diff --git a/src/fs/filter.rs b/src/fs/filter.rs index 9284fef..4fb9aeb 100644 --- a/src/fs/filter.rs +++ b/src/fs/filter.rs @@ -7,27 +7,10 @@ pub struct FileFilter { max: Option, regex: Option, glob: Option, - #[cfg(unix)] inodes_filter: inode::Filter, } impl FileFilter { - #[cfg(not(unix))] - pub fn new( - min: Option, - max: Option, - regex: Option, - glob: Option, - ) -> Self { - Self { - min, - max, - regex, - glob, - } - } - - #[cfg(unix)] pub fn new( min: Option, max: Option, diff --git a/src/lib.rs b/src/lib.rs index f653da9..aa5d10d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -4,7 +4,7 @@ //! //! ```toml //! [dependencies] -//! yadf = { version = "0.15.0", default-features = false } +//! yadf = { version = "*", default-features = false } //! ``` //! //! A collection of functions and structs to find duplicate files. @@ -14,7 +14,7 @@ //! Find and display all the duplicate files at the given paths : //! //! ```no_run -//! # fn foo(paths: &[std::path::PathBuf]) { +//! # fn foo(paths: Vec) { //! let counter = yadf::Yadf::builder() //! .paths(paths) //! .build() @@ -45,13 +45,13 @@ pub type FileReplicates<'a> = bag::Replicates<'a, u64, Path>; /// # Example /// /// ```no_run -/// # fn foo(paths: &[std::path::PathBuf]) { +/// # fn foo(paths: Vec) { /// let counter = yadf::Yadf::builder() /// .paths(paths) // required /// .minimum_file_size(64) // optional /// .maximum_file_size(1024 * 8) // optional -/// .regex(None) // optional -/// .glob(None) // optional +/// .maybe_regex(None) // optional +/// .maybe_glob(None) // optional /// .build() /// .scan::(); /// # } @@ -73,8 +73,7 @@ pub struct Yadf { regex: Option, /// File name must match this glob glob: Option, - #[cfg_attr(unix, doc = "Treat hard links as duplicates")] - #[cfg(unix)] + /// Treat hard links as duplicates #[builder(default)] hard_links: bool, } @@ -85,7 +84,6 @@ impl Yadf { where H: Hasher + Default, { - #[cfg(unix)] let file_filter = fs::filter::FileFilter::new( self.minimum_file_size, self.maximum_file_size, @@ -93,13 +91,6 @@ impl Yadf { self.glob.map(|g| g.compile_matcher()), self.hard_links, ); - #[cfg(not(unix))] - let file_filter = fs::filter::FileFilter::new( - self.minimum_file_size, - self.maximum_file_size, - self.regex, - self.glob.map(|g| g.compile_matcher()), - ); let bag = fs::find_dupes_partial::(&self.paths, self.max_depth, file_filter); if log::log_enabled!(log::Level::Info) { log::info!(