diff --git a/czkawka_cli/src/main.rs b/czkawka_cli/src/main.rs index f9a86fa..da81e8c 100644 --- a/czkawka_cli/src/main.rs +++ b/czkawka_cli/src/main.rs @@ -137,6 +137,8 @@ fn main() { df.find_duplicates(); + df.print_duplicated_entries(); + if ArgumentsPair::has_command(&arguments, "-f") { df.save_results_to_file(&ArgumentsPair::get_argument(&arguments, "-f", false)); } diff --git a/czkawka_core/src/common.rs b/czkawka_core/src/common.rs index b692b92..8d1d173 100644 --- a/czkawka_core/src/common.rs +++ b/czkawka_core/src/common.rs @@ -51,7 +51,10 @@ impl Common { /// Function to check if directory match expression pub fn regex_check(expression: &str, directory: &str) -> bool { if !expression.contains('*') { - println!("Expression should have *"); + #[cfg(debug_assertions)] + { + println!("Invalid expression ERROR: Expression should have *"); + } return false; } @@ -116,6 +119,7 @@ mod test { assert!(!Common::regex_check("*home", "/homefasfasfasfasf/")); assert!(!Common::regex_check("*home", "/homefasfasfasfasf")); assert!(!Common::regex_check("rafal*afal*fal", "rafal")); + assert!(!Common::regex_check("rafal*a", "rafal")); assert!(!Common::regex_check("AAAAAAAA****", "/AAAAAAAAAAAAAAAAA")); assert!(!Common::regex_check("*.git/*", "/home/.git")); assert!(!Common::regex_check("*home/*koc", "/koc/home/")); diff --git a/czkawka_core/src/duplicate.rs b/czkawka_core/src/duplicate.rs index 69961bc..fdeff35 100644 --- a/czkawka_core/src/duplicate.rs +++ b/czkawka_core/src/duplicate.rs @@ -476,12 +476,22 @@ impl DuplicateFinder { if !self.files_with_identical_size.is_empty() { file.write_all(b"-------------------------------------------------Files with same size-------------------------------------------------\n").unwrap(); - file.write_all(("Found ".to_string() + self.infos.number_of_duplicated_files_by_size.to_string().as_str() + " duplicated files which in " + self.infos.number_of_groups_by_size.to_string().as_str() + " groups.\n").as_bytes()) - .unwrap(); + file.write_all( + ("Found ".to_string() + + self.infos.number_of_duplicated_files_by_size.to_string().as_str() + + " duplicated files which in " + + self.infos.number_of_groups_by_size.to_string().as_str() + + " groups which takes " + + &*self.infos.lost_space_by_size.file_size(options::BINARY).unwrap() + + ".\n") + .as_bytes(), + ) + .unwrap(); for (size, files) in self.files_with_identical_size.iter().rev() { file.write_all(b"\n---- Size ").unwrap(); file.write_all(size.file_size(options::BINARY).unwrap().as_bytes()).unwrap(); file.write_all((" (".to_string() + size.to_string().as_str() + ")").as_bytes()).unwrap(); + file.write_all((" - ".to_string() + files.len().to_string().as_str() + " files").as_bytes()).unwrap(); file.write_all(b"\n").unwrap(); for file_entry in files { file.write_all((file_entry.path.clone() + "\n").as_bytes()).unwrap(); @@ -491,13 +501,23 @@ impl DuplicateFinder { if !self.files_with_identical_hashes.is_empty() { file.write_all(b"-------------------------------------------------Files with same hashes-------------------------------------------------\n").unwrap(); - file.write_all(("Found ".to_string() + self.infos.number_of_duplicated_files_by_hash.to_string().as_str() + " duplicated files which in " + self.infos.number_of_groups_by_hash.to_string().as_str() + " groups.\n").as_bytes()) - .unwrap(); + file.write_all( + ("Found ".to_string() + + self.infos.number_of_duplicated_files_by_hash.to_string().as_str() + + " duplicated files which in " + + self.infos.number_of_groups_by_hash.to_string().as_str() + + " groups which takes " + + &*self.infos.lost_space_by_hash.file_size(options::BINARY).unwrap() + + ".\n") + .as_bytes(), + ) + .unwrap(); for (size, files) in self.files_with_identical_hashes.iter().rev() { for vector in files { file.write_all(b"\n---- Size ").unwrap(); file.write_all(size.file_size(options::BINARY).unwrap().as_bytes()).unwrap(); file.write_all((" (".to_string() + size.to_string().as_str() + ")").as_bytes()).unwrap(); + file.write_all((" - ".to_string() + vector.len().to_string().as_str() + " files").as_bytes()).unwrap(); file.write_all(b"\n").unwrap(); for file_entry in vector { file.write_all((file_entry.path.clone() + "\n").as_bytes()).unwrap(); @@ -628,12 +648,12 @@ impl DuplicateFinder { } /// Print information about duplicated entries - pub fn print_duplicated_entries(&self, check_method: &CheckingMethod) { + pub fn print_duplicated_entries(&self) { let start_time: SystemTime = SystemTime::now(); let mut number_of_files: u64 = 0; let mut number_of_groups: u64 = 0; - match check_method { + match self.check_method { CheckingMethod::HASH => { for (_size, vector) in self.files_with_identical_hashes.iter() { for j in vector { @@ -647,9 +667,9 @@ impl DuplicateFinder { number_of_groups, self.infos.lost_space_by_size.file_size(options::BINARY).unwrap() ); - for (key, vector) in self.files_with_identical_hashes.iter().rev() { - println!("Size - {}", key.file_size(options::BINARY).unwrap()); + for (size, vector) in self.files_with_identical_hashes.iter().rev() { for j in vector { + println!("Size - {} ({}) - {} files ", size.file_size(options::BINARY).unwrap(), size, j.len()); for k in j { println!("{}", k.path); } @@ -669,9 +689,9 @@ impl DuplicateFinder { number_of_groups, self.infos.lost_space_by_size.file_size(options::BINARY).unwrap() ); - for i in &self.files_with_identical_size { - println!("Size - {}", i.0); - for j in i.1 { + for (size, vector) in &self.files_with_identical_size { + println!("Size - {} ({}) - {} files ", size.file_size(options::BINARY).unwrap(), size, vector.len()); + for j in vector { println!("{}", j.path); } println!();