From 249adfc8217665b4012282af01406fd2a5bf574e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Mikrut?= Date: Thu, 17 Sep 2020 14:01:14 +0200 Subject: [PATCH] Merge function which finds duplication by size with removing entries which contains only 1 entry --- czkawka_core/src/duplicate.rs | 37 +++++++++++++++-------------------- 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/czkawka_core/src/duplicate.rs b/czkawka_core/src/duplicate.rs index fdeff35..ce51d33 100644 --- a/czkawka_core/src/duplicate.rs +++ b/czkawka_core/src/duplicate.rs @@ -118,7 +118,6 @@ impl DuplicateFinder { pub fn find_duplicates(&mut self) { self.optimize_directories(); self.check_files_size(); - self.remove_files_with_unique_size(); if self.check_method == CheckingMethod::HASH { self.check_files_hash(); } @@ -449,6 +448,22 @@ impl DuplicateFinder { } } } + + // Remove files with unique size + let mut new_map: BTreeMap> = Default::default(); + + self.infos.number_of_duplicated_files_by_size = 0; + + for (size, vector) in &self.files_with_identical_size { + if vector.len() > 1 { + self.infos.number_of_duplicated_files_by_size += vector.len() - 1; + self.infos.number_of_groups_by_size += 1; + self.infos.lost_space_by_size += (vector.len() as u64 - 1) * size; + new_map.insert(*size, vector.clone()); + } + } + self.files_with_identical_size = new_map; + Common::print_time(start_time, SystemTime::now(), "check_files_size".to_string()); } pub fn save_results_to_file(&mut self, file_name: &str) { @@ -528,26 +543,6 @@ impl DuplicateFinder { Common::print_time(start_time, SystemTime::now(), "save_results_to_file".to_string()); } - /// Remove files which have unique size - fn remove_files_with_unique_size(&mut self) { - let start_time: SystemTime = SystemTime::now(); - let mut new_map: BTreeMap> = Default::default(); - - self.infos.number_of_duplicated_files_by_size = 0; - - for (size, vector) in &self.files_with_identical_size { - if vector.len() > 1 { - self.infos.number_of_duplicated_files_by_size += vector.len() - 1; - self.infos.number_of_groups_by_size += 1; - self.infos.lost_space_by_size += (vector.len() as u64 - 1) * size; - new_map.insert(*size, vector.clone()); - } - } - self.files_with_identical_size = new_map; - - Common::print_time(start_time, SystemTime::now(), "remove_files_with_unique_size".to_string()); - } - /// Should be slower than checking in different ways, but still needs to be checked fn check_files_hash(&mut self) { let start_time: SystemTime = SystemTime::now();