From 99b85c73c28eb3d72c33a2d1e2e087b32c2ff883 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Mikrut?= Date: Thu, 17 Sep 2020 12:18:01 +0200 Subject: [PATCH] Fixed invalid groups number when saving to file --- czkawka_core/src/duplicate.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/czkawka_core/src/duplicate.rs b/czkawka_core/src/duplicate.rs index 5794f61..69961bc 100644 --- a/czkawka_core/src/duplicate.rs +++ b/czkawka_core/src/duplicate.rs @@ -476,7 +476,7 @@ impl DuplicateFinder { if !self.files_with_identical_size.is_empty() { file.write_all(b"-------------------------------------------------Files with same size-------------------------------------------------\n").unwrap(); - file.write_all(("Found ".to_string() + self.infos.number_of_duplicated_files_by_size.to_string().as_str() + " duplicated files which in " + self.files_with_identical_size.len().to_string().as_str() + " groups.\n").as_bytes()) + file.write_all(("Found ".to_string() + self.infos.number_of_duplicated_files_by_size.to_string().as_str() + " duplicated files which in " + self.infos.number_of_groups_by_size.to_string().as_str() + " groups.\n").as_bytes()) .unwrap(); for (size, files) in self.files_with_identical_size.iter().rev() { file.write_all(b"\n---- Size ").unwrap(); @@ -491,7 +491,7 @@ impl DuplicateFinder { if !self.files_with_identical_hashes.is_empty() { file.write_all(b"-------------------------------------------------Files with same hashes-------------------------------------------------\n").unwrap(); - file.write_all(("Found ".to_string() + self.infos.number_of_duplicated_files_by_size.to_string().as_str() + " duplicated files which in " + self.files_with_identical_hashes.len().to_string().as_str() + " groups.\n").as_bytes()) + file.write_all(("Found ".to_string() + self.infos.number_of_duplicated_files_by_hash.to_string().as_str() + " duplicated files which in " + self.infos.number_of_groups_by_hash.to_string().as_str() + " groups.\n").as_bytes()) .unwrap(); for (size, files) in self.files_with_identical_hashes.iter().rev() { for vector in files {