1
0
Fork 0
mirror of synced 2024-05-21 04:43:50 +12:00

Fixed invalid groups number when saving to file

This commit is contained in:
Rafał Mikrut 2020-09-17 12:18:01 +02:00
parent 4757b28486
commit 99b85c73c2

View file

@ -476,7 +476,7 @@ impl DuplicateFinder {
if !self.files_with_identical_size.is_empty() { if !self.files_with_identical_size.is_empty() {
file.write_all(b"-------------------------------------------------Files with same size-------------------------------------------------\n").unwrap(); file.write_all(b"-------------------------------------------------Files with same size-------------------------------------------------\n").unwrap();
file.write_all(("Found ".to_string() + self.infos.number_of_duplicated_files_by_size.to_string().as_str() + " duplicated files which in " + self.files_with_identical_size.len().to_string().as_str() + " groups.\n").as_bytes()) file.write_all(("Found ".to_string() + self.infos.number_of_duplicated_files_by_size.to_string().as_str() + " duplicated files which in " + self.infos.number_of_groups_by_size.to_string().as_str() + " groups.\n").as_bytes())
.unwrap(); .unwrap();
for (size, files) in self.files_with_identical_size.iter().rev() { for (size, files) in self.files_with_identical_size.iter().rev() {
file.write_all(b"\n---- Size ").unwrap(); file.write_all(b"\n---- Size ").unwrap();
@ -491,7 +491,7 @@ impl DuplicateFinder {
if !self.files_with_identical_hashes.is_empty() { if !self.files_with_identical_hashes.is_empty() {
file.write_all(b"-------------------------------------------------Files with same hashes-------------------------------------------------\n").unwrap(); file.write_all(b"-------------------------------------------------Files with same hashes-------------------------------------------------\n").unwrap();
file.write_all(("Found ".to_string() + self.infos.number_of_duplicated_files_by_size.to_string().as_str() + " duplicated files which in " + self.files_with_identical_hashes.len().to_string().as_str() + " groups.\n").as_bytes()) file.write_all(("Found ".to_string() + self.infos.number_of_duplicated_files_by_hash.to_string().as_str() + " duplicated files which in " + self.infos.number_of_groups_by_hash.to_string().as_str() + " groups.\n").as_bytes())
.unwrap(); .unwrap();
for (size, files) in self.files_with_identical_hashes.iter().rev() { for (size, files) in self.files_with_identical_hashes.iter().rev() {
for vector in files { for vector in files {