czkawka/czkawka_core/src/duplicate.rs

736 lines
31 KiB
Rust
Raw Normal View History

use humansize::{file_size_opts as options, FileSize};
use std::collections::{BTreeMap, HashMap};
use std::fs;
2020-08-29 06:30:22 +12:00
use std::fs::{File, Metadata};
use std::io::prelude::*;
use std::time::{SystemTime, UNIX_EPOCH};
2020-08-27 06:50:07 +12:00
2020-09-27 03:52:13 +13:00
use crate::common::Common;
use crate::common_directory::Directories;
use crate::common_extensions::Extensions;
use crate::common_items::ExcludedItems;
use crate::common_messages::Messages;
use crate::common_traits::*;
2020-09-01 05:37:30 +12:00
const HASH_MB_LIMIT_BYTES: u64 = 1024 * 1024; // 1MB
2020-09-26 23:48:53 +12:00
#[derive(PartialEq, Eq, Clone, Debug)]
pub enum CheckingMethod {
2020-09-25 08:25:24 +12:00
None,
Size,
Hash,
HashMB,
}
2020-09-26 23:48:53 +12:00
#[derive(Eq, PartialEq, Clone, Debug)]
pub enum DeleteMethod {
None,
2020-08-31 03:18:04 +12:00
AllExceptNewest,
AllExceptOldest,
OneOldest,
OneNewest,
}
#[derive(Clone)]
pub struct FileEntry {
pub path: String,
pub size: u64,
2020-10-01 17:53:10 +13:00
pub modified_date: u64,
2020-08-31 03:18:04 +12:00
}
/// Info struck with helpful information's about results
pub struct Info {
pub number_of_checked_files: usize,
pub number_of_checked_folders: usize,
pub number_of_ignored_files: usize,
pub number_of_ignored_things: usize,
pub number_of_groups_by_size: usize,
pub number_of_duplicated_files_by_size: usize,
pub number_of_groups_by_hash: usize,
pub number_of_duplicated_files_by_hash: usize,
pub lost_space_by_size: u64,
pub lost_space_by_hash: u64,
pub bytes_read_when_hashing: u64,
pub number_of_removed_files: usize,
pub number_of_failed_to_remove_files: usize,
pub gained_space: u64,
}
impl Info {
pub fn new() -> Info {
Info {
2020-08-27 06:50:07 +12:00
number_of_checked_files: 0,
number_of_ignored_files: 0,
2020-08-28 04:57:56 +12:00
number_of_checked_folders: 0,
number_of_ignored_things: 0,
number_of_groups_by_size: 0,
number_of_duplicated_files_by_size: 0,
number_of_groups_by_hash: 0,
number_of_duplicated_files_by_hash: 0,
lost_space_by_size: 0,
lost_space_by_hash: 0,
bytes_read_when_hashing: 0,
number_of_removed_files: 0,
number_of_failed_to_remove_files: 0,
gained_space: 0,
}
}
}
impl Default for Info {
fn default() -> Self {
Self::new()
}
}
/// Struct with required information's to work
pub struct DuplicateFinder {
text_messages: Messages,
2020-09-18 17:32:37 +12:00
information: Info,
2020-09-27 06:51:28 +13:00
files_with_identical_size: BTreeMap<u64, Vec<FileEntry>>, // File Size, File Entry
files_with_identical_hashes: BTreeMap<u64, Vec<Vec<FileEntry>>>, // File Size, File Entry
2020-09-27 03:52:13 +13:00
directories: Directories,
allowed_extensions: Extensions,
excluded_items: ExcludedItems,
recursive_search: bool,
min_file_size: u64,
check_method: CheckingMethod,
delete_method: DeleteMethod,
}
impl DuplicateFinder {
pub fn new() -> DuplicateFinder {
DuplicateFinder {
2020-09-27 03:52:13 +13:00
text_messages: Messages::new(),
2020-09-18 17:32:37 +12:00
information: Info::new(),
files_with_identical_size: Default::default(),
files_with_identical_hashes: Default::default(),
2020-09-12 23:25:23 +12:00
recursive_search: true,
2020-09-27 03:52:13 +13:00
allowed_extensions: Extensions::new(),
2020-09-25 08:25:24 +12:00
check_method: CheckingMethod::None,
delete_method: DeleteMethod::None,
2020-09-20 21:33:55 +12:00
min_file_size: 1024,
2020-09-27 03:52:13 +13:00
directories: Directories::new(),
excluded_items: ExcludedItems::new(),
}
}
2020-09-18 17:32:37 +12:00
2020-09-27 03:52:13 +13:00
/// Finding duplicates, save results to internal struct variables
pub fn find_duplicates(&mut self) {
self.directories.optimize_directories(self.recursive_search, &mut self.text_messages);
self.check_files_size();
if self.check_method == CheckingMethod::Hash || self.check_method == CheckingMethod::HashMB {
2020-09-27 03:52:13 +13:00
self.check_files_hash();
}
self.delete_files();
self.debug_print();
}
pub fn get_files_sorted_by_size(&self) -> &BTreeMap<u64, Vec<FileEntry>> {
&self.files_with_identical_size
}
pub fn get_files_sorted_by_hash(&self) -> &BTreeMap<u64, Vec<Vec<FileEntry>>> {
&self.files_with_identical_hashes
}
pub fn get_text_messages(&self) -> &Messages {
&self.text_messages
2020-09-18 17:32:37 +12:00
}
2020-09-25 08:25:24 +12:00
2020-09-18 17:32:37 +12:00
pub fn get_information(&self) -> &Info {
&self.information
}
pub fn set_check_method(&mut self, check_method: CheckingMethod) {
self.check_method = check_method;
}
pub fn set_delete_method(&mut self, delete_method: DeleteMethod) {
self.delete_method = delete_method;
}
pub fn set_min_file_size(&mut self, min_size: u64) {
self.min_file_size = min_size;
}
pub fn set_recursive_search(&mut self, recursive_search: bool) {
self.recursive_search = recursive_search;
2020-09-12 23:25:23 +12:00
}
2020-09-25 08:25:24 +12:00
2020-09-27 03:52:13 +13:00
pub fn set_included_directory(&mut self, included_directory: String) -> bool {
self.directories.set_included_directory(included_directory, &mut self.text_messages)
2020-09-25 08:25:24 +12:00
}
2020-09-27 03:52:13 +13:00
pub fn set_excluded_directory(&mut self, excluded_directory: String) {
self.directories.set_excluded_directory(excluded_directory, &mut self.text_messages);
}
2020-09-27 03:52:13 +13:00
pub fn set_allowed_extensions(&mut self, allowed_extensions: String) {
self.allowed_extensions.set_allowed_extensions(allowed_extensions, &mut self.text_messages);
}
2020-09-27 03:52:13 +13:00
pub fn set_excluded_items(&mut self, excluded_items: String) {
self.excluded_items.set_excluded_items(excluded_items, &mut self.text_messages);
2020-08-27 06:50:07 +12:00
}
2020-09-25 08:25:24 +12:00
/// Read file length and puts it to different boxes(each for different lengths)
/// If in box is only 1 result, then it is removed
fn check_files_size(&mut self) {
// TODO maybe add multithreading checking for file hash
2020-08-28 04:57:56 +12:00
let start_time: SystemTime = SystemTime::now();
let mut folders_to_check: Vec<String> = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector
2020-08-28 04:57:56 +12:00
// Add root folders for finding
2020-09-27 03:52:13 +13:00
for id in &self.directories.included_directories {
2020-08-28 04:57:56 +12:00
folders_to_check.push(id.to_string());
}
2020-09-18 17:32:37 +12:00
self.information.number_of_checked_folders += folders_to_check.len();
2020-08-28 04:57:56 +12:00
let mut current_folder: String;
let mut next_folder: String;
while !folders_to_check.is_empty() {
current_folder = folders_to_check.pop().unwrap();
2020-09-27 06:51:28 +13:00
// Read current dir, if permission are denied just go to next
2020-09-01 09:03:10 +12:00
let read_dir = match fs::read_dir(&current_folder) {
2020-08-28 04:57:56 +12:00
Ok(t) => t,
Err(_) => {
self.text_messages.warnings.push("Cannot open dir ".to_string() + current_folder.as_str());
continue;
} // Permissions denied
2020-08-28 04:57:56 +12:00
};
2020-09-27 06:51:28 +13:00
// Check every sub folder/file/link etc.
2020-08-28 04:57:56 +12:00
for entry in read_dir {
let entry_data = match entry {
2020-09-08 03:39:16 +12:00
Ok(t) => t,
Err(_) => {
self.text_messages.warnings.push("Cannot read entry in dir ".to_string() + current_folder.as_str());
continue;
} //Permissions denied
2020-09-08 03:39:16 +12:00
};
let metadata: Metadata = match entry_data.metadata() {
Ok(t) => t,
Err(_) => {
self.text_messages.warnings.push("Cannot read metadata in dir ".to_string() + current_folder.as_str());
continue;
} //Permissions denied
};
2020-08-28 04:57:56 +12:00
if metadata.is_dir() {
2020-09-18 17:32:37 +12:00
self.information.number_of_checked_folders += 1;
// if entry_data.file_name().into_string().is_err() { // Probably this can be removed, if crash still will be happens, then uncomment this line
// self.text_messages.warnings.push("Cannot read folder name in dir ".to_string() + current_folder.as_str());
// continue; // Permissions denied
// }
if !self.recursive_search {
2020-09-12 23:25:23 +12:00
continue;
}
2020-08-28 04:57:56 +12:00
let mut is_excluded_dir = false;
next_folder = "".to_owned() + &current_folder + &entry_data.file_name().into_string().unwrap() + "/";
2020-09-27 03:52:13 +13:00
for ed in &self.directories.excluded_directories {
if next_folder == *ed {
2020-08-28 04:57:56 +12:00
is_excluded_dir = true;
break;
}
}
if !is_excluded_dir {
let mut found_expression: bool = false;
2020-09-27 03:52:13 +13:00
for expression in &self.excluded_items.items {
if Common::regex_check(expression, &next_folder) {
found_expression = true;
break;
}
}
if found_expression {
break;
}
2020-08-28 04:57:56 +12:00
folders_to_check.push(next_folder);
}
} else if metadata.is_file() {
2020-08-30 05:11:55 +12:00
let mut have_valid_extension: bool;
let file_name_lowercase: String = entry_data.file_name().into_string().unwrap().to_lowercase();
// Checking allowed extensions
2020-09-27 03:52:13 +13:00
if !self.allowed_extensions.file_extensions.is_empty() {
2020-08-30 05:11:55 +12:00
have_valid_extension = false;
2020-09-27 06:51:28 +13:00
for extension in &self.allowed_extensions.file_extensions {
if file_name_lowercase.ends_with((".".to_string() + extension.to_lowercase().as_str()).as_str()) {
2020-08-30 05:11:55 +12:00
have_valid_extension = true;
break;
}
}
} else {
have_valid_extension = true;
}
// Checking files
if metadata.len() >= self.min_file_size && have_valid_extension {
let current_file_name = "".to_owned() + &current_folder + &entry_data.file_name().into_string().unwrap();
// Checking expressions
let mut found_expression: bool = false;
2020-09-27 03:52:13 +13:00
for expression in &self.excluded_items.items {
if Common::regex_check(expression, &current_file_name) {
found_expression = true;
break;
}
}
if found_expression {
break;
}
// Creating new file entry
let fe: FileEntry = FileEntry {
path: current_file_name.clone(),
size: metadata.len(),
modified_date: match metadata.modified() {
Ok(t) => match t.duration_since(UNIX_EPOCH) {
Ok(d) => d.as_secs(),
Err(_) => {
self.text_messages.warnings.push(format!("File {} seems to be modified before Unix Epoch.", current_file_name));
0
}
},
Err(_) => {
self.text_messages.warnings.push("Unable to get modification date from file ".to_string() + current_file_name.as_str());
2020-09-27 06:51:28 +13:00
continue;
} // Permissions Denied
},
};
2020-09-27 06:51:28 +13:00
// Adding files to BTreeMap
self.files_with_identical_size.entry(metadata.len()).or_insert_with(Vec::new);
self.files_with_identical_size.get_mut(&metadata.len()).unwrap().push(fe);
2020-09-18 17:32:37 +12:00
self.information.number_of_checked_files += 1;
} else {
2020-09-18 17:32:37 +12:00
self.information.number_of_ignored_files += 1;
2020-08-29 06:30:22 +12:00
}
2020-08-28 04:57:56 +12:00
} else {
// Probably this is symbolic links so we are free to ignore this
2020-09-18 17:32:37 +12:00
self.information.number_of_ignored_things += 1;
2020-08-28 04:57:56 +12:00
}
}
}
2020-09-27 06:51:28 +13:00
// Create new BTreeMap without single size entries(files have not duplicates)
let mut new_map: BTreeMap<u64, Vec<FileEntry>> = Default::default();
2020-09-18 17:32:37 +12:00
self.information.number_of_duplicated_files_by_size = 0;
for (size, vector) in &self.files_with_identical_size {
if vector.len() > 1 {
2020-09-18 17:32:37 +12:00
self.information.number_of_duplicated_files_by_size += vector.len() - 1;
self.information.number_of_groups_by_size += 1;
self.information.lost_space_by_size += (vector.len() as u64 - 1) * size;
new_map.insert(*size, vector.clone());
}
}
self.files_with_identical_size = new_map;
2020-09-01 05:37:30 +12:00
Common::print_time(start_time, SystemTime::now(), "check_files_size".to_string());
2020-08-28 04:57:56 +12:00
}
2020-09-25 08:25:24 +12:00
/// The slowest checking type, which must be applied after checking for size
fn check_files_hash(&mut self) {
2020-08-29 06:30:22 +12:00
let start_time: SystemTime = SystemTime::now();
let mut file_handler: File;
let mut hashmap_with_hash: HashMap<String, Vec<FileEntry>>;
2020-08-29 06:30:22 +12:00
2020-09-27 06:51:28 +13:00
for (size, vector) in &self.files_with_identical_size {
hashmap_with_hash = Default::default();
2020-08-29 06:30:22 +12:00
2020-09-27 06:51:28 +13:00
for file_entry in vector {
file_handler = match File::open(&file_entry.path) {
Ok(t) => t,
2020-08-29 06:30:22 +12:00
Err(_) => {
2020-09-27 06:51:28 +13:00
self.text_messages.warnings.push("Unable to check hash of file ".to_string() + file_entry.path.as_str());
2020-08-29 06:30:22 +12:00
continue;
}
};
let mut error_reading_file: bool = false;
2020-08-29 06:30:22 +12:00
let mut hasher: blake3::Hasher = blake3::Hasher::new();
let mut buffer = [0u8; 16384];
let mut readed_bytes: u64 = 0;
2020-08-29 06:30:22 +12:00
loop {
let n = match file_handler.read(&mut buffer) {
Ok(t) => t,
Err(_) => {
2020-09-27 06:51:28 +13:00
self.text_messages.warnings.push("Error happened when checking hash of file ".to_string() + file_entry.path.as_str());
error_reading_file = true;
break;
}
};
2020-08-29 06:30:22 +12:00
if n == 0 {
break;
}
readed_bytes += n as u64;
self.information.bytes_read_when_hashing += n as u64;
2020-08-29 06:30:22 +12:00
hasher.update(&buffer[..n]);
if self.check_method == CheckingMethod::HashMB && readed_bytes >= HASH_MB_LIMIT_BYTES {
break;
}
2020-08-29 06:30:22 +12:00
}
if !error_reading_file {
let hash_string: String = hasher.finalize().to_hex().to_string();
hashmap_with_hash.entry(hash_string.to_string()).or_insert_with(Vec::new);
2020-09-27 06:51:28 +13:00
hashmap_with_hash.get_mut(hash_string.as_str()).unwrap().push(file_entry.to_owned());
}
}
2020-09-27 06:51:28 +13:00
for (_string, vector) in hashmap_with_hash {
if vector.len() > 1 {
self.files_with_identical_hashes.entry(*size).or_insert_with(Vec::new);
self.files_with_identical_hashes.get_mut(size).unwrap().push(vector);
}
2020-08-29 06:30:22 +12:00
}
}
2020-09-27 06:51:28 +13:00
for (size, vector_vectors) in &self.files_with_identical_hashes {
for vector in vector_vectors {
self.information.number_of_duplicated_files_by_hash += vector.len() - 1;
2020-09-18 17:32:37 +12:00
self.information.number_of_groups_by_hash += 1;
2020-09-27 06:51:28 +13:00
self.information.lost_space_by_hash += (vector.len() as u64 - 1) * size;
}
}
2020-09-01 05:37:30 +12:00
Common::print_time(start_time, SystemTime::now(), "check_files_hash".to_string());
2020-08-29 06:30:22 +12:00
}
2020-09-27 03:52:13 +13:00
/// Function to delete files, from filed before BTreeMap
/// Using another function to delete files to avoid duplicates data
fn delete_files(&mut self) {
let start_time: SystemTime = SystemTime::now();
match self.check_method {
CheckingMethod::Hash | CheckingMethod::HashMB => {
for vector_vectors in self.files_with_identical_hashes.values() {
2020-09-27 06:51:28 +13:00
for vector in vector_vectors.iter() {
let tuple: (u64, usize, usize) = delete_files(vector, &self.delete_method, &mut self.text_messages.warnings);
2020-09-27 03:52:13 +13:00
self.information.gained_space += tuple.0;
self.information.number_of_removed_files += tuple.1;
self.information.number_of_failed_to_remove_files += tuple.2;
}
}
}
CheckingMethod::Size => {
for vector in self.files_with_identical_size.values() {
2020-09-27 06:51:28 +13:00
let tuple: (u64, usize, usize) = delete_files(vector, &self.delete_method, &mut self.text_messages.warnings);
2020-09-27 03:52:13 +13:00
self.information.gained_space += tuple.0;
self.information.number_of_removed_files += tuple.1;
self.information.number_of_failed_to_remove_files += tuple.2;
}
}
CheckingMethod::None => {
//Just do nothing
panic!("Checking method should never be none.");
}
}
Common::print_time(start_time, SystemTime::now(), "delete_files".to_string());
}
}
impl Default for DuplicateFinder {
fn default() -> Self {
Self::new()
}
}
impl DebugPrint for DuplicateFinder {
#[allow(dead_code)]
#[allow(unreachable_code)]
2020-09-25 08:25:24 +12:00
/// Debugging printing - only available on debug build
fn debug_print(&self) {
#[cfg(not(debug_assertions))]
{
return;
}
2020-09-08 19:41:22 +12:00
println!("---------------DEBUG PRINT---------------");
2020-09-18 17:32:37 +12:00
println!("### Information's");
println!("Errors size - {}", self.text_messages.errors.len());
println!("Warnings size - {}", self.text_messages.warnings.len());
println!("Messages size - {}", self.text_messages.messages.len());
2020-09-18 17:32:37 +12:00
println!("Number of checked files - {}", self.information.number_of_checked_files);
println!("Number of checked folders - {}", self.information.number_of_checked_folders);
println!("Number of ignored files - {}", self.information.number_of_ignored_files);
println!("Number of ignored things(like symbolic links) - {}", self.information.number_of_ignored_things);
println!(
"Number of duplicated files by size(in groups) - {} ({})",
self.information.number_of_duplicated_files_by_size, self.information.number_of_groups_by_size
);
println!(
"Number of duplicated files by hash(in groups) - {} ({})",
self.information.number_of_duplicated_files_by_hash, self.information.number_of_groups_by_hash
);
2020-09-18 17:32:37 +12:00
println!("Lost space by size - {} ({} bytes)", self.information.lost_space_by_size.file_size(options::BINARY).unwrap(), self.information.lost_space_by_size);
println!("Lost space by hash - {} ({} bytes)", self.information.lost_space_by_hash.file_size(options::BINARY).unwrap(), self.information.lost_space_by_hash);
println!(
"Gained space by removing duplicated entries - {} ({} bytes)",
self.information.gained_space.file_size(options::BINARY).unwrap(),
self.information.gained_space
);
println!(
"Bytes read when hashing - {} ({} bytes)",
self.information.bytes_read_when_hashing.file_size(options::BINARY).unwrap(),
self.information.bytes_read_when_hashing
);
2020-09-18 17:32:37 +12:00
println!("Number of removed files - {}", self.information.number_of_removed_files);
println!("Number of failed to remove files - {}", self.information.number_of_failed_to_remove_files);
println!("### Other");
println!("Files list size - {}", self.files_with_identical_size.len());
println!("Hashed Files list size - {}", self.files_with_identical_hashes.len());
2020-09-27 03:52:13 +13:00
println!("Allowed extensions - {:?}", self.allowed_extensions.file_extensions);
println!("Excluded items - {:?}", self.excluded_items.items);
println!("Included directories - {:?}", self.directories.included_directories);
println!("Excluded directories - {:?}", self.directories.excluded_directories);
println!("Recursive search - {}", self.recursive_search.to_string());
println!("Minimum file size - {:?}", self.min_file_size);
2020-09-26 23:48:53 +12:00
println!("Checking Method - {:?}", self.check_method);
println!("Delete Method - {:?}", self.delete_method);
2020-09-08 19:41:22 +12:00
println!("-----------------------------------------");
2020-08-27 06:50:07 +12:00
}
2020-09-27 03:52:13 +13:00
}
impl SaveResults for DuplicateFinder {
fn save_results_to_file(&mut self, file_name: &str) -> bool {
let start_time: SystemTime = SystemTime::now();
let file_name: String = match file_name {
"" => "results.txt".to_string(),
k => k.to_string(),
};
let mut file = match File::create(&file_name) {
Ok(t) => t,
Err(_) => {
2020-09-27 06:51:28 +13:00
self.text_messages.errors.push(format!("Failed to create file {}", file_name));
2020-09-27 03:52:13 +13:00
return false;
}
};
2020-09-27 08:50:16 +13:00
match file.write_all(
format!(
"Results of searching {:?} with excluded directories {:?} and excluded items {:?}\n",
self.directories.included_directories, self.directories.excluded_directories, self.excluded_items.items
)
.as_bytes(),
) {
2020-09-27 03:52:13 +13:00
Ok(_) => (),
Err(_) => {
2020-09-27 06:51:28 +13:00
self.text_messages.errors.push(format!("Failed to save results to file {}", file_name));
2020-09-27 03:52:13 +13:00
return false;
}
}
if !self.files_with_identical_size.is_empty() {
file.write_all(b"-------------------------------------------------Files with same size-------------------------------------------------\n").unwrap();
file.write_all(
2020-09-27 06:51:28 +13:00
format!(
"Found {} duplicated files which in {} groups which takes {}.\n",
self.information.number_of_duplicated_files_by_size,
self.information.number_of_groups_by_size,
self.information.lost_space_by_size.file_size(options::BINARY).unwrap()
)
.as_bytes(),
2020-09-27 03:52:13 +13:00
)
.unwrap();
2020-09-27 06:51:28 +13:00
for (size, vector) in self.files_with_identical_size.iter().rev() {
file.write_all(format!("\n---- Size {} ({}) - {} files \n", size.file_size(options::BINARY).unwrap(), size, vector.len()).as_bytes()).unwrap();
for file_entry in vector {
file.write_all(format!("{} \n", file_entry.path).as_bytes()).unwrap();
2020-09-27 03:52:13 +13:00
}
}
2020-08-27 06:50:07 +12:00
2020-09-27 03:52:13 +13:00
if !self.files_with_identical_hashes.is_empty() {
file.write_all(b"-------------------------------------------------Files with same hashes-------------------------------------------------\n").unwrap();
file.write_all(
2020-09-27 06:51:28 +13:00
format!(
"Found {} duplicated files which in {} groups which takes {}.\n",
self.information.number_of_duplicated_files_by_hash,
self.information.number_of_groups_by_hash,
self.information.lost_space_by_hash.file_size(options::BINARY).unwrap()
)
.as_bytes(),
2020-09-27 03:52:13 +13:00
)
.unwrap();
2020-09-27 06:51:28 +13:00
for (size, vectors_vector) in self.files_with_identical_hashes.iter().rev() {
for vector in vectors_vector {
file.write_all(format!("\n---- Size {} ({}) - {} files \n", size.file_size(options::BINARY).unwrap(), size, vector.len()).as_bytes()).unwrap();
2020-09-27 03:52:13 +13:00
for file_entry in vector {
2020-09-27 06:51:28 +13:00
file.write_all(format!("{} \n", file_entry.path).as_bytes()).unwrap();
2020-09-27 03:52:13 +13:00
}
}
}
}
} else {
2020-09-27 08:50:16 +13:00
file.write_all(b"Not found any duplicates.").unwrap();
2020-09-27 03:52:13 +13:00
}
Common::print_time(start_time, SystemTime::now(), "save_results_to_file".to_string());
true
}
}
impl PrintResults for DuplicateFinder {
2020-09-18 17:32:37 +12:00
/// Print information's about duplicated entries
2020-09-25 08:25:24 +12:00
/// Only needed for CLI
2020-09-27 03:52:13 +13:00
fn print_results(&self) {
2020-08-29 06:30:22 +12:00
let start_time: SystemTime = SystemTime::now();
let mut number_of_files: u64 = 0;
let mut number_of_groups: u64 = 0;
2020-09-17 23:35:11 +12:00
match self.check_method {
CheckingMethod::Hash | CheckingMethod::HashMB => {
for (_size, vector) in self.files_with_identical_hashes.iter() {
for j in vector {
number_of_files += j.len() as u64;
number_of_groups += 1;
}
}
println!(
2020-09-02 03:10:54 +12:00
"Found {} duplicated files in {} groups with same content which took {}:",
number_of_files,
number_of_groups,
2020-09-18 17:32:37 +12:00
self.information.lost_space_by_size.file_size(options::BINARY).unwrap()
);
2020-09-17 23:35:11 +12:00
for (size, vector) in self.files_with_identical_hashes.iter().rev() {
for j in vector {
2020-09-17 23:35:11 +12:00
println!("Size - {} ({}) - {} files ", size.file_size(options::BINARY).unwrap(), size, j.len());
for k in j {
println!("{}", k.path);
}
println!("----");
}
println!();
}
2020-08-28 04:57:56 +12:00
}
2020-09-25 08:25:24 +12:00
CheckingMethod::Size => {
for i in &self.files_with_identical_size {
number_of_files += i.1.len() as u64;
number_of_groups += 1;
}
println!(
"Found {} files in {} groups with same size(may have different content) which took {}:",
number_of_files,
number_of_groups,
2020-09-18 17:32:37 +12:00
self.information.lost_space_by_size.file_size(options::BINARY).unwrap()
);
2020-09-17 23:35:11 +12:00
for (size, vector) in &self.files_with_identical_size {
println!("Size - {} ({}) - {} files ", size.file_size(options::BINARY).unwrap(), size, vector.len());
for j in vector {
2020-08-31 03:18:04 +12:00
println!("{}", j.path);
}
println!();
}
}
2020-09-25 08:25:24 +12:00
CheckingMethod::None => {
panic!("Checking Method shouldn't be ever set to None");
}
2020-08-27 06:50:07 +12:00
}
2020-09-27 08:50:16 +13:00
Common::print_time(start_time, SystemTime::now(), "print_entries".to_string());
2020-08-27 08:24:02 +12:00
}
}
2020-09-25 08:25:24 +12:00
/// Functions to remove slice(vector) of files with provided method
/// Returns size of removed elements, number of deleted and failed to delete files and modified warning list
fn delete_files(vector: &[FileEntry], delete_method: &DeleteMethod, warnings: &mut Vec<String>) -> (u64, usize, usize) {
assert!(vector.len() > 1, "Vector length must be bigger than 1(This should be done in previous steps).");
let mut q_index: usize = 0;
let mut q_time: u64 = 0;
let mut gained_space: u64 = 0;
let mut removed_files: usize = 0;
let mut failed_to_remove_files: usize = 0;
match delete_method {
DeleteMethod::OneOldest => {
2020-09-25 07:00:52 +12:00
for (index, file) in vector.iter().enumerate() {
2020-10-01 17:53:10 +13:00
if q_time == 0 || q_time > file.modified_date {
q_time = file.modified_date;
2020-09-25 07:00:52 +12:00
q_index = index;
}
}
match fs::remove_file(vector[q_index].path.clone()) {
Ok(_) => {
removed_files += 1;
gained_space += vector[q_index].size;
}
Err(_) => {
failed_to_remove_files += 1;
2020-09-18 00:14:17 +12:00
warnings.push("Failed to delete".to_string() + vector[q_index].path.as_str());
}
};
}
DeleteMethod::OneNewest => {
2020-09-25 07:00:52 +12:00
for (index, file) in vector.iter().enumerate() {
2020-10-01 17:53:10 +13:00
if q_time == 0 || q_time < file.modified_date {
q_time = file.modified_date;
2020-09-25 07:00:52 +12:00
q_index = index;
}
}
match fs::remove_file(vector[q_index].path.clone()) {
Ok(_) => {
removed_files += 1;
gained_space += vector[q_index].size;
}
Err(_) => {
failed_to_remove_files += 1;
2020-09-18 00:14:17 +12:00
warnings.push("Failed to delete".to_string() + vector[q_index].path.as_str());
}
};
}
DeleteMethod::AllExceptOldest => {
2020-09-25 07:00:52 +12:00
for (index, file) in vector.iter().enumerate() {
2020-10-01 17:53:10 +13:00
if q_time == 0 || q_time > file.modified_date {
q_time = file.modified_date;
2020-09-25 07:00:52 +12:00
q_index = index;
}
}
2020-09-25 07:00:52 +12:00
for (index, file) in vector.iter().enumerate() {
if q_index != index {
match fs::remove_file(file.path.clone()) {
Ok(_) => {
removed_files += 1;
2020-09-25 07:00:52 +12:00
gained_space += file.size;
}
Err(_) => {
failed_to_remove_files += 1;
2020-09-25 07:00:52 +12:00
warnings.push("Failed to delete".to_string() + file.path.as_str());
}
};
}
}
}
DeleteMethod::AllExceptNewest => {
2020-09-25 07:00:52 +12:00
for (index, file) in vector.iter().enumerate() {
2020-10-01 17:53:10 +13:00
if q_time == 0 || q_time < file.modified_date {
q_time = file.modified_date;
2020-09-25 07:00:52 +12:00
q_index = index;
}
}
2020-09-25 07:00:52 +12:00
for (index, file) in vector.iter().enumerate() {
if q_index != index {
match fs::remove_file(file.path.clone()) {
Ok(_) => {
removed_files += 1;
2020-09-25 07:00:52 +12:00
gained_space += file.size;
}
Err(_) => {
failed_to_remove_files += 1;
2020-09-25 07:00:52 +12:00
warnings.push("Failed to delete".to_string() + file.path.as_str());
}
};
}
}
}
DeleteMethod::None => {
// Just don't remove files
}
};
(gained_space, removed_files, failed_to_remove_files)
2020-08-27 06:50:07 +12:00
}