You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1756 lines
74 KiB

use std::collections::BTreeMap;
#[cfg(target_family = "unix")]
use std::collections::HashSet;
use std::fs::{File, Metadata, OpenOptions};
use std::hash::Hasher;
use std::io::prelude::*;
use std::io::{self, Error, ErrorKind};
use std::io::{BufReader, BufWriter};
#[cfg(target_family = "unix")]
use std::os::unix::fs::MetadataExt;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
use std::thread::sleep;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use std::{fs, mem, thread};
2 years ago
use crossbeam_channel::Receiver;
use directories_next::ProjectDirs;
use humansize::{file_size_opts as options, FileSize};
use rayon::prelude::*;
use crate::common::Common;
use crate::common_directory::Directories;
use crate::common_extensions::Extensions;
use crate::common_items::ExcludedItems;
use crate::common_messages::Messages;
use crate::common_traits::*;
use crate::fl;
use crate::localizer::generate_translation_hashmap;
#[derive(Debug)]
pub struct ProgressData {
pub checking_method: CheckingMethod,
pub current_stage: u8,
pub max_stage: u8,
pub files_checked: usize,
pub files_to_check: usize,
}
#[derive(PartialEq, Eq, Clone, Debug, Copy)]
pub enum CheckingMethod {
None,
Name,
Size,
Hash,
}
#[derive(PartialEq, Eq, Clone, Debug, Copy)]
pub enum HashType {
Blake3,
Crc32,
Xxh3,
}
impl HashType {
fn hasher(self: &HashType) -> Box<dyn MyHasher> {
match self {
HashType::Blake3 => Box::new(blake3::Hasher::new()),
HashType::Crc32 => Box::new(crc32fast::Hasher::new()),
HashType::Xxh3 => Box::new(xxhash_rust::xxh3::Xxh3::new()),
}
}
}
#[derive(Eq, PartialEq, Clone, Debug)]
pub enum DeleteMethod {
None,
2 years ago
AllExceptNewest,
AllExceptOldest,
OneOldest,
OneNewest,
HardLink,
}
#[derive(Clone, Debug, PartialEq, Default)]
pub struct FileEntry {
pub path: PathBuf,
pub size: u64,
pub modified_date: u64,
pub hash: String,
2 years ago
}
/// Info struck with helpful information's about results
#[derive(Default)]
pub struct Info {
pub number_of_groups_by_size: usize,
pub number_of_duplicated_files_by_size: usize,
pub number_of_groups_by_hash: usize,
pub number_of_duplicated_files_by_hash: usize,
pub number_of_groups_by_name: usize,
pub number_of_duplicated_files_by_name: usize,
pub lost_space_by_size: u64,
pub lost_space_by_hash: u64,
pub gained_space: u64,
}
impl Info {
pub fn new() -> Self {
Default::default()
}
}
/// Struct with required information's to work
pub struct DuplicateFinder {
text_messages: Messages,
information: Info,
files_with_identical_names: BTreeMap<String, Vec<FileEntry>>, // File Size, File Entry
files_with_identical_size: BTreeMap<u64, Vec<FileEntry>>, // File Size, File Entry
files_with_identical_hashes: BTreeMap<u64, Vec<Vec<FileEntry>>>, // File Size, File Entry
directories: Directories,
allowed_extensions: Extensions,
excluded_items: ExcludedItems,
recursive_search: bool,
minimal_file_size: u64,
maximal_file_size: u64,
check_method: CheckingMethod,
delete_method: DeleteMethod,
hash_type: HashType,
ignore_hard_links: bool,
dryrun: bool,
stopped_search: bool,
use_cache: bool,
use_prehash_cache: bool,
minimal_cache_file_size: u64,
minimal_prehash_cache_file_size: u64,
delete_outdated_cache: bool,
}
impl DuplicateFinder {
pub fn new() -> Self {
Self {
text_messages: Messages::new(),
information: Info::new(),
files_with_identical_names: Default::default(),
files_with_identical_size: Default::default(),
files_with_identical_hashes: Default::default(),
recursive_search: true,
allowed_extensions: Extensions::new(),
check_method: CheckingMethod::None,
delete_method: DeleteMethod::None,
minimal_file_size: 8192,
maximal_file_size: u64::MAX,
directories: Directories::new(),
excluded_items: ExcludedItems::new(),
stopped_search: false,
ignore_hard_links: true,
hash_type: HashType::Blake3,
dryrun: false,
use_cache: true,
use_prehash_cache: true,
minimal_cache_file_size: 1024 * 1024 / 4, // By default cache only >= 256 KB files
minimal_prehash_cache_file_size: 0,
delete_outdated_cache: true,
}
}
pub fn find_duplicates(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) {
self.directories.optimize_directories(self.recursive_search, &mut self.text_messages);
match self.check_method {
CheckingMethod::Name => {
if !self.check_files_name(stop_receiver, progress_sender) {
self.stopped_search = true;
return;
}
}
CheckingMethod::Size => {
if !self.check_files_size(stop_receiver, progress_sender) {
self.stopped_search = true;
return;
}
}
CheckingMethod::Hash => {
if !self.check_files_size(stop_receiver, progress_sender) {
self.stopped_search = true;
return;
}
if !self.check_files_hash(stop_receiver, progress_sender) {
self.stopped_search = true;
return;
}
}
CheckingMethod::None => {
panic!();
}
}
self.delete_files();
self.debug_print();
}
pub fn set_delete_outdated_cache(&mut self, delete_outdated_cache: bool) {
self.delete_outdated_cache = delete_outdated_cache;
}
pub const fn get_check_method(&self) -> &CheckingMethod {
&self.check_method
}
pub fn get_stopped_search(&self) -> bool {
self.stopped_search
}
pub fn set_minimal_cache_file_size(&mut self, minimal_cache_file_size: u64) {
self.minimal_cache_file_size = minimal_cache_file_size;
}
pub fn set_minimal_prehash_cache_file_size(&mut self, minimal_prehash_cache_file_size: u64) {
self.minimal_prehash_cache_file_size = minimal_prehash_cache_file_size;
}
pub const fn get_files_sorted_by_names(&self) -> &BTreeMap<String, Vec<FileEntry>> {
&self.files_with_identical_names
}
pub fn set_use_cache(&mut self, use_cache: bool) {
self.use_cache = use_cache;
}
pub fn set_use_prehash_cache(&mut self, use_prehash_cache: bool) {
self.use_prehash_cache = use_prehash_cache;
}
pub const fn get_files_sorted_by_size(&self) -> &BTreeMap<u64, Vec<FileEntry>> {
&self.files_with_identical_size
}
pub const fn get_files_sorted_by_hash(&self) -> &BTreeMap<u64, Vec<Vec<FileEntry>>> {
&self.files_with_identical_hashes
}
pub fn set_maximal_file_size(&mut self, maximal_file_size: u64) {
self.maximal_file_size = match maximal_file_size {
0 => 1,
t => t,
};
}
pub const fn get_text_messages(&self) -> &Messages {
&self.text_messages
}
pub const fn get_information(&self) -> &Info {
&self.information
}
pub fn set_hash_type(&mut self, hash_type: HashType) {
self.hash_type = hash_type;
}
pub fn set_ignore_hard_links(&mut self, ignore_hard_links: bool) {
self.ignore_hard_links = ignore_hard_links;
}
pub fn set_dryrun(&mut self, dryrun: bool) {
self.dryrun = dryrun;
}
pub fn set_check_method(&mut self, check_method: CheckingMethod) {
self.check_method = check_method;
}
pub fn set_delete_method(&mut self, delete_method: DeleteMethod) {
self.delete_method = delete_method;
}
pub fn set_minimal_file_size(&mut self, minimal_file_size: u64) {
self.minimal_file_size = match minimal_file_size {
0 => 1,
t => t,
};
}
pub fn set_recursive_search(&mut self, recursive_search: bool) {
self.recursive_search = recursive_search;
}
pub fn set_included_directory(&mut self, included_directory: Vec<PathBuf>) -> bool {
self.directories.set_included_directory(included_directory, &mut self.text_messages)
}
pub fn set_excluded_directory(&mut self, excluded_directory: Vec<PathBuf>) {
self.directories.set_excluded_directory(excluded_directory, &mut self.text_messages);
}
pub fn set_allowed_extensions(&mut self, allowed_extensions: String) {
self.allowed_extensions.set_allowed_extensions(allowed_extensions, &mut self.text_messages);
}
pub fn set_excluded_items(&mut self, excluded_items: Vec<String>) {
self.excluded_items.set_excluded_items(excluded_items, &mut self.text_messages);
2 years ago
}
fn check_files_name(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) -> bool {
let start_time: SystemTime = SystemTime::now();
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector
// Add root folders for finding
for id in &self.directories.included_directories {
folders_to_check.push(id.clone());
}
//// PROGRESS THREAD START
const LOOP_DURATION: u32 = 200; //in ms
let progress_thread_run = Arc::new(AtomicBool::new(true));
let atomic_file_counter = Arc::new(AtomicUsize::new(0));
let progress_thread_handle = if let Some(progress_sender) = progress_sender {
let progress_send = progress_sender.clone();
let progress_thread_run = progress_thread_run.clone();
let atomic_file_counter = atomic_file_counter.clone();
thread::spawn(move || loop {
progress_send
.unbounded_send(ProgressData {
checking_method: CheckingMethod::Name,
current_stage: 0,
max_stage: 0,
files_checked: atomic_file_counter.load(Ordering::Relaxed) as usize,
files_to_check: 0,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
break;
}
sleep(Duration::from_millis(LOOP_DURATION as u64));
})
} else {
thread::spawn(|| {})
};
//// PROGRESS THREAD END
while !folders_to_check.is_empty() {
if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() {
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
return false;
}
let segments: Vec<_> = folders_to_check
.par_iter()
.map(|current_folder| {
let mut dir_result = vec![];
let mut warnings = vec![];
let mut fe_result = vec![];
// Read current dir childrens
let read_dir = match fs::read_dir(&current_folder) {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_open_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
return (dir_result, warnings, fe_result);
}
};
// Check every sub folder/file/link etc.
'dir: for entry in read_dir {
let entry_data = match entry {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_entry_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
let metadata: Metadata = match entry_data.metadata() {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_metadata_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
if metadata.is_dir() {
if !self.recursive_search {
continue 'dir;
}
let next_folder = current_folder.join(entry_data.file_name());
if self.directories.is_excluded(&next_folder) {
continue 'dir;
}
if self.excluded_items.is_excluded(&next_folder) {
continue 'dir;
}
dir_result.push(next_folder);
} else if metadata.is_file() {
atomic_file_counter.fetch_add(1, Ordering::Relaxed);
let file_name_lowercase: String = match entry_data.file_name().into_string() {
Ok(t) => t,
Err(_inspected) => {
warnings.push(fl!(
"core_file_not_utf8_name",
generate_translation_hashmap(vec![("name", entry_data.path().display().to_string())])
));
continue 'dir;
}
}
.to_lowercase();
if !self.allowed_extensions.matches_filename(&file_name_lowercase) {
continue 'dir;
}
if (self.minimal_file_size..=self.maximal_file_size).contains(&metadata.len()) {
let current_file_name = current_folder.join(entry_data.file_name());
if self.excluded_items.is_excluded(&current_file_name) {
continue 'dir;
}
let fe: FileEntry = FileEntry {
path: current_file_name.clone(),
size: metadata.len(),
modified_date: match metadata.modified() {
Ok(t) => match t.duration_since(UNIX_EPOCH) {
Ok(d) => d.as_secs(),
Err(_inspected) => {
warnings.push(fl!(
"core_file_modified_before_epoch",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string())])
));
0
}
},
Err(e) => {
warnings.push(fl!(
"core_file_no_modification_date",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string()), ("reason", e.to_string())])
));
0
}
},
hash: "".to_string(),
};
fe_result.push((entry_data.file_name().to_string_lossy().to_string(), fe));
}
}
}
(dir_result, warnings, fe_result)
})
.collect();
// Advance the frontier
folders_to_check.clear();
// Process collected data
for (segment, warnings, fe_result) in segments {
folders_to_check.extend(segment);
self.text_messages.warnings.extend(warnings);
for (name, fe) in fe_result {
self.files_with_identical_names.entry(name.clone()).or_insert_with(Vec::new);
self.files_with_identical_names.get_mut(&name).unwrap().push(fe);
}
}
}
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
// Create new BTreeMap without single size entries(files have not duplicates)
let mut new_map: BTreeMap<String, Vec<FileEntry>> = Default::default();
for (name, vector) in &self.files_with_identical_names {
if vector.len() > 1 {
self.information.number_of_duplicated_files_by_name += vector.len() - 1;
self.information.number_of_groups_by_name += 1;
new_map.insert(name.clone(), vector.clone());
}
}
self.files_with_identical_names = new_map;
Common::print_time(start_time, SystemTime::now(), "check_files_name".to_string());
true
}
/// Read file length and puts it to different boxes(each for different lengths)
/// If in box is only 1 result, then it is removed
fn check_files_size(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) -> bool {
let start_time: SystemTime = SystemTime::now();
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector
// Add root folders for finding
for id in &self.directories.included_directories {
folders_to_check.push(id.clone());
}
//// PROGRESS THREAD START
const LOOP_DURATION: u32 = 200; //in ms
let progress_thread_run = Arc::new(AtomicBool::new(true));
let atomic_file_counter = Arc::new(AtomicUsize::new(0));
let progress_thread_handle = if let Some(progress_sender) = progress_sender {
let progress_send = progress_sender.clone();
let progress_thread_run = progress_thread_run.clone();
let atomic_file_counter = atomic_file_counter.clone();
let checking_method = self.check_method;
let max_stage = match self.check_method {
CheckingMethod::Size => 0,
CheckingMethod::Hash => 2,
_ => panic!(),
};
thread::spawn(move || loop {
progress_send
.unbounded_send(ProgressData {
checking_method,
current_stage: 0,
max_stage,
files_checked: atomic_file_counter.load(Ordering::Relaxed) as usize,
files_to_check: 0,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
break;
}
sleep(Duration::from_millis(LOOP_DURATION as u64));
})
} else {
thread::spawn(|| {})
};
//// PROGRESS THREAD END
while !folders_to_check.is_empty() {
if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() {
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
return false;
}
let segments: Vec<_> = folders_to_check
.par_iter()
.map(|current_folder| {
let mut dir_result = vec![];
let mut warnings = vec![];
let mut fe_result = vec![];
// Read current dir childrens
let read_dir = match fs::read_dir(&current_folder) {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_open_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
return (dir_result, warnings, fe_result);
}
};
// Check every sub folder/file/link etc.
'dir: for entry in read_dir {
let entry_data = match entry {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_entry_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
let metadata: Metadata = match entry_data.metadata() {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_metadata_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
if metadata.is_dir() {
if !self.recursive_search {
continue 'dir;
}
let next_folder = current_folder.join(entry_data.file_name());
if self.directories.is_excluded(&next_folder) {
continue 'dir;
}
if self.excluded_items.is_excluded(&next_folder) {
continue 'dir;
}
dir_result.push(next_folder);
} else if metadata.is_file() {
atomic_file_counter.fetch_add(1, Ordering::Relaxed);
let file_name_lowercase: String = match entry_data.file_name().into_string() {
Ok(t) => t,
Err(_inspected) => {
warnings.push(fl!(
"core_file_not_utf8_name",
generate_translation_hashmap(vec![("name", entry_data.path().display().to_string())])
));
continue 'dir;
}
}
.to_lowercase();
if !self.allowed_extensions.matches_filename(&file_name_lowercase) {
continue 'dir;
}
if (self.minimal_file_size..=self.maximal_file_size).contains(&metadata.len()) {
let current_file_name = current_folder.join(entry_data.file_name());
if self.excluded_items.is_excluded(&current_file_name) {
continue 'dir;
}
// Creating new file entry
let fe: FileEntry = FileEntry {
path: current_file_name.clone(),
size: metadata.len(),
modified_date: match metadata.modified() {
Ok(t) => match t.duration_since(UNIX_EPOCH) {
Ok(d) => d.as_secs(),
Err(_inspected) => {
warnings.push(fl!(
"core_file_modified_before_epoch",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string())])
));
0
}
},
Err(e) => {
warnings.push(fl!(
"core_file_no_modification_date",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string()), ("reason", e.to_string())])
));
0
}
},
hash: "".to_string(),
};
fe_result.push(fe);
}
}
}
<