1
0
Fork 0
mirror of synced 2024-05-07 14:03:48 +12:00

Implement DirTraversal (#531)

* Implement DirTraversal

* rustfmt

* Fix for options

* Fix entries

* Fix symlinks

* Fix other entry type
This commit is contained in:
Peter Blackson 2022-01-01 21:07:20 +01:00 committed by GitHub
parent 58c19dd912
commit 51df63b8ea
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 961 additions and 983 deletions

View file

@ -3,7 +3,8 @@ use std::path::PathBuf;
use img_hash::{FilterType, HashAlg};
use structopt::StructOpt;
use czkawka_core::duplicate::{CheckingMethod, DeleteMethod, HashType};
use czkawka_core::common_dir_traversal::CheckingMethod;
use czkawka_core::duplicate::{DeleteMethod, HashType};
use czkawka_core::same_music::MusicSimilarity;
use czkawka_core::similar_images::SimilarityPreset;

View file

@ -0,0 +1,665 @@
use std::collections::BTreeMap;
use std::fs::Metadata;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
use std::thread::sleep;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use std::{fs, thread};
use crossbeam_channel::Receiver;
use rayon::prelude::*;
use crate::common_directory::Directories;
use crate::common_extensions::Extensions;
use crate::common_items::ExcludedItems;
use crate::fl;
use crate::localizer::generate_translation_hashmap;
#[derive(Debug)]
pub struct ProgressData {
pub checking_method: CheckingMethod,
pub current_stage: u8,
pub max_stage: u8,
pub entries_checked: usize,
pub entries_to_check: usize,
}
#[derive(PartialEq, Eq, Clone, Debug, Copy)]
pub enum CheckingMethod {
None,
Name,
Size,
Hash,
}
#[derive(Clone, Debug, Default, PartialEq)]
pub struct FileEntry {
pub path: PathBuf,
pub size: u64,
pub modified_date: u64,
pub hash: String,
pub symlink_info: Option<SymlinkInfo>,
}
// Symlinks
const MAX_NUMBER_OF_SYMLINK_JUMPS: i32 = 20;
#[derive(Clone, Debug, PartialEq)]
pub struct SymlinkInfo {
pub destination_path: PathBuf,
pub type_of_error: ErrorType,
}
#[derive(Clone, Debug, PartialEq)]
pub enum ErrorType {
InfiniteRecursion,
NonExistentFile,
}
// Empty folders
/// Enum with values which show if folder is empty.
/// In function "optimize_folders" automatically "Maybe" is changed to "Yes", so it is not necessary to put it here
#[derive(Eq, PartialEq, Copy, Clone)]
enum FolderEmptiness {
No,
Maybe,
}
/// Struct assigned to each checked folder with parent path(used to ignore parent if children are not empty) and flag which shows if folder is empty
#[derive(Clone)]
pub struct FolderEntry {
parent_path: Option<PathBuf>, // Usable only when finding
is_empty: FolderEmptiness,
pub modified_date: u64,
}
// Collection mode (files / empty folders)
#[derive(Copy, Clone, Eq, PartialEq)]
pub enum Collect {
EmptyFolders,
InvalidSymlinks,
Files,
}
#[derive(Eq, PartialEq)]
enum EntryType {
File,
Dir,
Symlink,
Other,
}
pub struct DirTraversalBuilder<'a, 'b, F> {
group_by: Option<F>,
root_dirs: Vec<PathBuf>,
stop_receiver: Option<&'a Receiver<()>>,
progress_sender: Option<&'b futures::channel::mpsc::UnboundedSender<ProgressData>>,
minimal_file_size: Option<u64>,
maximal_file_size: Option<u64>,
checking_method: CheckingMethod,
max_stage: u8,
collect: Collect,
recursive_search: bool,
directories: Option<Directories>,
excluded_items: Option<ExcludedItems>,
allowed_extensions: Option<Extensions>,
}
pub struct DirTraversal<'a, 'b, F> {
group_by: F,
root_dirs: Vec<PathBuf>,
stop_receiver: Option<&'a Receiver<()>>,
progress_sender: Option<&'b futures::channel::mpsc::UnboundedSender<ProgressData>>,
recursive_search: bool,
directories: Directories,
excluded_items: ExcludedItems,
allowed_extensions: Extensions,
minimal_file_size: u64,
maximal_file_size: u64,
checking_method: CheckingMethod,
max_stage: u8,
collect: Collect,
}
impl<'a, 'b> Default for DirTraversalBuilder<'a, 'b, ()> {
fn default() -> Self {
Self::new()
}
}
impl<'a, 'b> DirTraversalBuilder<'a, 'b, ()> {
pub fn new() -> DirTraversalBuilder<'a, 'b, ()> {
DirTraversalBuilder {
group_by: None,
root_dirs: vec![],
stop_receiver: None,
progress_sender: None,
checking_method: CheckingMethod::None,
max_stage: 0,
minimal_file_size: None,
maximal_file_size: None,
collect: Collect::Files,
recursive_search: false,
directories: None,
allowed_extensions: None,
excluded_items: None,
}
}
}
impl<'a, 'b, F> DirTraversalBuilder<'a, 'b, F> {
pub fn root_dirs(mut self, dirs: Vec<PathBuf>) -> Self {
self.root_dirs = dirs;
self
}
pub fn stop_receiver(mut self, stop_receiver: Option<&'a Receiver<()>>) -> Self {
self.stop_receiver = stop_receiver;
self
}
pub fn progress_sender(mut self, progress_sender: Option<&'b futures::channel::mpsc::UnboundedSender<ProgressData>>) -> Self {
self.progress_sender = progress_sender;
self
}
pub fn checking_method(mut self, checking_method: CheckingMethod) -> Self {
self.checking_method = checking_method;
self
}
pub fn max_stage(mut self, max_stage: u8) -> Self {
self.max_stage = max_stage;
self
}
pub fn minimal_file_size(mut self, minimal_file_size: u64) -> Self {
self.minimal_file_size = Some(minimal_file_size);
self
}
pub fn maximal_file_size(mut self, maximal_file_size: u64) -> Self {
self.maximal_file_size = Some(maximal_file_size);
self
}
pub fn collect(mut self, collect: Collect) -> Self {
self.collect = collect;
self
}
pub fn directories(mut self, directories: Directories) -> Self {
self.directories = Some(directories);
self
}
pub fn allowed_extensions(mut self, allowed_extensions: Extensions) -> Self {
self.allowed_extensions = Some(allowed_extensions);
self
}
pub fn excluded_items(mut self, excluded_items: ExcludedItems) -> Self {
self.excluded_items = Some(excluded_items);
self
}
pub fn recursive_search(mut self, recursive_search: bool) -> Self {
self.recursive_search = recursive_search;
self
}
pub fn group_by<G, T>(self, group_by: G) -> DirTraversalBuilder<'a, 'b, G>
where
G: Fn(&FileEntry) -> T,
{
DirTraversalBuilder {
group_by: Some(group_by),
root_dirs: self.root_dirs,
stop_receiver: self.stop_receiver,
progress_sender: self.progress_sender,
directories: self.directories,
allowed_extensions: self.allowed_extensions,
excluded_items: self.excluded_items,
recursive_search: self.recursive_search,
maximal_file_size: self.maximal_file_size,
minimal_file_size: self.minimal_file_size,
collect: self.collect,
checking_method: self.checking_method,
max_stage: self.max_stage,
}
}
pub fn build(self) -> DirTraversal<'a, 'b, F> {
DirTraversal {
group_by: self.group_by.expect("could not build"),
root_dirs: self.root_dirs,
stop_receiver: self.stop_receiver,
progress_sender: self.progress_sender,
checking_method: self.checking_method,
max_stage: self.max_stage,
minimal_file_size: self.minimal_file_size.unwrap_or(0),
maximal_file_size: self.maximal_file_size.unwrap_or(u64::MAX),
collect: self.collect,
directories: self.directories.expect("could not build"),
excluded_items: self.excluded_items.expect("could not build"),
allowed_extensions: self.allowed_extensions.expect("could not build"),
recursive_search: self.recursive_search,
}
}
}
pub enum DirTraversalResult<T: Ord + PartialOrd> {
SuccessFiles {
start_time: SystemTime,
warnings: Vec<String>,
grouped_file_entries: BTreeMap<T, Vec<FileEntry>>,
},
SuccessFolders {
start_time: SystemTime,
warnings: Vec<String>,
folder_entries: BTreeMap<PathBuf, FolderEntry>, // Path, FolderEntry
},
Stopped,
}
impl<'a, 'b, F, T> DirTraversal<'a, 'b, F>
where
F: Fn(&FileEntry) -> T,
T: Ord + PartialOrd,
{
pub fn run(self) -> DirTraversalResult<T> {
let mut all_warnings = vec![];
let mut grouped_file_entries: BTreeMap<T, Vec<FileEntry>> = BTreeMap::new();
let mut folder_entries: BTreeMap<PathBuf, FolderEntry> = BTreeMap::new();
let start_time: SystemTime = SystemTime::now();
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector
if self.collect == Collect::EmptyFolders {
for dir in &self.root_dirs {
folder_entries.insert(
dir.clone(),
FolderEntry {
parent_path: None,
is_empty: FolderEmptiness::Maybe,
modified_date: 0,
},
);
}
}
// Add root folders for finding
folders_to_check.extend(self.root_dirs);
//// PROGRESS THREAD START
const LOOP_DURATION: u32 = 200; //in ms
let progress_thread_run = Arc::new(AtomicBool::new(true));
let atomic_entry_counter = Arc::new(AtomicUsize::new(0));
let progress_thread_handle = if let Some(progress_sender) = self.progress_sender {
let progress_send = progress_sender.clone();
let progress_thread_run = progress_thread_run.clone();
let atomic_entry_counter = atomic_entry_counter.clone();
let checking_method = self.checking_method;
let max_stage = self.max_stage;
thread::spawn(move || loop {
progress_send
.unbounded_send(ProgressData {
checking_method,
current_stage: 0,
max_stage,
entries_checked: atomic_entry_counter.load(Ordering::Relaxed) as usize,
entries_to_check: 0,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
break;
}
sleep(Duration::from_millis(LOOP_DURATION as u64));
})
} else {
thread::spawn(|| {})
};
//// PROGRESS THREAD END
let DirTraversal {
collect,
directories,
excluded_items,
allowed_extensions,
recursive_search,
minimal_file_size,
maximal_file_size,
stop_receiver,
..
} = self;
while !folders_to_check.is_empty() {
if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() {
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
return DirTraversalResult::Stopped;
}
let segments: Vec<_> = folders_to_check
.par_iter()
.map(|current_folder| {
let mut dir_result = vec![];
let mut warnings = vec![];
let mut fe_result = vec![];
let mut set_as_not_empty_folder_list = vec![];
let mut folder_entries_list = vec![];
// Read current dir childrens
let read_dir = match fs::read_dir(&current_folder) {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_open_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
return (dir_result, warnings, fe_result, set_as_not_empty_folder_list, folder_entries_list);
}
};
// Check every sub folder/file/link etc.
'dir: for entry in read_dir {
let entry_data = match entry {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_entry_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
let metadata: Metadata = match entry_data.metadata() {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_metadata_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
let entry_type;
if metadata.is_dir() {
entry_type = EntryType::Dir;
} else if metadata.file_type().is_symlink() {
entry_type = EntryType::Symlink;
} else if metadata.is_file() {
entry_type = EntryType::File;
} else {
entry_type = EntryType::Other;
}
match (entry_type, collect) {
(EntryType::Dir, Collect::Files) | (EntryType::Dir, Collect::InvalidSymlinks) => {
if !recursive_search {
continue 'dir;
}
let next_folder = current_folder.join(entry_data.file_name());
if directories.is_excluded(&next_folder) {
continue 'dir;
}
if excluded_items.is_excluded(&next_folder) {
continue 'dir;
}
dir_result.push(next_folder);
}
(EntryType::Dir, Collect::EmptyFolders) => {
atomic_entry_counter.fetch_add(1, Ordering::Relaxed);
let next_folder = current_folder.join(entry_data.file_name());
if excluded_items.is_excluded(&next_folder) || directories.is_excluded(&next_folder) {
set_as_not_empty_folder_list.push(current_folder.clone());
continue 'dir;
}
dir_result.push(next_folder.clone());
folder_entries_list.push((
next_folder.clone(),
FolderEntry {
parent_path: Some(current_folder.clone()),
is_empty: FolderEmptiness::Maybe,
modified_date: match metadata.modified() {
Ok(t) => match t.duration_since(UNIX_EPOCH) {
Ok(d) => d.as_secs(),
Err(_inspected) => {
warnings.push(fl!(
"core_folder_modified_before_epoch",
generate_translation_hashmap(vec![("name", current_folder.display().to_string())])
));
0
}
},
Err(e) => {
warnings.push(fl!(
"core_folder_no_modification_date",
generate_translation_hashmap(vec![("name", current_folder.display().to_string()), ("reason", e.to_string())])
));
0
}
},
},
));
}
(EntryType::File, Collect::Files) => {
atomic_entry_counter.fetch_add(1, Ordering::Relaxed);
let file_name_lowercase: String = match entry_data.file_name().into_string() {
Ok(t) => t,
Err(_inspected) => {
warnings.push(fl!(
"core_file_not_utf8_name",
generate_translation_hashmap(vec![("name", entry_data.path().display().to_string())])
));
continue 'dir;
}
}
.to_lowercase();
if !allowed_extensions.matches_filename(&file_name_lowercase) {
continue 'dir;
}
if (minimal_file_size..=maximal_file_size).contains(&metadata.len()) {
let current_file_name = current_folder.join(entry_data.file_name());
if excluded_items.is_excluded(&current_file_name) {
continue 'dir;
}
// Creating new file entry
let fe: FileEntry = FileEntry {
path: current_file_name.clone(),
size: metadata.len(),
modified_date: match metadata.modified() {
Ok(t) => match t.duration_since(UNIX_EPOCH) {
Ok(d) => d.as_secs(),
Err(_inspected) => {
warnings.push(fl!(
"core_file_modified_before_epoch",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string())])
));
0
}
},
Err(e) => {
warnings.push(fl!(
"core_file_no_modification_date",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string()), ("reason", e.to_string())])
));
0
}
},
hash: "".to_string(),
symlink_info: None,
};
fe_result.push(fe);
}
}
(EntryType::File, Collect::EmptyFolders) | (EntryType::Symlink, Collect::EmptyFolders) => {
set_as_not_empty_folder_list.push(current_folder.clone());
}
(EntryType::File, Collect::InvalidSymlinks) => {
atomic_entry_counter.fetch_add(1, Ordering::Relaxed);
}
(EntryType::Symlink, Collect::InvalidSymlinks) => {
atomic_entry_counter.fetch_add(1, Ordering::Relaxed);
let file_name_lowercase: String = match entry_data.file_name().into_string() {
Ok(t) => t,
Err(_inspected) => {
warnings.push(fl!(
"core_file_not_utf8_name",
generate_translation_hashmap(vec![("name", entry_data.path().display().to_string())])
));
continue 'dir;
}
}
.to_lowercase();
if !allowed_extensions.matches_filename(&file_name_lowercase) {
continue 'dir;
}
let current_file_name = current_folder.join(entry_data.file_name());
if excluded_items.is_excluded(&current_file_name) {
continue 'dir;
}
let mut destination_path = PathBuf::new();
let type_of_error;
match current_file_name.read_link() {
Ok(t) => {
destination_path.push(t);
let mut number_of_loop = 0;
let mut current_path = current_file_name.clone();
loop {
if number_of_loop == 0 && !current_path.exists() {
type_of_error = ErrorType::NonExistentFile;
break;
}
if number_of_loop == MAX_NUMBER_OF_SYMLINK_JUMPS {
type_of_error = ErrorType::InfiniteRecursion;
break;
}
current_path = match current_path.read_link() {
Ok(t) => t,
Err(_inspected) => {
// Looks that some next symlinks are broken, but we do nothing with it - TODO why they are broken
continue 'dir;
}
};
number_of_loop += 1;
}
}
Err(_inspected) => {
// Failed to load info about it
type_of_error = ErrorType::NonExistentFile;
}
}
// Creating new file entry
let fe: FileEntry = FileEntry {
path: current_file_name.clone(),
modified_date: match metadata.modified() {
Ok(t) => match t.duration_since(UNIX_EPOCH) {
Ok(d) => d.as_secs(),
Err(_inspected) => {
warnings.push(fl!(
"core_file_modified_before_epoch",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string())])
));
0
}
},
Err(e) => {
warnings.push(fl!(
"core_file_no_modification_date",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string()), ("reason", e.to_string())])
));
0
}
},
size: 0,
hash: "".to_string(),
symlink_info: Some(SymlinkInfo { destination_path, type_of_error }),
};
// Adding files to Vector
fe_result.push(fe);
}
(EntryType::Symlink, Collect::Files) | (EntryType::Other, _) => {
// nothing to do
}
}
}
(dir_result, warnings, fe_result, set_as_not_empty_folder_list, folder_entries_list)
})
.collect();
// Advance the frontier
folders_to_check.clear();
// Process collected data
for (segment, warnings, fe_result, set_as_not_empty_folder_list, fe_list) in segments {
folders_to_check.extend(segment);
all_warnings.extend(warnings);
for fe in fe_result {
let key = (self.group_by)(&fe);
grouped_file_entries.entry(key).or_insert_with(Vec::new).push(fe);
}
for current_folder in &set_as_not_empty_folder_list {
set_as_not_empty_folder(&mut folder_entries, current_folder);
}
for (path, entry) in fe_list {
folder_entries.insert(path, entry);
}
}
}
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
match collect {
Collect::Files | Collect::InvalidSymlinks => DirTraversalResult::SuccessFiles {
start_time,
grouped_file_entries,
warnings: all_warnings,
},
Collect::EmptyFolders => DirTraversalResult::SuccessFolders {
start_time,
folder_entries,
warnings: all_warnings,
},
}
}
}
fn set_as_not_empty_folder(folder_entries: &mut BTreeMap<PathBuf, FolderEntry>, current_folder: &Path) {
// Not folder so it may be a file or symbolic link so it isn't empty
folder_entries.get_mut(current_folder).unwrap().is_empty = FolderEmptiness::No;
let mut d = folder_entries.get_mut(current_folder).unwrap();
// Loop to recursively set as non empty this and all his parent folders
loop {
d.is_empty = FolderEmptiness::No;
if d.parent_path != None {
let cf = d.parent_path.clone().unwrap();
d = folder_entries.get_mut(&cf).unwrap();
} else {
break;
}
}
}

View file

@ -4,7 +4,7 @@ use std::time::SystemTime;
use crate::common::Common;
use crate::common_messages::Messages;
#[derive(Default)]
#[derive(Clone, Default)]
pub struct Directories {
pub excluded_directories: Vec<PathBuf>,
pub included_directories: Vec<PathBuf>,

View file

@ -3,7 +3,7 @@ use std::time::SystemTime;
use crate::common::Common;
use crate::common_messages::Messages;
#[derive(Default)]
#[derive(Clone, Default)]
pub struct Extensions {
file_extensions: Vec<String>,
}

View file

@ -4,7 +4,7 @@ use std::time::SystemTime;
use crate::common::Common;
use crate::common_messages::Messages;
#[derive(Default)]
#[derive(Clone, Default)]
pub struct ExcludedItems {
pub items: Vec<String>,
}

View file

@ -1,7 +1,7 @@
use std::collections::BTreeMap;
#[cfg(target_family = "unix")]
use std::collections::HashSet;
use std::fs::{File, Metadata, OpenOptions};
use std::fs::{File, OpenOptions};
use std::hash::Hasher;
use std::io::prelude::*;
use std::io::{self, Error, ErrorKind};
@ -12,7 +12,7 @@ use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
use std::thread::sleep;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use std::time::{Duration, SystemTime};
use std::{fs, mem, thread};
use crossbeam_channel::Receiver;
@ -21,30 +21,12 @@ use humansize::{file_size_opts as options, FileSize};
use rayon::prelude::*;
use crate::common::Common;
use crate::common_dir_traversal::{CheckingMethod, DirTraversalBuilder, DirTraversalResult, FileEntry, ProgressData};
use crate::common_directory::Directories;
use crate::common_extensions::Extensions;
use crate::common_items::ExcludedItems;
use crate::common_messages::Messages;
use crate::common_traits::*;
use crate::fl;
use crate::localizer::generate_translation_hashmap;
#[derive(Debug)]
pub struct ProgressData {
pub checking_method: CheckingMethod,
pub current_stage: u8,
pub max_stage: u8,
pub files_checked: usize,
pub files_to_check: usize,
}
#[derive(PartialEq, Eq, Clone, Debug, Copy)]
pub enum CheckingMethod {
None,
Name,
Size,
Hash,
}
#[derive(PartialEq, Eq, Clone, Debug, Copy)]
pub enum HashType {
@ -73,14 +55,6 @@ pub enum DeleteMethod {
HardLink,
}
#[derive(Clone, Debug, PartialEq, Default)]
pub struct FileEntry {
pub path: PathBuf,
pub size: u64,
pub modified_date: u64,
pub hash: String,
}
/// Info struck with helpful information's about results
#[derive(Default)]
pub struct Info {
@ -318,490 +292,191 @@ impl DuplicateFinder {
}
fn check_files_name(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) -> bool {
let start_time: SystemTime = SystemTime::now();
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector
// Add root folders for finding
for id in &self.directories.included_directories {
folders_to_check.push(id.clone());
}
//// PROGRESS THREAD START
const LOOP_DURATION: u32 = 200; //in ms
let progress_thread_run = Arc::new(AtomicBool::new(true));
let atomic_file_counter = Arc::new(AtomicUsize::new(0));
let progress_thread_handle = if let Some(progress_sender) = progress_sender {
let progress_send = progress_sender.clone();
let progress_thread_run = progress_thread_run.clone();
let atomic_file_counter = atomic_file_counter.clone();
thread::spawn(move || loop {
progress_send
.unbounded_send(ProgressData {
checking_method: CheckingMethod::Name,
current_stage: 0,
max_stage: 0,
files_checked: atomic_file_counter.load(Ordering::Relaxed) as usize,
files_to_check: 0,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
break;
}
sleep(Duration::from_millis(LOOP_DURATION as u64));
})
} else {
thread::spawn(|| {})
};
//// PROGRESS THREAD END
while !folders_to_check.is_empty() {
if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() {
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
return false;
}
let segments: Vec<_> = folders_to_check
.par_iter()
.map(|current_folder| {
let mut dir_result = vec![];
let mut warnings = vec![];
let mut fe_result = vec![];
// Read current dir childrens
let read_dir = match fs::read_dir(&current_folder) {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_open_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
return (dir_result, warnings, fe_result);
}
};
// Check every sub folder/file/link etc.
'dir: for entry in read_dir {
let entry_data = match entry {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_entry_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
let metadata: Metadata = match entry_data.metadata() {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_metadata_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
if metadata.is_dir() {
if !self.recursive_search {
continue 'dir;
}
let next_folder = current_folder.join(entry_data.file_name());
if self.directories.is_excluded(&next_folder) {
continue 'dir;
}
if self.excluded_items.is_excluded(&next_folder) {
continue 'dir;
}
dir_result.push(next_folder);
} else if metadata.is_file() {
atomic_file_counter.fetch_add(1, Ordering::Relaxed);
let file_name_lowercase: String = match entry_data.file_name().into_string() {
Ok(t) => t,
Err(_inspected) => {
warnings.push(fl!(
"core_file_not_utf8_name",
generate_translation_hashmap(vec![("name", entry_data.path().display().to_string())])
));
continue 'dir;
}
}
.to_lowercase();
if !self.allowed_extensions.matches_filename(&file_name_lowercase) {
continue 'dir;
}
if (self.minimal_file_size..=self.maximal_file_size).contains(&metadata.len()) {
let current_file_name = current_folder.join(entry_data.file_name());
if self.excluded_items.is_excluded(&current_file_name) {
continue 'dir;
}
let fe: FileEntry = FileEntry {
path: current_file_name.clone(),
size: metadata.len(),
modified_date: match metadata.modified() {
Ok(t) => match t.duration_since(UNIX_EPOCH) {
Ok(d) => d.as_secs(),
Err(_inspected) => {
warnings.push(fl!(
"core_file_modified_before_epoch",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string())])
));
0
}
},
Err(e) => {
warnings.push(fl!(
"core_file_no_modification_date",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string()), ("reason", e.to_string())])
));
0
}
},
hash: "".to_string(),
};
fe_result.push((entry_data.file_name().to_string_lossy().to_string(), fe));
}
}
}
(dir_result, warnings, fe_result)
})
.collect();
// Advance the frontier
folders_to_check.clear();
// Process collected data
for (segment, warnings, fe_result) in segments {
folders_to_check.extend(segment);
let result = DirTraversalBuilder::new()
.root_dirs(self.directories.included_directories.clone())
.group_by(|fe| fe.path.file_name().unwrap().to_string_lossy().to_string())
.stop_receiver(stop_receiver)
.progress_sender(progress_sender)
.checking_method(CheckingMethod::Name)
.directories(self.directories.clone())
.allowed_extensions(self.allowed_extensions.clone())
.excluded_items(self.excluded_items.clone())
.recursive_search(self.recursive_search)
.minimal_file_size(self.minimal_file_size)
.maximal_file_size(self.maximal_file_size)
.build()
.run();
match result {
DirTraversalResult::SuccessFiles {
start_time,
grouped_file_entries,
warnings,
} => {
self.files_with_identical_names = grouped_file_entries;
self.text_messages.warnings.extend(warnings);
for (name, fe) in fe_result {
self.files_with_identical_names.entry(name.clone()).or_insert_with(Vec::new);
self.files_with_identical_names.get_mut(&name).unwrap().push(fe);
// Create new BTreeMap without single size entries(files have not duplicates)
let mut new_map: BTreeMap<String, Vec<FileEntry>> = Default::default();
for (name, vector) in &self.files_with_identical_names {
if vector.len() > 1 {
new_map.insert(name.clone(), vector.clone());
}
}
}
}
self.files_with_identical_names = new_map;
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
// Reference - only use in size, because later hash will be counted differently
if self.use_reference_folders {
let mut btree_map = Default::default();
mem::swap(&mut self.files_with_identical_names, &mut btree_map);
let reference_directories = self.directories.reference_directories.clone();
let vec = btree_map
.into_iter()
.filter_map(|(_size, vec_file_entry)| {
let mut files_from_referenced_folders = Vec::new();
let mut normal_files = Vec::new();
for file_entry in vec_file_entry {
if reference_directories.iter().any(|e| file_entry.path.starts_with(&e)) {
files_from_referenced_folders.push(file_entry);
} else {
normal_files.push(file_entry);
}
}
// Create new BTreeMap without single size entries(files have not duplicates)
let mut new_map: BTreeMap<String, Vec<FileEntry>> = Default::default();
for (name, vector) in &self.files_with_identical_names {
if vector.len() > 1 {
new_map.insert(name.clone(), vector.clone());
}
}
self.files_with_identical_names = new_map;
// Reference - only use in size, because later hash will be counted differently
if self.use_reference_folders {
let mut btree_map = Default::default();
mem::swap(&mut self.files_with_identical_names, &mut btree_map);
let reference_directories = self.directories.reference_directories.clone();
let vec = btree_map
.into_iter()
.filter_map(|(_size, vec_file_entry)| {
let mut files_from_referenced_folders = Vec::new();
let mut normal_files = Vec::new();
for file_entry in vec_file_entry {
if reference_directories.iter().any(|e| file_entry.path.starts_with(&e)) {
files_from_referenced_folders.push(file_entry);
} else {
normal_files.push(file_entry);
}
if files_from_referenced_folders.is_empty() || normal_files.is_empty() {
None
} else {
Some((files_from_referenced_folders.pop().unwrap(), normal_files))
}
})
.collect::<Vec<(FileEntry, Vec<FileEntry>)>>();
for (fe, vec_fe) in vec {
self.files_with_identical_names_referenced.insert(fe.path.to_string_lossy().to_string(), (fe, vec_fe));
}
}
if files_from_referenced_folders.is_empty() || normal_files.is_empty() {
None
} else {
Some((files_from_referenced_folders.pop().unwrap(), normal_files))
if self.use_reference_folders {
for (_fe, vector) in self.files_with_identical_names_referenced.values() {
self.information.number_of_duplicated_files_by_name += vector.len();
self.information.number_of_groups_by_name += 1;
}
})
.collect::<Vec<(FileEntry, Vec<FileEntry>)>>();
for (fe, vec_fe) in vec {
self.files_with_identical_names_referenced.insert(fe.path.to_string_lossy().to_string(), (fe, vec_fe));
}
}
} else {
for vector in self.files_with_identical_names.values() {
self.information.number_of_duplicated_files_by_name += vector.len() - 1;
self.information.number_of_groups_by_name += 1;
}
}
if self.use_reference_folders {
for (_fe, vector) in self.files_with_identical_names_referenced.values() {
self.information.number_of_duplicated_files_by_name += vector.len();
self.information.number_of_groups_by_name += 1;
Common::print_time(start_time, SystemTime::now(), "check_files_name".to_string());
true
}
} else {
for vector in self.files_with_identical_names.values() {
self.information.number_of_duplicated_files_by_name += vector.len() - 1;
self.information.number_of_groups_by_name += 1;
DirTraversalResult::SuccessFolders { .. } => {
unreachable!()
}
DirTraversalResult::Stopped => false,
}
Common::print_time(start_time, SystemTime::now(), "check_files_name".to_string());
true
}
/// Read file length and puts it to different boxes(each for different lengths)
/// If in box is only 1 result, then it is removed
fn check_files_size(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) -> bool {
let start_time: SystemTime = SystemTime::now();
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector
// Add root folders for finding
for id in &self.directories.included_directories {
folders_to_check.push(id.clone());
}
//// PROGRESS THREAD START
const LOOP_DURATION: u32 = 200; //in ms
let progress_thread_run = Arc::new(AtomicBool::new(true));
let atomic_file_counter = Arc::new(AtomicUsize::new(0));
let progress_thread_handle = if let Some(progress_sender) = progress_sender {
let progress_send = progress_sender.clone();
let progress_thread_run = progress_thread_run.clone();
let atomic_file_counter = atomic_file_counter.clone();
let checking_method = self.check_method;
let max_stage = match self.check_method {
CheckingMethod::Size => 0,
CheckingMethod::Hash => 2,
_ => panic!(),
};
thread::spawn(move || loop {
progress_send
.unbounded_send(ProgressData {
checking_method,
current_stage: 0,
max_stage,
files_checked: atomic_file_counter.load(Ordering::Relaxed) as usize,
files_to_check: 0,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
break;
}
sleep(Duration::from_millis(LOOP_DURATION as u64));
})
} else {
thread::spawn(|| {})
let max_stage = match self.check_method {
CheckingMethod::Size => 0,
CheckingMethod::Hash => 2,
_ => panic!(),
};
//// PROGRESS THREAD END
while !folders_to_check.is_empty() {
if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() {
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
return false;
}
let segments: Vec<_> = folders_to_check
.par_iter()
.map(|current_folder| {
let mut dir_result = vec![];
let mut warnings = vec![];
let mut fe_result = vec![];
// Read current dir childrens
let read_dir = match fs::read_dir(&current_folder) {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_open_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
return (dir_result, warnings, fe_result);
}
};
// Check every sub folder/file/link etc.
'dir: for entry in read_dir {
let entry_data = match entry {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_entry_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
let metadata: Metadata = match entry_data.metadata() {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_metadata_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
if metadata.is_dir() {
if !self.recursive_search {
continue 'dir;
}
let next_folder = current_folder.join(entry_data.file_name());
if self.directories.is_excluded(&next_folder) {
continue 'dir;
}
if self.excluded_items.is_excluded(&next_folder) {
continue 'dir;
}
dir_result.push(next_folder);
} else if metadata.is_file() {
atomic_file_counter.fetch_add(1, Ordering::Relaxed);
let file_name_lowercase: String = match entry_data.file_name().into_string() {
Ok(t) => t,
Err(_inspected) => {
warnings.push(fl!(
"core_file_not_utf8_name",
generate_translation_hashmap(vec![("name", entry_data.path().display().to_string())])
));
continue 'dir;
}
}
.to_lowercase();
if !self.allowed_extensions.matches_filename(&file_name_lowercase) {
continue 'dir;
}
if (self.minimal_file_size..=self.maximal_file_size).contains(&metadata.len()) {
let current_file_name = current_folder.join(entry_data.file_name());
if self.excluded_items.is_excluded(&current_file_name) {
continue 'dir;
}
// Creating new file entry
let fe: FileEntry = FileEntry {
path: current_file_name.clone(),
size: metadata.len(),
modified_date: match metadata.modified() {
Ok(t) => match t.duration_since(UNIX_EPOCH) {
Ok(d) => d.as_secs(),
Err(_inspected) => {
warnings.push(fl!(
"core_file_modified_before_epoch",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string())])
));
0
}
},
Err(e) => {
warnings.push(fl!(
"core_file_no_modification_date",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string()), ("reason", e.to_string())])
));
0
}
},
hash: "".to_string(),
};
fe_result.push(fe);
}
}
}
(dir_result, warnings, fe_result)
})
.collect();
// Advance the frontier
folders_to_check.clear();
// Process collected data
for (segment, warnings, fe_result) in segments {
folders_to_check.extend(segment);
let result = DirTraversalBuilder::new()
.root_dirs(self.directories.included_directories.clone())
.group_by(|fe| fe.size)
.stop_receiver(stop_receiver)
.progress_sender(progress_sender)
.checking_method(self.check_method)
.max_stage(max_stage)
.directories(self.directories.clone())
.allowed_extensions(self.allowed_extensions.clone())
.excluded_items(self.excluded_items.clone())
.recursive_search(self.recursive_search)
.minimal_file_size(self.minimal_file_size)
.maximal_file_size(self.maximal_file_size)
.build()
.run();
match result {
DirTraversalResult::SuccessFiles {
start_time,
grouped_file_entries,
warnings,
} => {
self.files_with_identical_size = grouped_file_entries;
self.text_messages.warnings.extend(warnings);
for fe in fe_result {
self.files_with_identical_size.entry(fe.size).or_insert_with(Vec::new);
self.files_with_identical_size.get_mut(&fe.size).unwrap().push(fe);
// Create new BTreeMap without single size entries(files have not duplicates)
let mut old_map: BTreeMap<u64, Vec<FileEntry>> = Default::default();
mem::swap(&mut old_map, &mut self.files_with_identical_size);
for (size, vec) in old_map {
if vec.len() <= 1 {
continue;
}
let vector = if self.ignore_hard_links { filter_hard_links(&vec) } else { vec };
if vector.len() > 1 {
self.files_with_identical_size.insert(size, vector);
}
}
}
}
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
// Reference - only use in size, because later hash will be counted differently
if self.use_reference_folders && self.check_method == CheckingMethod::Size {
let mut btree_map = Default::default();
mem::swap(&mut self.files_with_identical_size, &mut btree_map);
let reference_directories = self.directories.reference_directories.clone();
let vec = btree_map
.into_iter()
.filter_map(|(_size, vec_file_entry)| {
let mut files_from_referenced_folders = Vec::new();
let mut normal_files = Vec::new();
for file_entry in vec_file_entry {
if reference_directories.iter().any(|e| file_entry.path.starts_with(&e)) {
files_from_referenced_folders.push(file_entry);
} else {
normal_files.push(file_entry);
}
}
// Create new BTreeMap without single size entries(files have not duplicates)
let mut old_map: BTreeMap<u64, Vec<FileEntry>> = Default::default();
mem::swap(&mut old_map, &mut self.files_with_identical_size);
for (size, vec) in old_map {
if vec.len() <= 1 {
continue;
}
let vector = if self.ignore_hard_links { filter_hard_links(&vec) } else { vec };
if vector.len() > 1 {
self.files_with_identical_size.insert(size, vector);
}
}
// Reference - only use in size, because later hash will be counted differently
if self.use_reference_folders && self.check_method == CheckingMethod::Size {
let mut btree_map = Default::default();
mem::swap(&mut self.files_with_identical_size, &mut btree_map);
let reference_directories = self.directories.reference_directories.clone();
let vec = btree_map
.into_iter()
.filter_map(|(_size, vec_file_entry)| {
let mut files_from_referenced_folders = Vec::new();
let mut normal_files = Vec::new();
for file_entry in vec_file_entry {
if reference_directories.iter().any(|e| file_entry.path.starts_with(&e)) {
files_from_referenced_folders.push(file_entry);
} else {
normal_files.push(file_entry);
}
if files_from_referenced_folders.is_empty() || normal_files.is_empty() {
None
} else {
Some((files_from_referenced_folders.pop().unwrap(), normal_files))
}
})
.collect::<Vec<(FileEntry, Vec<FileEntry>)>>();
for (fe, vec_fe) in vec {
self.files_with_identical_size_referenced.insert(fe.size, (fe, vec_fe));
}
}
if files_from_referenced_folders.is_empty() || normal_files.is_empty() {
None
} else {
Some((files_from_referenced_folders.pop().unwrap(), normal_files))
if self.use_reference_folders {
for (size, (_fe, vector)) in &self.files_with_identical_size_referenced {
self.information.number_of_duplicated_files_by_size += vector.len();
self.information.number_of_groups_by_size += 1;
self.information.lost_space_by_size += (vector.len() as u64) * size;
}
})
.collect::<Vec<(FileEntry, Vec<FileEntry>)>>();
for (fe, vec_fe) in vec {
self.files_with_identical_size_referenced.insert(fe.size, (fe, vec_fe));
}
}
} else {
for (size, vector) in &self.files_with_identical_size {
self.information.number_of_duplicated_files_by_size += vector.len() - 1;
self.information.number_of_groups_by_size += 1;
self.information.lost_space_by_size += (vector.len() as u64 - 1) * size;
}
}
if self.use_reference_folders {
for (size, (_fe, vector)) in &self.files_with_identical_size_referenced {
self.information.number_of_duplicated_files_by_size += vector.len();
self.information.number_of_groups_by_size += 1;
self.information.lost_space_by_size += (vector.len() as u64) * size;
Common::print_time(start_time, SystemTime::now(), "check_files_name".to_string());
true
}
} else {
for (size, vector) in &self.files_with_identical_size {
self.information.number_of_duplicated_files_by_size += vector.len() - 1;
self.information.number_of_groups_by_size += 1;
self.information.lost_space_by_size += (vector.len() as u64 - 1) * size;
DirTraversalResult::SuccessFolders { .. } => {
unreachable!()
}
DirTraversalResult::Stopped => false,
}
Common::print_time(start_time, SystemTime::now(), "check_files_size".to_string());
true
}
/// The slowest checking type, which must be applied after checking for size
@ -832,8 +507,8 @@ impl DuplicateFinder {
checking_method,
current_stage: 1,
max_stage: 2,
files_checked: atomic_file_counter.load(Ordering::Relaxed) as usize,
files_to_check,
entries_checked: atomic_file_counter.load(Ordering::Relaxed) as usize,
entries_to_check: files_to_check,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
@ -998,8 +673,8 @@ impl DuplicateFinder {
checking_method,
current_stage: 2,
max_stage: 2,
files_checked: atomic_file_counter.load(Ordering::Relaxed) as usize,
files_to_check,
entries_checked: atomic_file_counter.load(Ordering::Relaxed) as usize,
entries_to_check: files_to_check,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
@ -1727,6 +1402,7 @@ pub fn load_hashes_from_file(text_messages: &mut Messages, delete_outdated_cache
}
},
hash: uuu[3].to_string(),
symlink_info: None,
};
hashmap_loaded_entries.entry(file_entry.size).or_insert_with(Vec::new);
hashmap_loaded_entries.get_mut(&file_entry.size).unwrap().push(file_entry);
@ -1769,7 +1445,7 @@ impl MyHasher for xxhash_rust::xxh3::Xxh3 {
#[cfg(test)]
mod tests {
use std::fs::{read_dir, File};
use std::fs::{read_dir, File, Metadata};
use std::io;
#[cfg(target_family = "windows")]
use std::os::fs::MetadataExt;

View file

@ -1,31 +1,19 @@
use std::fs::{File, Metadata};
use std::fs;
use std::fs::File;
use std::io::prelude::*;
use std::io::BufWriter;
use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
use std::thread::sleep;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use std::{fs, thread};
use std::time::SystemTime;
use crossbeam_channel::Receiver;
use rayon::prelude::*;
use crate::common::Common;
use crate::common_dir_traversal::{DirTraversalBuilder, DirTraversalResult, FileEntry, ProgressData};
use crate::common_directory::Directories;
use crate::common_extensions::Extensions;
use crate::common_items::ExcludedItems;
use crate::common_messages::Messages;
use crate::common_traits::*;
use crate::fl;
use crate::localizer::generate_translation_hashmap;
#[derive(Debug)]
pub struct ProgressData {
pub current_stage: u8,
pub max_stage: u8,
pub files_checked: usize,
}
#[derive(Eq, PartialEq, Clone, Debug)]
pub enum DeleteMethod {
@ -33,12 +21,6 @@ pub enum DeleteMethod {
Delete,
}
#[derive(Clone)]
pub struct FileEntry {
pub path: PathBuf,
pub modified_date: u64,
}
/// Info struck with helpful information's about results
#[derive(Default)]
pub struct Info {
@ -131,179 +113,38 @@ impl EmptyFiles {
/// Check files for any with size == 0
fn check_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) -> bool {
let start_time: SystemTime = SystemTime::now();
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector
// Add root folders for finding
for id in &self.directories.included_directories {
folders_to_check.push(id.clone());
}
//// PROGRESS THREAD START
const LOOP_DURATION: u32 = 200; //in ms
let progress_thread_run = Arc::new(AtomicBool::new(true));
let atomic_file_counter = Arc::new(AtomicUsize::new(0));
let progress_thread_handle = if let Some(progress_sender) = progress_sender {
let progress_send = progress_sender.clone();
let progress_thread_run = progress_thread_run.clone();
let atomic_file_counter = atomic_file_counter.clone();
thread::spawn(move || loop {
progress_send
.unbounded_send(ProgressData {
current_stage: 0,
max_stage: 0,
files_checked: atomic_file_counter.load(Ordering::Relaxed) as usize,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
break;
let result = DirTraversalBuilder::new()
.root_dirs(self.directories.included_directories.clone())
.group_by(|_fe| ())
.stop_receiver(stop_receiver)
.progress_sender(progress_sender)
.minimal_file_size(0)
.maximal_file_size(0)
.directories(self.directories.clone())
.allowed_extensions(self.allowed_extensions.clone())
.excluded_items(self.excluded_items.clone())
.recursive_search(self.recursive_search)
.build()
.run();
match result {
DirTraversalResult::SuccessFiles {
start_time,
grouped_file_entries,
warnings,
} => {
if let Some(empty_files) = grouped_file_entries.get(&()) {
self.empty_files = empty_files.clone();
}
sleep(Duration::from_millis(LOOP_DURATION as u64));
})
} else {
thread::spawn(|| {})
};
//// PROGRESS THREAD END
while !folders_to_check.is_empty() {
if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() {
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
return false;
}
let segments: Vec<_> = folders_to_check
.par_iter()
.map(|current_folder| {
let mut dir_result = vec![];
let mut warnings = vec![];
let mut fe_result = vec![];
// Read current dir childrens
let read_dir = match fs::read_dir(&current_folder) {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_open_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
return (dir_result, warnings, fe_result);
}
};
// Check every sub folder/file/link etc.
'dir: for entry in read_dir {
let entry_data = match entry {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_entry_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
let metadata: Metadata = match entry_data.metadata() {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_metadata_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
if metadata.is_dir() {
if !self.recursive_search {
continue 'dir;
}
let next_folder = current_folder.join(entry_data.file_name());
if self.directories.is_excluded(&next_folder) {
continue 'dir;
}
if self.excluded_items.is_excluded(&next_folder) {
continue 'dir;
}
dir_result.push(next_folder);
} else if metadata.is_file() {
atomic_file_counter.fetch_add(1, Ordering::Relaxed);
let file_name_lowercase: String = match entry_data.file_name().into_string() {
Ok(t) => t,
Err(_inspected) => {
warnings.push(fl!(
"core_file_not_utf8_name",
generate_translation_hashmap(vec![("name", entry_data.path().display().to_string())])
));
continue 'dir;
}
}
.to_lowercase();
if !self.allowed_extensions.matches_filename(&file_name_lowercase) {
continue 'dir;
}
if metadata.len() == 0 {
let current_file_name = current_folder.join(entry_data.file_name());
if self.excluded_items.is_excluded(&current_file_name) {
continue 'dir;
}
let fe: FileEntry = FileEntry {
path: current_file_name.clone(),
modified_date: match metadata.modified() {
Ok(t) => match t.duration_since(UNIX_EPOCH) {
Ok(d) => d.as_secs(),
Err(_inspected) => {
warnings.push(fl!(
"core_file_modified_before_epoch",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string())])
));
0
}
},
Err(e) => {
warnings.push(fl!(
"core_file_no_modification_date",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string()), ("reason", e.to_string())])
));
0
}
},
};
fe_result.push(fe);
}
}
}
(dir_result, warnings, fe_result)
})
.collect();
// Advance the frontier
folders_to_check.clear();
// Process collected data
for (segment, warnings, fe_result) in segments {
folders_to_check.extend(segment);
self.information.number_of_empty_files = self.empty_files.len();
self.text_messages.warnings.extend(warnings);
for fe in fe_result {
self.empty_files.push(fe);
}
Common::print_time(start_time, SystemTime::now(), "check_files_name".to_string());
true
}
DirTraversalResult::SuccessFolders { .. } => {
unreachable!()
}
DirTraversalResult::Stopped => false,
}
self.information.number_of_empty_files = self.empty_files.len();
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
Common::print_time(start_time, SystemTime::now(), "check_files_size".to_string());
true
}
/// Function to delete files, from filed Vector

View file

@ -1,31 +1,19 @@
use std::fs::{File, Metadata};
use std::fs;
use std::fs::File;
use std::io::prelude::*;
use std::io::BufWriter;
use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
use std::thread::sleep;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use std::{fs, thread};
use std::time::SystemTime;
use crossbeam_channel::Receiver;
use rayon::prelude::*;
use crate::common::Common;
use crate::common_dir_traversal::{Collect, DirTraversalBuilder, DirTraversalResult, ErrorType, FileEntry, ProgressData};
use crate::common_directory::Directories;
use crate::common_extensions::Extensions;
use crate::common_items::ExcludedItems;
use crate::common_messages::Messages;
use crate::common_traits::*;
use crate::fl;
use crate::localizer::generate_translation_hashmap;
#[derive(Debug)]
pub struct ProgressData {
pub current_stage: u8,
pub max_stage: u8,
pub files_checked: usize,
}
#[derive(Eq, PartialEq, Clone, Debug)]
pub enum DeleteMethod {
@ -33,22 +21,6 @@ pub enum DeleteMethod {
Delete,
}
const MAX_NUMBER_OF_SYMLINK_JUMPS: i32 = 20;
#[derive(Clone)]
pub enum ErrorType {
InfiniteRecursion,
NonExistentFile,
}
#[derive(Clone)]
pub struct FileEntry {
pub symlink_path: PathBuf,
pub destination_path: PathBuf,
pub type_of_error: ErrorType,
pub modified_date: u64,
}
/// Info struck with helpful information's about results
#[derive(Default)]
pub struct Info {
@ -140,220 +112,35 @@ impl InvalidSymlinks {
/// Check files for any with size == 0
fn check_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) -> bool {
let start_time: SystemTime = SystemTime::now();
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector
// Add root folders for finding
for id in &self.directories.included_directories {
folders_to_check.push(id.clone());
}
//// PROGRESS THREAD START
const LOOP_DURATION: u32 = 200; //in ms
let progress_thread_run = Arc::new(AtomicBool::new(true));
let atomic_file_counter = Arc::new(AtomicUsize::new(0));
let progress_thread_handle = if let Some(progress_sender) = progress_sender {
let progress_send = progress_sender.clone();
let progress_thread_run = progress_thread_run.clone();
let atomic_file_counter = atomic_file_counter.clone();
thread::spawn(move || loop {
progress_send
.unbounded_send(ProgressData {
current_stage: 0,
max_stage: 0,
files_checked: atomic_file_counter.load(Ordering::Relaxed) as usize,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
break;
let result = DirTraversalBuilder::new()
.root_dirs(self.directories.included_directories.clone())
.group_by(|_fe| ())
.stop_receiver(stop_receiver)
.progress_sender(progress_sender)
.collect(Collect::InvalidSymlinks)
.directories(self.directories.clone())
.allowed_extensions(self.allowed_extensions.clone())
.excluded_items(self.excluded_items.clone())
.recursive_search(self.recursive_search)
.build()
.run();
match result {
DirTraversalResult::SuccessFiles {
start_time,
grouped_file_entries,
warnings,
} => {
if let Some(((), invalid_symlinks)) = grouped_file_entries.into_iter().next() {
self.invalid_symlinks = invalid_symlinks;
}
sleep(Duration::from_millis(LOOP_DURATION as u64));
})
} else {
thread::spawn(|| {})
};
//// PROGRESS THREAD END
while !folders_to_check.is_empty() {
if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() {
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
return false;
}
let segments: Vec<_> = folders_to_check
.par_iter()
.map(|current_folder| {
let mut dir_result = vec![];
let mut warnings = vec![];
let mut fe_result = vec![];
// Read current dir childrens
let read_dir = match fs::read_dir(&current_folder) {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_open_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
return (dir_result, warnings, fe_result);
}
};
// Check every sub folder/file/link etc.
'dir: for entry in read_dir {
let entry_data = match entry {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_entry_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
let metadata: Metadata = match entry_data.metadata() {
Ok(t) => t,
Err(e) => {
warnings.push(fl!(
"core_cannot_read_metadata_dir",
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
));
continue 'dir;
}
};
if metadata.is_dir() {
if !self.recursive_search {
continue 'dir;
}
let next_folder = current_folder.join(entry_data.file_name());
if self.directories.is_excluded(&next_folder) {
continue 'dir;
}
if self.excluded_items.is_excluded(&next_folder) {
continue 'dir;
}
dir_result.push(next_folder);
} else if metadata.is_file() {
atomic_file_counter.fetch_add(1, Ordering::Relaxed);
} else if metadata.file_type().is_symlink() {
atomic_file_counter.fetch_add(1, Ordering::Relaxed);
let file_name_lowercase: String = match entry_data.file_name().into_string() {
Ok(t) => t,
Err(_inspected) => {
warnings.push(fl!(
"core_file_not_utf8_name",
generate_translation_hashmap(vec![("name", entry_data.path().display().to_string())])
));
continue 'dir;
}
}
.to_lowercase();
if !self.allowed_extensions.matches_filename(&file_name_lowercase) {
continue 'dir;
}
let current_file_name = current_folder.join(entry_data.file_name());
if self.excluded_items.is_excluded(&current_file_name) {
continue 'dir;
}
let mut destination_path = PathBuf::new();
let type_of_error;
match current_file_name.read_link() {
Ok(t) => {
destination_path.push(t);
let mut number_of_loop = 0;
let mut current_path = current_file_name.clone();
loop {
if number_of_loop == 0 && !current_path.exists() {
type_of_error = ErrorType::NonExistentFile;
break;
}
if number_of_loop == MAX_NUMBER_OF_SYMLINK_JUMPS {
type_of_error = ErrorType::InfiniteRecursion;
break;
}
current_path = match current_path.read_link() {
Ok(t) => t,
Err(_inspected) => {
// Looks that some next symlinks are broken, but we do nothing with it - TODO why they are broken
continue 'dir;
}
};
number_of_loop += 1;
}
}
Err(_inspected) => {
// Failed to load info about it
type_of_error = ErrorType::NonExistentFile;
}
}
// Creating new file entry
let fe: FileEntry = FileEntry {
symlink_path: current_file_name.clone(),
destination_path,
type_of_error,
modified_date: match metadata.modified() {
Ok(t) => match t.duration_since(UNIX_EPOCH) {
Ok(d) => d.as_secs(),
Err(_inspected) => {
warnings.push(fl!(
"core_file_modified_before_epoch",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string())])
));
0
}
},
Err(e) => {
warnings.push(fl!(
"core_file_no_modification_date",
generate_translation_hashmap(vec![("name", current_file_name.display().to_string()), ("reason", e.to_string())])
));
0
}
},
};
// Adding files to Vector
fe_result.push(fe);
}
}
(dir_result, warnings, fe_result)
})
.collect();
// Advance the frontier
folders_to_check.clear();
// Process collected data
for (segment, warnings, fe_result) in segments {
folders_to_check.extend(segment);
self.information.number_of_invalid_symlinks = self.invalid_symlinks.len();
self.text_messages.warnings.extend(warnings);
for fe in fe_result {
self.invalid_symlinks.push(fe);
}
Common::print_time(start_time, SystemTime::now(), "check_files_name".to_string());
true
}
DirTraversalResult::SuccessFolders { .. } => unreachable!(),
DirTraversalResult::Stopped => false,
}
self.information.number_of_invalid_symlinks = self.invalid_symlinks.len();
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
Common::print_time(start_time, SystemTime::now(), "check_files_size".to_string());
true
}
/// Function to delete files, from filed Vector
@ -363,8 +150,8 @@ impl InvalidSymlinks {
match self.delete_method {
DeleteMethod::Delete => {
for file_entry in &self.invalid_symlinks {
if fs::remove_file(file_entry.symlink_path.clone()).is_err() {
self.text_messages.warnings.push(file_entry.symlink_path.display().to_string());
if fs::remove_file(file_entry.path.clone()).is_err() {
self.text_messages.warnings.push(file_entry.path.display().to_string());
}
}
}
@ -443,9 +230,9 @@ impl SaveResults for InvalidSymlinks {
writeln!(
writer,
"{}\t\t{}\t\t{}",
file_entry.symlink_path.display(),
file_entry.destination_path.display(),
match file_entry.type_of_error {
file_entry.path.display(),
file_entry.symlink_info.clone().expect("invalid traversal result").destination_path.display(),
match file_entry.symlink_info.clone().expect("invalid traversal result").type_of_error {
ErrorType::InfiniteRecursion => "Infinite Recursion",
ErrorType::NonExistentFile => "Non Existent File",
}
@ -469,9 +256,9 @@ impl PrintResults for InvalidSymlinks {
for file_entry in self.invalid_symlinks.iter() {
println!(
"{}\t\t{}\t\t{}",
file_entry.symlink_path.display(),
file_entry.destination_path.display(),
match file_entry.type_of_error {
file_entry.path.display(),
file_entry.symlink_info.clone().expect("invalid traversal result").destination_path.display(),
match file_entry.symlink_info.clone().expect("invalid traversal result").type_of_error {
ErrorType::InfiniteRecursion => "Infinite Recursion",
ErrorType::NonExistentFile => "Non Existent File",
}

View file

@ -13,6 +13,7 @@ pub mod similar_videos;
pub mod temporary;
pub mod common;
pub mod common_dir_traversal;
pub mod common_directory;
pub mod common_extensions;
pub mod common_items;

View file

@ -8,7 +8,7 @@ use glib::Receiver;
use gtk::prelude::*;
use humansize::{file_size_opts as options, FileSize};
use czkawka_core::duplicate::CheckingMethod;
use czkawka_core::common_dir_traversal::CheckingMethod;
use czkawka_core::same_music::MusicSimilarity;
use czkawka_core::similar_images;
@ -1356,18 +1356,22 @@ pub fn connect_compute_results(gui_data: &GuiData, glib_stop_receiver: Receiver<
let mut vector = vector.clone();
vector.sort_by_key(|e| {
let t = split_path(e.symlink_path.as_path());
let t = split_path(e.path.as_path());
(t.0, t.1)
});
for file_entry in vector {
let (directory, file) = split_path(&file_entry.symlink_path);
let (directory, file) = split_path(&file_entry.path);
let symlink_info = file_entry.symlink_info.clone().expect("invalid traversal result");
let values: [(u32, &dyn ToValue); 7] = [
(ColumnsInvalidSymlinks::SelectionButton as u32, &false),
(ColumnsInvalidSymlinks::Name as u32, &file),
(ColumnsInvalidSymlinks::Path as u32, &directory),
(ColumnsInvalidSymlinks::DestinationPath as u32, &file_entry.destination_path.to_string_lossy().to_string()),
(ColumnsInvalidSymlinks::TypeOfError as u32, &get_text_from_invalid_symlink_cause(&file_entry.type_of_error)),
(ColumnsInvalidSymlinks::DestinationPath as u32, &symlink_info.destination_path.to_string_lossy().to_string()),
(
ColumnsInvalidSymlinks::TypeOfError as u32,
&get_text_from_invalid_symlink_cause(&symlink_info.type_of_error),
),
(
ColumnsInvalidSymlinks::Modification as u32,
&(NaiveDateTime::from_timestamp(file_entry.modified_date as i64, 0).to_string()),

View file

@ -7,6 +7,7 @@ use gtk::prelude::*;
use czkawka_core::big_file::BigFile;
use czkawka_core::broken_files::BrokenFiles;
use czkawka_core::common_dir_traversal;
use czkawka_core::duplicate::DuplicateFinder;
use czkawka_core::empty_files::EmptyFiles;
use czkawka_core::empty_folder::EmptyFolder;
@ -30,15 +31,15 @@ use crate::taskbar_progress::tbp_flags::TBPF_NOPROGRESS;
pub fn connect_button_search(
gui_data: &GuiData,
glib_stop_sender: Sender<Message>,
futures_sender_duplicate_files: futures::channel::mpsc::UnboundedSender<duplicate::ProgressData>,
futures_sender_empty_files: futures::channel::mpsc::UnboundedSender<empty_files::ProgressData>,
futures_sender_duplicate_files: futures::channel::mpsc::UnboundedSender<common_dir_traversal::ProgressData>,
futures_sender_empty_files: futures::channel::mpsc::UnboundedSender<common_dir_traversal::ProgressData>,
futures_sender_empty_folder: futures::channel::mpsc::UnboundedSender<empty_folder::ProgressData>,
futures_sender_big_file: futures::channel::mpsc::UnboundedSender<big_file::ProgressData>,
futures_sender_same_music: futures::channel::mpsc::UnboundedSender<same_music::ProgressData>,
futures_sender_similar_images: futures::channel::mpsc::UnboundedSender<similar_images::ProgressData>,
futures_sender_similar_videos: futures::channel::mpsc::UnboundedSender<similar_videos::ProgressData>,
futures_sender_temporary: futures::channel::mpsc::UnboundedSender<temporary::ProgressData>,
futures_sender_invalid_symlinks: futures::channel::mpsc::UnboundedSender<invalid_symlinks::ProgressData>,
futures_sender_invalid_symlinks: futures::channel::mpsc::UnboundedSender<common_dir_traversal::ProgressData>,
futures_sender_broken_files: futures::channel::mpsc::UnboundedSender<broken_files::ProgressData>,
) {
let combo_box_image_hash_size = gui_data.main_notebook.combo_box_image_hash_size.clone();

View file

@ -1,6 +1,6 @@
use gtk::prelude::*;
use czkawka_core::duplicate::CheckingMethod;
use czkawka_core::common_dir_traversal::CheckingMethod;
use crate::gui_data::GuiData;
use crate::help_combo_box::DUPLICATES_CHECK_METHOD_COMBO_BOX;

View file

@ -1,7 +1,7 @@
use futures::StreamExt;
use gtk::prelude::*;
use czkawka_core::{big_file, broken_files, duplicate, empty_files, empty_folder, invalid_symlinks, same_music, similar_images, similar_videos, temporary};
use czkawka_core::{big_file, broken_files, common_dir_traversal, empty_folder, same_music, similar_images, similar_videos, temporary};
use crate::fl;
use crate::gui_data::GuiData;
@ -11,15 +11,15 @@ use crate::taskbar_progress::tbp_flags::TBPF_INDETERMINATE;
#[allow(clippy::too_many_arguments)]
pub fn connect_progress_window(
gui_data: &GuiData,
mut futures_receiver_duplicate_files: futures::channel::mpsc::UnboundedReceiver<duplicate::ProgressData>,
mut futures_receiver_empty_files: futures::channel::mpsc::UnboundedReceiver<empty_files::ProgressData>,
mut futures_receiver_duplicate_files: futures::channel::mpsc::UnboundedReceiver<common_dir_traversal::ProgressData>,
mut futures_receiver_empty_files: futures::channel::mpsc::UnboundedReceiver<common_dir_traversal::ProgressData>,
mut futures_receiver_empty_folder: futures::channel::mpsc::UnboundedReceiver<empty_folder::ProgressData>,
mut futures_receiver_big_files: futures::channel::mpsc::UnboundedReceiver<big_file::ProgressData>,
mut futures_receiver_same_music: futures::channel::mpsc::UnboundedReceiver<same_music::ProgressData>,
mut futures_receiver_similar_images: futures::channel::mpsc::UnboundedReceiver<similar_images::ProgressData>,
mut futures_receiver_similar_videos: futures::channel::mpsc::UnboundedReceiver<similar_videos::ProgressData>,
mut futures_receiver_temporary: futures::channel::mpsc::UnboundedReceiver<temporary::ProgressData>,
mut futures_receiver_invalid_symlinks: futures::channel::mpsc::UnboundedReceiver<invalid_symlinks::ProgressData>,
mut futures_receiver_invalid_symlinks: futures::channel::mpsc::UnboundedReceiver<common_dir_traversal::ProgressData>,
mut futures_receiver_broken_files: futures::channel::mpsc::UnboundedReceiver<broken_files::ProgressData>,
) {
let main_context = glib::MainContext::default();
@ -35,7 +35,7 @@ pub fn connect_progress_window(
let future = async move {
while let Some(item) = futures_receiver_duplicate_files.next().await {
match item.checking_method {
duplicate::CheckingMethod::Hash => {
common_dir_traversal::CheckingMethod::Hash => {
label_stage.show();
match item.current_stage {
// Checking Size
@ -45,7 +45,7 @@ pub fn connect_progress_window(
progress_bar_all_stages.set_fraction(0 as f64);
label_stage.set_text(&fl!(
"progress_scanning_size",
generate_translation_hashmap(vec![("file_number", item.files_checked.to_string())])
generate_translation_hashmap(vec![("file_number", item.entries_checked.to_string())])
));
taskbar_state.borrow().set_progress_state(TBPF_INDETERMINATE);
}
@ -53,12 +53,13 @@ pub fn connect_progress_window(
1 => {
progress_bar_current_stage.show();
// progress_bar_all_stages.show();
if item.files_to_check != 0 {
progress_bar_all_stages.set_fraction((1f64 + (item.files_checked) as f64 / item.files_to_check as f64) / (item.max_stage + 1) as f64);
progress_bar_current_stage.set_fraction((item.files_checked) as f64 / item.files_to_check as f64);
taskbar_state
.borrow()
.set_progress_value((item.files_to_check + item.files_checked) as u64, item.files_to_check as u64 * (item.max_stage + 1) as u64);
if item.entries_to_check != 0 {
progress_bar_all_stages.set_fraction((1f64 + (item.entries_checked) as f64 / item.entries_to_check as f64) / (item.max_stage + 1) as f64);
progress_bar_current_stage.set_fraction((item.entries_checked) as f64 / item.entries_to_check as f64);
taskbar_state.borrow().set_progress_value(
(item.entries_to_check + item.entries_checked) as u64,
item.entries_to_check as u64 * (item.max_stage + 1) as u64,
);
} else {
progress_bar_all_stages.set_fraction((1f64) / (item.max_stage + 1) as f64);
progress_bar_current_stage.set_fraction(0f64);
@ -67,17 +68,17 @@ pub fn connect_progress_window(
label_stage.set_text(&fl!(
"progress_analyzed_partial_hash",
generate_translation_hashmap(vec![("file_checked", item.files_checked.to_string()), ("all_files", item.files_to_check.to_string())])
generate_translation_hashmap(vec![("file_checked", item.entries_checked.to_string()), ("all_files", item.entries_to_check.to_string())])
));
}
// Hash - normal hash
2 => {
if item.files_to_check != 0 {
progress_bar_all_stages.set_fraction((2f64 + (item.files_checked) as f64 / item.files_to_check as f64) / (item.max_stage + 1) as f64);
progress_bar_current_stage.set_fraction((item.files_checked) as f64 / item.files_to_check as f64);
if item.entries_to_check != 0 {
progress_bar_all_stages.set_fraction((2f64 + (item.entries_checked) as f64 / item.entries_to_check as f64) / (item.max_stage + 1) as f64);
progress_bar_current_stage.set_fraction((item.entries_checked) as f64 / item.entries_to_check as f64);
taskbar_state.borrow().set_progress_value(
(2 * item.files_to_check + item.files_checked) as u64,
item.files_to_check as u64 * (item.max_stage + 1) as u64,
(2 * item.entries_to_check + item.entries_checked) as u64,
item.entries_to_check as u64 * (item.max_stage + 1) as u64,
);
} else {
progress_bar_all_stages.set_fraction((2f64) / (item.max_stage + 1) as f64);
@ -87,7 +88,7 @@ pub fn connect_progress_window(
label_stage.set_text(&fl!(
"progress_analyzed_full_hash",
generate_translation_hashmap(vec![("file_checked", item.files_checked.to_string()), ("all_files", item.files_to_check.to_string())])
generate_translation_hashmap(vec![("file_checked", item.entries_checked.to_string()), ("all_files", item.entries_to_check.to_string())])
));
}
_ => {
@ -95,27 +96,27 @@ pub fn connect_progress_window(
}
}
}
duplicate::CheckingMethod::Name => {
common_dir_traversal::CheckingMethod::Name => {
label_stage.show();
grid_progress_stages.hide();
label_stage.set_text(&fl!(
"progress_scanning_name",
generate_translation_hashmap(vec![("file_number", item.files_checked.to_string())])
generate_translation_hashmap(vec![("file_number", item.entries_checked.to_string())])
));
taskbar_state.borrow().set_progress_state(TBPF_INDETERMINATE);
}
duplicate::CheckingMethod::Size => {
common_dir_traversal::CheckingMethod::Size => {
label_stage.show();
grid_progress_stages.hide();
label_stage.set_text(&fl!(
"progress_scanning_size",
generate_translation_hashmap(vec![("file_number", item.files_checked.to_string())])
generate_translation_hashmap(vec![("file_number", item.entries_checked.to_string())])
));
taskbar_state.borrow().set_progress_state(TBPF_INDETERMINATE);
}
duplicate::CheckingMethod::None => {
common_dir_traversal::CheckingMethod::None => {
panic!();
}
};
@ -131,7 +132,7 @@ pub fn connect_progress_window(
while let Some(item) = futures_receiver_empty_files.next().await {
label_stage.set_text(&fl!(
"progress_scanning_general_file",
generate_translation_hashmap(vec![("file_number", item.files_checked.to_string())])
generate_translation_hashmap(vec![("file_number", item.entries_checked.to_string())])
));
taskbar_state.borrow().set_progress_state(TBPF_INDETERMINATE);
}
@ -359,7 +360,7 @@ pub fn connect_progress_window(
while let Some(item) = futures_receiver_invalid_symlinks.next().await {
label_stage.set_text(&fl!(
"progress_scanning_general_file",
generate_translation_hashmap(vec![("file_number", item.files_checked.to_string())])
generate_translation_hashmap(vec![("file_number", item.entries_checked.to_string())])
));
taskbar_state.borrow().set_progress_state(TBPF_INDETERMINATE);
}

View file

@ -104,7 +104,7 @@ pub fn connect_settings(gui_data: &GuiData) {
for use_prehash in [true, false] {
for type_of_hash in [HashType::Xxh3, HashType::Blake3, HashType::Crc32].iter() {
if let Some(cache_entries) = czkawka_core::duplicate::load_hashes_from_file(&mut messages, true, type_of_hash, use_prehash) {
let mut hashmap_to_save: BTreeMap<String, czkawka_core::duplicate::FileEntry> = Default::default();
let mut hashmap_to_save: BTreeMap<String, czkawka_core::common_dir_traversal::FileEntry> = Default::default();
for (_, vec_file_entry) in cache_entries {
for file_entry in vec_file_entry {
hashmap_to_save.insert(file_entry.path.to_string_lossy().to_string(), file_entry);

View file

@ -1,4 +1,4 @@
use czkawka_core::duplicate::CheckingMethod;
use czkawka_core::common_dir_traversal::CheckingMethod;
use gtk::prelude::*;
use gtk::{EventControllerKey, TreeView};

View file

@ -1,6 +1,7 @@
use img_hash::{FilterType, HashAlg};
use czkawka_core::duplicate::{CheckingMethod, HashType};
use czkawka_core::common_dir_traversal::CheckingMethod;
use czkawka_core::duplicate::HashType;
pub struct HashTypeStruct {
pub eng_name: &'static str,

View file

@ -15,7 +15,7 @@ use czkawka_core::same_music::SameMusic;
use czkawka_core::similar_images::SimilarImages;
use czkawka_core::similar_videos::SimilarVideos;
use czkawka_core::temporary::Temporary;
use czkawka_core::{fl, invalid_symlinks};
use czkawka_core::{common_dir_traversal, fl};
use crate::notebook_enums::{NotebookMainEnum, NotebookUpperEnum, NUMBER_OF_NOTEBOOK_MAIN_TABS};
@ -448,10 +448,10 @@ pub fn hide_all_buttons(buttons_array: &[Widget]) {
}
}
pub fn get_text_from_invalid_symlink_cause(error: &invalid_symlinks::ErrorType) -> String {
pub fn get_text_from_invalid_symlink_cause(error: &common_dir_traversal::ErrorType) -> String {
match error {
invalid_symlinks::ErrorType::InfiniteRecursion => fl!("invalid_symlink_infinite_recursion"),
invalid_symlinks::ErrorType::NonExistentFile => fl!("invalid_symlink_non_existent_destination"),
common_dir_traversal::ErrorType::InfiniteRecursion => fl!("invalid_symlink_infinite_recursion"),
common_dir_traversal::ErrorType::NonExistentFile => fl!("invalid_symlink_non_existent_destination"),
}
}

View file

@ -85,12 +85,12 @@ fn main() {
// Futures progress report
let (futures_sender_duplicate_files, futures_receiver_duplicate_files): (
futures::channel::mpsc::UnboundedSender<duplicate::ProgressData>,
futures::channel::mpsc::UnboundedReceiver<duplicate::ProgressData>,
futures::channel::mpsc::UnboundedSender<common_dir_traversal::ProgressData>,
futures::channel::mpsc::UnboundedReceiver<common_dir_traversal::ProgressData>,
) = futures::channel::mpsc::unbounded();
let (futures_sender_empty_files, futures_receiver_empty_files): (
futures::channel::mpsc::UnboundedSender<empty_files::ProgressData>,
futures::channel::mpsc::UnboundedReceiver<empty_files::ProgressData>,
futures::channel::mpsc::UnboundedSender<common_dir_traversal::ProgressData>,
futures::channel::mpsc::UnboundedReceiver<common_dir_traversal::ProgressData>,
) = futures::channel::mpsc::unbounded();
let (futures_sender_empty_folder, futures_receiver_empty_folder): (
futures::channel::mpsc::UnboundedSender<empty_folder::ProgressData>,
@ -117,8 +117,8 @@ fn main() {
futures::channel::mpsc::UnboundedReceiver<temporary::ProgressData>,
) = futures::channel::mpsc::unbounded();
let (futures_sender_invalid_symlinks, futures_receiver_invalid_symlinks): (
futures::channel::mpsc::UnboundedSender<invalid_symlinks::ProgressData>,
futures::channel::mpsc::UnboundedReceiver<invalid_symlinks::ProgressData>,
futures::channel::mpsc::UnboundedSender<common_dir_traversal::ProgressData>,
futures::channel::mpsc::UnboundedReceiver<common_dir_traversal::ProgressData>,
) = futures::channel::mpsc::unbounded();
let (futures_sender_broken_files, futures_receiver_broken_files): (
futures::channel::mpsc::UnboundedSender<broken_files::ProgressData>,