Microoptimizations
This commit is contained in:
parent
4f6fe076a7
commit
f4f2df3a0e
9 changed files with 60 additions and 79 deletions
|
@ -5,6 +5,7 @@
|
|||
|
||||
### CLI
|
||||
- Providing full static rust binary with [Eyra](https://github.com/sunfishcode/eyra) - [#1102](https://github.com/qarmin/czkawka/pull/1102)
|
||||
- Fixed duplicated `-c` argument, now saving as compact json is handled via `-C` - ????
|
||||
|
||||
### Krokiet GUI
|
||||
- Initial release of new gui - [#1102](https://github.com/qarmin/czkawka/pull/1102)
|
||||
|
|
|
@ -68,13 +68,9 @@ impl BigFile {
|
|||
|
||||
#[fun_time(message = "look_for_big_files", level = "debug")]
|
||||
fn look_for_big_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&Sender<ProgressData>>) -> bool {
|
||||
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2);
|
||||
let mut old_map: BTreeMap<u64, Vec<FileEntry>> = Default::default();
|
||||
|
||||
// Add root folders for finding
|
||||
for id in &self.common_data.directories.included_directories {
|
||||
folders_to_check.push(id.clone());
|
||||
}
|
||||
let mut folders_to_check: Vec<PathBuf> = self.common_data.directories.included_directories.clone();
|
||||
|
||||
let (progress_thread_handle, progress_thread_run, atomic_counter, _check_was_stopped) =
|
||||
prepare_thread_handler_common(progress_sender, 0, 0, 0, CheckingMethod::None, self.common_data.tool_type);
|
||||
|
@ -87,13 +83,13 @@ impl BigFile {
|
|||
}
|
||||
|
||||
let segments: Vec<_> = folders_to_check
|
||||
.par_iter()
|
||||
.into_par_iter()
|
||||
.map(|current_folder| {
|
||||
let mut dir_result = vec![];
|
||||
let mut warnings = vec![];
|
||||
let mut fe_result = vec![];
|
||||
|
||||
let Some(read_dir) = common_read_dir(current_folder, &mut warnings) else {
|
||||
let Some(read_dir) = common_read_dir(¤t_folder, &mut warnings) else {
|
||||
return (dir_result, warnings, fe_result);
|
||||
};
|
||||
|
||||
|
@ -110,22 +106,22 @@ impl BigFile {
|
|||
check_folder_children(
|
||||
&mut dir_result,
|
||||
&mut warnings,
|
||||
current_folder,
|
||||
¤t_folder,
|
||||
&entry_data,
|
||||
self.common_data.recursive_search,
|
||||
&self.common_data.directories,
|
||||
&self.common_data.excluded_items,
|
||||
);
|
||||
} else if file_type.is_file() {
|
||||
self.collect_file_entry(&atomic_counter, &entry_data, &mut fe_result, &mut warnings, current_folder);
|
||||
self.collect_file_entry(&atomic_counter, &entry_data, &mut fe_result, &mut warnings, ¤t_folder);
|
||||
}
|
||||
}
|
||||
(dir_result, warnings, fe_result)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Advance the frontier
|
||||
folders_to_check.clear();
|
||||
let required_size = segments.iter().map(|(segment, _, _)| segment.len()).sum::<usize>();
|
||||
folders_to_check = Vec::with_capacity(required_size);
|
||||
|
||||
// Process collected data
|
||||
for (segment, warnings, fe_result) in segments {
|
||||
|
|
|
@ -108,12 +108,7 @@ impl BrokenFiles {
|
|||
|
||||
#[fun_time(message = "check_files", level = "debug")]
|
||||
fn check_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&Sender<ProgressData>>) -> bool {
|
||||
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2);
|
||||
|
||||
// Add root folders for finding
|
||||
for id in &self.common_data.directories.included_directories {
|
||||
folders_to_check.push(id.clone());
|
||||
}
|
||||
let mut folders_to_check: Vec<PathBuf> = self.common_data.directories.included_directories.clone();
|
||||
|
||||
let (progress_thread_handle, progress_thread_run, atomic_counter, _check_was_stopped) =
|
||||
prepare_thread_handler_common(progress_sender, 0, 1, 0, CheckingMethod::None, self.common_data.tool_type);
|
||||
|
@ -126,13 +121,13 @@ impl BrokenFiles {
|
|||
}
|
||||
|
||||
let segments: Vec<_> = folders_to_check
|
||||
.par_iter()
|
||||
.into_par_iter()
|
||||
.map(|current_folder| {
|
||||
let mut dir_result = vec![];
|
||||
let mut warnings = vec![];
|
||||
let mut fe_result = vec![];
|
||||
|
||||
let Some(read_dir) = common_read_dir(current_folder, &mut warnings) else {
|
||||
let Some(read_dir) = common_read_dir(¤t_folder, &mut warnings) else {
|
||||
return (dir_result, warnings, fe_result);
|
||||
};
|
||||
|
||||
|
@ -149,14 +144,14 @@ impl BrokenFiles {
|
|||
check_folder_children(
|
||||
&mut dir_result,
|
||||
&mut warnings,
|
||||
current_folder,
|
||||
¤t_folder,
|
||||
&entry_data,
|
||||
self.common_data.recursive_search,
|
||||
&self.common_data.directories,
|
||||
&self.common_data.excluded_items,
|
||||
);
|
||||
} else if file_type.is_file() {
|
||||
if let Some(file_entry) = self.get_file_entry(&atomic_counter, &entry_data, &mut warnings, current_folder) {
|
||||
if let Some(file_entry) = self.get_file_entry(&atomic_counter, &entry_data, &mut warnings, ¤t_folder) {
|
||||
fe_result.push((file_entry.path.to_string_lossy().to_string(), file_entry));
|
||||
}
|
||||
}
|
||||
|
@ -166,8 +161,8 @@ impl BrokenFiles {
|
|||
.collect();
|
||||
debug!("check_files - collected files");
|
||||
|
||||
// Advance the frontier
|
||||
folders_to_check.clear();
|
||||
let required_size = segments.iter().map(|(segment, _, _)| segment.len()).sum::<usize>();
|
||||
folders_to_check = Vec::with_capacity(required_size);
|
||||
|
||||
// Process collected data
|
||||
for (segment, warnings, fe_result) in segments {
|
||||
|
@ -206,8 +201,8 @@ impl BrokenFiles {
|
|||
};
|
||||
|
||||
let fe: FileEntry = FileEntry {
|
||||
path: current_file_name.clone(),
|
||||
modified_date: get_modified_time(&metadata, warnings, ¤t_file_name, false),
|
||||
path: current_file_name,
|
||||
size: metadata.len(),
|
||||
type_of_file,
|
||||
error_string: String::new(),
|
||||
|
@ -329,7 +324,7 @@ impl BrokenFiles {
|
|||
|
||||
for (name, file_entry) in files_to_check {
|
||||
if let Some(cached_file_entry) = loaded_hash_map.get(&name) {
|
||||
records_already_cached.insert(name.clone(), cached_file_entry.clone());
|
||||
records_already_cached.insert(name, cached_file_entry.clone());
|
||||
} else {
|
||||
non_cached_files_to_check.insert(name, file_entry);
|
||||
}
|
||||
|
|
|
@ -347,7 +347,6 @@ where
|
|||
let mut folder_entries: HashMap<PathBuf, FolderEntry> = HashMap::new();
|
||||
|
||||
// Add root folders into result (only for empty folder collection)
|
||||
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2);
|
||||
if self.collect == Collect::EmptyFolders {
|
||||
for dir in &self.root_dirs {
|
||||
folder_entries.insert(
|
||||
|
@ -361,7 +360,7 @@ where
|
|||
}
|
||||
}
|
||||
// Add root folders for finding
|
||||
folders_to_check.extend(self.root_dirs);
|
||||
let mut folders_to_check: Vec<PathBuf> = self.root_dirs.clone();
|
||||
|
||||
let (progress_thread_handle, progress_thread_run, atomic_counter, _check_was_stopped) =
|
||||
prepare_thread_handler_common(self.progress_sender, 0, self.max_stage, 0, self.checking_method, self.tool_type);
|
||||
|
@ -385,7 +384,7 @@ where
|
|||
}
|
||||
|
||||
let segments: Vec<_> = folders_to_check
|
||||
.par_iter()
|
||||
.into_par_iter()
|
||||
.map(|current_folder| {
|
||||
let mut dir_result = vec![];
|
||||
let mut warnings = vec![];
|
||||
|
@ -393,27 +392,27 @@ where
|
|||
let mut set_as_not_empty_folder_list = vec![];
|
||||
let mut folder_entries_list = vec![];
|
||||
|
||||
let Some(read_dir) = common_read_dir(current_folder, &mut warnings) else {
|
||||
set_as_not_empty_folder_list.push(current_folder.clone());
|
||||
let Some(read_dir) = common_read_dir(¤t_folder, &mut warnings) else {
|
||||
set_as_not_empty_folder_list.push(current_folder);
|
||||
return (dir_result, warnings, fe_result, set_as_not_empty_folder_list, folder_entries_list);
|
||||
};
|
||||
|
||||
let mut counter = 0;
|
||||
// Check every sub folder/file/link etc.
|
||||
'dir: for entry in read_dir {
|
||||
let Some(entry_data) = common_get_entry_data(&entry, &mut warnings, current_folder) else {
|
||||
let Some(entry_data) = common_get_entry_data(&entry, &mut warnings, ¤t_folder) else {
|
||||
continue;
|
||||
};
|
||||
let Ok(file_type) = entry_data.file_type() else { continue };
|
||||
|
||||
match (entry_type(file_type), collect) {
|
||||
(EntryType::Dir, Collect::Files | Collect::InvalidSymlinks) => {
|
||||
process_dir_in_file_symlink_mode(recursive_search, current_folder, entry_data, &directories, &mut dir_result, &mut warnings, &excluded_items);
|
||||
process_dir_in_file_symlink_mode(recursive_search, ¤t_folder, entry_data, &directories, &mut dir_result, &mut warnings, &excluded_items);
|
||||
}
|
||||
(EntryType::Dir, Collect::EmptyFolders) => {
|
||||
counter += 1;
|
||||
process_dir_in_dir_mode(
|
||||
current_folder,
|
||||
¤t_folder,
|
||||
entry_data,
|
||||
&directories,
|
||||
&mut dir_result,
|
||||
|
@ -430,7 +429,7 @@ where
|
|||
&mut warnings,
|
||||
&mut fe_result,
|
||||
&allowed_extensions,
|
||||
current_folder,
|
||||
¤t_folder,
|
||||
&directories,
|
||||
&excluded_items,
|
||||
minimal_file_size,
|
||||
|
@ -440,7 +439,7 @@ where
|
|||
(EntryType::File | EntryType::Symlink, Collect::EmptyFolders) => {
|
||||
#[cfg(target_family = "unix")]
|
||||
if directories.exclude_other_filesystems() {
|
||||
match directories.is_on_other_filesystems(current_folder) {
|
||||
match directories.is_on_other_filesystems(¤t_folder) {
|
||||
Ok(true) => continue 'dir,
|
||||
Err(e) => warnings.push(e.to_string()),
|
||||
_ => (),
|
||||
|
@ -459,7 +458,7 @@ where
|
|||
&mut warnings,
|
||||
&mut fe_result,
|
||||
&allowed_extensions,
|
||||
current_folder,
|
||||
¤t_folder,
|
||||
&directories,
|
||||
&excluded_items,
|
||||
);
|
||||
|
@ -477,8 +476,8 @@ where
|
|||
})
|
||||
.collect();
|
||||
|
||||
// Advance the frontier
|
||||
folders_to_check.clear();
|
||||
let required_size = segments.iter().map(|(segment, _, _, _, _)| segment.len()).sum::<usize>();
|
||||
folders_to_check = Vec::with_capacity(required_size);
|
||||
|
||||
// Process collected data
|
||||
for (segment, warnings, fe_result, set_as_not_empty_folder_list, fe_list) in segments {
|
||||
|
@ -554,9 +553,9 @@ fn process_file_in_file_mode(
|
|||
if (minimal_file_size..=maximal_file_size).contains(&metadata.len()) {
|
||||
// Creating new file entry
|
||||
let fe: FileEntry = FileEntry {
|
||||
path: current_file_name.clone(),
|
||||
size: metadata.len(),
|
||||
modified_date: get_modified_time(&metadata, warnings, ¤t_file_name, false),
|
||||
path: current_file_name,
|
||||
hash: String::new(),
|
||||
symlink_info: None,
|
||||
};
|
||||
|
@ -708,8 +707,8 @@ fn process_symlink_in_symlink_mode(
|
|||
|
||||
// Creating new file entry
|
||||
let fe: FileEntry = FileEntry {
|
||||
path: current_file_name.clone(),
|
||||
modified_date: get_modified_time(&metadata, warnings, ¤t_file_name, false),
|
||||
path: current_file_name,
|
||||
size: 0,
|
||||
hash: String::new(),
|
||||
symlink_info: Some(SymlinkInfo { destination_path, type_of_error }),
|
||||
|
|
|
@ -103,7 +103,7 @@ impl DuplicateFinder {
|
|||
ignore_hard_links: true,
|
||||
hash_type: HashType::Blake3,
|
||||
use_prehash_cache: true,
|
||||
minimal_cache_file_size: 1024 * 256, // By default cache only >= 256 KB files
|
||||
minimal_cache_file_size: 1024 * 3256, // By default cache only >= 256 KB files
|
||||
minimal_prehash_cache_file_size: 0,
|
||||
case_sensitive_name_comparison: false,
|
||||
}
|
||||
|
@ -522,7 +522,7 @@ impl DuplicateFinder {
|
|||
.map(|(size, vec_file_entry)| {
|
||||
let mut hashmap_with_hash: BTreeMap<String, Vec<FileEntry>> = Default::default();
|
||||
let mut errors: Vec<String> = Vec::new();
|
||||
let mut buffer = [0u8; 1024 * 2];
|
||||
let mut buffer = [0u8; 1024 * 32];
|
||||
|
||||
atomic_counter.fetch_add(vec_file_entry.len(), Ordering::Relaxed);
|
||||
if check_if_stop_received(stop_receiver) {
|
||||
|
|
|
@ -242,7 +242,7 @@ impl SameMusic {
|
|||
debug!("load_cache - Starting to check for differences");
|
||||
for (name, file_entry) in mem::take(&mut self.music_to_check) {
|
||||
if let Some(cached_file_entry) = loaded_hash_map.get(&name) {
|
||||
records_already_cached.insert(name.clone(), cached_file_entry.clone());
|
||||
records_already_cached.insert(name, cached_file_entry.clone());
|
||||
} else {
|
||||
non_cached_files_to_check.insert(name, file_entry);
|
||||
}
|
||||
|
@ -622,7 +622,7 @@ impl SameMusic {
|
|||
music_entries.push(entry.clone());
|
||||
}
|
||||
used_paths.insert(f_string);
|
||||
music_entries.push(f_entry.clone());
|
||||
music_entries.push(f_entry);
|
||||
duplicated_music_entries.push(music_entries);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -146,7 +146,7 @@ impl SimilarImages {
|
|||
|
||||
#[fun_time(message = "check_for_similar_images", level = "debug")]
|
||||
fn check_for_similar_images(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&Sender<ProgressData>>) -> bool {
|
||||
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2);
|
||||
let mut folders_to_check: Vec<PathBuf> = self.common_data.directories.included_directories.clone();
|
||||
|
||||
if !self.common_data.allowed_extensions.using_custom_extensions() {
|
||||
self.common_data.allowed_extensions.extend_allowed_extensions(IMAGE_RS_SIMILAR_IMAGES_EXTENSIONS);
|
||||
|
@ -177,13 +177,13 @@ impl SimilarImages {
|
|||
}
|
||||
|
||||
let segments: Vec<_> = folders_to_check
|
||||
.par_iter()
|
||||
.into_par_iter()
|
||||
.map(|current_folder| {
|
||||
let mut dir_result = vec![];
|
||||
let mut warnings = vec![];
|
||||
let mut fe_result = vec![];
|
||||
|
||||
let Some(read_dir) = common_read_dir(current_folder, &mut warnings) else {
|
||||
let Some(read_dir) = common_read_dir(¤t_folder, &mut warnings) else {
|
||||
return (dir_result, warnings, fe_result);
|
||||
};
|
||||
|
||||
|
@ -198,7 +198,7 @@ impl SimilarImages {
|
|||
check_folder_children(
|
||||
&mut dir_result,
|
||||
&mut warnings,
|
||||
current_folder,
|
||||
¤t_folder,
|
||||
&entry_data,
|
||||
self.common_data.recursive_search,
|
||||
&self.common_data.directories,
|
||||
|
@ -206,15 +206,15 @@ impl SimilarImages {
|
|||
);
|
||||
} else if file_type.is_file() {
|
||||
atomic_counter.fetch_add(1, Ordering::Relaxed);
|
||||
self.add_file_entry(current_folder, &entry_data, &mut fe_result, &mut warnings);
|
||||
self.add_file_entry(¤t_folder, &entry_data, &mut fe_result, &mut warnings);
|
||||
}
|
||||
}
|
||||
(dir_result, warnings, fe_result)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Advance the frontier
|
||||
folders_to_check.clear();
|
||||
let required_size = segments.iter().map(|(segment, _, _)| segment.len()).sum::<usize>();
|
||||
folders_to_check = Vec::with_capacity(required_size);
|
||||
|
||||
// Process collected data
|
||||
for (segment, warnings, fe_result) in segments {
|
||||
|
@ -281,7 +281,7 @@ impl SimilarImages {
|
|||
debug!("hash_images-load_cache - starting calculating diff");
|
||||
for (name, file_entry) in mem::take(&mut self.images_to_check) {
|
||||
if let Some(cached_file_entry) = loaded_hash_map.get(&name) {
|
||||
records_already_cached.insert(name.clone(), cached_file_entry.clone());
|
||||
records_already_cached.insert(name, cached_file_entry.clone());
|
||||
} else {
|
||||
non_cached_files_to_check.insert(name, file_entry);
|
||||
}
|
||||
|
|
|
@ -130,7 +130,7 @@ impl SimilarVideos {
|
|||
|
||||
#[fun_time(message = "check_for_similar_videos", level = "debug")]
|
||||
fn check_for_similar_videos(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&Sender<ProgressData>>) -> bool {
|
||||
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2);
|
||||
let mut folders_to_check: Vec<PathBuf> = self.common_data.directories.included_directories.clone();
|
||||
|
||||
if !self.common_data.allowed_extensions.using_custom_extensions() {
|
||||
self.common_data.allowed_extensions.extend_allowed_extensions(VIDEO_FILES_EXTENSIONS);
|
||||
|
@ -141,11 +141,6 @@ impl SimilarVideos {
|
|||
}
|
||||
}
|
||||
|
||||
// Add root folders for finding
|
||||
for id in &self.common_data.directories.included_directories {
|
||||
folders_to_check.push(id.clone());
|
||||
}
|
||||
|
||||
let (progress_thread_handle, progress_thread_run, atomic_counter, _check_was_stopped) =
|
||||
prepare_thread_handler_common(progress_sender, 0, 1, 0, CheckingMethod::None, self.common_data.tool_type);
|
||||
|
||||
|
@ -156,13 +151,13 @@ impl SimilarVideos {
|
|||
}
|
||||
|
||||
let segments: Vec<_> = folders_to_check
|
||||
.par_iter()
|
||||
.into_par_iter()
|
||||
.map(|current_folder| {
|
||||
let mut dir_result = vec![];
|
||||
let mut warnings = vec![];
|
||||
let mut fe_result = vec![];
|
||||
|
||||
let Some(read_dir) = common_read_dir(current_folder, &mut warnings) else {
|
||||
let Some(read_dir) = common_read_dir(¤t_folder, &mut warnings) else {
|
||||
return (dir_result, warnings, fe_result);
|
||||
};
|
||||
|
||||
|
@ -179,7 +174,7 @@ impl SimilarVideos {
|
|||
check_folder_children(
|
||||
&mut dir_result,
|
||||
&mut warnings,
|
||||
current_folder,
|
||||
¤t_folder,
|
||||
&entry_data,
|
||||
self.common_data.recursive_search,
|
||||
&self.common_data.directories,
|
||||
|
@ -187,15 +182,15 @@ impl SimilarVideos {
|
|||
);
|
||||
} else if file_type.is_file() {
|
||||
atomic_counter.fetch_add(1, Ordering::Relaxed);
|
||||
self.add_video_file_entry(&entry_data, &mut fe_result, &mut warnings, current_folder);
|
||||
self.add_video_file_entry(&entry_data, &mut fe_result, &mut warnings, ¤t_folder);
|
||||
}
|
||||
}
|
||||
(dir_result, warnings, fe_result)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Advance the frontier
|
||||
folders_to_check.clear();
|
||||
let required_size = segments.iter().map(|(segment, _, _)| segment.len()).sum::<usize>();
|
||||
folders_to_check = Vec::with_capacity(required_size);
|
||||
|
||||
// Process collected data
|
||||
for (segment, warnings, fe_result) in segments {
|
||||
|
@ -230,9 +225,9 @@ impl SimilarVideos {
|
|||
// Checking files
|
||||
if (self.common_data.minimal_file_size..=self.common_data.maximal_file_size).contains(&metadata.len()) {
|
||||
let fe: FileEntry = FileEntry {
|
||||
path: current_file_name.clone(),
|
||||
size: metadata.len(),
|
||||
modified_date: get_modified_time(&metadata, warnings, ¤t_file_name, false),
|
||||
path: current_file_name,
|
||||
vhash: Default::default(),
|
||||
error: String::new(),
|
||||
};
|
||||
|
@ -255,7 +250,7 @@ impl SimilarVideos {
|
|||
|
||||
for (name, file_entry) in mem::take(&mut self.videos_to_check) {
|
||||
if let Some(cached_file_entry) = loaded_hash_map.get(&name) {
|
||||
records_already_cached.insert(name.clone(), cached_file_entry.clone());
|
||||
records_already_cached.insert(name, cached_file_entry.clone());
|
||||
} else {
|
||||
non_cached_files_to_check.insert(name, file_entry);
|
||||
}
|
||||
|
|
|
@ -71,12 +71,7 @@ impl Temporary {
|
|||
|
||||
#[fun_time(message = "check_files", level = "debug")]
|
||||
fn check_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&Sender<ProgressData>>) -> bool {
|
||||
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2);
|
||||
|
||||
// Add root folders for finding
|
||||
for id in &self.common_data.directories.included_directories {
|
||||
folders_to_check.push(id.clone());
|
||||
}
|
||||
let mut folders_to_check: Vec<PathBuf> = self.common_data.directories.included_directories.clone();
|
||||
|
||||
let (progress_thread_handle, progress_thread_run, atomic_counter, _check_was_stopped) =
|
||||
prepare_thread_handler_common(progress_sender, 0, 0, 0, CheckingMethod::None, self.common_data.tool_type);
|
||||
|
@ -88,13 +83,13 @@ impl Temporary {
|
|||
}
|
||||
|
||||
let segments: Vec<_> = folders_to_check
|
||||
.par_iter()
|
||||
.into_par_iter()
|
||||
.map(|current_folder| {
|
||||
let mut dir_result = vec![];
|
||||
let mut warnings = vec![];
|
||||
let mut fe_result = vec![];
|
||||
|
||||
let Some(read_dir) = common_read_dir(current_folder, &mut warnings) else {
|
||||
let Some(read_dir) = common_read_dir(¤t_folder, &mut warnings) else {
|
||||
return (dir_result, warnings, fe_result);
|
||||
};
|
||||
|
||||
|
@ -111,14 +106,14 @@ impl Temporary {
|
|||
check_folder_children(
|
||||
&mut dir_result,
|
||||
&mut warnings,
|
||||
current_folder,
|
||||
¤t_folder,
|
||||
&entry_data,
|
||||
self.common_data.recursive_search,
|
||||
&self.common_data.directories,
|
||||
&self.common_data.excluded_items,
|
||||
);
|
||||
} else if file_type.is_file() {
|
||||
if let Some(file_entry) = self.get_file_entry(&atomic_counter, &entry_data, &mut warnings, current_folder) {
|
||||
if let Some(file_entry) = self.get_file_entry(&atomic_counter, &entry_data, &mut warnings, ¤t_folder) {
|
||||
fe_result.push(file_entry);
|
||||
}
|
||||
}
|
||||
|
@ -127,8 +122,8 @@ impl Temporary {
|
|||
})
|
||||
.collect();
|
||||
|
||||
// Advance the frontier
|
||||
folders_to_check.clear();
|
||||
let required_size = segments.iter().map(|(segment, _, _)| segment.len()).sum::<usize>();
|
||||
folders_to_check = Vec::with_capacity(required_size);
|
||||
|
||||
// Process collected data
|
||||
for (segment, warnings, fe_result) in segments {
|
||||
|
@ -164,8 +159,8 @@ impl Temporary {
|
|||
|
||||
// Creating new file entry
|
||||
Some(FileEntry {
|
||||
path: current_file_name.clone(),
|
||||
modified_date: get_modified_time(&metadata, warnings, ¤t_file_name, false),
|
||||
path: current_file_name,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue