1
0
Fork 0
mirror of synced 2024-04-28 09:33:30 +12:00

Implement finding duplicates by size/name (#956)

* Implementing Size+Name method

* Partial hashing

* Move hashing into different functions

* Update

* Add some code

* Split code into parts

* Entry size

* Simplify code

* Bottom Buttons

* Bottom Buttons

* Confusion

* Libheif

* Simplified sorting

* Revert libheif change
This commit is contained in:
Rafał Mikrut 2023-04-05 08:08:43 +02:00 committed by GitHub
parent de4edba380
commit 5272309341
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
24 changed files with 2397 additions and 2042 deletions

748
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -10,7 +10,7 @@ homepage = "https://github.com/qarmin/czkawka"
repository = "https://github.com/qarmin/czkawka"
[dependencies]
clap = { version = "4.1", features = ["derive"] }
clap = { version = "4.2", features = ["derive"] }
# For enum types
image_hasher = "1.1.2"

View file

@ -584,6 +584,7 @@ fn parse_checking_method(src: &str) -> Result<CheckingMethod, &'static str> {
match src.to_ascii_lowercase().as_str() {
"name" => Ok(CheckingMethod::Name),
"size" => Ok(CheckingMethod::Size),
"size_name" => Ok(CheckingMethod::SizeName),
"hash" => Ok(CheckingMethod::Hash),
_ => Err("Couldn't parse the search method (allowed: NAME, SIZE, HASH)"),
}

View file

@ -20,16 +20,16 @@ directories-next = "2.0.0"
# Needed by similar images
image_hasher = "1.1.2"
bk-tree = "0.4.0"
image = "0.24.5"
bk-tree = "0.5.0"
image = "0.24.6"
hamming = "0.1.3"
# Needed by same music
bitflags = "1.3.2"
lofty = "0.11.0"
bitflags = "2.0.2"
lofty = "0.12.0"
# Futures - needed by async progress sender
futures = "0.3.26"
futures = "0.3.28"
# Needed by broken files
zip = { version = "0.6.4", features = ["aes-crypto", "bzip2", "deflate", "time"], default-features = false }
@ -41,7 +41,7 @@ blake3 = "1.3.3"
crc32fast = "1.3.2"
xxhash-rust = { version = "0.8.6", features = ["xxh3"] }
tempfile = "3.4.0"
tempfile = "3.5.0"
# Video Duplicates
vid_dup_finder_lib = "0.1.1"
@ -54,8 +54,8 @@ serde_json = "1.0"
# Language
i18n-embed = { version = "0.13.8", features = ["fluent-system", "desktop-requester"] }
i18n-embed-fl = "0.6.5"
rust-embed = "6.6.0"
i18n-embed-fl = "0.6.6"
rust-embed = "6.6.1"
once_cell = "1.17.1"
# Raw image files
@ -69,10 +69,10 @@ infer = "0.13.0"
num_cpus = "1.15.0"
# Heif/Heic
libheif-rs = { version = "0.18.0", optional = true }
libheif-rs = { version = "0.18.0", optional = true } # Do not upgrade now, since Ubuntu 22.04 not works with newer version
anyhow = { version = "1.0", optional = true }
state="0.5.3"
state = "0.5.3"
[features]
default = []

View file

@ -59,6 +59,7 @@ pub enum TypeOfFile {
}
bitflags! {
#[derive(PartialEq, Copy, Clone)]
pub struct CheckedTypes : u32 {
const NONE = 0;

View file

@ -10,6 +10,8 @@ use anyhow::Result;
use directories_next::ProjectDirs;
use image::{DynamicImage, ImageBuffer, Rgb};
use imagepipe::{ImageSource, Pipeline};
// #[cfg(feature = "heif")]
// use libheif_rs::LibHeif;
#[cfg(feature = "heif")]
use libheif_rs::{ColorSpace, HeifContext, RgbChroma};
@ -126,8 +128,10 @@ pub fn open_cache_folder(cache_file_name: &str, save_to_cache: bool, use_json: b
#[cfg(feature = "heif")]
pub fn get_dynamic_image_from_heic(path: &str) -> Result<DynamicImage> {
// let libheif = LibHeif::new();
let im = HeifContext::read_from_file(path)?;
let handle = im.primary_image_handle()?;
// let image = libheif.decode(&handle, ColorSpace::Rgb(RgbChroma::Rgb), None)?; // Enable when using libheif 0.19
let image = handle.decode(ColorSpace::Rgb(RgbChroma::Rgb), None)?;
let width = image.width();
let height = image.height();

View file

@ -30,6 +30,7 @@ pub struct ProgressData {
pub enum CheckingMethod {
None,
Name,
SizeName,
Size,
Hash,
}

View file

@ -11,7 +11,7 @@ use std::os::unix::fs::MetadataExt;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
use std::thread::sleep;
use std::thread::{sleep, JoinHandle};
use std::time::{Duration, SystemTime};
use std::{fs, mem, thread};
@ -67,6 +67,8 @@ pub struct Info {
pub number_of_duplicated_files_by_hash: usize,
pub number_of_groups_by_name: usize,
pub number_of_duplicated_files_by_name: usize,
pub number_of_groups_by_size_name: usize,
pub number_of_duplicated_files_by_size_name: usize,
pub lost_space_by_size: u64,
pub lost_space_by_hash: u64,
}
@ -81,11 +83,13 @@ impl Info {
pub struct DuplicateFinder {
text_messages: Messages,
information: Info,
files_with_identical_names: BTreeMap<String, Vec<FileEntry>>, // File Size, File Entry
files_with_identical_size: BTreeMap<u64, Vec<FileEntry>>, // File Size, File Entry
files_with_identical_hashes: BTreeMap<u64, Vec<Vec<FileEntry>>>, // File Size, next grouped by file size, next grouped by hash
files_with_identical_names_referenced: BTreeMap<String, (FileEntry, Vec<FileEntry>)>, // File Size, File Entry
files_with_identical_size_referenced: BTreeMap<u64, (FileEntry, Vec<FileEntry>)>, // File Size, File Entry
files_with_identical_names: BTreeMap<String, Vec<FileEntry>>, // File Size, File Entry
files_with_identical_size_names: BTreeMap<(u64, String), Vec<FileEntry>>, // File (Size, Name), File Entry
files_with_identical_size: BTreeMap<u64, Vec<FileEntry>>, // File Size, File Entry
files_with_identical_hashes: BTreeMap<u64, Vec<Vec<FileEntry>>>, // File Size, next grouped by file size, next grouped by hash
files_with_identical_names_referenced: BTreeMap<String, (FileEntry, Vec<FileEntry>)>, // File Size, File Entry
files_with_identical_size_names_referenced: BTreeMap<(u64, String), (FileEntry, Vec<FileEntry>)>, // File (Size, Name), File Entry
files_with_identical_size_referenced: BTreeMap<u64, (FileEntry, Vec<FileEntry>)>, // File Size, File Entry
files_with_identical_hashes_referenced: BTreeMap<u64, Vec<(FileEntry, Vec<FileEntry>)>>, // File Size, next grouped by file size, next grouped by hash
directories: Directories,
allowed_extensions: Extensions,
@ -116,8 +120,10 @@ impl DuplicateFinder {
information: Info::new(),
files_with_identical_names: Default::default(),
files_with_identical_size: Default::default(),
files_with_identical_size_names: Default::default(),
files_with_identical_hashes: Default::default(),
files_with_identical_names_referenced: Default::default(),
files_with_identical_size_names_referenced: Default::default(),
files_with_identical_size_referenced: Default::default(),
files_with_identical_hashes_referenced: Default::default(),
recursive_search: true,
@ -148,24 +154,30 @@ impl DuplicateFinder {
match self.check_method {
CheckingMethod::Name => {
if !self.check_files_name(stop_receiver, progress_sender) {
self.stopped_search = true;
self.stopped_search = !self.check_files_name(stop_receiver, progress_sender); // TODO restore this to name
if self.stopped_search {
return;
}
}
CheckingMethod::SizeName => {
self.stopped_search = !self.check_files_size_name(stop_receiver, progress_sender);
if self.stopped_search {
return;
}
}
CheckingMethod::Size => {
if !self.check_files_size(stop_receiver, progress_sender) {
self.stopped_search = true;
self.stopped_search = !self.check_files_size(stop_receiver, progress_sender);
if self.stopped_search {
return;
}
}
CheckingMethod::Hash => {
if !self.check_files_size(stop_receiver, progress_sender) {
self.stopped_search = true;
self.stopped_search = !self.check_files_size(stop_receiver, progress_sender);
if self.stopped_search {
return;
}
if !self.check_files_hash(stop_receiver, progress_sender) {
self.stopped_search = true;
self.stopped_search = !self.check_files_hash(stop_receiver, progress_sender);
if self.stopped_search {
return;
}
}
@ -221,6 +233,11 @@ impl DuplicateFinder {
&self.files_with_identical_size
}
#[must_use]
pub const fn get_files_sorted_by_size_name(&self) -> &BTreeMap<(u64, String), Vec<FileEntry>> {
&self.files_with_identical_size_names
}
#[must_use]
pub const fn get_files_sorted_by_hash(&self) -> &BTreeMap<u64, Vec<Vec<FileEntry>>> {
&self.files_with_identical_hashes
@ -319,6 +336,11 @@ impl DuplicateFinder {
&self.files_with_identical_size_referenced
}
#[must_use]
pub fn get_files_with_identical_size_names_referenced(&self) -> &BTreeMap<(u64, String), (FileEntry, Vec<FileEntry>)> {
&self.files_with_identical_size_names_referenced
}
fn check_files_name(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) -> bool {
let group_by_func = if self.case_sensitive_name_comparison {
|fe: &FileEntry| fe.path.file_name().unwrap().to_string_lossy().to_string()
@ -388,18 +410,7 @@ impl DuplicateFinder {
self.files_with_identical_names_referenced.insert(fe.path.to_string_lossy().to_string(), (fe, vec_fe));
}
}
if self.use_reference_folders {
for (_fe, vector) in self.files_with_identical_names_referenced.values() {
self.information.number_of_duplicated_files_by_name += vector.len();
self.information.number_of_groups_by_name += 1;
}
} else {
for vector in self.files_with_identical_names.values() {
self.information.number_of_duplicated_files_by_name += vector.len() - 1;
self.information.number_of_groups_by_name += 1;
}
}
self.calculate_name_stats();
Common::print_time(start_time, SystemTime::now(), "check_files_name");
true
@ -411,6 +422,118 @@ impl DuplicateFinder {
}
}
fn calculate_name_stats(&mut self) {
if self.use_reference_folders {
for (_fe, vector) in self.files_with_identical_names_referenced.values() {
self.information.number_of_duplicated_files_by_name += vector.len();
self.information.number_of_groups_by_name += 1;
}
} else {
for vector in self.files_with_identical_names.values() {
self.information.number_of_duplicated_files_by_name += vector.len() - 1;
self.information.number_of_groups_by_name += 1;
}
}
}
fn check_files_size_name(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) -> bool {
let group_by_func = if self.case_sensitive_name_comparison {
|fe: &FileEntry| (fe.size, fe.path.file_name().unwrap().to_string_lossy().to_string())
} else {
|fe: &FileEntry| (fe.size, fe.path.file_name().unwrap().to_string_lossy().to_lowercase())
};
let result = DirTraversalBuilder::new()
.root_dirs(self.directories.included_directories.clone())
.group_by(group_by_func)
.stop_receiver(stop_receiver)
.progress_sender(progress_sender)
.checking_method(CheckingMethod::Name)
.directories(self.directories.clone())
.allowed_extensions(self.allowed_extensions.clone())
.excluded_items(self.excluded_items.clone())
.recursive_search(self.recursive_search)
.minimal_file_size(self.minimal_file_size)
.maximal_file_size(self.maximal_file_size)
.build()
.run();
match result {
DirTraversalResult::SuccessFiles {
start_time,
grouped_file_entries,
warnings,
} => {
self.files_with_identical_size_names = grouped_file_entries;
self.text_messages.warnings.extend(warnings);
// Create new BTreeMap without single size entries(files have not duplicates)
let mut new_map: BTreeMap<(u64, String), Vec<FileEntry>> = Default::default();
for (name_size, vector) in &self.files_with_identical_size_names {
if vector.len() > 1 {
new_map.insert(name_size.clone(), vector.clone());
}
}
self.files_with_identical_size_names = new_map;
// Reference - only use in size, because later hash will be counted differently
if self.use_reference_folders {
let mut btree_map = Default::default();
mem::swap(&mut self.files_with_identical_size_names, &mut btree_map);
let reference_directories = self.directories.reference_directories.clone();
let vec = btree_map
.into_iter()
.filter_map(|(_size, vec_file_entry)| {
let mut files_from_referenced_folders = Vec::new();
let mut normal_files = Vec::new();
for file_entry in vec_file_entry {
if reference_directories.iter().any(|e| file_entry.path.starts_with(e)) {
files_from_referenced_folders.push(file_entry);
} else {
normal_files.push(file_entry);
}
}
if files_from_referenced_folders.is_empty() || normal_files.is_empty() {
None
} else {
Some((files_from_referenced_folders.pop().unwrap(), normal_files))
}
})
.collect::<Vec<(FileEntry, Vec<FileEntry>)>>();
for (fe, vec_fe) in vec {
self.files_with_identical_size_names_referenced
.insert((fe.size, fe.path.to_string_lossy().to_string()), (fe, vec_fe));
}
}
self.calculate_size_name_stats();
Common::print_time(start_time, SystemTime::now(), "check_files_size_name");
true
}
DirTraversalResult::SuccessFolders { .. } => {
unreachable!()
}
DirTraversalResult::Stopped => false,
}
}
fn calculate_size_name_stats(&mut self) {
if self.use_reference_folders {
for ((size, _name), (_fe, vector)) in &self.files_with_identical_size_names_referenced {
self.information.number_of_duplicated_files_by_size_name += vector.len();
self.information.number_of_groups_by_size_name += 1;
self.information.lost_space_by_size += (vector.len() as u64) * size;
}
} else {
for ((size, _name), vector) in &self.files_with_identical_size_names {
self.information.number_of_duplicated_files_by_size_name += vector.len() - 1;
self.information.number_of_groups_by_size_name += 1;
self.information.lost_space_by_size += (vector.len() as u64 - 1) * size;
}
}
}
/// Read file length and puts it to different boxes(each for different lengths)
/// If in box is only 1 result, then it is removed
fn check_files_size(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) -> bool {
@ -459,49 +582,8 @@ impl DuplicateFinder {
}
}
// Reference - only use in size, because later hash will be counted differently
if self.use_reference_folders && self.check_method == CheckingMethod::Size {
let mut btree_map = Default::default();
mem::swap(&mut self.files_with_identical_size, &mut btree_map);
let reference_directories = self.directories.reference_directories.clone();
let vec = btree_map
.into_iter()
.filter_map(|(_size, vec_file_entry)| {
let mut files_from_referenced_folders = Vec::new();
let mut normal_files = Vec::new();
for file_entry in vec_file_entry {
if reference_directories.iter().any(|e| file_entry.path.starts_with(e)) {
files_from_referenced_folders.push(file_entry);
} else {
normal_files.push(file_entry);
}
}
if files_from_referenced_folders.is_empty() || normal_files.is_empty() {
None
} else {
Some((files_from_referenced_folders.pop().unwrap(), normal_files))
}
})
.collect::<Vec<(FileEntry, Vec<FileEntry>)>>();
for (fe, vec_fe) in vec {
self.files_with_identical_size_referenced.insert(fe.size, (fe, vec_fe));
}
}
if self.use_reference_folders {
for (size, (_fe, vector)) in &self.files_with_identical_size_referenced {
self.information.number_of_duplicated_files_by_size += vector.len();
self.information.number_of_groups_by_size += 1;
self.information.lost_space_by_size += (vector.len() as u64) * size;
}
} else {
for (size, vector) in &self.files_with_identical_size {
self.information.number_of_duplicated_files_by_size += vector.len() - 1;
self.information.number_of_groups_by_size += 1;
self.information.lost_space_by_size += (vector.len() as u64 - 1) * size;
}
}
self.filter_reference_folders_by_size();
self.calculate_size_stats();
Common::print_time(start_time, SystemTime::now(), "check_files_size");
true
@ -513,35 +595,78 @@ impl DuplicateFinder {
}
}
/// The slowest checking type, which must be applied after checking for size
fn check_files_hash(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) -> bool {
assert_eq!(self.check_method, CheckingMethod::Hash);
fn calculate_size_stats(&mut self) {
if self.use_reference_folders {
for (size, (_fe, vector)) in &self.files_with_identical_size_referenced {
self.information.number_of_duplicated_files_by_size += vector.len();
self.information.number_of_groups_by_size += 1;
self.information.lost_space_by_size += (vector.len() as u64) * size;
}
} else {
for (size, vector) in &self.files_with_identical_size {
self.information.number_of_duplicated_files_by_size += vector.len() - 1;
self.information.number_of_groups_by_size += 1;
self.information.lost_space_by_size += (vector.len() as u64 - 1) * size;
}
}
}
let check_type = Arc::new(self.hash_type);
/// This step check for references, only when checking for size.
/// This is needed, because later reference folders looks for hashes, not size
fn filter_reference_folders_by_size(&mut self) {
if self.use_reference_folders && self.check_method == CheckingMethod::Size {
let mut btree_map = Default::default();
mem::swap(&mut self.files_with_identical_size, &mut btree_map);
let reference_directories = self.directories.reference_directories.clone();
let vec = btree_map
.into_iter()
.filter_map(|(_size, vec_file_entry)| {
let mut files_from_referenced_folders = Vec::new();
let mut normal_files = Vec::new();
for file_entry in vec_file_entry {
if reference_directories.iter().any(|e| file_entry.path.starts_with(e)) {
files_from_referenced_folders.push(file_entry);
} else {
normal_files.push(file_entry);
}
}
let start_time: SystemTime = SystemTime::now();
let check_was_stopped = AtomicBool::new(false); // Used for breaking from GUI and ending check thread
let mut pre_checked_map: BTreeMap<u64, Vec<FileEntry>> = Default::default();
if files_from_referenced_folders.is_empty() || normal_files.is_empty() {
None
} else {
Some((files_from_referenced_folders.pop().unwrap(), normal_files))
}
})
.collect::<Vec<(FileEntry, Vec<FileEntry>)>>();
for (fe, vec_fe) in vec {
self.files_with_identical_size_referenced.insert(fe.size, (fe, vec_fe));
}
}
}
//// PROGRESS THREAD START
let progress_thread_run = Arc::new(AtomicBool::new(true));
let atomic_file_counter = Arc::new(AtomicUsize::new(0));
let progress_thread_handle = if let Some(progress_sender) = progress_sender {
// TODO Generalize this if possible with different tools
fn prepare_hash_thread_handler(
&self,
progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>,
progress_thread_run: Arc<AtomicBool>,
atomic_counter: Arc<AtomicUsize>,
current_stage: u8,
max_stage: u8,
max_value: usize,
) -> JoinHandle<()> {
if let Some(progress_sender) = progress_sender {
let progress_send = progress_sender.clone();
let progress_thread_run = progress_thread_run.clone();
let atomic_file_counter = atomic_file_counter.clone();
let files_to_check = self.files_with_identical_size.values().map(Vec::len).sum();
let progress_thread_run = progress_thread_run;
let atomic_counter = atomic_counter;
let checking_method = self.check_method;
thread::spawn(move || loop {
progress_send
.unbounded_send(ProgressData {
checking_method,
current_stage: 1,
max_stage: 2,
entries_checked: atomic_file_counter.load(Ordering::Relaxed),
entries_to_check: files_to_check,
current_stage,
max_stage,
entries_checked: atomic_counter.load(Ordering::Relaxed),
entries_to_check: max_value,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
@ -551,166 +676,174 @@ impl DuplicateFinder {
})
} else {
thread::spawn(|| {})
};
}
}
//// PROGRESS THREAD END
fn prehashing(
&mut self,
stop_receiver: Option<&Receiver<()>>,
progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>,
pre_checked_map: &mut BTreeMap<u64, Vec<FileEntry>>,
) -> Option<()> {
let start_time: SystemTime = SystemTime::now();
let check_type = self.hash_type;
let check_was_stopped = AtomicBool::new(false); // Used for breaking from GUI and ending check thread
///////////////////////////////////////////////////////////////////////////// PREHASHING START
{
let loaded_hash_map;
let mut records_already_cached: BTreeMap<u64, Vec<FileEntry>> = Default::default();
let mut non_cached_files_to_check: BTreeMap<u64, Vec<FileEntry>> = Default::default();
let progress_thread_run = Arc::new(AtomicBool::new(true));
let atomic_file_counter = Arc::new(AtomicUsize::new(0));
let progress_thread_handle = self.prepare_hash_thread_handler(
progress_sender,
progress_thread_run.clone(),
atomic_file_counter.clone(),
1,
2,
self.files_with_identical_size.values().map(Vec::len).sum(),
);
// Cache algorithm
// - Load data from cache
// - Convert from BT<u64,Vec<FileEntry>> to BT<String,FileEntry>
// - Save to proper values
if self.use_prehash_cache {
loaded_hash_map = match load_hashes_from_file(&mut self.text_messages, self.delete_outdated_cache, &self.hash_type, true) {
Some(t) => t,
None => Default::default(),
};
let loaded_hash_map;
let mut records_already_cached: BTreeMap<u64, Vec<FileEntry>> = Default::default();
let mut non_cached_files_to_check: BTreeMap<u64, Vec<FileEntry>> = Default::default();
let mut loaded_hash_map2: BTreeMap<String, FileEntry> = Default::default();
for vec_file_entry in loaded_hash_map.values() {
for file_entry in vec_file_entry {
loaded_hash_map2.insert(file_entry.path.to_string_lossy().to_string(), file_entry.clone());
// Cache algorithm
// - Load data from cache
// - Convert from BT<u64,Vec<FileEntry>> to BT<String,FileEntry>
// - Save to proper values
if self.use_prehash_cache {
loaded_hash_map = match load_hashes_from_file(&mut self.text_messages, self.delete_outdated_cache, &self.hash_type, true) {
Some(t) => t,
None => Default::default(),
};
let mut loaded_hash_map2: BTreeMap<String, FileEntry> = Default::default();
for vec_file_entry in loaded_hash_map.values() {
for file_entry in vec_file_entry {
loaded_hash_map2.insert(file_entry.path.to_string_lossy().to_string(), file_entry.clone());
}
}
#[allow(clippy::if_same_then_else)]
for vec_file_entry in self.files_with_identical_size.values() {
for file_entry in vec_file_entry {
let name = file_entry.path.to_string_lossy().to_string();
if !loaded_hash_map2.contains_key(&name) {
// If loaded data doesn't contains current image info
non_cached_files_to_check.entry(file_entry.size).or_insert_with(Vec::new).push(file_entry.clone());
} else if file_entry.size != loaded_hash_map2.get(&name).unwrap().size || file_entry.modified_date != loaded_hash_map2.get(&name).unwrap().modified_date {
// When size or modification date of image changed, then it is clear that is different image
non_cached_files_to_check.entry(file_entry.size).or_insert_with(Vec::new).push(file_entry.clone());
} else {
// Checking may be omitted when already there is entry with same size and modification date
records_already_cached.entry(file_entry.size).or_insert_with(Vec::new).push(file_entry.clone());
}
}
}
} else {
loaded_hash_map = Default::default();
mem::swap(&mut self.files_with_identical_size, &mut non_cached_files_to_check);
}
#[allow(clippy::if_same_then_else)]
for vec_file_entry in self.files_with_identical_size.values() {
for file_entry in vec_file_entry {
let name = file_entry.path.to_string_lossy().to_string();
if !loaded_hash_map2.contains_key(&name) {
// If loaded data doesn't contains current image info
non_cached_files_to_check.entry(file_entry.size).or_insert_with(Vec::new).push(file_entry.clone());
} else if file_entry.size != loaded_hash_map2.get(&name).unwrap().size || file_entry.modified_date != loaded_hash_map2.get(&name).unwrap().modified_date {
// When size or modification date of image changed, then it is clear that is different image
non_cached_files_to_check.entry(file_entry.size).or_insert_with(Vec::new).push(file_entry.clone());
} else {
// Checking may be omitted when already there is entry with same size and modification date
records_already_cached.entry(file_entry.size).or_insert_with(Vec::new).push(file_entry.clone());
#[allow(clippy::type_complexity)]
let pre_hash_results: Vec<(u64, BTreeMap<String, Vec<FileEntry>>, Vec<String>)> = non_cached_files_to_check
.par_iter()
.map(|(size, vec_file_entry)| {
let mut hashmap_with_hash: BTreeMap<String, Vec<FileEntry>> = Default::default();
let mut errors: Vec<String> = Vec::new();
let mut buffer = [0u8; 1024 * 2];
atomic_file_counter.fetch_add(vec_file_entry.len(), Ordering::Relaxed);
for file_entry in vec_file_entry {
if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() {
check_was_stopped.store(true, Ordering::Relaxed);
return None;
}
match hash_calculation(&mut buffer, file_entry, &check_type, 0) {
Ok(hash_string) => {
hashmap_with_hash.entry(hash_string.clone()).or_insert_with(Vec::new).push(file_entry.clone());
}
Err(s) => errors.push(s),
}
}
} else {
loaded_hash_map = Default::default();
mem::swap(&mut self.files_with_identical_size, &mut non_cached_files_to_check);
Some((*size, hashmap_with_hash, errors))
})
.while_some()
.collect();
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
// Check if user aborted search(only from GUI)
if check_was_stopped.load(Ordering::Relaxed) {
return None;
}
// Add data from cache
for (size, vec_file_entry) in &records_already_cached {
pre_checked_map.entry(*size).or_insert_with(Vec::new).append(&mut vec_file_entry.clone());
}
// Check results
for (size, hash_map, errors) in &pre_hash_results {
self.text_messages.warnings.append(&mut errors.clone());
for vec_file_entry in hash_map.values() {
if vec_file_entry.len() > 1 {
pre_checked_map.entry(*size).or_insert_with(Vec::new).append(&mut vec_file_entry.clone());
}
}
}
#[allow(clippy::type_complexity)]
let pre_hash_results: Vec<(u64, BTreeMap<String, Vec<FileEntry>>, Vec<String>)> = non_cached_files_to_check
.par_iter()
.map(|(size, vec_file_entry)| {
let mut hashmap_with_hash: BTreeMap<String, Vec<FileEntry>> = Default::default();
let mut errors: Vec<String> = Vec::new();
let mut buffer = [0u8; 1024 * 2];
if self.use_prehash_cache {
// All results = records already cached + computed results
let mut save_cache_to_hashmap: BTreeMap<String, FileEntry> = Default::default();
atomic_file_counter.fetch_add(vec_file_entry.len(), Ordering::Relaxed);
for (size, vec_file_entry) in loaded_hash_map {
if size >= self.minimal_prehash_cache_file_size {
for file_entry in vec_file_entry {
if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() {
check_was_stopped.store(true, Ordering::Relaxed);
return None;
}
match hash_calculation(&mut buffer, file_entry, &check_type, 0) {
Ok(hash_string) => {
hashmap_with_hash.entry(hash_string.clone()).or_insert_with(Vec::new).push(file_entry.clone());
}
Err(s) => errors.push(s),
}
}
Some((*size, hashmap_with_hash, errors))
})
.while_some()
.collect();
// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
// Check if user aborted search(only from GUI)
if check_was_stopped.load(Ordering::Relaxed) {
return false;
}
// Add data from cache
for (size, vec_file_entry) in &records_already_cached {
pre_checked_map.entry(*size).or_insert_with(Vec::new).append(&mut vec_file_entry.clone());
}
// Check results
for (size, hash_map, errors) in &pre_hash_results {
self.text_messages.warnings.append(&mut errors.clone());
for vec_file_entry in hash_map.values() {
if vec_file_entry.len() > 1 {
pre_checked_map.entry(*size).or_insert_with(Vec::new).append(&mut vec_file_entry.clone());
save_cache_to_hashmap.insert(file_entry.path.to_string_lossy().to_string(), file_entry.clone());
}
}
}
if self.use_prehash_cache {
// All results = records already cached + computed results
let mut save_cache_to_hashmap: BTreeMap<String, FileEntry> = Default::default();
for (size, vec_file_entry) in loaded_hash_map {
if size >= self.minimal_prehash_cache_file_size {
for (size, hash_map, _errors) in &pre_hash_results {
if *size >= self.minimal_prehash_cache_file_size {
for vec_file_entry in hash_map.values() {
for file_entry in vec_file_entry {
save_cache_to_hashmap.insert(file_entry.path.to_string_lossy().to_string(), file_entry.clone());
}
}
}
for (size, hash_map, _errors) in &pre_hash_results {
if *size >= self.minimal_prehash_cache_file_size {
for vec_file_entry in hash_map.values() {
for file_entry in vec_file_entry {
save_cache_to_hashmap.insert(file_entry.path.to_string_lossy().to_string(), file_entry.clone());
}
}
}
}
save_hashes_to_file(&save_cache_to_hashmap, &mut self.text_messages, &self.hash_type, true, self.minimal_prehash_cache_file_size);
}
save_hashes_to_file(&save_cache_to_hashmap, &mut self.text_messages, &self.hash_type, true, self.minimal_prehash_cache_file_size);
}
///////////////////////////////////////////////////////////////////////////// PREHASHING END
Common::print_time(start_time, SystemTime::now(), "check_files_hash - prehash");
Some(())
}
fn full_hashing(
&mut self,
stop_receiver: Option<&Receiver<()>>,
progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>,
mut pre_checked_map: BTreeMap<u64, Vec<FileEntry>>,
) -> Option<()> {
let check_was_stopped = AtomicBool::new(false); // Used for breaking from GUI and ending check thread
let check_type = self.hash_type;
let start_time: SystemTime = SystemTime::now();
/////////////////////////
//// PROGRESS THREAD START
let progress_thread_run = Arc::new(AtomicBool::new(true));
let atomic_file_counter = Arc::new(AtomicUsize::new(0));
let progress_thread_handle = if let Some(progress_sender) = progress_sender {
let progress_send = progress_sender.clone();
let progress_thread_run = progress_thread_run.clone();
let atomic_file_counter = atomic_file_counter.clone();
let files_to_check = pre_checked_map.values().map(Vec::len).sum();
let checking_method = self.check_method;
thread::spawn(move || loop {
progress_send
.unbounded_send(ProgressData {
checking_method,
current_stage: 2,
max_stage: 2,
entries_checked: atomic_file_counter.load(Ordering::Relaxed),
entries_to_check: files_to_check,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
break;
}
sleep(Duration::from_millis(LOOP_DURATION as u64));
})
} else {
thread::spawn(|| {})
};
let progress_thread_handle = self.prepare_hash_thread_handler(
progress_sender,
progress_thread_run.clone(),
atomic_file_counter.clone(),
2,
2,
pre_checked_map.values().map(Vec::len).sum(),
);
//// PROGRESS THREAD END
@ -828,7 +961,7 @@ impl DuplicateFinder {
// Break if stop was clicked after saving to cache
if check_was_stopped.load(Ordering::Relaxed) {
return false;
return None;
}
for (size, hash_map, mut errors) in full_hash_results {
@ -840,9 +973,11 @@ impl DuplicateFinder {
}
}
}
Common::print_time(start_time, SystemTime::now(), "delete_files");
Some(())
}
///////////////////////////////////////////////////////////////////////////// HASHING END
fn hash_reference_folders(&mut self) {
// Reference - only use in size, because later hash will be counted differently
if self.use_reference_folders {
let mut btree_map = Default::default();
@ -897,8 +1032,24 @@ impl DuplicateFinder {
}
}
}
}
Common::print_time(start_time, SystemTime::now(), "check_files_hash - full hash");
/// The slowest checking type, which must be applied after checking for size
fn check_files_hash(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::UnboundedSender<ProgressData>>) -> bool {
assert_eq!(self.check_method, CheckingMethod::Hash);
let mut pre_checked_map: BTreeMap<u64, Vec<FileEntry>> = Default::default();
let ret = self.prehashing(stop_receiver, progress_sender, &mut pre_checked_map);
if ret.is_none() {
return false;
}
let ret = self.full_hashing(stop_receiver, progress_sender, pre_checked_map);
if ret.is_none() {
return false;
}
self.hash_reference_folders();
// Clean unused data
self.files_with_identical_size = Default::default();
@ -920,6 +1071,11 @@ impl DuplicateFinder {
let _tuple: (u64, usize, usize) = delete_files(vector, &self.delete_method, &mut self.text_messages, self.dryrun);
}
}
CheckingMethod::SizeName => {
for vector in self.files_with_identical_size_names.values() {
let _tuple: (u64, usize, usize) = delete_files(vector, &self.delete_method, &mut self.text_messages, self.dryrun);
}
}
CheckingMethod::Hash => {
for vector_vectors in self.files_with_identical_hashes.values() {
for vector in vector_vectors.iter() {
@ -1053,6 +1209,30 @@ impl SaveResults for DuplicateFinder {
write!(writer, "Not found any files with same names.").unwrap();
}
}
CheckingMethod::SizeName => {
if !self.files_with_identical_names.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same size and names-------------------------------------------------"
)
.unwrap();
writeln!(
writer,
"Found {} files in {} groups with same size and name(may have different content)",
self.information.number_of_duplicated_files_by_size_name, self.information.number_of_groups_by_size_name,
)
.unwrap();
for ((size, name), vector) in self.files_with_identical_size_names.iter().rev() {
writeln!(writer, "Name - {}, {} - {} files ", name, format_size(*size, BINARY), vector.len()).unwrap();
for j in vector {
writeln!(writer, "{}", j.path.display()).unwrap();
}
writeln!(writer).unwrap();
}
} else {
write!(writer, "Not found any files with same size and names.").unwrap();
}
}
CheckingMethod::Size => {
if !self.files_with_identical_size.is_empty() {
writeln!(
@ -1137,6 +1317,20 @@ impl PrintResults for DuplicateFinder {
println!();
}
}
CheckingMethod::SizeName => {
for i in &self.files_with_identical_size_names {
number_of_files += i.1.len() as u64;
number_of_groups += 1;
}
println!("Found {number_of_files} files in {number_of_groups} groups with same size and name(may have different content)",);
for ((size, name), vector) in &self.files_with_identical_size_names {
println!("Name - {}, {} - {} files ", name, format_size(*size, BINARY), vector.len());
for j in vector {
println!("{}", j.path.display());
}
println!();
}
}
CheckingMethod::Hash => {
for vector in self.files_with_identical_hashes.values() {
for j in vector {

View file

@ -31,6 +31,7 @@ pub enum DeleteMethod {
}
bitflags! {
#[derive(PartialEq, Copy, Clone, Debug)]
pub struct MusicSimilarity : u32 {
const NONE = 0;

View file

@ -965,7 +965,7 @@ impl SimilarImages {
let mut found = false;
for vec_file_entry in collected_similar_images.values() {
if vec_file_entry.is_empty() {
println!("Empty Element {vec_file_entry:?}");
println!("Empty group");
found = true;
continue;
}
@ -984,7 +984,7 @@ impl SimilarImages {
}
}
}
assert!(!found, "Found Invalid entries");
assert!(!found, "Found Invalid entries, verify errors before"); // TODO crashes with empty result with reference folder, verify why
}
self.similar_vectors = collected_similar_images.into_values().collect();

View file

@ -10,29 +10,29 @@ homepage = "https://github.com/qarmin/czkawka"
repository = "https://github.com/qarmin/czkawka"
[dependencies]
gdk4 = "0.6.2"
glib = "0.17.2"
gdk4 = "0.6.3"
glib = "0.17.5"
humansize = "2.1.3"
chrono = "0.4.23"
chrono = "0.4.24"
# Used for sending stop signal across threads
crossbeam-channel = "0.5.7"
# To get information about progress
futures = "0.3.26"
futures = "0.3.28"
# For saving/loading config files to specific directories
directories-next = "2.0.0"
# For opening files
open = "3.2.0"
open = "4.0.1"
# To get image preview
image = "0.24.5"
image = "0.24.6"
# To be able to use custom select
regex = "1.7.1"
regex = "1.7.3"
# To get image_hasher types
image_hasher = "1.1.2"
@ -45,15 +45,15 @@ fs_extra = "1.3.0"
# Language
i18n-embed = { version = "0.13.8", features = ["fluent-system", "desktop-requester"] }
i18n-embed-fl = "0.6.5"
rust-embed = "6.6.0"
i18n-embed-fl = "0.6.6"
rust-embed = "6.6.1"
once_cell = "1.17.1"
[target.'cfg(windows)'.dependencies]
winapi = { version = "0.3.9", features = ["combaseapi", "objbase", "shobjidl_core", "windef", "winerror", "wtypesbase", "winuser"] }
[dependencies.gtk4]
version = "0.6.2"
version = "0.6.4"
default-features = false
features = ["v4_6"]

View file

@ -28,6 +28,7 @@ duplicate_case_sensitive_name_tooltip =
Disabling such option will group names without checking if each letter is same size e.g. żoŁD <-> Żołd
duplicate_mode_size_name_combo_box = Size and Name
duplicate_mode_name_combo_box = Name
duplicate_mode_size_combo_box = Size
duplicate_mode_hash_combo_box = Hash
@ -447,6 +448,7 @@ progress_scanning_music_tags_end = Comparing tags of {$file_checked}/{$all_files
progress_scanning_music_tags = Reading tags of {$file_checked}/{$all_files} music file
progress_scanning_empty_folders = Scanning {$folder_number} folder
progress_scanning_size = Scanning size of {$file_number} file
progress_scanning_size_name = Scanning name and size of {$file_number} file
progress_scanning_name = Scanning name of {$file_number} file
progress_analyzed_partial_hash = Analyzed partial hash of {$file_checked}/{$all_files} files
progress_analyzed_full_hash = Analyzed full hash of {$file_checked}/{$all_files} files

File diff suppressed because it is too large Load diff

View file

@ -395,13 +395,13 @@ fn generate_cache_for_results(vector_with_path: Vec<(String, String, TreePath)>)
#[allow(clippy::never_loop)]
loop {
let Some(pixbuf_big) = resize_pixbuf_dimension(&pixbuf, (BIG_PREVIEW_SIZE, BIG_PREVIEW_SIZE), InterpType::Bilinear) else{
println!("Failed to resize image {full_path}.");
break;
let Some(pixbuf_big) = resize_pixbuf_dimension(&pixbuf, (BIG_PREVIEW_SIZE, BIG_PREVIEW_SIZE), InterpType::Bilinear) else {
println!("Failed to resize image {full_path}.");
break;
};
let Some(pixbuf_small) = resize_pixbuf_dimension(&pixbuf_big, (SMALL_PREVIEW_SIZE, SMALL_PREVIEW_SIZE), InterpType::Bilinear) else {
println!("Failed to resize image {full_path}.");
break;
let Some(pixbuf_small) = resize_pixbuf_dimension(&pixbuf_big, (SMALL_PREVIEW_SIZE, SMALL_PREVIEW_SIZE), InterpType::Bilinear) else {
println!("Failed to resize image {full_path}.");
break;
};
big_img.set_from_pixbuf(Some(&pixbuf_big));

View file

@ -1,9 +1,10 @@
use gtk4::prelude::*;
use crate::gui_structs::gui_data::GuiData;
use crate::gui_structs::gui_popovers_sort::GuiSortPopovers;
use crate::help_functions::PopoverTypes;
use crate::notebook_enums::{to_notebook_main_enum, NotebookMainEnum};
use crate::notebook_info::NOTEBOOKS_INFO;
use gtk4::prelude::*;
pub fn connect_button_sort(gui_data: &GuiData) {
let popovers_sort = gui_data.popovers_sort.clone();

View file

@ -21,7 +21,7 @@ pub fn connect_duplicate_combo_box(gui_data: &GuiData) {
label_duplicate_hash_type.set_visible(false);
}
if DUPLICATES_CHECK_METHOD_COMBO_BOX[chosen_index as usize].check_method == CheckingMethod::Name {
if [CheckingMethod::Name, CheckingMethod::SizeName].contains(&DUPLICATES_CHECK_METHOD_COMBO_BOX[chosen_index as usize].check_method) {
check_button_duplicate_case_sensitive_name.set_visible(true);
} else {
check_button_duplicate_case_sensitive_name.set_visible(false);

View file

@ -1,6 +1,7 @@
use std::fmt::Debug;
use gtk4::prelude::*;
use gtk4::{ListStore, TreeIter};
use std::fmt::Debug;
use crate::gui_structs::gui_data::GuiData;
use crate::help_functions::*;
@ -118,10 +119,11 @@ pub fn connect_popover_sort(gui_data: &GuiData) {
#[cfg(test)]
mod test {
use crate::connect_things::connect_popovers_sort::{popover_sort_general, sort_iters};
use gtk4::prelude::*;
use gtk4::{Popover, TreeView};
use crate::connect_things::connect_popovers_sort::{popover_sort_general, sort_iters};
#[gtk4::test]
fn test_sort_iters() {
let columns_types: &[glib::types::Type] = &[glib::types::Type::U32, glib::types::Type::STRING];

View file

@ -109,6 +109,16 @@ pub fn connect_progress_window(
));
taskbar_state.borrow().set_progress_state(TBPF_INDETERMINATE);
}
common_dir_traversal::CheckingMethod::SizeName => {
label_stage.show();
grid_progress_stages.hide();
label_stage.set_text(&flg!(
"progress_scanning_size_name",
generate_translation_hashmap(vec![("file_number", item.entries_checked.to_string())])
));
taskbar_state.borrow().set_progress_state(TBPF_INDETERMINATE);
}
common_dir_traversal::CheckingMethod::Size => {
label_stage.show();
grid_progress_stages.hide();

View file

@ -549,9 +549,8 @@ impl GuiMainNotebook {
CheckingMethod::Hash => flg!("duplicate_mode_hash_combo_box"),
CheckingMethod::Size => flg!("duplicate_mode_size_combo_box"),
CheckingMethod::Name => flg!("duplicate_mode_name_combo_box"),
_ => {
panic!()
}
CheckingMethod::SizeName => flg!("duplicate_mode_size_name_combo_box"),
CheckingMethod::None => panic!(),
};
self.combo_box_duplicate_check_method.append_text(&text);
}

View file

@ -29,7 +29,7 @@ pub struct CheckMethodStruct {
pub check_method: CheckingMethod,
}
pub const DUPLICATES_CHECK_METHOD_COMBO_BOX: [CheckMethodStruct; 3] = [
pub const DUPLICATES_CHECK_METHOD_COMBO_BOX: [CheckMethodStruct; 4] = [
CheckMethodStruct {
eng_name: "Hash",
check_method: CheckingMethod::Hash,
@ -42,6 +42,10 @@ pub const DUPLICATES_CHECK_METHOD_COMBO_BOX: [CheckMethodStruct; 3] = [
eng_name: "Name",
check_method: CheckingMethod::Name,
},
CheckMethodStruct {
eng_name: "Size and Name",
check_method: CheckingMethod::SizeName,
},
];
#[derive(Copy, Clone)]

View file

@ -244,8 +244,8 @@ pub fn get_string_from_list_store(tree_view: &TreeView, column_full_path: i32, c
let mut string_vector: Vec<String> = Vec::new();
let Some(tree_iter) = list_store.iter_first() else {
return string_vector;
let Some(tree_iter) = list_store.iter_first() else {
return string_vector;
};
match column_selection {
Some(column_selection) => loop {

View file

@ -1,5 +1,4 @@
use std::cell::RefCell;
use std::path::Path;
use std::rc::Rc;

View file

@ -1,6 +1,6 @@
use crate::help_functions::{
ColumnsBadExtensions, ColumnsBigFiles, ColumnsBrokenFiles, ColumnsDuplicates, ColumnsEmptyFiles, ColumnsEmptyFolders, ColumnsInvalidSymlinks, ColumnsSameMusic,
ColumnsSimilarImages, ColumnsSimilarVideos, ColumnsTemporaryFiles, PopoverTypes,
BottomButtonsEnum, ColumnsBadExtensions, ColumnsBigFiles, ColumnsBrokenFiles, ColumnsDuplicates, ColumnsEmptyFiles, ColumnsEmptyFolders, ColumnsInvalidSymlinks,
ColumnsSameMusic, ColumnsSimilarImages, ColumnsSimilarVideos, ColumnsTemporaryFiles, PopoverTypes,
};
use crate::notebook_enums::{NotebookMainEnum, NUMBER_OF_NOTEBOOK_MAIN_TABS};
@ -17,6 +17,7 @@ pub struct NotebookObject {
pub column_size_as_bytes: Option<i32>,
pub column_modification_as_secs: Option<i32>,
pub columns_types: &'static [glib::types::Type],
pub bottom_buttons: &'static [BottomButtonsEnum],
}
pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
@ -52,6 +53,15 @@ pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
glib::types::Type::BOOL, // IsHeader
glib::types::Type::STRING, // TextColor
],
bottom_buttons: &[
BottomButtonsEnum::Save,
BottomButtonsEnum::Delete,
BottomButtonsEnum::Select,
BottomButtonsEnum::Sort,
BottomButtonsEnum::Symlink,
BottomButtonsEnum::Hardlink,
BottomButtonsEnum::Move,
],
},
NotebookObject {
notebook_type: NotebookMainEnum::EmptyDirectories,
@ -72,6 +82,7 @@ pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
glib::types::Type::STRING, // Modification
glib::types::Type::U64, // ModificationAsSecs
],
bottom_buttons: &[BottomButtonsEnum::Save, BottomButtonsEnum::Delete, BottomButtonsEnum::Select, BottomButtonsEnum::Move],
},
NotebookObject {
notebook_type: NotebookMainEnum::BigFiles,
@ -94,6 +105,7 @@ pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
glib::types::Type::U64, // SizeAsBytes
glib::types::Type::U64, // ModificationAsSecs
],
bottom_buttons: &[BottomButtonsEnum::Save, BottomButtonsEnum::Delete, BottomButtonsEnum::Select, BottomButtonsEnum::Move],
},
NotebookObject {
notebook_type: NotebookMainEnum::EmptyFiles,
@ -114,6 +126,7 @@ pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
glib::types::Type::STRING, // Modification
glib::types::Type::U64, // ModificationAsSecs
],
bottom_buttons: &[BottomButtonsEnum::Save, BottomButtonsEnum::Delete, BottomButtonsEnum::Select, BottomButtonsEnum::Move],
},
NotebookObject {
notebook_type: NotebookMainEnum::Temporary,
@ -134,6 +147,7 @@ pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
glib::types::Type::STRING, // Modification
glib::types::Type::U64, // ModificationAsSecs
],
bottom_buttons: &[BottomButtonsEnum::Save, BottomButtonsEnum::Delete, BottomButtonsEnum::Select, BottomButtonsEnum::Move],
},
NotebookObject {
notebook_type: NotebookMainEnum::SimilarImages,
@ -162,6 +176,16 @@ pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
glib::types::Type::BOOL, // IsHeader
glib::types::Type::STRING, // TextColor
],
bottom_buttons: &[
BottomButtonsEnum::Save,
BottomButtonsEnum::Delete,
BottomButtonsEnum::Select,
BottomButtonsEnum::Sort,
BottomButtonsEnum::Symlink,
BottomButtonsEnum::Hardlink,
BottomButtonsEnum::Move,
BottomButtonsEnum::Compare,
],
},
NotebookObject {
notebook_type: NotebookMainEnum::SimilarVideos,
@ -188,6 +212,15 @@ pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
glib::types::Type::BOOL, // IsHeader
glib::types::Type::STRING, // TextColor
],
bottom_buttons: &[
BottomButtonsEnum::Save,
BottomButtonsEnum::Delete,
BottomButtonsEnum::Select,
BottomButtonsEnum::Sort,
BottomButtonsEnum::Symlink,
BottomButtonsEnum::Hardlink,
BottomButtonsEnum::Move,
],
},
NotebookObject {
notebook_type: NotebookMainEnum::SameMusic,
@ -221,6 +254,15 @@ pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
glib::types::Type::BOOL, // IsHeader
glib::types::Type::STRING, // TextColor
],
bottom_buttons: &[
BottomButtonsEnum::Save,
BottomButtonsEnum::Delete,
BottomButtonsEnum::Select,
BottomButtonsEnum::Sort,
BottomButtonsEnum::Symlink,
BottomButtonsEnum::Hardlink,
BottomButtonsEnum::Move,
],
},
NotebookObject {
notebook_type: NotebookMainEnum::Symlinks,
@ -243,6 +285,7 @@ pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
glib::types::Type::STRING, // Modification
glib::types::Type::U64, // ModificationAsSecs
],
bottom_buttons: &[BottomButtonsEnum::Save, BottomButtonsEnum::Delete, BottomButtonsEnum::Select, BottomButtonsEnum::Move],
},
NotebookObject {
notebook_type: NotebookMainEnum::BrokenFiles,
@ -264,6 +307,7 @@ pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
glib::types::Type::STRING, // Modification
glib::types::Type::U64, // ModificationAsSecs
],
bottom_buttons: &[BottomButtonsEnum::Save, BottomButtonsEnum::Delete, BottomButtonsEnum::Select, BottomButtonsEnum::Move],
},
NotebookObject {
notebook_type: NotebookMainEnum::BadExtensions,
@ -286,5 +330,6 @@ pub static NOTEBOOKS_INFO: [NotebookObject; NUMBER_OF_NOTEBOOK_MAIN_TABS] = [
glib::types::Type::STRING, // Modification
glib::types::Type::U64, // ModificationAsSecs
],
bottom_buttons: &[BottomButtonsEnum::Save, BottomButtonsEnum::Delete, BottomButtonsEnum::Select, BottomButtonsEnum::Move],
},
];

View file

@ -5,11 +5,11 @@ use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use std::{env, fs};
use czkawka_core::common::get_default_number_of_threads;
use directories_next::ProjectDirs;
use gtk4::prelude::*;
use gtk4::{ComboBoxText, ScrolledWindow, TextView, TreeView};
use czkawka_core::common::get_default_number_of_threads;
use czkawka_core::common_dir_traversal::CheckingMethod;
use czkawka_core::similar_images::SIMILAR_VALUES;
@ -938,7 +938,7 @@ pub fn load_configuration(
main_notebook.label_duplicate_hash_type.set_visible(false);
}
if DUPLICATES_CHECK_METHOD_COMBO_BOX[combo_chosen_index as usize].check_method == CheckingMethod::Name {
if [CheckingMethod::Name, CheckingMethod::SizeName].contains(&DUPLICATES_CHECK_METHOD_COMBO_BOX[combo_chosen_index as usize].check_method) {
main_notebook.check_button_duplicate_case_sensitive_name.set_visible(true);
} else {
main_notebook.check_button_duplicate_case_sensitive_name.set_visible(false);