diff --git a/README.md b/README.md index 1101873..4188f55 100644 --- a/README.md +++ b/README.md @@ -130,7 +130,7 @@ DupeGuru after selecting files, froze at 45% for ~15 minutes, so I just kill it. I used Mprof for checking memory usage FSlint and Dupeguru, for Czkawka I used Heaptrack. To not get Dupeguru crash I checked smaller directory with 217986 files and 41883 folders. -| App| Idle Ram | Max Operational Ram Usage | Stabilized after search usage | +| App| Idle Ram | Max Operational Ram Usage | Stabilized after search | |:----------:|:-------------:|:-------------:|:-------------:| | FSlint 2.4.7 | 54 MB | 120 MB | 117 MB | | Czkawka 1.2.2 | 8 MB | 42 MB | 41 MB | @@ -159,7 +159,7 @@ So still is a big room for improvements. | Language | Rust| Python | Python/Objective C | | OS | Linux, Windows, Mac(only CLI) | Linux | Linux, Windows, Mac| | Framework | GTK 3 (Gtk-rs)| GTK 2 (PyGTK) | Qt 5 (PyQt)/Cocoa | -| Ram Usage | Low | Medium | | +| Ram Usage | Low | Medium | Very High | | Duplicate finder | X | X | X | | Empty files | X | X | | | Empty folders | X | X | | diff --git a/czkawka_cli/src/commands.rs b/czkawka_cli/src/commands.rs index e48faa9..d94661e 100644 --- a/czkawka_cli/src/commands.rs +++ b/czkawka_cli/src/commands.rs @@ -16,7 +16,7 @@ pub enum Commands { minimal_file_size: u64, #[structopt(flatten)] allowed_extensions: AllowedExtensions, - #[structopt(short, long, default_value = "HASH", parse(try_from_str = parse_checking_method), help = "Search method (SIZE, HASH, HASHMB)", long_help = "Methods to search files.\nSIZE - The fastest method, checking by the file's size,\nHASHMB - More accurate but slower, checking by the hash of the file's first mibibyte or\nHASH - The slowest method, checking by the hash of the entire file")] + #[structopt(short, long, default_value = "HASH", parse(try_from_str = parse_checking_method), help = "Search method (NAME, SIZE, HASH, HASHMB)", long_help = "Methods to search files.\nNAME - Fast but but rarely usable,\nSIZE - Fast but not accurate, checking by the file's size,\nHASHMB - More accurate but slower, checking by the hash of the file's first mebibyte or\nHASH - The slowest method, checking by the hash of the entire file")] search_method: CheckingMethod, #[structopt(short = "D", long, default_value = "NONE", parse(try_from_str = parse_delete_method), help = "Delete method (AEN, AEO, ON, OO)", long_help = "Methods to delete the files.\nAEN - All files except the newest,\nAEO - All files except the oldest,\nON - Only 1 file, the newest,\nOO - Only 1 file, the oldest\nNONE - not delete files")] delete_method: DeleteMethod, @@ -83,7 +83,7 @@ pub enum Commands { #[structopt(flatten)] not_recursive: NotRecursive, }, - #[structopt(name = "ima", about = "Finds similar images", help_message = HELP_MESSAGE, after_help = "EXAMPLE:\n czkawka ima -d /home/rafal/ -E */.git */tmp* *Pulpit -f results.txt")] + #[structopt(name = "image", about = "Finds similar images", help_message = HELP_MESSAGE, after_help = "EXAMPLE:\n czkawka image -d /home/rafal/ -E */.git */tmp* *Pulpit -f results.txt")] SimilarImages { #[structopt(flatten)] directories: Directories, @@ -153,10 +153,11 @@ impl FileToSave { fn parse_checking_method(src: &str) -> Result { match src.to_ascii_lowercase().as_str() { + "name" => Ok(CheckingMethod::Name), "size" => Ok(CheckingMethod::Size), "hash" => Ok(CheckingMethod::Hash), "hashmb" => Ok(CheckingMethod::HashMB), - _ => Err("Couldn't parse the search method (allowed: SIZE, HASH, HASHMB)"), + _ => Err("Couldn't parse the search method (allowed: NAME, SIZE, HASH, HASHMB)"), } } @@ -205,4 +206,5 @@ EXAMPLES: {bin} empty-folders -d /home/rafal/rr /home/gateway -f results.txt {bin} big -d /home/rafal/ /home/piszczal -e /home/rafal/Roman -n 25 -x VIDEO -f results.txt {bin} empty-files -d /home/rafal /home/szczekacz -e /home/rafal/Pulpit -R -f results.txt - {bin} temp -d /home/rafal/ -E */.git */tmp* *Pulpit -f results.txt -D"#; + {bin} temp -d /home/rafal/ -E */.git */tmp* *Pulpit -f results.txt -D + {bin} image -d /home/rafal -e /home/rafal/Pulpit -f results.txt"#; diff --git a/czkawka_core/src/duplicate.rs b/czkawka_core/src/duplicate.rs index d018319..37d61d1 100644 --- a/czkawka_core/src/duplicate.rs +++ b/czkawka_core/src/duplicate.rs @@ -19,11 +19,17 @@ const HASH_MB_LIMIT_BYTES: u64 = 1024 * 1024; // 1MB #[derive(PartialEq, Eq, Clone, Debug)] pub enum CheckingMethod { None, + Name, Size, Hash, HashMB, } +#[derive(PartialEq, Eq, Clone, Debug)] +pub enum HashType { + Blake3, +} + #[derive(Eq, PartialEq, Clone, Debug)] pub enum DeleteMethod { None, @@ -51,8 +57,10 @@ pub struct Info { pub number_of_duplicated_files_by_size: usize, pub number_of_groups_by_hash: usize, pub number_of_duplicated_files_by_hash: usize, - pub number_of_duplicated_files_after_pre_hash: usize, pub number_of_groups_after_pre_hash: usize, + pub number_of_duplicated_files_after_pre_hash: usize, + pub number_of_groups_by_name: usize, + pub number_of_duplicated_files_by_name: usize, pub lost_space_by_size: u64, pub lost_space_after_pre_hash: u64, pub lost_space_by_hash: u64, @@ -72,6 +80,7 @@ impl Info { pub struct DuplicateFinder { text_messages: Messages, information: Info, + files_with_identical_names: BTreeMap>, // File Size, File Entry files_with_identical_size: BTreeMap>, // File Size, File Entry files_with_identical_hashes: BTreeMap>>, // File Size, File Entry directories: Directories, @@ -81,6 +90,7 @@ pub struct DuplicateFinder { minimal_file_size: u64, check_method: CheckingMethod, delete_method: DeleteMethod, + hash_type: HashType, stopped_search: bool, } @@ -89,6 +99,7 @@ impl DuplicateFinder { Self { text_messages: Messages::new(), information: Info::new(), + files_with_identical_names: Default::default(), files_with_identical_size: Default::default(), files_with_identical_hashes: Default::default(), recursive_search: true, @@ -99,20 +110,38 @@ impl DuplicateFinder { directories: Directories::new(), excluded_items: ExcludedItems::new(), stopped_search: false, + hash_type: HashType::Blake3, } } pub fn find_duplicates(&mut self, rx: Option<&Receiver<()>>) { self.directories.optimize_directories(self.recursive_search, &mut self.text_messages); - if !self.check_files_size(rx) { - self.stopped_search = true; - return; - } - #[allow(clippy::collapsible_if)] - if self.check_method == CheckingMethod::Hash || self.check_method == CheckingMethod::HashMB { - if !self.check_files_hash(rx) { - self.stopped_search = true; - return; + + match self.check_method { + CheckingMethod::Name => { + if !self.check_files_name(rx) { + self.stopped_search = true; + return; + } + } + CheckingMethod::Size => { + if !self.check_files_size(rx) { + self.stopped_search = true; + return; + } + } + CheckingMethod::HashMB | CheckingMethod::Hash => { + if !self.check_files_size(rx) { + self.stopped_search = true; + return; + } + if !self.check_files_hash(rx) { + self.stopped_search = true; + return; + } + } + CheckingMethod::None => { + panic!(); } } self.delete_files(); @@ -127,6 +156,10 @@ impl DuplicateFinder { self.stopped_search } + pub const fn get_files_sorted_by_names(&self) -> &BTreeMap> { + &self.files_with_identical_names + } + pub const fn get_files_sorted_by_size(&self) -> &BTreeMap> { &self.files_with_identical_size } @@ -177,10 +210,146 @@ impl DuplicateFinder { self.excluded_items.set_excluded_items(excluded_items, &mut self.text_messages); } + fn check_files_name(&mut self, rx: Option<&Receiver<()>>) -> bool { + // TODO maybe add multithreading checking files + let start_time: SystemTime = SystemTime::now(); + let mut folders_to_check: Vec = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector + + // Add root folders for finding + for id in &self.directories.included_directories { + folders_to_check.push(id.clone()); + } + self.information.number_of_checked_folders += folders_to_check.len(); + + while !folders_to_check.is_empty() { + if rx.is_some() && rx.unwrap().try_recv().is_ok() { + return false; + } + let current_folder = folders_to_check.pop().unwrap(); + + // Read current dir, if permission are denied just go to next + let read_dir = match fs::read_dir(¤t_folder) { + Ok(t) => t, + Err(_) => { + self.text_messages.warnings.push(format!("Cannot open dir {}", current_folder.display())); + continue; + } // Permissions denied + }; + + // Check every sub folder/file/link etc. + 'dir: for entry in read_dir { + let entry_data = match entry { + Ok(t) => t, + Err(_) => { + self.text_messages.warnings.push(format!("Cannot read entry in dir {}", current_folder.display())); + continue 'dir; + } //Permissions denied + }; + let metadata: Metadata = match entry_data.metadata() { + Ok(t) => t, + Err(_) => { + self.text_messages.warnings.push(format!("Cannot read metadata in dir {}", current_folder.display())); + continue 'dir; + } //Permissions denied + }; + if metadata.is_dir() { + self.information.number_of_checked_folders += 1; + + if !self.recursive_search { + continue 'dir; + } + + let next_folder = current_folder.join(entry_data.file_name()); + if self.directories.is_excluded(&next_folder) { + continue 'dir; + } + + if self.excluded_items.is_excluded(&next_folder) { + continue 'dir; + } + + folders_to_check.push(next_folder); + } else if metadata.is_file() { + // let mut have_valid_extension: bool; + let file_name_lowercase: String = match entry_data.file_name().into_string() { + Ok(t) => t, + Err(_) => continue 'dir, + } + .to_lowercase(); + + // Checking allowed extensions + if !self.allowed_extensions.file_extensions.is_empty() { + let allowed = self.allowed_extensions.file_extensions.iter().any(|e| file_name_lowercase.ends_with((".".to_string() + e.to_lowercase().as_str()).as_str())); + if !allowed { + // Not an allowed extension, ignore it. + self.information.number_of_ignored_files += 1; + continue 'dir; + } + } + // Checking files + if metadata.len() >= self.minimal_file_size { + let current_file_name = current_folder.join(entry_data.file_name()); + if self.excluded_items.is_excluded(¤t_file_name) { + continue 'dir; + } + + // Creating new file entry + let fe: FileEntry = FileEntry { + path: current_file_name.clone(), + size: metadata.len(), + modified_date: match metadata.modified() { + Ok(t) => match t.duration_since(UNIX_EPOCH) { + Ok(d) => d.as_secs(), + Err(_) => { + self.text_messages.warnings.push(format!("File {} seems to be modified before Unix Epoch.", current_file_name.display())); + 0 + } + }, + Err(_) => { + self.text_messages.warnings.push(format!("Unable to get modification date from file {}", current_file_name.display())); + continue 'dir; + } // Permissions Denied + }, + }; + + // Adding files to BTreeMap + self.files_with_identical_names.entry(entry_data.file_name().to_string_lossy().to_string()).or_insert_with(Vec::new); + self.files_with_identical_names.get_mut(&entry_data.file_name().to_string_lossy().to_string()).unwrap().push(fe); + + self.information.number_of_checked_files += 1; + } else { + // Probably this is symbolic links so we are free to ignore this + self.information.number_of_ignored_files += 1; + } + } else { + // Probably this is symbolic links so we are free to ignore this + self.information.number_of_ignored_things += 1; + } + } + } + + // Create new BTreeMap without single size entries(files have not duplicates) + let mut new_map: BTreeMap> = Default::default(); + + self.information.number_of_duplicated_files_by_name = 0; + + for (name, vector) in &self.files_with_identical_names { + if vector.len() > 1 { + self.information.number_of_duplicated_files_by_name += vector.len() - 1; + self.information.number_of_groups_by_name += 1; + new_map.insert(name.clone(), vector.clone()); + } + } + self.files_with_identical_names = new_map; + + Common::print_time(start_time, SystemTime::now(), "check_files_name".to_string()); + true + } + /// Read file length and puts it to different boxes(each for different lengths) /// If in box is only 1 result, then it is removed fn check_files_size(&mut self, rx: Option<&Receiver<()>>) -> bool { - // TODO maybe add multithreading checking for file hash + // TODO maybe add multithreading checking files let start_time: SystemTime = SystemTime::now(); let mut folders_to_check: Vec = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector @@ -318,6 +487,10 @@ impl DuplicateFinder { /// The slowest checking type, which must be applied after checking for size fn check_files_hash(&mut self, rx: Option<&Receiver<()>>) -> bool { + if self.hash_type != HashType::Blake3 { + panic!(); // TODO Add more hash types + } + let start_time: SystemTime = SystemTime::now(); let mut file_handler: File; let mut hashmap_with_hash: HashMap>; @@ -442,7 +615,19 @@ impl DuplicateFinder { fn delete_files(&mut self) { let start_time: SystemTime = SystemTime::now(); + if self.delete_method == DeleteMethod::None { + return; + } + match self.check_method { + CheckingMethod::Name => { + for vector in self.files_with_identical_names.values() { + let tuple: (u64, usize, usize) = delete_files(vector, &self.delete_method, &mut self.text_messages.warnings); + self.information.gained_space += tuple.0; + self.information.number_of_removed_files += tuple.1; + self.information.number_of_failed_to_remove_files += tuple.2; + } + } CheckingMethod::Hash | CheckingMethod::HashMB => { for vector_vectors in self.files_with_identical_hashes.values() { for vector in vector_vectors.iter() { @@ -507,6 +692,10 @@ impl DebugPrint for DuplicateFinder { "Number of duplicated files by hash(in groups) - {} ({})", self.information.number_of_duplicated_files_by_hash, self.information.number_of_groups_by_hash ); + println!( + "Number of duplicated files by name(in groups) - {} ({})", + self.information.number_of_duplicated_files_by_name, self.information.number_of_groups_by_name + ); println!("Lost space by size - {} ({} bytes)", self.information.lost_space_by_size.file_size(options::BINARY).unwrap(), self.information.lost_space_by_size); println!( "Lost space after pre hash - {} ({} bytes)", @@ -568,45 +757,74 @@ impl SaveResults for DuplicateFinder { self.text_messages.errors.push(format!("Failed to save results to file {}", file_name)); return false; } - - if !self.files_with_identical_size.is_empty() { - writeln!(file, "-------------------------------------------------Files with same size-------------------------------------------------").unwrap(); - writeln!( - file, - "Found {} duplicated files which in {} groups which takes {}.", - self.information.number_of_duplicated_files_by_size, - self.information.number_of_groups_by_size, - self.information.lost_space_by_size.file_size(options::BINARY).unwrap() - ) - .unwrap(); - for (size, vector) in self.files_with_identical_size.iter().rev() { - write!(file, "\n---- Size {} ({}) - {} files \n", size.file_size(options::BINARY).unwrap(), size, vector.len()).unwrap(); - for file_entry in vector { - writeln!(file, "{}", file_entry.path.display()).unwrap(); + match self.check_method { + CheckingMethod::Name => { + if !self.files_with_identical_size.is_empty() { + writeln!(file, "-------------------------------------------------Files with same names-------------------------------------------------").unwrap(); + writeln!( + file, + "Found {} files in {} groups with same name(may have different content)", + self.information.number_of_duplicated_files_by_name, self.information.number_of_groups_by_name, + ) + .unwrap(); + for (name, vector) in self.files_with_identical_names.iter().rev() { + writeln!(file, "Name - {} - {} files ", name, vector.len()).unwrap(); + for j in vector { + writeln!(file, "{}", j.path.display()).unwrap(); + } + writeln!(file).unwrap(); + } + } else { + write!(file, "Not found any files with same names.").unwrap(); } } - - if !self.files_with_identical_hashes.is_empty() { - writeln!(file, "-------------------------------------------------Files with same hashes-------------------------------------------------").unwrap(); - writeln!( - file, - "Found {} duplicated files which in {} groups which takes {}.", - self.information.number_of_duplicated_files_by_hash, - self.information.number_of_groups_by_hash, - self.information.lost_space_by_hash.file_size(options::BINARY).unwrap() - ) - .unwrap(); - for (size, vectors_vector) in self.files_with_identical_hashes.iter().rev() { - for vector in vectors_vector { - writeln!(file, "\n---- Size {} ({}) - {} files", size.file_size(options::BINARY).unwrap(), size, vector.len()).unwrap(); + CheckingMethod::Size => { + if !self.files_with_identical_size.is_empty() { + writeln!(file, "-------------------------------------------------Files with same size-------------------------------------------------").unwrap(); + writeln!( + file, + "Found {} duplicated files which in {} groups which takes {}.", + self.information.number_of_duplicated_files_by_size, + self.information.number_of_groups_by_size, + self.information.lost_space_by_size.file_size(options::BINARY).unwrap() + ) + .unwrap(); + for (size, vector) in self.files_with_identical_size.iter().rev() { + write!(file, "\n---- Size {} ({}) - {} files \n", size.file_size(options::BINARY).unwrap(), size, vector.len()).unwrap(); for file_entry in vector { writeln!(file, "{}", file_entry.path.display()).unwrap(); } } + } else { + write!(file, "Not found any duplicates.").unwrap(); } } - } else { - write!(file, "Not found any duplicates.").unwrap(); + CheckingMethod::Hash | CheckingMethod::HashMB => { + if !self.files_with_identical_hashes.is_empty() { + writeln!(file, "-------------------------------------------------Files with same hashes-------------------------------------------------").unwrap(); + writeln!( + file, + "Found {} duplicated files which in {} groups which takes {}.", + self.information.number_of_duplicated_files_by_hash, + self.information.number_of_groups_by_hash, + self.information.lost_space_by_hash.file_size(options::BINARY).unwrap() + ) + .unwrap(); + for (size, vectors_vector) in self.files_with_identical_hashes.iter().rev() { + for vector in vectors_vector { + writeln!(file, "\n---- Size {} ({}) - {} files", size.file_size(options::BINARY).unwrap(), size, vector.len()).unwrap(); + for file_entry in vector { + writeln!(file, "{}", file_entry.path.display()).unwrap(); + } + } + } + } else { + write!(file, "Not found any duplicates.").unwrap(); + } + } + CheckingMethod::None => { + panic!(); + } } Common::print_time(start_time, SystemTime::now(), "save_results_to_file".to_string()); true @@ -621,6 +839,20 @@ impl PrintResults for DuplicateFinder { let mut number_of_groups: u64 = 0; match self.check_method { + CheckingMethod::Name => { + for i in &self.files_with_identical_names { + number_of_files += i.1.len() as u64; + number_of_groups += 1; + } + println!("Found {} files in {} groups with same name(may have different content)", number_of_files, number_of_groups,); + for (name, vector) in &self.files_with_identical_names { + println!("Name - {} - {} files ", name, vector.len()); + for j in vector { + println!("{}", j.path.display()); + } + println!(); + } + } CheckingMethod::Hash | CheckingMethod::HashMB => { for (_size, vector) in self.files_with_identical_hashes.iter() { for j in vector { diff --git a/czkawka_gui/czkawka.glade b/czkawka_gui/czkawka.glade index 83d6c77..7292651 100644 --- a/czkawka_gui/czkawka.glade +++ b/czkawka_gui/czkawka.glade @@ -744,6 +744,21 @@ Author: Rafał Mikrut True False + + + Name(very fast) + True + True + False + True + radio_button_hash + + + False + True + 0 + + Size(very fast) @@ -756,7 +771,7 @@ Author: Rafał Mikrut False True - 0 + 1 @@ -771,7 +786,7 @@ Author: Rafał Mikrut False True - 1 + 2 @@ -786,7 +801,7 @@ Author: Rafał Mikrut False True - 2 + 3 @@ -796,6 +811,43 @@ Author: Rafał Mikrut 1 + + + True + False + + + True + False + Hash type: + + + False + True + 0 + + + + + Blake3 + True + True + False + True + + + False + True + 1 + + + + + False + True + 2 + + True @@ -808,7 +860,7 @@ Author: Rafał Mikrut True True - 2 + 3 diff --git a/czkawka_gui/src/main.rs b/czkawka_gui/src/main.rs index 636f9e0..2596953 100644 --- a/czkawka_gui/src/main.rs +++ b/czkawka_gui/src/main.rs @@ -165,6 +165,7 @@ fn main() { let check_button_recursive: gtk::CheckButton = builder.get_object("check_button_recursive").unwrap(); //// Radio Buttons + let radio_button_name: gtk::RadioButton = builder.get_object("radio_button_name").unwrap(); let radio_button_size: gtk::RadioButton = builder.get_object("radio_button_size").unwrap(); let radio_button_hashmb: gtk::RadioButton = builder.get_object("radio_button_hashmb").unwrap(); let radio_button_hash: gtk::RadioButton = builder.get_object("radio_button_hash").unwrap(); @@ -499,7 +500,9 @@ fn main() { match notebook_main_children_names.get(notebook_main.get_current_page().unwrap() as usize).unwrap().as_str() { "notebook_main_duplicate_finder_label" => { let check_method; - if radio_button_size.get_active() { + if radio_button_name.get_active() { + check_method = duplicate::CheckingMethod::Name; + } else if radio_button_size.get_active() { check_method = duplicate::CheckingMethod::Size; } else if radio_button_hashmb.get_active() { check_method = duplicate::CheckingMethod::HashMB; @@ -1542,15 +1545,23 @@ fn main() { let duplicates_group: usize; match df.get_check_method() { + CheckingMethod::Name => { + duplicates_number = information.number_of_duplicated_files_by_name; + duplicates_size = 0; + duplicates_group = information.number_of_groups_by_name; + entry_info.set_text(format!("Found {} files in {} groups which have same names.", duplicates_number, duplicates_group).as_str()); + } CheckingMethod::Hash | CheckingMethod::HashMB => { duplicates_number = information.number_of_duplicated_files_by_hash; duplicates_size = information.lost_space_by_hash; duplicates_group = information.number_of_groups_by_hash; + entry_info.set_text(format!("Found {} duplicates files in {} groups which took {}.", duplicates_number, duplicates_group, duplicates_size.file_size(options::BINARY).unwrap()).as_str()); } CheckingMethod::Size => { duplicates_number = information.number_of_duplicated_files_by_size; duplicates_size = information.lost_space_by_size; duplicates_group = information.number_of_groups_by_size; + entry_info.set_text(format!("Found {} duplicates files in {} groups which took {}.", duplicates_number, duplicates_group, duplicates_size.file_size(options::BINARY).unwrap()).as_str()); } CheckingMethod::None => { panic!(); @@ -1577,6 +1588,33 @@ fn main() { let col_indices = [0, 1, 2, 3, 4, 5]; match df.get_check_method() { + CheckingMethod::Name => { + let btreemap = df.get_files_sorted_by_names(); + + for (name, vector) in btreemap.iter().rev() { + let values: [&dyn ToValue; 6] = [ + &name, + &(format!("{} results", vector.len())), + &"".to_string(), // No text in 3 column + &(0), // Not used here + &(HEADER_ROW_COLOR.to_string()), + &(TEXT_COLOR.to_string()), + ]; + list_store.set(&list_store.append(), &col_indices, &values); + for entry in vector { + let (directory, file) = split_path(&entry.path); + let values: [&dyn ToValue; 6] = [ + &file, + &directory, + &(format!("{} - ({})", NaiveDateTime::from_timestamp(entry.modified_date as i64, 0).to_string(), entry.size.file_size(options::BINARY).unwrap())), + &(entry.modified_date), + &(MAIN_ROW_COLOR.to_string()), + &(TEXT_COLOR.to_string()), + ]; + list_store.set(&list_store.append(), &col_indices, &values); + } + } + } CheckingMethod::Hash | CheckingMethod::HashMB => { let btreemap = df.get_files_sorted_by_hash(); @@ -1645,7 +1683,7 @@ fn main() { { *shared_duplication_state.borrow_mut() = df; - if duplicates_size > 0 { + if duplicates_number > 0 { *shared_buttons.borrow_mut().get_mut("duplicate").unwrap().get_mut("save").unwrap() = true; *shared_buttons.borrow_mut().get_mut("duplicate").unwrap().get_mut("delete").unwrap() = true; *shared_buttons.borrow_mut().get_mut("duplicate").unwrap().get_mut("select").unwrap() = true; diff --git a/misc/.idea/workspace.xml b/misc/.idea/workspace.xml index bc56e2f..3127014 100644 --- a/misc/.idea/workspace.xml +++ b/misc/.idea/workspace.xml @@ -14,7 +14,19 @@ - + + + + + + + - + - + + + + - - - - + + + + + + + - - + + + - +