From 17e45b959fc8de2f27598482d608fdbf9dcf24bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Mikrut?= <41945903+qarmin@users.noreply.github.com> Date: Wed, 19 Jan 2022 19:30:16 +0100 Subject: [PATCH] Latest fixes (#592) --- Cargo.lock | 20 ++++++++++---------- czkawka_gui/src/saving_loading.rs | 2 ++ instructions/Instruction.md | 11 ++++------- 3 files changed, 16 insertions(+), 17 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 172dd53..60b4098 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1039,9 +1039,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" +checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c" dependencies = [ "cfg-if 1.0.0", "libc", @@ -1522,9 +1522,9 @@ checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125" [[package]] name = "libloading" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afe203d669ec979b7128619bae5a63b7b42e9203c1b29146079ee05e2f604b52" +checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd" dependencies = [ "cfg-if 1.0.0", "winapi", @@ -2458,9 +2458,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.74" +version = "1.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee2bb9cd061c5865d345bb02ca49fcef1391741b672b54a0bf7b679badec3142" +checksum = "c059c05b48c5c0067d4b4b2b4f0732dd65feb52daf7e0ea09cd87e7dadc1af79" dependencies = [ "itoa", "ryu", @@ -2517,9 +2517,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309" +checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" [[package]] name = "stdweb" @@ -2553,9 +2553,9 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "structopt" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40b9788f4202aa75c240ecc9c15c65185e6a39ccdeb0fd5d008b98825464c87c" +checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10" dependencies = [ "clap", "lazy_static", diff --git a/czkawka_gui/src/saving_loading.rs b/czkawka_gui/src/saving_loading.rs index 8fda222..8f7e958 100644 --- a/czkawka_gui/src/saving_loading.rs +++ b/czkawka_gui/src/saving_loading.rs @@ -822,6 +822,7 @@ pub fn load_configuration( let index = main_notebook.combo_box_image_hash_size.active().unwrap() as usize; main_notebook.scale_similarity_similar_images.set_range(0_f64, SIMILAR_VALUES[index][5] as f64); + main_notebook.scale_similarity_similar_images.set_fill_level(SIMILAR_VALUES[index][5] as f64); main_notebook.scale_similarity_similar_images.set_value(similar_images_similarity as f64); } else { @@ -923,6 +924,7 @@ pub fn reset_configuration(manual_clearing: bool, upper_notebook: &GuiUpperNoteb main_notebook.combo_box_image_hash_size.set_active(Some(0)); main_notebook.scale_similarity_similar_images.set_range(0_f64, SIMILAR_VALUES[0][5] as f64); // DEFAULT FOR MAX of 8 + main_notebook.scale_similarity_similar_images.set_fill_level(SIMILAR_VALUES[0][5] as f64); main_notebook.entry_big_files_number.set_text(DEFAULT_NUMBER_OF_BIGGEST_FILES); main_notebook.scale_similarity_similar_images.set_value(DEFAULT_SIMILAR_IMAGES_SIMILARITY as f64); diff --git a/instructions/Instruction.md b/instructions/Instruction.md index 70901bd..8ff6683 100644 --- a/instructions/Instruction.md +++ b/instructions/Instruction.md @@ -127,16 +127,13 @@ Duplicate Finder allows you to search for files and group them according to a pr - Checking the hash - After leaving files that have the same beginning in groups, you should now check the whole contents of the file to make sure they are identical. -- **By hashmb** - Works the same way as via hash, only in the last phase it does not calculate the hash of the whole file but only of its first - megabyte. It is perfect for quick search of possible duplicate files. - ### Empty Files Searching for empty files is easy and fast, because we only need to check the file metadata and its length. ### Empty Directories At the beginning, a special entry is created for each directory containing - the parent path (only if it is not a folder directly selected by the user) and a flag to indicate whether the given directory is empty (at the beginning each one is -set to be potentionally empty). +set to be potentially empty). First, user-defined folders are put into the pool of folders to be checked. @@ -158,7 +155,7 @@ For each file inside the given path its size is read and then after sorting the ### Temporary Files Searching for temporary files only involves comparing their extensions with a previously prepared list. -Currently files with these extensions are considered temporary files - +Currently, files with these extensions are considered temporary files - ``` ["#", "thumbs.db", ".bak", "~", ".tmp", ".temp", ".ds_store", ".crdownload", ".part", ".cache", ".dmp", ".download", ".partial"] ``` @@ -166,7 +163,7 @@ Currently files with these extensions are considered temporary files - This only removes the most basic temporary files, for more I suggest to use BleachBit. ### Invalid Symlinks -To find invalid symlinks we must first find symlnks. +To find invalid symlinks we must first find symlinks. After searching for them you should check at which element it points to and if it does not exist, add this symlinks into the list of invalid symlinks, pointing to a non-existent path. @@ -234,7 +231,7 @@ Tool works similar as Similar Images. To work require `FFmpeg`, so it will show an error when it is not found in OS. Also only checks files which are longer than 30s. -For now it is limiting to check video files with almost equal length. +For now, it is limiting to check video files with almost equal length. At first, it collects video files by extension (`mp4`, `mpv`, `avi` etc.). Next each file is hashed. Implementation is hidden in library but looks that generate 10 images from this video and hash them with help of perceptual hash.