diff --git a/.gitignore b/.gitignore index d7dab46..830c5ec 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,5 @@ flatpak/ /report ci_tester/target ci_tester/Cargo.lock -czkawka_slint_gui/Cargo.lock \ No newline at end of file +czkawka_slint_gui/Cargo.lock +czkawka_slint_gui/target \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index c0e0d1d..ea98583 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -27,9 +27,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea5d730647d4fadd988536d06fecce94b7b4f2a7efdae548f1cf4b63205518ab" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" dependencies = [ "memchr", ] @@ -103,29 +103,12 @@ version = "1.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" -[[package]] -name = "arbitrary" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db55d72333851e17d572bec876e390cd3b11eb1ef53ae821dd9f3b653d2b4569" - [[package]] name = "arc-swap" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" -[[package]] -name = "arg_enum_proc_macro" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.38", -] - [[package]] name = "arrayref" version = "0.3.7" @@ -137,9 +120,6 @@ name = "arrayvec" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" -dependencies = [ - "serde", -] [[package]] name = "async-trait" @@ -167,31 +147,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" -[[package]] -name = "av1-grain" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f6ca6f0c18c02c2fbfc119df551b8aeb8a385f6d5980f1475ba0255f1e97f1e" -dependencies = [ - "anyhow", - "arrayvec", - "itertools", - "log", - "nom", - "num-rational", - "serde", - "v_frame", -] - -[[package]] -name = "avif-serialize" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "876c75a42f6364451a033496a14c44bffe41f5f4a8236f697391f11024e596d2" -dependencies = [ - "arrayvec", -] - [[package]] name = "base64" version = "0.21.4" @@ -231,12 +186,6 @@ version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" -[[package]] -name = "bitstream-io" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82704769cb85a22df2c54d6bdd6a158b7931d256cf3248a07d6ecbe9d58b31d7" - [[package]] name = "bk-tree" version = "0.5.0" @@ -284,15 +233,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "built" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b9c056b9ed43aee5e064b683aa1ec783e19c6acec7559e3ae931b7490472fbe" -dependencies = [ - "cargo-lock", -] - [[package]] name = "bumpalo" version = "3.14.0" @@ -357,18 +297,6 @@ dependencies = [ "system-deps", ] -[[package]] -name = "cargo-lock" -version = "8.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "031718ddb8f78aa5def78a09e90defe30151d1f6c672f937af4dd916429ed996" -dependencies = [ - "semver", - "serde", - "toml 0.5.11", - "url", -] - [[package]] name = "cbc" version = "0.1.2" @@ -384,7 +312,6 @@ version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" dependencies = [ - "jobserver", "libc", ] @@ -758,9 +685,12 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" +checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3" +dependencies = [ + "powerfmt", +] [[package]] name = "digest" @@ -945,9 +875,9 @@ checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" [[package]] name = "flate2" -version = "1.0.27" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" dependencies = [ "crc32fast", "miniz_oxide", @@ -1035,9 +965,9 @@ checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" [[package]] name = "fun_time" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9af29f347b6ae4821c45565e1238231caf5b57a951bd011222752ba0f5a47eae" +checksum = "783fdd8402ab5955698d66b0ba307bb2d3b3bfb911a4dfe7bd6bbc7f140be6a5" dependencies = [ "fun_time_derive", "log", @@ -1045,11 +975,12 @@ dependencies = [ [[package]] name = "fun_time_derive" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e992455767376a16164ee4cc69bd799530c1c41c701bdd8c8a541ec6095c078e" +checksum = "a3d89b974360ce9cd00ef9dd26353bc6f1ec9ff12d95e741ad83b36385f3a736" dependencies = [ "darling", + "log", "proc-macro2", "quote", "syn 1.0.109", @@ -1647,8 +1578,6 @@ dependencies = [ "num-traits", "png", "qoi", - "ravif", - "rgb", "tiff", ] @@ -1685,12 +1614,6 @@ dependencies = [ "serde_yaml", ] -[[package]] -name = "imgref" -version = "1.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2cf49df1085dcfb171460e4592597b84abe50d900fb83efb6e41b20fefd6c2c" - [[package]] name = "indexmap" version = "1.9.3" @@ -1739,17 +1662,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "interpolate_name" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.38", -] - [[package]] name = "intl-memoizer" version = "0.5.1" @@ -1812,15 +1724,6 @@ version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" -[[package]] -name = "jobserver" -version = "0.1.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" -dependencies = [ - "libc", -] - [[package]] name = "jpeg-decoder" version = "0.3.0" @@ -1857,16 +1760,6 @@ version = "0.2.149" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" -[[package]] -name = "libfuzzer-sys" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcf184a4b6b274f82a5df6b357da6055d3e82272327bba281c28bbba6f1664ef" -dependencies = [ - "arbitrary", - "cc", -] - [[package]] name = "libheif-rs" version = "0.18.0" @@ -1976,15 +1869,6 @@ dependencies = [ "tracing-subscriber", ] -[[package]] -name = "loop9" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81a837f917de41d61ab531ba255d1913208d02325cab0d6a66a706e0dbaa699d" -dependencies = [ - "imgref", -] - [[package]] name = "malloc_buf" version = "0.0.6" @@ -2003,16 +1887,6 @@ dependencies = [ "regex-automata 0.1.10", ] -[[package]] -name = "maybe-rayon" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea1f30cedd69f0a2954655f7188c6a834246d2bcf1e315e2ac40c4b24dc9519" -dependencies = [ - "cfg-if", - "rayon", -] - [[package]] name = "md5" version = "0.7.0" @@ -2050,12 +1924,6 @@ dependencies = [ "unicase", ] -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - [[package]] name = "miniz_oxide" version = "0.7.1" @@ -2075,37 +1943,6 @@ dependencies = [ "linked-hash-map", ] -[[package]] -name = "nasm-rs" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe4d98d0065f4b1daf164b3eafb11974c94662e5e2396cf03f32d0bb5c17da51" -dependencies = [ - "rayon", -] - -[[package]] -name = "new_debug_unreachable" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - -[[package]] -name = "noop_proc_macro" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0676bb32a98c1a483ce53e500a81ad9c3d5b3f7c920c28c24e9cb0980d0b5bc8" - [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -2116,17 +1953,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "num-bigint" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - [[package]] name = "num-complex" version = "0.3.1" @@ -2145,28 +1971,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-derive" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "num-derive" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e6a0fd4f737c707bd9086cc16c925f294943eb62eb71499e9fd4cf71f8b9f4e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.38", -] - [[package]] name = "num-integer" version = "0.1.45" @@ -2184,7 +1988,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" dependencies = [ "autocfg", - "num-bigint", "num-integer", "num-traits", ] @@ -2426,6 +2229,12 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -2493,12 +2302,6 @@ dependencies = [ "bytemuck", ] -[[package]] -name = "quick-error" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" - [[package]] name = "quote" version = "1.0.33" @@ -2538,59 +2341,6 @@ dependencies = [ "getrandom", ] -[[package]] -name = "rav1e" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16c383692a5e7abd9f6d1eddb1a5e0269f859392387883361bb09e5555852ec1" -dependencies = [ - "arbitrary", - "arg_enum_proc_macro", - "arrayvec", - "av1-grain", - "bitstream-io", - "built", - "cc", - "cfg-if", - "interpolate_name", - "itertools", - "libc", - "libfuzzer-sys", - "log", - "maybe-rayon", - "nasm-rs", - "new_debug_unreachable", - "noop_proc_macro", - "num-derive 0.3.3", - "num-traits", - "once_cell", - "paste", - "rand", - "rand_chacha", - "rust_hawktracer", - "rustc_version", - "simd_helpers", - "system-deps", - "thiserror", - "v_frame", - "wasm-bindgen", -] - -[[package]] -name = "ravif" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "badc69028460108fa7e32d4aec2b0c980710d7a31a896864002c8c1fc61516ee" -dependencies = [ - "avif-serialize", - "imgref", - "loop9", - "quick-error", - "rav1e", - "rayon", - "rgb", -] - [[package]] name = "rawloader" version = "0.37.1" @@ -2666,14 +2416,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.9.6" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff" +checksum = "d119d7c7ca818f8a53c300863d4f87566aac09943aef5b355bb83969dae75d87" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.3.9", - "regex-syntax 0.7.5", + "regex-automata 0.4.1", + "regex-syntax 0.8.1", ] [[package]] @@ -2687,13 +2437,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.9" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9" +checksum = "465c6fc0621e4abc4187a2bda0937bfd4f722c2730b29562e19689ea796c9a4b" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.7.5", + "regex-syntax 0.8.1", ] [[package]] @@ -2704,18 +2454,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.7.5" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" - -[[package]] -name = "rgb" -version = "0.8.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20ec2d3e3fc7a92ced357df9cebd5a10b6fb2aa1ee797bf7e9ce2f17dffc8f59" -dependencies = [ - "bytemuck", -] +checksum = "56d84fdd47036b038fc80dd333d10b6aab10d5d31f4a366e20014def75328d33" [[package]] name = "rubato" @@ -2763,28 +2504,6 @@ dependencies = [ "walkdir", ] -[[package]] -name = "rust_hawktracer" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3480a29b927f66c6e06527be7f49ef4d291a01d694ec1fe85b0de71d6b02ac1" -dependencies = [ - "rust_hawktracer_normal_macro", - "rust_hawktracer_proc_macro", -] - -[[package]] -name = "rust_hawktracer_normal_macro" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a570059949e1dcdc6f35228fa389f54c2c84dfe0c94c05022baacd56eacd2e9" - -[[package]] -name = "rust_hawktracer_proc_macro" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb626abdbed5e93f031baae60d72032f56bc964e11ac2ff65f2ba3ed98d6d3e1" - [[package]] name = "rustc-hash" version = "1.1.0" @@ -2849,9 +2568,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.17" +version = "0.38.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f25469e9ae0f3d0047ca8b93fc56843f38e6774f0914a107ff8b41be8be8e0b7" +checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed" dependencies = [ "bitflags 2.4.0", "errno", @@ -2911,27 +2630,24 @@ checksum = "1ef965a420fe14fdac7dd018862966a4c14094f900e1650bbc71ddd7d580c8af" [[package]] name = "semver" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad977052201c6de01a8ef2aa3378c4bd23217a056337d1d6da40468d267a4fb0" -dependencies = [ - "serde", -] +checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" [[package]] name = "serde" -version = "1.0.188" +version = "1.0.189" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" +checksum = "8e422a44e74ad4001bdc8eede9a4570ab52f71190e9c076d14369f38b9200537" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.188" +version = "1.0.189" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" +checksum = "1e48d1f918009ce3145511378cf68d613e3b3d9137d67272562080d68a2b32d5" dependencies = [ "proc-macro2", "quote", @@ -3007,15 +2723,6 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" -[[package]] -name = "simd_helpers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95890f873bec569a0362c235787f3aca6e1e887302ba4840839bcc6459c42da6" -dependencies = [ - "quote", -] - [[package]] name = "slab" version = "0.4.9" @@ -3389,14 +3096,15 @@ dependencies = [ [[package]] name = "time" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "426f806f4089c493dcac0d24c29c01e2c38baf8e30f1b716ee37e83d200b18fe" +checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" dependencies = [ "deranged", "itoa", "libc", "num_threads", + "powerfmt", "serde", "time-core", "time-macros", @@ -3497,11 +3205,10 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "ee2ef2af84856a50c1d430afce2fdded0a4ec7eda868db86409b4543df0797f9" dependencies = [ - "cfg-if", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -3509,9 +3216,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", @@ -3520,9 +3227,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", "valuable", @@ -3685,19 +3392,6 @@ version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" -[[package]] -name = "v_frame" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85db69f33d00031c1b07f7292e56317d5aa9475bdbd3d27ef18f3633438a697e" -dependencies = [ - "cfg-if", - "noop_proc_macro", - "num-derive 0.4.0", - "num-traits", - "rust_hawktracer", -] - [[package]] name = "valuable" version = "0.1.0" @@ -3983,9 +3677,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "winnow" -version = "0.5.16" +version = "0.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "037711d82167854aff2018dfd193aa0fef5370f456732f0d5a0c59b0f1b4b907" +checksum = "a3b801d0e0a6726477cc207f60162da452f3a95adb368399bef20a946e06f65c" dependencies = [ "memchr", ] diff --git a/Changelog.md b/Changelog.md index 7ebc7de..563a76a 100644 --- a/Changelog.md +++ b/Changelog.md @@ -1,6 +1,6 @@ ## Version 6.1.0 - ? -- BREAKING CHANGE - Changed cache saving method, deduplicated, optimized and simplified procedure(all files needs to be hashed again) - [#1072](https://github.com/qarmin/czkawka/pull/1072) -- Remove up to 170ms of delay after ending scan - [#1070](https://github.com/qarmin/czkawka/pull/1070) +- BREAKING CHANGE - Changed cache saving method, deduplicated, optimized and simplified procedure(all files needs to be hashed again) - [#1072](https://github.com/qarmin/czkawka/pull/1072), [#1086](https://github.com/qarmin/czkawka/pull/1086) +- Remove up to 340ms of delay when waiting for results - [#1070](https://github.com/qarmin/czkawka/pull/1070) - Added logger with useful info when debugging app (level can be adjusted via e.g. `RUST_LOG=debug` env) - [#1072](https://github.com/qarmin/czkawka/pull/1072), [#1070](https://github.com/qarmin/czkawka/pull/1070) - Core code cleanup - [#1072](https://github.com/qarmin/czkawka/pull/1072), [#1070](https://github.com/qarmin/czkawka/pull/1070), [#1082](https://github.com/qarmin/czkawka/pull/1082) - Updated list of bad extensions and support for finding invalid jar files - [#1070](https://github.com/qarmin/czkawka/pull/1070) @@ -10,6 +10,10 @@ - Added exporting results into json file format - [#1083](https://github.com/qarmin/czkawka/pull/1083) - Added new test/regression suite for CI - [#1083](https://github.com/qarmin/czkawka/pull/1083) - Added ability to use relative paths - [#1083](https://github.com/qarmin/czkawka/pull/1083) +- Allowed removing similar images/videos/music from cli - [#1087](https://github.com/qarmin/czkawka/pull/1087) +- Added info about saving/loading items to cache in duplicate and music mode - [#1091](https://github.com/qarmin/czkawka/pull/1091) +- Fixed number of files to check in duplicate mode - [#1091](https://github.com/qarmin/czkawka/pull/1091) +- Added support for qoi image format - [e92a](https://github.com/qarmin/czkawka/commit/e92a8a65de9bd1250be482dbce06959125554849) - Fixed stability problem, that could remove invalid file in CLI - [#1083](https://github.com/qarmin/czkawka/pull/1083) - Fixed problem with invalid cache loading - [#0000] - Fix Windows gui crashes by using gtk 4.6 instead 4.8 or 4.10 - [#992](https://github.com/qarmin/czkawka/pull/992) diff --git a/czkawka_core/Cargo.toml b/czkawka_core/Cargo.toml index 47cdd3c..f61bcb6 100644 --- a/czkawka_core/Cargo.toml +++ b/czkawka_core/Cargo.toml @@ -21,7 +21,7 @@ directories-next = "2.0" # Needed by similar images image_hasher = "1.2" bk-tree = "0.5" -image = { version = "0.24", default-features = false, features = ["gif", "jpeg", "ico", "png", "pnm", "tga", "tiff", "webp", "bmp", "hdr", "dxt", "dds", "farbfeld", "openexr", "qoi", "avif"] } +image = { version = "0.24", default-features = false, features = ["gif", "jpeg", "ico", "png", "pnm", "tga", "tiff", "webp", "bmp", "hdr", "dxt", "dds", "farbfeld", "openexr", "qoi"] } hamming = "0.1" # Needed by same music diff --git a/czkawka_core/src/bad_extensions.rs b/czkawka_core/src/bad_extensions.rs index 49f9441..c2bc3b6 100644 --- a/czkawka_core/src/bad_extensions.rs +++ b/czkawka_core/src/bad_extensions.rs @@ -13,7 +13,7 @@ use mime_guess::get_mime_extensions; use rayon::prelude::*; use serde::Serialize; -use crate::common::{prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads}; +use crate::common::{check_if_stop_received, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads}; use crate::common_dir_traversal::{CheckingMethod, DirTraversalBuilder, DirTraversalResult, FileEntry, ProgressData, ToolType}; use crate::common_tool::{CommonData, CommonToolData}; use crate::common_traits::*; @@ -194,7 +194,7 @@ impl BadExtensions { } } - #[fun_time(message = "find_bad_extensions_files")] + #[fun_time(message = "find_bad_extensions_files", level = "info")] pub fn find_bad_extensions_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) { self.optimize_dirs_before_start(); if !self.check_files(stop_receiver, progress_sender) { @@ -208,7 +208,7 @@ impl BadExtensions { self.debug_print(); } - #[fun_time(message = "check_files")] + #[fun_time(message = "check_files", level = "debug")] fn check_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let result = DirTraversalBuilder::new() .root_dirs(self.common_data.directories.included_directories.clone()) @@ -238,7 +238,7 @@ impl BadExtensions { } } - #[fun_time(message = "look_for_bad_extensions_files")] + #[fun_time(message = "look_for_bad_extensions_files", level = "debug")] fn look_for_bad_extensions_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let (progress_thread_handle, progress_thread_run, atomic_counter, check_was_stopped) = prepare_thread_handler_common(progress_sender, 1, 1, self.files_to_check.len(), CheckingMethod::None, self.get_cd().tool_type); @@ -270,7 +270,7 @@ impl BadExtensions { true } - #[fun_time(message = "verify_extensions")] + #[fun_time(message = "verify_extensions", level = "debug")] fn verify_extensions( &self, files_to_check: Vec, @@ -283,7 +283,7 @@ impl BadExtensions { .into_par_iter() .map(|file_entry| { atomic_counter.fetch_add(1, Ordering::Relaxed); - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { check_was_stopped.store(true, Ordering::Relaxed); return None; } diff --git a/czkawka_core/src/big_file.rs b/czkawka_core/src/big_file.rs index 62c6107..58ac2fa 100644 --- a/czkawka_core/src/big_file.rs +++ b/czkawka_core/src/big_file.rs @@ -14,7 +14,7 @@ use log::debug; use rayon::prelude::*; use serde::{Deserialize, Serialize}; -use crate::common::{check_folder_children, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, split_path}; +use crate::common::{check_folder_children, check_if_stop_received, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, split_path}; use crate::common_dir_traversal::{common_get_entry_data_metadata, common_read_dir, get_lowercase_name, get_modified_time, CheckingMethod, ProgressData, ToolType}; use crate::common_tool::{CommonData, CommonToolData, DeleteMethod}; use crate::common_traits::{DebugPrint, PrintResults}; @@ -56,7 +56,7 @@ impl BigFile { } } - #[fun_time(message = "find_big_files")] + #[fun_time(message = "find_big_files", level = "info")] pub fn find_big_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) { self.optimize_dirs_before_start(); if !self.look_for_big_files(stop_receiver, progress_sender) { @@ -67,7 +67,7 @@ impl BigFile { self.debug_print(); } - #[fun_time(message = "look_for_big_files")] + #[fun_time(message = "look_for_big_files", level = "debug")] fn look_for_big_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let mut folders_to_check: Vec = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector let mut old_map: BTreeMap> = Default::default(); @@ -82,7 +82,7 @@ impl BigFile { debug!("Starting to search for big files"); while !folders_to_check.is_empty() { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return false; } @@ -181,7 +181,7 @@ impl BigFile { fe_result.push((fe.size, fe)); } - #[fun_time(message = "extract_n_biggest_files")] + #[fun_time(message = "extract_n_biggest_files", level = "debug")] pub fn extract_n_biggest_files(&mut self, old_map: BTreeMap>) { let iter: Box>; if self.search_mode == SearchMode::SmallestFiles { diff --git a/czkawka_core/src/broken_files.rs b/czkawka_core/src/broken_files.rs index 63ec060..31f5259 100644 --- a/czkawka_core/src/broken_files.rs +++ b/czkawka_core/src/broken_files.rs @@ -19,7 +19,7 @@ use rayon::prelude::*; use serde::{Deserialize, Serialize}; use crate::common::{ - check_folder_children, create_crash_message, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, AUDIO_FILES_EXTENSIONS, + check_folder_children, check_if_stop_received, create_crash_message, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, AUDIO_FILES_EXTENSIONS, IMAGE_RS_BROKEN_FILES_EXTENSIONS, PDF_FILES_EXTENSIONS, ZIP_FILES_EXTENSIONS, }; use crate::common_cache::{get_broken_files_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; @@ -92,7 +92,7 @@ impl BrokenFiles { } } - #[fun_time(message = "find_broken_files")] + #[fun_time(message = "find_broken_files", level = "info")] pub fn find_broken_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) { self.optimize_dirs_before_start(); if !self.check_files(stop_receiver, progress_sender) { @@ -107,7 +107,7 @@ impl BrokenFiles { self.debug_print(); } - #[fun_time(message = "check_files")] + #[fun_time(message = "check_files", level = "debug")] fn check_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let mut folders_to_check: Vec = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector @@ -121,7 +121,7 @@ impl BrokenFiles { debug!("check_files - starting to collect files"); while !folders_to_check.is_empty() { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return false; } @@ -319,7 +319,7 @@ impl BrokenFiles { } } - #[fun_time(message = "load_cache")] + #[fun_time(message = "load_cache", level = "debug")] fn load_cache(&mut self) -> (BTreeMap, BTreeMap, BTreeMap) { let loaded_hash_map; @@ -346,7 +346,7 @@ impl BrokenFiles { (loaded_hash_map, records_already_cached, non_cached_files_to_check) } - #[fun_time(message = "look_for_broken_files")] + #[fun_time(message = "look_for_broken_files", level = "debug")] fn look_for_broken_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let (loaded_hash_map, records_already_cached, non_cached_files_to_check) = self.load_cache(); @@ -358,7 +358,7 @@ impl BrokenFiles { .into_par_iter() .map(|(_, file_entry)| { atomic_counter.fetch_add(1, Ordering::Relaxed); - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { return None; } @@ -396,7 +396,7 @@ impl BrokenFiles { true } - #[fun_time(message = "save_to_cache")] + #[fun_time(message = "save_to_cache", level = "debug")] fn save_to_cache(&mut self, vec_file_entry: &[FileEntry], loaded_hash_map: BTreeMap) { if self.common_data.use_cache { // Must save all results to file, old loaded from file with all currently counted results @@ -414,7 +414,7 @@ impl BrokenFiles { } } - #[fun_time(message = "delete_files")] + #[fun_time(message = "delete_files", level = "debug")] fn delete_files(&mut self) { match self.common_data.delete_method { DeleteMethod::Delete => { diff --git a/czkawka_core/src/common.rs b/czkawka_core/src/common.rs index b844e42..7a715fe 100644 --- a/czkawka_core/src/common.rs +++ b/czkawka_core/src/common.rs @@ -84,12 +84,10 @@ pub const RAW_IMAGE_EXTENSIONS: &[&str] = &[ ".cr2", ".ari", ]; pub const IMAGE_RS_EXTENSIONS: &[&str] = &[ - ".jpg", ".jpeg", ".png", ".bmp", ".tiff", ".tif", ".tga", ".ff", ".jif", ".jfi", ".webp", ".gif", ".ico", ".exr", ".qoi", ".avif", + ".jpg", ".jpeg", ".png", ".bmp", ".tiff", ".tif", ".tga", ".ff", ".jif", ".jfi", ".webp", ".gif", ".ico", ".exr", ".qoi", ]; -pub const IMAGE_RS_SIMILAR_IMAGES_EXTENSIONS: &[&str] = &[ - ".jpg", ".jpeg", ".png", ".tiff", ".tif", ".tga", ".ff", ".jif", ".jfi", ".bmp", ".webp", ".exr", ".qoi", ".avif", -]; +pub const IMAGE_RS_SIMILAR_IMAGES_EXTENSIONS: &[&str] = &[".jpg", ".jpeg", ".png", ".tiff", ".tif", ".tga", ".ff", ".jif", ".jfi", ".bmp", ".webp", ".exr", ".qoi"]; pub const IMAGE_RS_BROKEN_FILES_EXTENSIONS: &[&str] = &[ ".jpg", ".jpeg", ".png", ".tiff", ".tif", ".tga", ".ff", ".jif", ".jfi", ".gif", ".bmp", ".ico", ".jfif", ".jpe", ".pnz", ".dib", ".webp", ".exr", @@ -505,7 +503,17 @@ pub fn prepare_thread_handler_common( (progress_thread_sender, progress_thread_run, atomic_counter, check_was_stopped) } -#[fun_time(message = "send_info_and_wait_for_ending_all_threads")] +#[inline] +pub fn check_if_stop_received(stop_receiver: Option<&crossbeam_channel::Receiver<()>>) -> bool { + if let Some(stop_receiver) = stop_receiver { + if stop_receiver.try_recv().is_ok() { + return true; + } + } + false +} + +#[fun_time(message = "send_info_and_wait_for_ending_all_threads", level = "debug")] pub fn send_info_and_wait_for_ending_all_threads(progress_thread_run: &Arc, progress_thread_handle: JoinHandle<()>) { progress_thread_run.store(false, Ordering::Relaxed); progress_thread_handle.join().unwrap(); diff --git a/czkawka_core/src/common_cache.rs b/czkawka_core/src/common_cache.rs index 6cce94d..a5f73fb 100644 --- a/czkawka_core/src/common_cache.rs +++ b/czkawka_core/src/common_cache.rs @@ -41,7 +41,7 @@ pub fn get_duplicate_cache_file(type_of_hash: &HashType, is_prehash: bool) -> St format!("cache_duplicates_{type_of_hash:?}{prehash_str}_61.bin") } -#[fun_time(message = "save_cache_to_file_generalized")] +#[fun_time(message = "save_cache_to_file_generalized", level = "debug")] pub fn save_cache_to_file_generalized(cache_file_name: &str, hashmap: &BTreeMap, save_also_as_json: bool, minimum_file_size: u64) -> Messages where T: Serialize + ResultEntry + Sized + Send + Sync, @@ -78,13 +78,14 @@ where } text_messages.messages.push(format!("Properly saved to file {} cache entries.", hashmap.len())); + debug!("Properly saved to file {} cache entries.", hashmap.len()); } else { debug!("Failed to save cache to file {cache_file_name} because not exists"); } text_messages } -#[fun_time(message = "load_cache_from_file_generalized_by_path")] +#[fun_time(message = "load_cache_from_file_generalized_by_path", level = "debug")] pub fn load_cache_from_file_generalized_by_path(cache_file_name: &str, delete_outdated_cache: bool, used_files: &BTreeMap) -> (Messages, Option>) where for<'a> T: Deserialize<'a> + ResultEntry + Sized + Send + Sync + Clone, @@ -104,7 +105,7 @@ where (text_messages, Some(map_loaded_entries)) } -#[fun_time(message = "load_cache_from_file_generalized_by_size")] +#[fun_time(message = "load_cache_from_file_generalized_by_size", level = "debug")] pub fn load_cache_from_file_generalized_by_size( cache_file_name: &str, delete_outdated_cache: bool, @@ -135,7 +136,7 @@ where (text_messages, Some(map_loaded_entries)) } -#[fun_time(message = "load_cache_from_file_generalized_by_path_from_size")] +#[fun_time(message = "load_cache_from_file_generalized_by_path_from_size", level = "debug")] pub fn load_cache_from_file_generalized_by_path_from_size( cache_file_name: &str, delete_outdated_cache: bool, @@ -166,7 +167,7 @@ where (text_messages, Some(map_loaded_entries)) } -#[fun_time(message = "load_cache_from_file_generalized")] +#[fun_time(message = "load_cache_from_file_generalized", level = "debug")] fn load_cache_from_file_generalized(cache_file_name: &str, delete_outdated_cache: bool, used_files: &BTreeMap) -> (Messages, Option>) where for<'a> T: Deserialize<'a> + ResultEntry + Sized + Send + Sync + Clone, diff --git a/czkawka_core/src/common_dir_traversal.rs b/czkawka_core/src/common_dir_traversal.rs index 8c052db..40db1b1 100644 --- a/czkawka_core/src/common_dir_traversal.rs +++ b/czkawka_core/src/common_dir_traversal.rs @@ -12,7 +12,7 @@ use log::debug; use rayon::prelude::*; use serde::{Deserialize, Serialize}; -use crate::common::{prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads}; +use crate::common::{check_if_stop_received, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads}; use crate::common_directory::Directories; use crate::common_extensions::Extensions; use crate::common_items::ExcludedItems; @@ -340,7 +340,7 @@ where F: Fn(&FileEntry) -> T, T: Ord + PartialOrd, { - #[fun_time(message = "run(collecting files/dirs)")] + #[fun_time(message = "run(collecting files/dirs)", level = "debug")] pub fn run(self) -> DirTraversalResult { let mut all_warnings = vec![]; let mut grouped_file_entries: BTreeMap> = BTreeMap::new(); @@ -379,7 +379,7 @@ where } = self; while !folders_to_check.is_empty() { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return DirTraversalResult::Stopped; } diff --git a/czkawka_core/src/common_traits.rs b/czkawka_core/src/common_traits.rs index 1397ced..13a3da5 100644 --- a/czkawka_core/src/common_traits.rs +++ b/czkawka_core/src/common_traits.rs @@ -11,7 +11,7 @@ pub trait DebugPrint { pub trait PrintResults { fn write_results(&self, writer: &mut T) -> std::io::Result<()>; - #[fun_time(message = "print_results_to_output")] + #[fun_time(message = "print_results_to_output", level = "debug")] fn print_results_to_output(&self) { let stdout = std::io::stdout(); let mut handle = stdout.lock(); @@ -19,7 +19,7 @@ pub trait PrintResults { handle.flush().unwrap(); } - #[fun_time(message = "print_results_to_file")] + #[fun_time(message = "print_results_to_file", level = "debug")] fn print_results_to_file(&self, file_name: &str) -> std::io::Result<()> { let file_name: String = match file_name { "" => "results.txt".to_string(), @@ -43,7 +43,7 @@ pub trait PrintResults { } } - #[fun_time(message = "save_results_to_file_as_json_pretty")] + #[fun_time(message = "save_results_to_file_as_json_pretty", level = "debug")] fn save_results_to_file_as_json_pretty(&self, file_name: &str, item_to_serialize: &T) -> std::io::Result<()> { let file_handler = File::create(file_name)?; let mut writer = BufWriter::new(file_handler); @@ -51,7 +51,7 @@ pub trait PrintResults { Ok(()) } - #[fun_time(message = "save_results_to_file_as_json_compact")] + #[fun_time(message = "save_results_to_file_as_json_compact", level = "debug")] fn save_results_to_file_as_json_compact(&self, file_name: &str, item_to_serialize: &T) -> std::io::Result<()> { let file_handler = File::create(file_name)?; let mut writer = BufWriter::new(file_handler); diff --git a/czkawka_core/src/duplicate.rs b/czkawka_core/src/duplicate.rs index 5b1d4dd..ba38137 100644 --- a/czkawka_core/src/duplicate.rs +++ b/czkawka_core/src/duplicate.rs @@ -18,7 +18,7 @@ use log::debug; use rayon::prelude::*; use xxhash_rust::xxh3::Xxh3; -use crate::common::{delete_files_custom, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads}; +use crate::common::{check_if_stop_received, delete_files_custom, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads}; use crate::common_cache::{get_duplicate_cache_file, load_cache_from_file_generalized_by_size, save_cache_to_file_generalized}; use crate::common_dir_traversal::{CheckingMethod, DirTraversalBuilder, DirTraversalResult, FileEntry, ProgressData, ToolType}; use crate::common_tool::{CommonData, CommonToolData, DeleteMethod}; @@ -34,6 +34,7 @@ pub enum HashType { Xxh3, } +const MAX_STAGE: u8 = 5; impl HashType { fn hasher(self: &HashType) -> Box { match self { @@ -109,7 +110,7 @@ impl DuplicateFinder { } } - #[fun_time(message = "find_duplicates")] + #[fun_time(message = "find_duplicates", level = "info")] pub fn find_duplicates(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) { self.optimize_dirs_before_start(); self.common_data.use_reference_folders = !self.common_data.directories.reference_directories.is_empty(); @@ -149,7 +150,7 @@ impl DuplicateFinder { self.debug_print(); } - #[fun_time(message = "check_files_name")] + #[fun_time(message = "check_files_name", level = "debug")] fn check_files_name(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let group_by_func = if self.case_sensitive_name_comparison { |fe: &FileEntry| fe.path.file_name().unwrap().to_string_lossy().to_string() @@ -224,7 +225,7 @@ impl DuplicateFinder { } } - #[fun_time(message = "check_files_size_name")] + #[fun_time(message = "check_files_size_name", level = "debug")] fn check_files_size_name(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let group_by_func = if self.case_sensitive_name_comparison { |fe: &FileEntry| (fe.size, fe.path.file_name().unwrap().to_string_lossy().to_string()) @@ -301,11 +302,11 @@ impl DuplicateFinder { } } - #[fun_time(message = "check_files_size")] + #[fun_time(message = "check_files_size", level = "debug")] fn check_files_size(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let max_stage = match self.check_method { CheckingMethod::Size => 0, - CheckingMethod::Hash => 2, + CheckingMethod::Hash => MAX_STAGE, _ => panic!(), }; let result = DirTraversalBuilder::new() @@ -376,7 +377,7 @@ impl DuplicateFinder { } } - #[fun_time(message = "filter_reference_folders_by_size")] + #[fun_time(message = "filter_reference_folders_by_size", level = "debug")] fn filter_reference_folders_by_size(&mut self) { if self.common_data.use_reference_folders && self.check_method == CheckingMethod::Size { let vec = mem::take(&mut self.files_with_identical_size) @@ -399,7 +400,7 @@ impl DuplicateFinder { } } - #[fun_time(message = "prehash_load_cache_at_start")] + #[fun_time(message = "prehash_load_cache_at_start", level = "debug")] fn prehash_load_cache_at_start(&mut self) -> (BTreeMap>, BTreeMap>, BTreeMap>) { // Cache algorithm // - Load data from cache @@ -452,7 +453,7 @@ impl DuplicateFinder { (loaded_hash_map, records_already_cached, non_cached_files_to_check) } - #[fun_time(message = "prehash_save_cache_at_exit")] + #[fun_time(message = "prehash_save_cache_at_exit", level = "debug")] fn prehash_save_cache_at_exit(&mut self, loaded_hash_map: BTreeMap>, pre_hash_results: &Vec<(u64, BTreeMap>, Vec)>) { if self.use_prehash_cache { // All results = records already cached + computed results @@ -486,7 +487,7 @@ impl DuplicateFinder { } } - #[fun_time(message = "prehashing")] + #[fun_time(message = "prehashing", level = "debug")] fn prehashing( &mut self, stop_receiver: Option<&Receiver<()>>, @@ -494,17 +495,24 @@ impl DuplicateFinder { pre_checked_map: &mut BTreeMap>, ) -> Option<()> { let check_type = self.hash_type; + let (progress_thread_handle, progress_thread_run, _atomic_counter, _check_was_stopped) = + prepare_thread_handler_common(progress_sender, 1, MAX_STAGE, 0, self.check_method, self.common_data.tool_type); + + let (loaded_hash_map, records_already_cached, non_cached_files_to_check) = self.prehash_load_cache_at_start(); + + send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); + if check_if_stop_received(stop_receiver) { + return None; + } let (progress_thread_handle, progress_thread_run, atomic_counter, check_was_stopped) = prepare_thread_handler_common( progress_sender, - 1, 2, - self.files_with_identical_size.values().map(Vec::len).sum(), + MAX_STAGE, + non_cached_files_to_check.values().map(Vec::len).sum(), self.check_method, self.common_data.tool_type, ); - let (loaded_hash_map, records_already_cached, non_cached_files_to_check) = self.prehash_load_cache_at_start(); - debug!("Starting calculating prehash"); #[allow(clippy::type_complexity)] let pre_hash_results: Vec<(u64, BTreeMap>, Vec)> = non_cached_files_to_check @@ -515,7 +523,7 @@ impl DuplicateFinder { let mut buffer = [0u8; 1024 * 2]; atomic_counter.fetch_add(vec_file_entry.len(), Ordering::Relaxed); - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { check_was_stopped.store(true, Ordering::Relaxed); return None; } @@ -536,10 +544,9 @@ impl DuplicateFinder { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); - // Check if user aborted search(only from GUI) - if check_was_stopped.load(Ordering::Relaxed) { - return None; - } + // Saving into cache + let (progress_thread_handle, progress_thread_run, _atomic_counter, _check_was_stopped) = + prepare_thread_handler_common(progress_sender, 3, MAX_STAGE, 0, self.check_method, self.common_data.tool_type); // Add data from cache for (size, vec_file_entry) in &records_already_cached { @@ -560,10 +567,15 @@ impl DuplicateFinder { self.prehash_save_cache_at_exit(loaded_hash_map, &pre_hash_results); + send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); + if check_was_stopped.load(Ordering::Relaxed) || check_if_stop_received(stop_receiver) { + return None; + } + Some(()) } - #[fun_time(message = "full_hashing_load_cache_at_start")] + #[fun_time(message = "full_hashing_load_cache_at_start", level = "debug")] fn full_hashing_load_cache_at_start( &mut self, mut pre_checked_map: BTreeMap>, @@ -614,7 +626,7 @@ impl DuplicateFinder { (loaded_hash_map, records_already_cached, non_cached_files_to_check) } - #[fun_time(message = "full_hashing_save_cache_at_exit")] + #[fun_time(message = "full_hashing_save_cache_at_exit", level = "debug")] fn full_hashing_save_cache_at_exit( &mut self, records_already_cached: BTreeMap>, @@ -666,28 +678,33 @@ impl DuplicateFinder { self.get_text_messages_mut().extend_with_another_messages(messages); } - #[fun_time(message = "full_hashing")] + #[fun_time(message = "full_hashing", level = "debug")] fn full_hashing( &mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>, pre_checked_map: BTreeMap>, ) -> Option<()> { - let check_type = self.hash_type; + let (progress_thread_handle, progress_thread_run, _atomic_counter, _check_was_stopped) = + prepare_thread_handler_common(progress_sender, 4, MAX_STAGE, 0, self.check_method, self.common_data.tool_type); + + let (loaded_hash_map, records_already_cached, non_cached_files_to_check) = self.full_hashing_load_cache_at_start(pre_checked_map); + + send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); + if check_if_stop_received(stop_receiver) { + return None; + } let (progress_thread_handle, progress_thread_run, atomic_counter, check_was_stopped) = prepare_thread_handler_common( progress_sender, - 2, - 2, - pre_checked_map.values().map(Vec::len).sum(), + 5, + MAX_STAGE, + non_cached_files_to_check.values().map(Vec::len).sum(), self.check_method, self.common_data.tool_type, ); - ///////////////////////////////////////////////////////////////////////////// HASHING START - - let (loaded_hash_map, records_already_cached, non_cached_files_to_check) = self.full_hashing_load_cache_at_start(pre_checked_map); - + let check_type = self.hash_type; debug!("Starting full hashing of {} files", non_cached_files_to_check.values().map(Vec::len).sum::()); let mut full_hash_results: Vec<(u64, BTreeMap>, Vec)> = non_cached_files_to_check .into_par_iter() @@ -698,7 +715,7 @@ impl DuplicateFinder { atomic_counter.fetch_add(vec_file_entry.len(), Ordering::Relaxed); for mut file_entry in vec_file_entry { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { check_was_stopped.store(true, Ordering::Relaxed); return None; } @@ -717,15 +734,15 @@ impl DuplicateFinder { .collect(); debug!("Finished full hashing"); + // Even if clicked stop, save items to cache and show results + send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); + let (progress_thread_handle, progress_thread_run, _atomic_counter, _check_was_stopped) = + prepare_thread_handler_common(progress_sender, 6, MAX_STAGE, 0, self.check_method, self.common_data.tool_type); + self.full_hashing_save_cache_at_exit(records_already_cached, &mut full_hash_results, loaded_hash_map); send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); - // Break if stop was clicked after saving to cache - if check_was_stopped.load(Ordering::Relaxed) { - return None; - } - for (size, hash_map, mut errors) in full_hash_results { self.common_data.text_messages.warnings.append(&mut errors); for (_hash, vec_file_entry) in hash_map { @@ -738,7 +755,7 @@ impl DuplicateFinder { Some(()) } - #[fun_time(message = "hash_reference_folders")] + #[fun_time(message = "hash_reference_folders", level = "debug")] fn hash_reference_folders(&mut self) { // Reference - only use in size, because later hash will be counted differently if self.common_data.use_reference_folders { @@ -787,7 +804,7 @@ impl DuplicateFinder { } } - #[fun_time(message = "check_files_hash")] + #[fun_time(message = "check_files_hash", level = "debug")] fn check_files_hash(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { assert_eq!(self.check_method, CheckingMethod::Hash); @@ -810,7 +827,7 @@ impl DuplicateFinder { true } - #[fun_time(message = "delete_files")] + #[fun_time(message = "delete_files", level = "debug")] fn delete_files(&mut self) { if self.common_data.delete_method == DeleteMethod::None { return; diff --git a/czkawka_core/src/empty_files.rs b/czkawka_core/src/empty_files.rs index df9587c..a4acedf 100644 --- a/czkawka_core/src/empty_files.rs +++ b/czkawka_core/src/empty_files.rs @@ -40,7 +40,7 @@ impl EmptyFiles { } } - #[fun_time(message = "find_empty_files")] + #[fun_time(message = "find_empty_files", level = "info")] pub fn find_empty_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) { self.optimize_dirs_before_start(); if !self.check_files(stop_receiver, progress_sender) { @@ -51,7 +51,7 @@ impl EmptyFiles { self.debug_print(); } - #[fun_time(message = "check_files")] + #[fun_time(message = "check_files", level = "debug")] fn check_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let result = DirTraversalBuilder::new() .root_dirs(self.common_data.directories.included_directories.clone()) @@ -84,7 +84,7 @@ impl EmptyFiles { } } - #[fun_time(message = "delete_files")] + #[fun_time(message = "delete_files", level = "debug")] fn delete_files(&mut self) { match self.common_data.delete_method { DeleteMethod::Delete => { diff --git a/czkawka_core/src/empty_folder.rs b/czkawka_core/src/empty_folder.rs index 2a814db..ed5fa99 100644 --- a/czkawka_core/src/empty_folder.rs +++ b/czkawka_core/src/empty_folder.rs @@ -41,7 +41,7 @@ impl EmptyFolder { &self.information } - #[fun_time(message = "find_empty_folders")] + #[fun_time(message = "find_empty_folders", level = "info")] pub fn find_empty_folders(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) { self.optimize_dirs_before_start(); if !self.check_for_empty_folders(stop_receiver, progress_sender) { @@ -73,7 +73,7 @@ impl EmptyFolder { self.information.number_of_empty_folders = self.empty_folder_list.len(); } - #[fun_time(message = "check_for_empty_folders")] + #[fun_time(message = "check_for_empty_folders", level = "debug")] fn check_for_empty_folders(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let result = DirTraversalBuilder::new() .root_dirs(self.common_data.directories.included_directories.clone()) @@ -106,7 +106,7 @@ impl EmptyFolder { } } - // #[fun_time(message = "delete_files")] + #[fun_time(message = "delete_files", level = "debug")] fn delete_files(&mut self) { if self.get_delete_method() == DeleteMethod::None { return; diff --git a/czkawka_core/src/invalid_symlinks.rs b/czkawka_core/src/invalid_symlinks.rs index a51708f..fd57ae6 100644 --- a/czkawka_core/src/invalid_symlinks.rs +++ b/czkawka_core/src/invalid_symlinks.rs @@ -30,7 +30,7 @@ impl InvalidSymlinks { } } - #[fun_time(message = "find_invalid_links")] + #[fun_time(message = "find_invalid_links", level = "info")] pub fn find_invalid_links(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) { self.optimize_dirs_before_start(); if !self.check_files(stop_receiver, progress_sender) { @@ -41,7 +41,7 @@ impl InvalidSymlinks { self.debug_print(); } - #[fun_time(message = "check_files")] + #[fun_time(message = "check_files", level = "debug")] fn check_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let result = DirTraversalBuilder::new() .root_dirs(self.common_data.directories.included_directories.clone()) @@ -69,7 +69,7 @@ impl InvalidSymlinks { } } - #[fun_time(message = "delete_files")] + #[fun_time(message = "delete_files", level = "debug")] fn delete_files(&mut self) { match self.common_data.delete_method { DeleteMethod::Delete => { diff --git a/czkawka_core/src/same_music.rs b/czkawka_core/src/same_music.rs index ceb9f19..84e339d 100644 --- a/czkawka_core/src/same_music.rs +++ b/czkawka_core/src/same_music.rs @@ -26,7 +26,8 @@ use symphonia::core::meta::MetadataOptions; use symphonia::core::probe::Hint; use crate::common::{ - create_crash_message, delete_files_custom, filter_reference_folders_generic, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, AUDIO_FILES_EXTENSIONS, + check_if_stop_received, create_crash_message, delete_files_custom, filter_reference_folders_generic, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, + AUDIO_FILES_EXTENSIONS, }; use crate::common_cache::{get_similar_music_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; use crate::common_dir_traversal::{CheckingMethod, DirTraversalBuilder, DirTraversalResult, FileEntry, ProgressData, ToolType}; @@ -63,6 +64,9 @@ pub struct MusicEntry { pub bitrate: u32, } +const MAX_STAGE_TAGS: u8 = 4; +const MAX_STAGE_CONTENT: u8 = 5; + impl ResultEntry for MusicEntry { fn get_path(&self) -> &Path { &self.path @@ -132,7 +136,7 @@ impl SameMusic { } } - #[fun_time(message = "print_results")] + #[fun_time(message = "find_same_music", level = "info")] pub fn find_same_music(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) { self.optimize_dirs_before_start(); self.common_data.use_reference_folders = !self.common_data.directories.reference_directories.is_empty(); @@ -171,7 +175,7 @@ impl SameMusic { self.debug_print(); } - #[fun_time(message = "check_files")] + #[fun_time(message = "check_files", level = "debug")] fn check_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { if !self.common_data.allowed_extensions.using_custom_extensions() { self.common_data.allowed_extensions.extend_allowed_extensions(AUDIO_FILES_EXTENSIONS); @@ -182,6 +186,12 @@ impl SameMusic { } } + let max_stage = match self.check_type { + CheckingMethod::AudioTags => MAX_STAGE_TAGS, + CheckingMethod::AudioContent => MAX_STAGE_CONTENT, + _ => panic!(), + }; + let result = DirTraversalBuilder::new() .root_dirs(self.common_data.directories.included_directories.clone()) .group_by(|_fe| ()) @@ -193,7 +203,7 @@ impl SameMusic { .allowed_extensions(self.common_data.allowed_extensions.clone()) .excluded_items(self.common_data.excluded_items.clone()) .recursive_search(self.common_data.recursive_search) - .max_stage(2) + .max_stage(max_stage) .build() .run(); @@ -215,7 +225,7 @@ impl SameMusic { } } - #[fun_time(message = "load_cache")] + #[fun_time(message = "load_cache", level = "debug")] fn load_cache(&mut self, checking_tags: bool) -> (BTreeMap, BTreeMap, BTreeMap) { let loaded_hash_map; @@ -250,7 +260,7 @@ impl SameMusic { (loaded_hash_map, records_already_cached, non_cached_files_to_check) } - #[fun_time(message = "save_cache")] + #[fun_time(message = "save_cache", level = "debug")] fn save_cache(&mut self, vec_file_entry: Vec, loaded_hash_map: BTreeMap, checking_tags: bool) { if !self.common_data.use_cache { return; @@ -266,12 +276,26 @@ impl SameMusic { self.get_text_messages_mut().extend_with_another_messages(messages); } - #[fun_time(message = "calculate_fingerprint")] + #[fun_time(message = "calculate_fingerprint", level = "debug")] fn calculate_fingerprint(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { + let (progress_thread_handle, progress_thread_run, _atomic_counter, _check_was_stopped) = + prepare_thread_handler_common(progress_sender, 1, MAX_STAGE_CONTENT, 0, self.check_type, self.common_data.tool_type); + let (loaded_hash_map, records_already_cached, non_cached_files_to_check) = self.load_cache(false); - let (progress_thread_handle, progress_thread_run, atomic_counter, check_was_stopped) = - prepare_thread_handler_common(progress_sender, 1, 3, non_cached_files_to_check.len(), self.check_type, self.common_data.tool_type); + send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); + if check_if_stop_received(stop_receiver) { + return false; + } + + let (progress_thread_handle, progress_thread_run, atomic_counter, check_was_stopped) = prepare_thread_handler_common( + progress_sender, + 2, + MAX_STAGE_CONTENT, + non_cached_files_to_check.len(), + self.check_type, + self.common_data.tool_type, + ); let configuration = &self.hash_preset_config; debug!("calculate_fingerprint - starting fingerprinting"); @@ -279,7 +303,7 @@ impl SameMusic { .into_par_iter() .map(|(path, mut music_entry)| { atomic_counter.fetch_add(1, Ordering::Relaxed); - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { check_was_stopped.store(true, Ordering::Relaxed); return None; } @@ -298,6 +322,8 @@ impl SameMusic { debug!("calculate_fingerprint - ended fingerprinting"); send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); + let (progress_thread_handle, progress_thread_run, _atomic_counter, _check_was_stopped) = + prepare_thread_handler_common(progress_sender, 3, MAX_STAGE_CONTENT, 0, self.check_type, self.common_data.tool_type); // Just connect loaded results with already calculated vec_file_entry.extend(records_already_cached.into_values()); @@ -307,18 +333,33 @@ impl SameMusic { self.save_cache(vec_file_entry, loaded_hash_map, false); // Break if stop was clicked after saving to cache - if check_was_stopped.load(Ordering::Relaxed) { + send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); + if check_was_stopped.load(Ordering::Relaxed) || check_if_stop_received(stop_receiver) { return false; } true } - #[fun_time(message = "read_tags")] + #[fun_time(message = "read_tags", level = "debug")] fn read_tags(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { + let (progress_thread_handle, progress_thread_run, _atomic_counter, _check_was_stopped) = + prepare_thread_handler_common(progress_sender, 1, MAX_STAGE_TAGS, 0, self.check_type, self.common_data.tool_type); + let (loaded_hash_map, records_already_cached, non_cached_files_to_check) = self.load_cache(true); - let (progress_thread_handle, progress_thread_run, atomic_counter, check_was_stopped) = - prepare_thread_handler_common(progress_sender, 1, 2, non_cached_files_to_check.len(), self.check_type, self.common_data.tool_type); + send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); + if check_if_stop_received(stop_receiver) { + return false; + } + + let (progress_thread_handle, progress_thread_run, atomic_counter, check_was_stopped) = prepare_thread_handler_common( + progress_sender, + 2, + MAX_STAGE_TAGS, + non_cached_files_to_check.len(), + self.check_type, + self.common_data.tool_type, + ); debug!("read_tags - starting reading tags"); // Clean for duplicate files @@ -326,7 +367,7 @@ impl SameMusic { .into_par_iter() .map(|(path, mut music_entry)| { atomic_counter.fetch_add(1, Ordering::Relaxed); - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { check_was_stopped.store(true, Ordering::Relaxed); return None; } @@ -343,6 +384,8 @@ impl SameMusic { debug!("read_tags - ended reading tags"); send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); + let (progress_thread_handle, progress_thread_run, _atomic_counter, _check_was_stopped) = + prepare_thread_handler_common(progress_sender, 3, MAX_STAGE_TAGS, 0, self.check_type, self.common_data.tool_type); // Just connect loaded results with already calculated vec_file_entry.extend(records_already_cached.into_values()); @@ -352,6 +395,7 @@ impl SameMusic { self.save_cache(vec_file_entry, loaded_hash_map, true); // Break if stop was clicked after saving to cache + send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); if check_was_stopped.load(Ordering::Relaxed) { return false; } @@ -359,16 +403,16 @@ impl SameMusic { true } - #[fun_time(message = "check_for_duplicate_tags")] + #[fun_time(message = "check_for_duplicate_tags", level = "debug")] fn check_for_duplicate_tags(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let (progress_thread_handle, progress_thread_run, atomic_counter, _check_was_stopped) = - prepare_thread_handler_common(progress_sender, 2, 2, self.music_to_check.len(), self.check_type, self.common_data.tool_type); + prepare_thread_handler_common(progress_sender, 4, MAX_STAGE_TAGS, self.music_to_check.len(), self.check_type, self.common_data.tool_type); let mut old_duplicates: Vec> = vec![self.music_entries.clone()]; let mut new_duplicates: Vec> = Vec::new(); if (self.music_similarity & MusicSimilarity::TRACK_TITLE) == MusicSimilarity::TRACK_TITLE { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return false; } @@ -376,7 +420,7 @@ impl SameMusic { old_duplicates = self.check_music_item(old_duplicates, &atomic_counter, |fe| &fe.track_title, self.approximate_comparison); } if (self.music_similarity & MusicSimilarity::TRACK_ARTIST) == MusicSimilarity::TRACK_ARTIST { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return false; } @@ -384,7 +428,7 @@ impl SameMusic { old_duplicates = self.check_music_item(old_duplicates, &atomic_counter, |fe| &fe.track_artist, self.approximate_comparison); } if (self.music_similarity & MusicSimilarity::YEAR) == MusicSimilarity::YEAR { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return false; } @@ -392,7 +436,7 @@ impl SameMusic { old_duplicates = self.check_music_item(old_duplicates, &atomic_counter, |fe| &fe.year, false); } if (self.music_similarity & MusicSimilarity::LENGTH) == MusicSimilarity::LENGTH { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return false; } @@ -400,7 +444,7 @@ impl SameMusic { old_duplicates = self.check_music_item(old_duplicates, &atomic_counter, |fe| &fe.length, false); } if (self.music_similarity & MusicSimilarity::GENRE) == MusicSimilarity::GENRE { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return false; } @@ -408,7 +452,7 @@ impl SameMusic { old_duplicates = self.check_music_item(old_duplicates, &atomic_counter, |fe| &fe.genre, false); } if (self.music_similarity & MusicSimilarity::BITRATE) == MusicSimilarity::BITRATE { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return false; } @@ -458,11 +502,11 @@ impl SameMusic { true } - #[fun_time(message = "read_tags_to_files_similar_by_content")] + #[fun_time(message = "read_tags_to_files_similar_by_content", level = "debug")] fn read_tags_to_files_similar_by_content(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let groups_to_check = max(self.duplicated_music_entries.len(), self.duplicated_music_entries_referenced.len()); let (progress_thread_handle, progress_thread_run, atomic_counter, check_was_stopped) = - prepare_thread_handler_common(progress_sender, 3, 3, groups_to_check, self.check_type, self.common_data.tool_type); + prepare_thread_handler_common(progress_sender, 5, MAX_STAGE_CONTENT, groups_to_check, self.check_type, self.common_data.tool_type); if !self.duplicated_music_entries.is_empty() { let _: Vec<_> = self @@ -470,7 +514,7 @@ impl SameMusic { .par_iter_mut() .map(|vec_me| { atomic_counter.fetch_add(1, Ordering::Relaxed); - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { check_was_stopped.store(true, Ordering::Relaxed); return None; } @@ -488,7 +532,7 @@ impl SameMusic { .par_iter_mut() .map(|(me_o, vec_me)| { atomic_counter.fetch_add(1, Ordering::Relaxed); - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { check_was_stopped.store(true, Ordering::Relaxed); return None; } @@ -525,7 +569,7 @@ impl SameMusic { (base_files, files_to_compare) } - #[fun_time(message = "compare_fingerprints")] + #[fun_time(message = "compare_fingerprints", level = "debug")] fn compare_fingerprints( &mut self, stop_receiver: Option<&Receiver<()>>, @@ -543,7 +587,7 @@ impl SameMusic { for f_entry in base_files { atomic_counter.fetch_add(1, Ordering::Relaxed); - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { return None; } @@ -584,7 +628,7 @@ impl SameMusic { Some(duplicated_music_entries) } - #[fun_time(message = "check_for_duplicate_fingerprints")] + #[fun_time(message = "check_for_duplicate_fingerprints", level = "debug")] fn check_for_duplicate_fingerprints(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let (base_files, files_to_compare) = self.split_fingerprints_to_check(); let (progress_thread_handle, progress_thread_run, atomic_counter, _check_was_stopped) = @@ -621,7 +665,7 @@ impl SameMusic { true } - #[fun_time(message = "check_music_item")] + #[fun_time(message = "check_music_item", level = "debug")] fn check_music_item( &self, old_duplicates: Vec>, @@ -653,7 +697,7 @@ impl SameMusic { new_duplicates } - #[fun_time(message = "delete_files")] + #[fun_time(message = "delete_files", level = "debug")] fn delete_files(&mut self) { if self.common_data.delete_method == DeleteMethod::None { return; @@ -887,7 +931,7 @@ impl Default for SameMusic { } impl DebugPrint for SameMusic { - #[fun_time(message = "debug_print")] + #[fun_time(message = "debug_print", level = "debug")] fn debug_print(&self) { if !cfg!(debug_assertions) { return; diff --git a/czkawka_core/src/similar_images.rs b/czkawka_core/src/similar_images.rs index 288e8b2..8055f9d 100644 --- a/czkawka_core/src/similar_images.rs +++ b/czkawka_core/src/similar_images.rs @@ -20,8 +20,8 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "heif")] use crate::common::get_dynamic_image_from_heic; use crate::common::{ - check_folder_children, create_crash_message, delete_files_custom, get_dynamic_image_from_raw_image, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, - HEIC_EXTENSIONS, IMAGE_RS_SIMILAR_IMAGES_EXTENSIONS, RAW_IMAGE_EXTENSIONS, + check_folder_children, check_if_stop_received, create_crash_message, delete_files_custom, get_dynamic_image_from_raw_image, prepare_thread_handler_common, + send_info_and_wait_for_ending_all_threads, HEIC_EXTENSIONS, IMAGE_RS_SIMILAR_IMAGES_EXTENSIONS, RAW_IMAGE_EXTENSIONS, }; use crate::common_cache::{get_similar_images_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; use crate::common_dir_traversal::{common_get_entry_data_metadata, common_read_dir, get_lowercase_name, get_modified_time, CheckingMethod, ProgressData, ToolType}; @@ -124,7 +124,7 @@ impl SimilarImages { } } - #[fun_time(message = "find_similar_images")] + #[fun_time(message = "find_similar_images", level = "info")] pub fn find_similar_images(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) { self.optimize_dirs_before_start(); self.common_data.use_reference_folders = !self.common_data.directories.reference_directories.is_empty(); @@ -144,7 +144,7 @@ impl SimilarImages { self.debug_print(); } - #[fun_time(message = "check_for_similar_images")] + #[fun_time(message = "check_for_similar_images", level = "debug")] fn check_for_similar_images(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let mut folders_to_check: Vec = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector @@ -171,7 +171,7 @@ impl SimilarImages { prepare_thread_handler_common(progress_sender, 0, 2, 0, CheckingMethod::None, self.common_data.tool_type); while !folders_to_check.is_empty() { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return false; } @@ -258,7 +258,7 @@ impl SimilarImages { } } - #[fun_time(message = "hash_images_load_cache")] + #[fun_time(message = "hash_images_load_cache", level = "debug")] fn hash_images_load_cache(&mut self) -> (BTreeMap, BTreeMap, BTreeMap) { let loaded_hash_map; @@ -303,7 +303,7 @@ impl SimilarImages { // - Join already read hashes with hashes which were read from file // - Join all hashes and save it to file - #[fun_time(message = "hash_images")] + #[fun_time(message = "hash_images", level = "debug")] fn hash_images(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let (loaded_hash_map, records_already_cached, non_cached_files_to_check) = self.hash_images_load_cache(); @@ -315,7 +315,7 @@ impl SimilarImages { .into_par_iter() .map(|(_s, file_entry)| { atomic_counter.fetch_add(1, Ordering::Relaxed); - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { check_was_stopped.store(true, Ordering::Relaxed); return None; } @@ -352,7 +352,7 @@ impl SimilarImages { true } - #[fun_time(message = "save_to_cache")] + #[fun_time(message = "save_to_cache", level = "debug")] fn save_to_cache(&mut self, vec_file_entry: Vec<(FileEntry, ImHash)>, loaded_hash_map: BTreeMap) { if self.common_data.use_cache { // Must save all results to file, old loaded from file with all currently counted results @@ -443,7 +443,7 @@ impl SimilarImages { } // Split hashes at 2 parts, base hashes and hashes to compare, 3 argument is set of hashes with multiple images - #[fun_time(message = "split_hashes")] + #[fun_time(message = "split_hashes", level = "debug")] fn split_hashes(&mut self, all_hashed_images: &HashMap>) -> (Vec, HashSet) { let hashes_with_multiple_images: HashSet = all_hashed_images .iter() @@ -485,7 +485,7 @@ impl SimilarImages { (base_hashes, hashes_with_multiple_images) } - #[fun_time(message = "collect_hash_compare_result")] + #[fun_time(message = "collect_hash_compare_result", level = "debug")] fn collect_hash_compare_result( &self, hashes_parents: HashMap, @@ -545,7 +545,7 @@ impl SimilarImages { } } - #[fun_time(message = "compare_hashes_with_non_zero_tolerance")] + #[fun_time(message = "compare_hashes_with_non_zero_tolerance", level = "debug")] fn compare_hashes_with_non_zero_tolerance( &mut self, all_hashed_images: &HashMap>, @@ -572,7 +572,7 @@ impl SimilarImages { .map(|hash_to_check| { atomic_counter.fetch_add(1, Ordering::Relaxed); - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { check_was_stopped.store(true, Ordering::Relaxed); return None; } @@ -621,7 +621,7 @@ impl SimilarImages { true } - #[fun_time(message = "connect_results")] + #[fun_time(message = "connect_results", level = "debug")] fn connect_results( &self, partial_results: Vec<(&ImHash, Vec<(u32, &ImHash)>)>, @@ -683,7 +683,7 @@ impl SimilarImages { } } - #[fun_time(message = "find_similar_hashes")] + #[fun_time(message = "find_similar_hashes", level = "debug")] fn find_similar_hashes(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { if self.image_hashes.is_empty() { return true; @@ -736,7 +736,7 @@ impl SimilarImages { true } - #[fun_time(message = "exclude_items_with_same_size")] + #[fun_time(message = "exclude_items_with_same_size", level = "debug")] fn exclude_items_with_same_size(&mut self) { if self.exclude_images_with_same_size { for vec_file_entry in mem::take(&mut self.similar_vectors) { @@ -755,7 +755,7 @@ impl SimilarImages { } } - #[fun_time(message = "remove_multiple_records_from_reference_folders")] + #[fun_time(message = "remove_multiple_records_from_reference_folders", level = "debug")] fn remove_multiple_records_from_reference_folders(&mut self) { if self.common_data.use_reference_folders { self.similar_referenced_vectors = mem::take(&mut self.similar_vectors) diff --git a/czkawka_core/src/similar_videos.rs b/czkawka_core/src/similar_videos.rs index 6a63197..d76bd2f 100644 --- a/czkawka_core/src/similar_videos.rs +++ b/czkawka_core/src/similar_videos.rs @@ -15,7 +15,9 @@ use serde::{Deserialize, Serialize}; use vid_dup_finder_lib::HashCreationErrorKind::DetermineVideo; use vid_dup_finder_lib::{NormalizedTolerance, VideoHash}; -use crate::common::{check_folder_children, delete_files_custom, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, VIDEO_FILES_EXTENSIONS}; +use crate::common::{ + check_folder_children, check_if_stop_received, delete_files_custom, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, VIDEO_FILES_EXTENSIONS, +}; use crate::common_cache::{get_similar_videos_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; use crate::common_dir_traversal::{common_get_entry_data_metadata, common_read_dir, get_lowercase_name, get_modified_time, CheckingMethod, ProgressData, ToolType}; use crate::common_tool::{CommonData, CommonToolData, DeleteMethod}; @@ -100,7 +102,7 @@ impl SimilarVideos { } } - #[fun_time(message = "find_similar_videos")] + #[fun_time(message = "find_similar_videos", level = "info")] pub fn find_similar_videos(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) { if !check_if_ffmpeg_is_installed() { self.common_data.text_messages.errors.push(flc!("core_ffmpeg_not_found")); @@ -127,7 +129,7 @@ impl SimilarVideos { self.debug_print(); } - #[fun_time(message = "check_for_similar_videos")] + #[fun_time(message = "check_for_similar_videos", level = "debug")] fn check_for_similar_videos(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let mut folders_to_check: Vec = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector @@ -149,7 +151,7 @@ impl SimilarVideos { prepare_thread_handler_common(progress_sender, 0, 1, 0, CheckingMethod::None, self.common_data.tool_type); while !folders_to_check.is_empty() { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return false; } @@ -237,7 +239,7 @@ impl SimilarVideos { } } - #[fun_time(message = "load_cache_at_start")] + #[fun_time(message = "load_cache_at_start", level = "debug")] fn load_cache_at_start(&mut self) -> (BTreeMap, BTreeMap, BTreeMap) { let loaded_hash_map; let mut records_already_cached: BTreeMap = Default::default(); @@ -263,7 +265,7 @@ impl SimilarVideos { (loaded_hash_map, records_already_cached, non_cached_files_to_check) } - #[fun_time(message = "sort_videos")] + #[fun_time(message = "sort_videos", level = "debug")] fn sort_videos(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let (loaded_hash_map, records_already_cached, non_cached_files_to_check) = self.load_cache_at_start(); @@ -274,7 +276,7 @@ impl SimilarVideos { .par_iter() .map(|file_entry| { atomic_counter.fetch_add(1, Ordering::Relaxed); - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { check_was_stopped.store(true, Ordering::Relaxed); return None; } @@ -343,7 +345,7 @@ impl SimilarVideos { true } - #[fun_time(message = "save_cache")] + #[fun_time(message = "save_cache", level = "debug")] fn save_cache(&mut self, vec_file_entry: Vec, loaded_hash_map: BTreeMap) { if self.common_data.use_cache { // Must save all results to file, old loaded from file with all currently counted results @@ -357,7 +359,7 @@ impl SimilarVideos { } } - #[fun_time(message = "match_groups_of_videos")] + #[fun_time(message = "match_groups_of_videos", level = "debug")] fn match_groups_of_videos(&mut self, vector_of_hashes: Vec, hashmap_with_file_entries: &HashMap) { let match_group = vid_dup_finder_lib::search(vector_of_hashes, NormalizedTolerance::new(self.tolerance as f64 / 100.0f64)); let mut collected_similar_videos: Vec> = Default::default(); @@ -383,7 +385,7 @@ impl SimilarVideos { self.similar_vectors = collected_similar_videos; } - #[fun_time(message = "remove_from_reference_folders")] + #[fun_time(message = "remove_from_reference_folders", level = "debug")] fn remove_from_reference_folders(&mut self) { if self.common_data.use_reference_folders { self.similar_referenced_vectors = mem::take(&mut self.similar_vectors) @@ -420,7 +422,7 @@ impl Default for SimilarVideos { } impl DebugPrint for SimilarVideos { - #[fun_time(message = "debug_print")] + #[fun_time(message = "debug_print", level = "debug")] fn debug_print(&self) { if !cfg!(debug_assertions) { return; diff --git a/czkawka_core/src/temporary.rs b/czkawka_core/src/temporary.rs index fb4c035..0785303 100644 --- a/czkawka_core/src/temporary.rs +++ b/czkawka_core/src/temporary.rs @@ -12,7 +12,7 @@ use futures::channel::mpsc::UnboundedSender; use rayon::prelude::*; use serde::Serialize; -use crate::common::{check_folder_children, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads}; +use crate::common::{check_folder_children, check_if_stop_received, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads}; use crate::common_dir_traversal::{common_get_entry_data_metadata, common_read_dir, get_lowercase_name, get_modified_time, CheckingMethod, ProgressData, ToolType}; use crate::common_tool::{CommonData, CommonToolData, DeleteMethod}; use crate::common_traits::*; @@ -59,7 +59,7 @@ impl Temporary { } } - #[fun_time(message = "find_temporary_files")] + #[fun_time(message = "find_temporary_files", level = "info")] pub fn find_temporary_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) { self.optimize_dirs_before_start(); if !self.check_files(stop_receiver, progress_sender) { @@ -70,7 +70,7 @@ impl Temporary { self.debug_print(); } - #[fun_time(message = "check_files")] + #[fun_time(message = "check_files", level = "debug")] fn check_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&UnboundedSender>) -> bool { let mut folders_to_check: Vec = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector @@ -83,7 +83,7 @@ impl Temporary { prepare_thread_handler_common(progress_sender, 0, 0, 0, CheckingMethod::None, self.common_data.tool_type); while !folders_to_check.is_empty() { - if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() { + if check_if_stop_received(stop_receiver) { send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); return false; } @@ -172,7 +172,7 @@ impl Temporary { }) } - #[fun_time(message = "delete_files")] + #[fun_time(message = "delete_files", level = "debug")] fn delete_files(&mut self) { match self.common_data.delete_method { DeleteMethod::Delete => { diff --git a/czkawka_gui/i18n/en/czkawka_gui.ftl b/czkawka_gui/i18n/en/czkawka_gui.ftl index d7d90ee..70d7780 100644 --- a/czkawka_gui/i18n/en/czkawka_gui.ftl +++ b/czkawka_gui/i18n/en/czkawka_gui.ftl @@ -471,6 +471,12 @@ progress_scanning_size_name = Scanning name and size of {$file_number} file progress_scanning_name = Scanning name of {$file_number} file progress_analyzed_partial_hash = Analyzed partial hash of {$file_checked}/{$all_files} files progress_analyzed_full_hash = Analyzed full hash of {$file_checked}/{$all_files} files +progress_prehash_cache_loading = Loading prehash cache +progress_prehash_cache_saving = Saving prehash cache +progress_hash_cache_loading = Loading hash cache +progress_hash_cache_saving = Saving hash cache +progress_cache_loading = Loading cache +progress_cache_saving = Saving cache progress_current_stage = Current Stage:{" "} progress_all_stages = All Stages:{" "} diff --git a/czkawka_gui/src/compute_results.rs b/czkawka_gui/src/compute_results.rs index f1b24f5..3bb4c12 100644 --- a/czkawka_gui/src/compute_results.rs +++ b/czkawka_gui/src/compute_results.rs @@ -4,6 +4,7 @@ use std::path::PathBuf; use std::rc::Rc; use chrono::NaiveDateTime; +use fun_time::fun_time; use glib::Receiver; use gtk4::prelude::*; use gtk4::{Entry, ListStore, TextView, TreeView, Widget}; @@ -229,6 +230,7 @@ pub fn connect_compute_results(gui_data: &GuiData, glib_stop_receiver: Receiver< }); } +#[fun_time(message = "computer_bad_extensions", level = "debug")] fn computer_bad_extensions( be: BadExtensions, entry_info: &Entry, @@ -302,6 +304,7 @@ fn computer_bad_extensions( } } +#[fun_time(message = "computer_broken_files", level = "debug")] fn computer_broken_files( br: BrokenFiles, entry_info: &Entry, @@ -375,6 +378,7 @@ fn computer_broken_files( } } +#[fun_time(message = "computer_invalid_symlinks", level = "debug")] fn computer_invalid_symlinks( ifs: InvalidSymlinks, entry_info: &Entry, @@ -446,6 +450,7 @@ fn computer_invalid_symlinks( } } +#[fun_time(message = "computer_same_music", level = "debug")] fn computer_same_music( mf: SameMusic, entry_info: &Entry, @@ -620,6 +625,7 @@ fn computer_same_music( } } +#[fun_time(message = "computer_similar_videos", level = "debug")] fn computer_similar_videos( ff: SimilarVideos, entry_info: &Entry, @@ -721,6 +727,7 @@ fn computer_similar_videos( } } +#[fun_time(message = "computer_similar_images", level = "debug")] fn computer_similar_images( sf: SimilarImages, entry_info: &Entry, @@ -853,6 +860,7 @@ fn computer_similar_images( } } +#[fun_time(message = "computer_temporary_files", level = "debug")] fn computer_temporary_files( tf: Temporary, entry_info: &Entry, @@ -924,6 +932,7 @@ fn computer_temporary_files( } } +#[fun_time(message = "computer_big_files", level = "debug")] fn computer_big_files( bf: BigFile, entry_info: &Entry, @@ -991,6 +1000,7 @@ fn computer_big_files( } } +#[fun_time(message = "computer_empty_files", level = "debug")] fn computer_empty_files( vf: EmptyFiles, entry_info: &Entry, @@ -1057,6 +1067,7 @@ fn computer_empty_files( } } +#[fun_time(message = "computer_empty_folders", level = "debug")] fn computer_empty_folders( ef: EmptyFolder, entry_info: &Entry, @@ -1128,6 +1139,7 @@ fn computer_empty_folders( } } +#[fun_time(message = "computer_duplicate_finder", level = "debug")] fn computer_duplicate_finder( df: DuplicateFinder, entry_info: &Entry, diff --git a/czkawka_gui/src/connect_things/connect_button_search.rs b/czkawka_gui/src/connect_things/connect_button_search.rs index e6f4eb7..4468eb6 100644 --- a/czkawka_gui/src/connect_things/connect_button_search.rs +++ b/czkawka_gui/src/connect_things/connect_button_search.rs @@ -794,7 +794,7 @@ fn bad_extensions_search( }); } -#[fun_time(message = "clean_tree_view")] +#[fun_time(message = "clean_tree_view", level = "debug")] fn clean_tree_view(tree_view: >k4::TreeView) { let list_store = get_list_store(tree_view); list_store.clear(); diff --git a/czkawka_gui/src/connect_things/connect_progress_window.rs b/czkawka_gui/src/connect_things/connect_progress_window.rs index 584ec83..955ad6b 100644 --- a/czkawka_gui/src/connect_things/connect_progress_window.rs +++ b/czkawka_gui/src/connect_things/connect_progress_window.rs @@ -84,7 +84,13 @@ fn process_bar_same_music(gui_data: &GuiData, item: &ProgressData) { label_stage.set_text(&flg!("progress_scanning_general_file", file_number_tm(item))); taskbar_state.borrow().set_progress_state(TBPF_INDETERMINATE); } + // Loading cache 1 => { + progress_bar_current_stage.hide(); + common_set_data(item, &progress_bar_all_stages, &progress_bar_current_stage, &taskbar_state); + label_stage.set_text(&flg!("progress_cache_loading")); + } + 2 => { progress_bar_current_stage.show(); common_set_data(item, &progress_bar_all_stages, &progress_bar_current_stage, &taskbar_state); @@ -94,7 +100,14 @@ fn process_bar_same_music(gui_data: &GuiData, item: &ProgressData) { _ => panic!(), } } - 2 => { + // Saving cache + 3 => { + progress_bar_current_stage.hide(); + common_set_data(item, &progress_bar_all_stages, &progress_bar_current_stage, &taskbar_state); + label_stage.set_text(&flg!("progress_cache_saving")); + } + 4 => { + progress_bar_current_stage.show(); common_set_data(item, &progress_bar_all_stages, &progress_bar_current_stage, &taskbar_state); match item.checking_method { @@ -103,7 +116,8 @@ fn process_bar_same_music(gui_data: &GuiData, item: &ProgressData) { _ => panic!(), } } - 3 => { + 5 => { + progress_bar_current_stage.show(); common_set_data(item, &progress_bar_all_stages, &progress_bar_current_stage, &taskbar_state); if item.checking_method == CheckingMethod::AudioContent { @@ -235,21 +249,42 @@ fn process_bar_duplicates(gui_data: &GuiData, item: &ProgressData) { // Checking Size 0 => { progress_bar_current_stage.hide(); - // progress_bar_all_stages.hide(); - progress_bar_all_stages.set_fraction(0 as f64); + progress_bar_all_stages.set_fraction(0f64); label_stage.set_text(&flg!("progress_scanning_size", file_number_tm(item))); taskbar_state.borrow().set_progress_state(TBPF_INDETERMINATE); } + // Loading cache + 1 | 4 => { + progress_bar_current_stage.hide(); + common_set_data(item, &progress_bar_all_stages, &progress_bar_current_stage, &taskbar_state); + + if item.current_stage == 1 { + label_stage.set_text(&flg!("progress_prehash_cache_loading")); + } else { + label_stage.set_text(&flg!("progress_hash_cache_loading")); + } + } + // Saving cache + 3 | 6 => { + progress_bar_current_stage.hide(); + common_set_data(item, &progress_bar_all_stages, &progress_bar_current_stage, &taskbar_state); + + if item.current_stage == 3 { + label_stage.set_text(&flg!("progress_prehash_cache_saving")); + } else { + label_stage.set_text(&flg!("progress_hash_cache_saving")); + } + } // Hash - first 1KB file - 1 => { + 2 => { progress_bar_current_stage.show(); - // progress_bar_all_stages.show(); common_set_data(item, &progress_bar_all_stages, &progress_bar_current_stage, &taskbar_state); label_stage.set_text(&flg!("progress_analyzed_partial_hash", progress_ratio_tm(item))); } // Hash - normal hash - 2 => { + 5 => { + progress_bar_current_stage.show(); common_set_data(item, &progress_bar_all_stages, &progress_bar_current_stage, &taskbar_state); label_stage.set_text(&flg!("progress_analyzed_full_hash", progress_ratio_tm(item))); } @@ -285,14 +320,18 @@ fn process_bar_duplicates(gui_data: &GuiData, item: &ProgressData) { fn common_set_data(item: &ProgressData, progress_bar_all_stages: &ProgressBar, progress_bar_current_stage: &ProgressBar, taskbar_state: &Rc>) { if item.entries_to_check != 0 { - progress_bar_all_stages.set_fraction((item.current_stage as f64 + (item.entries_checked) as f64 / item.entries_to_check as f64) / (item.max_stage + 1) as f64); + let all_stages = (item.current_stage as f64 + (item.entries_checked) as f64 / item.entries_to_check as f64) / (item.max_stage + 1) as f64; + let all_stages = if all_stages > 0.99 { 0.99 } else { all_stages }; + progress_bar_all_stages.set_fraction(all_stages); progress_bar_current_stage.set_fraction((item.entries_checked) as f64 / item.entries_to_check as f64); taskbar_state.borrow().set_progress_value( ((item.current_stage as usize) * item.entries_to_check + item.entries_checked) as u64, item.entries_to_check as u64 * (item.max_stage + 1) as u64, ); } else { - progress_bar_all_stages.set_fraction((item.current_stage as f64) / (item.max_stage + 1) as f64); + let all_stages = (item.current_stage as f64) / (item.max_stage + 1) as f64; + let all_stages = if all_stages > 0.99 { 0.99 } else { all_stages }; + progress_bar_all_stages.set_fraction(all_stages); progress_bar_current_stage.set_fraction(0f64); taskbar_state.borrow().set_progress_value(item.current_stage as u64, 1 + item.max_stage as u64); } diff --git a/instructions/Instruction.md b/instructions/Instruction.md index 6ddde62..8205124 100644 --- a/instructions/Instruction.md +++ b/instructions/Instruction.md @@ -201,7 +201,7 @@ Then, for each selected tag by which we want to search for duplicates, we perfor ### Similar Images It is a tool for finding similar images that differ e.g. in watermark, size etc. -The tool first collects images with specific extensions that can be checked - `[".jpg", ".jpeg", ".png", ".bmp", ".tiff", ".tif", ".pnm", ".tga", ".ff", ".gif", ".jif", ".jfi", ".ico", ".webp", ".avif"]`. +The tool first collects images with specific extensions that can be checked - `[".jpg", ".jpeg", ".png", ".bmp", ".tiff", ".tif", ".pnm", ".tga", ".ff", ".gif", ".jif", ".jfi", ".ico", ".webp"]`. Next cached data is loaded from file to prevent hashing twice the same file. The cache which points to non-existing data, by default is deleted automatically.