1
0
Fork 0
mirror of synced 2024-04-29 10:03:00 +12:00

Add json support to gui (#1083)

* Add json support to gui

* Cli support

* Tests

* Zip fixed

* Always printing

* Zip

* Bound

* Improved CI

* More tests

* Maybe

* Different duplicate delete
This commit is contained in:
Rafał Mikrut 2023-10-12 18:48:46 +02:00 committed by GitHub
parent 9b57382e39
commit 0462324607
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
24 changed files with 908 additions and 451 deletions

View file

@ -38,73 +38,12 @@ jobs:
path: target/release/czkawka_cli
if: ${{ matrix.type == 'release' }}
# Duplicate finder checks included and excluded directories
# Others are just check delete files number
- name: Linux Regression Test
run: |
wget https://github.com/qarmin/czkawka/releases/download/1.1.0/TestSuite.zip
unzip TestSuite.zip -d TestSuite
python3 misc/check_results.py TestSuite 15 8
target/release/czkawka_cli dup -d "$(pwd)/TestSuite" -D aen -m 1024
python3 misc/check_results.py TestSuite 7 8
rm -rf TestSuite
unzip TestSuite.zip -d TestSuite
target/release/czkawka_cli dup -d "$(pwd)/TestSuite" -D aen -m 1024
python3 misc/check_results.py TestSuite 7 8
rm -rf TestSuite
unzip TestSuite.zip -d TestSuite
target/release/czkawka_cli dup -d "$(pwd)/TestSuite" -x TEXT -D aeo -m 1024
python3 misc/check_results.py TestSuite 14 8
rm -rf TestSuite
unzip TestSuite.zip -d TestSuite
target/release/czkawka_cli dup -d "$(pwd)/TestSuite" -e "$(pwd)/TestSuite/SubFolder" -D aeo -m 1024
python3 misc/check_results.py TestSuite 13 8
rm -rf TestSuite
unzip TestSuite.zip -d TestSuite
target/release/czkawka_cli dup -d "$(pwd)/TestSuite" -m 1500 -D aeo
python3 misc/check_results.py TestSuite 8 8
rm -rf TestSuite
unzip TestSuite.zip -d TestSuite
target/release/czkawka_cli dup -d "$(pwd)/TestSuite" -R -m 1024
python3 misc/check_results.py TestSuite 15 8
target/release/czkawka_cli dup -d "$(pwd)/TestSuite" -R -D aeo -m 1024
python3 misc/check_results.py TestSuite 13 8
target/release/czkawka_cli big -d "$(pwd)/TestSuite"
rm -rf TestSuite
unzip TestSuite.zip -d TestSuite
target/release/czkawka_cli empty-files -d "$(pwd)/TestSuite"
python3 misc/check_results.py TestSuite 15 8
target/release/czkawka_cli empty-files -d "$(pwd)/TestSuite" -D
python3 misc/check_results.py TestSuite 13 8
rm -rf TestSuite
unzip TestSuite.zip -d TestSuite
target/release/czkawka_cli empty-folders -d "$(pwd)/TestSuite"
python3 misc/check_results.py TestSuite 15 8
target/release/czkawka_cli empty-folders -d "$(pwd)/TestSuite" -D
python3 misc/check_results.py TestSuite 15 2
rm -rf TestSuite
unzip TestSuite.zip -d TestSuite
target/release/czkawka_cli temp -d "$(pwd)/TestSuite"
python3 misc/check_results.py TestSuite 15 8
target/release/czkawka_cli temp -d "$(pwd)/TestSuite" -D
python3 misc/check_results.py TestSuite 14 8
wget https://github.com/qarmin/czkawka/releases/download/6.0.0/TestFiles.zip
cd ci_tester
cargo build --release
cd ..
ci_tester/target/release/ci_tester target/release/czkawka_cli
if: ${{ matrix.type == 'release' }}

3
.gitignore vendored
View file

@ -13,3 +13,6 @@ flatpak/
*.profdata
/lcov_report*
/report
ci_tester/target
ci_tester/Cargo.lock
czkawka_slint_gui/Cargo.lock

View file

@ -3,9 +3,9 @@ members = [
"czkawka_core",
"czkawka_cli",
"czkawka_gui",
# "czkawka_slint_gui",
]
exclude = [
"ci_tester",
"czkawka_slint_gui"
]
resolver = "2"

11
ci_tester/Cargo.toml Normal file
View file

@ -0,0 +1,11 @@
[package]
name = "ci_tester"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
state = "0.6.0"
handsome_logger = "0.8.0"
log = "0.4.20"

233
ci_tester/src/main.rs Normal file
View file

@ -0,0 +1,233 @@
use log::info;
use std::collections::BTreeSet;
use std::fs;
use std::process::Command;
use std::process::Stdio;
#[derive(Default, Clone, Debug)]
struct CollectedFiles {
files: BTreeSet<String>,
folders: BTreeSet<String>,
symlinks: BTreeSet<String>,
}
static CZKAWKA_PATH: state::InitCell<String> = state::InitCell::new();
static COLLECTED_FILES: state::InitCell<CollectedFiles> = state::InitCell::new();
const ATTEMPTS: u32 = 10;
// App runs - ./ci_tester PATH_TO_CZKAWKA
fn main() {
handsome_logger::init().unwrap();
let args: Vec<String> = std::env::args().collect();
let path_to_czkawka = args[1].clone();
CZKAWKA_PATH.set(path_to_czkawka);
remove_test_dir();
run_with_good_status(&["ls"], false);
unzip_files();
let all_files = collect_all_files_and_dirs("TestFiles").unwrap();
COLLECTED_FILES.set(all_files);
remove_test_dir();
for _ in 0..ATTEMPTS {
test_empty_files();
test_smallest_files();
test_biggest_files();
test_empty_folders();
test_temporary_files();
test_symlinks_files();
test_remove_duplicates_one_oldest();
test_remove_duplicates_one_newest();
test_remove_duplicates_all_expect_newest();
test_remove_duplicates_all_expect_oldest();
}
println!("Completed checking");
}
fn test_remove_duplicates_all_expect_oldest() {
info!("test_remove_duplicates_all_expect_oldest");
run_test(
&["dup", "-d", "TestFiles", "-D", "AEO"],
vec!["Images/A1.jpg", "Images/A5.jpg", "Music/M1.mp3", "Music/M2.mp3", "Videos/V1.mp4", "Videos/V5.mp4"],
vec![],
vec![],
);
}
fn test_remove_duplicates_all_expect_newest() {
info!("test_remove_duplicates_all_expect_newest");
run_test(
&["dup", "-d", "TestFiles", "-D", "AEN"],
vec!["Images/A2.jpg", "Images/A5.jpg", "Music/M1.mp3", "Music/M5.mp3", "Videos/V1.mp4", "Videos/V2.mp4"],
vec![],
vec![],
);
}
fn test_remove_duplicates_one_newest() {
info!("test_remove_duplicates_one_newest");
run_test(
&["dup", "-d", "TestFiles", "-D", "ON"],
vec!["Images/A1.jpg", "Music/M2.mp3", "Videos/V5.mp4"],
vec![],
vec![],
);
}
fn test_remove_duplicates_one_oldest() {
info!("test_remove_duplicates_one_oldest");
run_test(
&["dup", "-d", "TestFiles", "-D", "OO"],
vec!["Images/A2.jpg", "Music/M5.mp3", "Videos/V2.mp4"],
vec![],
vec![],
);
}
fn test_symlinks_files() {
info!("test_symlinks_files");
run_test(&["symlinks", "-d", "TestFiles", "-D"], vec![], vec![], vec!["Symlinks/EmptyFiles"]);
}
fn test_temporary_files() {
info!("test_temporary_files");
run_test(&["temp", "-d", "TestFiles", "-D"], vec!["Temporary/Boczze.cache"], vec![], vec![]);
}
fn test_empty_folders() {
info!("test_empty_folders");
run_test(
&["empty-folders", "-d", "TestFiles", "-D"],
vec![],
vec!["EmptyFolders/One", "EmptyFolders/Two", "EmptyFolders/Two/TwoInside"],
vec![],
);
}
fn test_biggest_files() {
info!("test_biggest_files");
run_test(
&["big", "-d", "TestFiles", "-n", "6", "-D"],
vec!["Music/M3.flac", "Music/M4.mp3", "Videos/V2.mp4", "Videos/V3.webm", "Videos/V1.mp4", "Videos/V5.mp4"],
vec![],
vec![],
);
}
fn test_smallest_files() {
info!("test_smallest_files");
run_test(
&["big", "-d", "TestFiles", "-J", "-n", "5", "-D"],
vec!["Broken/Br.jpg", "Broken/Br.mp3", "Broken/Br.pdf", "Broken/Br.zip", "EmptyFolders/ThreeButNot/KEKEKE"],
vec![],
vec![],
);
}
fn test_empty_files() {
info!("test_empty_files");
run_test(&["empty-files", "-d", "TestFiles", "-D"], vec!["EmptyFile"], vec![], vec![]);
}
////////////////////////////////////
////////////////////////////////////
/////////HELPER FUNCTIONS///////////
////////////////////////////////////
////////////////////////////////////
fn run_test(arguments: &[&str], expected_files_differences: Vec<&'static str>, expected_folders_differences: Vec<&'static str>, expected_symlinks_differences: Vec<&'static str>) {
unzip_files();
// Add path_to_czkawka to arguments
let mut all_arguments = vec![];
all_arguments.push(CZKAWKA_PATH.get().as_str());
all_arguments.extend_from_slice(arguments);
run_with_good_status(&all_arguments, true);
file_folder_diffs(
COLLECTED_FILES.get(),
expected_files_differences,
expected_folders_differences,
expected_symlinks_differences,
);
remove_test_dir();
}
fn unzip_files() {
run_with_good_status(&["unzip", "-X", "TestFiles.zip", "-d", "TestFiles"], false);
}
fn remove_test_dir() {
let _ = fs::remove_dir_all("TestFiles");
}
fn run_with_good_status(str_command: &[&str], print_messages: bool) {
let mut command = Command::new(str_command[0]);
let mut com = command.args(&str_command[1..]);
if !print_messages {
com = com.stderr(Stdio::piped()).stdout(Stdio::piped());
}
let status = com.spawn().expect("failed to execute process").wait().unwrap();
assert!(status.success());
}
fn file_folder_diffs(
all_files: &CollectedFiles,
mut expected_files_differences: Vec<&'static str>,
mut expected_folders_differences: Vec<&'static str>,
mut expected_symlinks_differences: Vec<&'static str>,
) {
let current_files = collect_all_files_and_dirs("TestFiles").unwrap();
let mut diff_files = all_files
.files
.difference(&current_files.files)
.map(|e| e.strip_prefix("TestFiles/").unwrap().to_string())
.collect::<Vec<_>>();
let mut diff_folders = all_files
.folders
.difference(&current_files.folders)
.map(|e| e.strip_prefix("TestFiles/").unwrap().to_string())
.collect::<Vec<_>>();
let mut diff_symlinks = all_files
.symlinks
.difference(&current_files.symlinks)
.map(|e| e.strip_prefix("TestFiles/").unwrap().to_string())
.collect::<Vec<_>>();
expected_symlinks_differences.sort();
expected_folders_differences.sort();
expected_files_differences.sort();
diff_files.sort();
diff_folders.sort();
diff_symlinks.sort();
assert_eq!(diff_files, expected_files_differences);
assert_eq!(diff_folders, expected_folders_differences);
assert_eq!(diff_symlinks, expected_symlinks_differences);
}
fn collect_all_files_and_dirs(dir: &str) -> std::io::Result<CollectedFiles> {
let mut files = BTreeSet::new();
let mut folders = BTreeSet::new();
let mut symlinks = BTreeSet::new();
let mut folders_to_check = vec![dir.to_string()];
while !folders_to_check.is_empty() {
let folder = folders_to_check.pop().unwrap();
let rd = fs::read_dir(folder)?;
for entry in rd {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
folders.insert(path.display().to_string());
folders_to_check.push(path.display().to_string());
} else if path.is_symlink() {
symlinks.insert(path.display().to_string());
} else if path.is_file() {
files.insert(path.display().to_string());
} else {
panic!("Unknown type of file {:?}", path);
}
}
}
folders.remove(dir);
// println!("Found {} files, {} folders and {} symlinks", files.len(), folders.len(), symlinks.len());
Ok(CollectedFiles { files, folders, symlinks })
}

View file

@ -152,6 +152,10 @@ pub struct DuplicatesArgs {
#[clap(flatten)]
pub file_to_save: FileToSave,
#[clap(flatten)]
pub json_compact_file_to_save: JsonCompactFileToSave,
#[clap(flatten)]
pub json_pretty_file_to_save: JsonPrettyFileToSave,
#[clap(flatten)]
pub not_recursive: NotRecursive,
#[clap(flatten)]
pub case_sensitive_name_comparison: CaseSensitiveNameComparison,
@ -178,6 +182,10 @@ pub struct EmptyFoldersArgs {
pub delete_folders: bool,
#[clap(flatten)]
pub file_to_save: FileToSave,
#[clap(flatten)]
pub json_compact_file_to_save: JsonCompactFileToSave,
#[clap(flatten)]
pub json_pretty_file_to_save: JsonPrettyFileToSave,
#[cfg(target_family = "unix")]
#[clap(flatten)]
pub exclude_other_filesystems: ExcludeOtherFilesystems,
@ -202,6 +210,10 @@ pub struct BiggestFilesArgs {
#[clap(flatten)]
pub file_to_save: FileToSave,
#[clap(flatten)]
pub json_compact_file_to_save: JsonCompactFileToSave,
#[clap(flatten)]
pub json_pretty_file_to_save: JsonPrettyFileToSave,
#[clap(flatten)]
pub not_recursive: NotRecursive,
#[clap(short = 'J', long, help = "Finds the smallest files instead the biggest")]
pub smallest_mode: bool,
@ -227,6 +239,10 @@ pub struct EmptyFilesArgs {
#[clap(flatten)]
pub file_to_save: FileToSave,
#[clap(flatten)]
pub json_compact_file_to_save: JsonCompactFileToSave,
#[clap(flatten)]
pub json_pretty_file_to_save: JsonPrettyFileToSave,
#[clap(flatten)]
pub not_recursive: NotRecursive,
#[cfg(target_family = "unix")]
#[clap(flatten)]
@ -248,6 +264,10 @@ pub struct TemporaryArgs {
#[clap(flatten)]
pub file_to_save: FileToSave,
#[clap(flatten)]
pub json_compact_file_to_save: JsonCompactFileToSave,
#[clap(flatten)]
pub json_pretty_file_to_save: JsonPrettyFileToSave,
#[clap(flatten)]
pub not_recursive: NotRecursive,
#[cfg(target_family = "unix")]
#[clap(flatten)]
@ -294,6 +314,10 @@ pub struct SimilarImagesArgs {
#[clap(flatten)]
pub file_to_save: FileToSave,
#[clap(flatten)]
pub json_compact_file_to_save: JsonCompactFileToSave,
#[clap(flatten)]
pub json_pretty_file_to_save: JsonPrettyFileToSave,
#[clap(flatten)]
pub not_recursive: NotRecursive,
#[cfg(target_family = "unix")]
#[clap(flatten)]
@ -348,6 +372,10 @@ pub struct SameMusicArgs {
#[clap(flatten)]
pub file_to_save: FileToSave,
#[clap(flatten)]
pub json_compact_file_to_save: JsonCompactFileToSave,
#[clap(flatten)]
pub json_pretty_file_to_save: JsonPrettyFileToSave,
#[clap(flatten)]
pub not_recursive: NotRecursive,
#[cfg(target_family = "unix")]
#[clap(flatten)]
@ -389,6 +417,10 @@ pub struct InvalidSymlinksArgs {
#[clap(flatten)]
pub file_to_save: FileToSave,
#[clap(flatten)]
pub json_compact_file_to_save: JsonCompactFileToSave,
#[clap(flatten)]
pub json_pretty_file_to_save: JsonPrettyFileToSave,
#[clap(flatten)]
pub not_recursive: NotRecursive,
#[cfg(target_family = "unix")]
#[clap(flatten)]
@ -412,6 +444,10 @@ pub struct BrokenFilesArgs {
#[clap(flatten)]
pub file_to_save: FileToSave,
#[clap(flatten)]
pub json_compact_file_to_save: JsonCompactFileToSave,
#[clap(flatten)]
pub json_pretty_file_to_save: JsonPrettyFileToSave,
#[clap(flatten)]
pub not_recursive: NotRecursive,
#[cfg(target_family = "unix")]
#[clap(flatten)]
@ -433,6 +469,10 @@ pub struct SimilarVideosArgs {
#[clap(flatten)]
pub file_to_save: FileToSave,
#[clap(flatten)]
pub json_compact_file_to_save: JsonCompactFileToSave,
#[clap(flatten)]
pub json_pretty_file_to_save: JsonPrettyFileToSave,
#[clap(flatten)]
pub allowed_extensions: AllowedExtensions,
#[clap(flatten)]
pub not_recursive: NotRecursive,
@ -483,6 +523,10 @@ pub struct BadExtensionsArgs {
#[clap(flatten)]
pub file_to_save: FileToSave,
#[clap(flatten)]
pub json_compact_file_to_save: JsonCompactFileToSave,
#[clap(flatten)]
pub json_pretty_file_to_save: JsonPrettyFileToSave,
#[clap(flatten)]
pub not_recursive: NotRecursive,
#[cfg(target_family = "unix")]
#[clap(flatten)]
@ -555,10 +599,22 @@ pub struct ExcludeOtherFilesystems {
#[derive(Debug, clap::Args)]
pub struct FileToSave {
#[clap(short, long, value_name = "file-name", help = "Saves the results into the file")]
#[clap(short, long, value_name = "file-name", help = "Saves the results into the formatted txt file")]
pub file_to_save: Option<PathBuf>,
}
#[derive(Debug, clap::Args)]
pub struct JsonCompactFileToSave {
#[clap(short, long, value_name = "json-file-name", help = "Saves the results into the compact json file")]
pub compact_file_to_save: Option<PathBuf>,
}
#[derive(Debug, clap::Args)]
pub struct JsonPrettyFileToSave {
#[clap(short, long, value_name = "pretty-json-file-name", help = "Saves the results into the pretty json file")]
pub pretty_file_to_save: Option<PathBuf>,
}
#[derive(Debug, clap::Args)]
pub struct AllowHardLinks {
#[clap(short = 'L', long, help = "Do not ignore hard links")]
@ -586,6 +642,24 @@ impl FileToSave {
None
}
}
impl JsonCompactFileToSave {
pub fn file_name(&self) -> Option<&str> {
if let Some(file_name) = &self.compact_file_to_save {
return file_name.to_str();
}
None
}
}
impl JsonPrettyFileToSave {
pub fn file_name(&self) -> Option<&str> {
if let Some(file_name) = &self.pretty_file_to_save {
return file_name.to_str();
}
None
}
}
fn parse_hash_type(src: &str) -> Result<HashType, &'static str> {
match src.to_ascii_lowercase().as_str() {

View file

@ -69,6 +69,8 @@ fn duplicates(duplicates: DuplicatesArgs) {
delete_method,
hash_type,
file_to_save,
json_compact_file_to_save,
json_pretty_file_to_save,
not_recursive,
#[cfg(target_family = "unix")]
exclude_other_filesystems,
@ -79,37 +81,33 @@ fn duplicates(duplicates: DuplicatesArgs) {
set_number_of_threads(thread_number.thread_number);
let mut df = DuplicateFinder::new();
let mut item = DuplicateFinder::new();
df.set_included_directory(directories.directories);
df.set_excluded_directory(excluded_directories.excluded_directories);
df.set_excluded_items(excluded_items.excluded_items);
df.set_minimal_file_size(minimal_file_size);
df.set_maximal_file_size(maximal_file_size);
df.set_minimal_cache_file_size(minimal_cached_file_size);
df.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
df.set_check_method(search_method);
df.set_delete_method(delete_method);
df.set_hash_type(hash_type);
df.set_recursive_search(!not_recursive.not_recursive);
item.set_included_directory(directories.directories);
item.set_excluded_directory(excluded_directories.excluded_directories);
item.set_excluded_items(excluded_items.excluded_items);
item.set_minimal_file_size(minimal_file_size);
item.set_maximal_file_size(maximal_file_size);
item.set_minimal_cache_file_size(minimal_cached_file_size);
item.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
item.set_check_method(search_method);
item.set_delete_method(delete_method);
item.set_hash_type(hash_type);
item.set_recursive_search(!not_recursive.not_recursive);
#[cfg(target_family = "unix")]
df.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
df.set_ignore_hard_links(!allow_hard_links.allow_hard_links);
df.set_dryrun(dryrun.dryrun);
df.set_case_sensitive_name_comparison(case_sensitive_name_comparison.case_sensitive_name_comparison);
item.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
item.set_ignore_hard_links(!allow_hard_links.allow_hard_links);
item.set_dryrun(dryrun.dryrun);
item.set_case_sensitive_name_comparison(case_sensitive_name_comparison.case_sensitive_name_comparison);
df.find_duplicates(None, None);
item.find_duplicates(None, None);
if let Some(file_name) = file_to_save.file_name() {
if let Err(e) = df.print_results_to_file(file_name) {
error!("Failed to save results to file {e}");
}
}
save_results_to_files(file_to_save.file_name(), json_compact_file_to_save.file_name(), json_pretty_file_to_save.file_name(), &item);
if !cfg!(debug_assertions) {
df.print_results_to_output();
item.print_results_to_output();
}
df.get_text_messages().print_messages();
item.get_text_messages().print_messages();
}
fn empty_folders(empty_folders: EmptyFoldersArgs) {
@ -118,6 +116,8 @@ fn empty_folders(empty_folders: EmptyFoldersArgs) {
directories,
delete_folders,
file_to_save,
json_compact_file_to_save,
json_pretty_file_to_save,
excluded_directories,
excluded_items,
#[cfg(target_family = "unix")]
@ -126,27 +126,23 @@ fn empty_folders(empty_folders: EmptyFoldersArgs) {
set_number_of_threads(thread_number.thread_number);
let mut ef = EmptyFolder::new();
let mut item = EmptyFolder::new();
ef.set_included_directory(directories.directories);
ef.set_excluded_directory(excluded_directories.excluded_directories);
ef.set_excluded_items(excluded_items.excluded_items);
ef.set_delete_folder(delete_folders);
item.set_included_directory(directories.directories);
item.set_excluded_directory(excluded_directories.excluded_directories);
item.set_excluded_items(excluded_items.excluded_items);
item.set_delete_folder(delete_folders);
#[cfg(target_family = "unix")]
ef.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
item.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
ef.find_empty_folders(None, None);
item.find_empty_folders(None, None);
if let Some(file_name) = file_to_save.file_name() {
if let Err(e) = ef.print_results_to_file(file_name) {
error!("Failed to save results to file {e}");
}
}
save_results_to_files(file_to_save.file_name(), json_compact_file_to_save.file_name(), json_pretty_file_to_save.file_name(), &item);
if !cfg!(debug_assertions) {
ef.print_results_to_output();
item.print_results_to_output();
}
ef.get_text_messages().print_messages();
item.get_text_messages().print_messages();
}
fn biggest_files(biggest_files: BiggestFilesArgs) {
@ -158,6 +154,8 @@ fn biggest_files(biggest_files: BiggestFilesArgs) {
allowed_extensions,
number_of_files,
file_to_save,
json_compact_file_to_save,
json_pretty_file_to_save,
not_recursive,
#[cfg(target_family = "unix")]
exclude_other_filesystems,
@ -167,35 +165,31 @@ fn biggest_files(biggest_files: BiggestFilesArgs) {
set_number_of_threads(thread_number.thread_number);
let mut bf = BigFile::new();
let mut item = BigFile::new();
bf.set_included_directory(directories.directories);
bf.set_excluded_directory(excluded_directories.excluded_directories);
bf.set_excluded_items(excluded_items.excluded_items);
bf.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
bf.set_number_of_files_to_check(number_of_files);
bf.set_recursive_search(!not_recursive.not_recursive);
item.set_included_directory(directories.directories);
item.set_excluded_directory(excluded_directories.excluded_directories);
item.set_excluded_items(excluded_items.excluded_items);
item.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
item.set_number_of_files_to_check(number_of_files);
item.set_recursive_search(!not_recursive.not_recursive);
#[cfg(target_family = "unix")]
bf.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
item.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
if delete_files {
bf.set_delete_method(DeleteMethod::Delete);
item.set_delete_method(DeleteMethod::Delete);
}
if smallest_mode {
bf.set_search_mode(SearchMode::SmallestFiles);
item.set_search_mode(SearchMode::SmallestFiles);
}
bf.find_big_files(None, None);
item.find_big_files(None, None);
if let Some(file_name) = file_to_save.file_name() {
if let Err(e) = bf.print_results_to_file(file_name) {
error!("Failed to save results to file {e}");
}
}
save_results_to_files(file_to_save.file_name(), json_compact_file_to_save.file_name(), json_pretty_file_to_save.file_name(), &item);
if !cfg!(debug_assertions) {
bf.print_results_to_output();
item.print_results_to_output();
}
bf.get_text_messages().print_messages();
item.get_text_messages().print_messages();
}
fn empty_files(empty_files: EmptyFilesArgs) {
@ -207,6 +201,8 @@ fn empty_files(empty_files: EmptyFilesArgs) {
allowed_extensions,
delete_files,
file_to_save,
json_compact_file_to_save,
json_pretty_file_to_save,
not_recursive,
#[cfg(target_family = "unix")]
exclude_other_filesystems,
@ -214,32 +210,28 @@ fn empty_files(empty_files: EmptyFilesArgs) {
set_number_of_threads(thread_number.thread_number);
let mut ef = EmptyFiles::new();
let mut item = EmptyFiles::new();
ef.set_included_directory(directories.directories);
ef.set_excluded_directory(excluded_directories.excluded_directories);
ef.set_excluded_items(excluded_items.excluded_items);
ef.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
ef.set_recursive_search(!not_recursive.not_recursive);
item.set_included_directory(directories.directories);
item.set_excluded_directory(excluded_directories.excluded_directories);
item.set_excluded_items(excluded_items.excluded_items);
item.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
item.set_recursive_search(!not_recursive.not_recursive);
#[cfg(target_family = "unix")]
ef.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
item.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
if delete_files {
ef.set_delete_method(DeleteMethod::Delete);
item.set_delete_method(DeleteMethod::Delete);
}
ef.find_empty_files(None, None);
item.find_empty_files(None, None);
if let Some(file_name) = file_to_save.file_name() {
if let Err(e) = ef.print_results_to_file(file_name) {
error!("Failed to save results to file {e}");
}
}
save_results_to_files(file_to_save.file_name(), json_compact_file_to_save.file_name(), json_pretty_file_to_save.file_name(), &item);
if !cfg!(debug_assertions) {
ef.print_results_to_output();
item.print_results_to_output();
}
ef.get_text_messages().print_messages();
item.get_text_messages().print_messages();
}
fn temporary(temporary: TemporaryArgs) {
@ -252,36 +244,34 @@ fn temporary(temporary: TemporaryArgs) {
exclude_other_filesystems,
delete_files,
file_to_save,
json_compact_file_to_save,
json_pretty_file_to_save,
not_recursive,
} = temporary;
set_number_of_threads(thread_number.thread_number);
let mut tf = Temporary::new();
let mut item = Temporary::new();
tf.set_included_directory(directories.directories);
tf.set_excluded_directory(excluded_directories.excluded_directories);
tf.set_excluded_items(excluded_items.excluded_items);
tf.set_recursive_search(!not_recursive.not_recursive);
item.set_included_directory(directories.directories);
item.set_excluded_directory(excluded_directories.excluded_directories);
item.set_excluded_items(excluded_items.excluded_items);
item.set_recursive_search(!not_recursive.not_recursive);
#[cfg(target_family = "unix")]
tf.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
item.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
if delete_files {
tf.set_delete_method(DeleteMethod::Delete);
item.set_delete_method(DeleteMethod::Delete);
}
tf.find_temporary_files(None, None);
item.find_temporary_files(None, None);
if let Some(file_name) = file_to_save.file_name() {
if let Err(e) = tf.print_results_to_file(file_name) {
error!("Failed to save results to file {e}");
}
}
save_results_to_files(file_to_save.file_name(), json_compact_file_to_save.file_name(), json_pretty_file_to_save.file_name(), &item);
if !cfg!(debug_assertions) {
tf.print_results_to_output();
item.print_results_to_output();
}
tf.get_text_messages().print_messages();
item.get_text_messages().print_messages();
}
fn similar_images(similar_images: SimilarImagesArgs) {
@ -291,6 +281,8 @@ fn similar_images(similar_images: SimilarImagesArgs) {
excluded_directories,
excluded_items,
file_to_save,
json_compact_file_to_save,
json_pretty_file_to_save,
minimal_file_size,
maximal_file_size,
similarity_preset,
@ -304,34 +296,30 @@ fn similar_images(similar_images: SimilarImagesArgs) {
set_number_of_threads(thread_number.thread_number);
let mut sf = SimilarImages::new();
let mut item = SimilarImages::new();
sf.set_included_directory(directories.directories);
sf.set_excluded_directory(excluded_directories.excluded_directories);
sf.set_excluded_items(excluded_items.excluded_items);
sf.set_minimal_file_size(minimal_file_size);
sf.set_maximal_file_size(maximal_file_size);
sf.set_recursive_search(!not_recursive.not_recursive);
item.set_included_directory(directories.directories);
item.set_excluded_directory(excluded_directories.excluded_directories);
item.set_excluded_items(excluded_items.excluded_items);
item.set_minimal_file_size(minimal_file_size);
item.set_maximal_file_size(maximal_file_size);
item.set_recursive_search(!not_recursive.not_recursive);
#[cfg(target_family = "unix")]
sf.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
sf.set_image_filter(image_filter);
sf.set_hash_alg(hash_alg);
sf.set_hash_size(hash_size);
item.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
item.set_image_filter(image_filter);
item.set_hash_alg(hash_alg);
item.set_hash_size(hash_size);
sf.set_similarity(return_similarity_from_similarity_preset(&similarity_preset, hash_size));
item.set_similarity(return_similarity_from_similarity_preset(&similarity_preset, hash_size));
sf.find_similar_images(None, None);
item.find_similar_images(None, None);
if let Some(file_name) = file_to_save.file_name() {
if let Err(e) = sf.print_results_to_file(file_name) {
error!("Failed to save results to file {e}");
}
}
save_results_to_files(file_to_save.file_name(), json_compact_file_to_save.file_name(), json_pretty_file_to_save.file_name(), &item);
if !cfg!(debug_assertions) {
sf.print_results_to_output();
item.print_results_to_output();
}
sf.get_text_messages().print_messages();
item.get_text_messages().print_messages();
}
fn same_music(same_music: SameMusicArgs) {
@ -342,6 +330,8 @@ fn same_music(same_music: SameMusicArgs) {
excluded_items,
// delete_files,
file_to_save,
json_compact_file_to_save,
json_pretty_file_to_save,
not_recursive,
#[cfg(target_family = "unix")]
exclude_other_filesystems,
@ -352,34 +342,30 @@ fn same_music(same_music: SameMusicArgs) {
set_number_of_threads(thread_number.thread_number);
let mut mf = SameMusic::new();
let mut item = SameMusic::new();
mf.set_included_directory(directories.directories);
mf.set_excluded_directory(excluded_directories.excluded_directories);
mf.set_excluded_items(excluded_items.excluded_items);
mf.set_minimal_file_size(minimal_file_size);
mf.set_maximal_file_size(maximal_file_size);
mf.set_recursive_search(!not_recursive.not_recursive);
item.set_included_directory(directories.directories);
item.set_excluded_directory(excluded_directories.excluded_directories);
item.set_excluded_items(excluded_items.excluded_items);
item.set_minimal_file_size(minimal_file_size);
item.set_maximal_file_size(maximal_file_size);
item.set_recursive_search(!not_recursive.not_recursive);
#[cfg(target_family = "unix")]
mf.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
mf.set_music_similarity(music_similarity);
item.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
item.set_music_similarity(music_similarity);
// if delete_files {
// // TODO mf.set_delete_method(same_music::DeleteMethod::Delete);
// // TODO item.set_delete_method(same_music::DeleteMethod::Delete);
// }
mf.find_same_music(None, None);
item.find_same_music(None, None);
if let Some(file_name) = file_to_save.file_name() {
if let Err(e) = mf.print_results_to_file(file_name) {
error!("Failed to save results to file {e}");
}
}
save_results_to_files(file_to_save.file_name(), json_compact_file_to_save.file_name(), json_pretty_file_to_save.file_name(), &item);
if !cfg!(debug_assertions) {
mf.print_results_to_output();
item.print_results_to_output();
}
mf.get_text_messages().print_messages();
item.get_text_messages().print_messages();
}
fn invalid_symlinks(invalid_symlinks: InvalidSymlinksArgs) {
@ -390,6 +376,8 @@ fn invalid_symlinks(invalid_symlinks: InvalidSymlinksArgs) {
excluded_items,
allowed_extensions,
file_to_save,
json_compact_file_to_save,
json_pretty_file_to_save,
not_recursive,
#[cfg(target_family = "unix")]
exclude_other_filesystems,
@ -398,31 +386,27 @@ fn invalid_symlinks(invalid_symlinks: InvalidSymlinksArgs) {
set_number_of_threads(thread_number.thread_number);
let mut ifs = InvalidSymlinks::new();
let mut item = InvalidSymlinks::new();
ifs.set_included_directory(directories.directories);
ifs.set_excluded_directory(excluded_directories.excluded_directories);
ifs.set_excluded_items(excluded_items.excluded_items);
ifs.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
ifs.set_recursive_search(!not_recursive.not_recursive);
item.set_included_directory(directories.directories);
item.set_excluded_directory(excluded_directories.excluded_directories);
item.set_excluded_items(excluded_items.excluded_items);
item.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
item.set_recursive_search(!not_recursive.not_recursive);
#[cfg(target_family = "unix")]
ifs.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
item.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
if delete_files {
ifs.set_delete_method(DeleteMethod::Delete);
item.set_delete_method(DeleteMethod::Delete);
}
ifs.find_invalid_links(None, None);
item.find_invalid_links(None, None);
if let Some(file_name) = file_to_save.file_name() {
if let Err(e) = ifs.print_results_to_file(file_name) {
error!("Failed to save results to file {e}");
}
}
save_results_to_files(file_to_save.file_name(), json_compact_file_to_save.file_name(), json_pretty_file_to_save.file_name(), &item);
if !cfg!(debug_assertions) {
ifs.print_results_to_output();
item.print_results_to_output();
}
ifs.get_text_messages().print_messages();
item.get_text_messages().print_messages();
}
fn broken_files(broken_files: BrokenFilesArgs) {
@ -434,6 +418,8 @@ fn broken_files(broken_files: BrokenFilesArgs) {
allowed_extensions,
delete_files,
file_to_save,
json_compact_file_to_save,
json_pretty_file_to_save,
not_recursive,
#[cfg(target_family = "unix")]
exclude_other_filesystems,
@ -441,32 +427,28 @@ fn broken_files(broken_files: BrokenFilesArgs) {
set_number_of_threads(thread_number.thread_number);
let mut br = BrokenFiles::new();
let mut item = BrokenFiles::new();
br.set_included_directory(directories.directories);
br.set_excluded_directory(excluded_directories.excluded_directories);
br.set_excluded_items(excluded_items.excluded_items);
br.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
br.set_recursive_search(!not_recursive.not_recursive);
item.set_included_directory(directories.directories);
item.set_excluded_directory(excluded_directories.excluded_directories);
item.set_excluded_items(excluded_items.excluded_items);
item.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
item.set_recursive_search(!not_recursive.not_recursive);
#[cfg(target_family = "unix")]
br.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
item.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
if delete_files {
br.set_delete_method(DeleteMethod::Delete);
item.set_delete_method(DeleteMethod::Delete);
}
br.find_broken_files(None, None);
item.find_broken_files(None, None);
if let Some(file_name) = file_to_save.file_name() {
if let Err(e) = br.print_results_to_file(file_name) {
error!("Failed to save results to file {e}");
}
}
save_results_to_files(file_to_save.file_name(), json_compact_file_to_save.file_name(), json_pretty_file_to_save.file_name(), &item);
if !cfg!(debug_assertions) {
br.print_results_to_output();
item.print_results_to_output();
}
br.get_text_messages().print_messages();
item.get_text_messages().print_messages();
}
fn similar_videos(similar_videos: SimilarVideosArgs) {
@ -476,6 +458,8 @@ fn similar_videos(similar_videos: SimilarVideosArgs) {
excluded_directories,
excluded_items,
file_to_save,
json_compact_file_to_save,
json_pretty_file_to_save,
not_recursive,
#[cfg(target_family = "unix")]
exclude_other_filesystems,
@ -487,31 +471,27 @@ fn similar_videos(similar_videos: SimilarVideosArgs) {
set_number_of_threads(thread_number.thread_number);
let mut vr = SimilarVideos::new();
let mut item = SimilarVideos::new();
vr.set_included_directory(directories.directories);
vr.set_excluded_directory(excluded_directories.excluded_directories);
vr.set_excluded_items(excluded_items.excluded_items);
vr.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
vr.set_recursive_search(!not_recursive.not_recursive);
item.set_included_directory(directories.directories);
item.set_excluded_directory(excluded_directories.excluded_directories);
item.set_excluded_items(excluded_items.excluded_items);
item.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
item.set_recursive_search(!not_recursive.not_recursive);
#[cfg(target_family = "unix")]
vr.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
vr.set_minimal_file_size(minimal_file_size);
vr.set_maximal_file_size(maximal_file_size);
vr.set_tolerance(tolerance);
item.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
item.set_minimal_file_size(minimal_file_size);
item.set_maximal_file_size(maximal_file_size);
item.set_tolerance(tolerance);
vr.find_similar_videos(None, None);
item.find_similar_videos(None, None);
if let Some(file_name) = file_to_save.file_name() {
if let Err(e) = vr.print_results_to_file(file_name) {
error!("Failed to save results to file {e}");
}
}
save_results_to_files(file_to_save.file_name(), json_compact_file_to_save.file_name(), json_pretty_file_to_save.file_name(), &item);
if !cfg!(debug_assertions) {
vr.print_results_to_output();
item.print_results_to_output();
}
vr.get_text_messages().print_messages();
item.get_text_messages().print_messages();
}
fn bad_extensions(bad_extensions: BadExtensionsArgs) {
@ -521,6 +501,8 @@ fn bad_extensions(bad_extensions: BadExtensionsArgs) {
excluded_directories,
excluded_items,
file_to_save,
json_compact_file_to_save,
json_pretty_file_to_save,
not_recursive,
#[cfg(target_family = "unix")]
exclude_other_filesystems,
@ -529,26 +511,40 @@ fn bad_extensions(bad_extensions: BadExtensionsArgs) {
set_number_of_threads(thread_number.thread_number);
let mut be = BadExtensions::new();
let mut item = BadExtensions::new();
be.set_included_directory(directories.directories);
be.set_excluded_directory(excluded_directories.excluded_directories);
be.set_excluded_items(excluded_items.excluded_items);
be.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
be.set_recursive_search(!not_recursive.not_recursive);
item.set_included_directory(directories.directories);
item.set_excluded_directory(excluded_directories.excluded_directories);
item.set_excluded_items(excluded_items.excluded_items);
item.set_allowed_extensions(allowed_extensions.allowed_extensions.join(","));
item.set_recursive_search(!not_recursive.not_recursive);
#[cfg(target_family = "unix")]
be.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
item.set_exclude_other_filesystems(exclude_other_filesystems.exclude_other_filesystems);
be.find_bad_extensions_files(None, None);
item.find_bad_extensions_files(None, None);
if let Some(file_name) = file_to_save.file_name() {
if let Err(e) = be.print_results_to_file(file_name) {
save_results_to_files(file_to_save.file_name(), json_compact_file_to_save.file_name(), json_pretty_file_to_save.file_name(), &item);
if !cfg!(debug_assertions) {
item.print_results_to_output();
}
item.get_text_messages().print_messages();
}
fn save_results_to_files<T: PrintResults>(txt_file_name: Option<&str>, compact_json_file_name: Option<&str>, pretty_json_file_name: Option<&str>, item: &T) {
if let Some(file_name) = txt_file_name {
if let Err(e) = item.print_results_to_file(file_name) {
error!("Failed to save results to file {e}");
}
}
if !cfg!(debug_assertions) {
be.print_results_to_output();
if let Some(file_name) = compact_json_file_name {
if let Err(e) = item.save_results_to_file_as_json(file_name, false) {
error!("Failed to save compact json results to file {e}");
}
}
if let Some(file_name) = pretty_json_file_name {
if let Err(e) = item.save_results_to_file_as_json(file_name, true) {
error!("Failed to save pretty json results to file {e}");
}
}
be.get_text_messages().print_messages();
}

View file

@ -11,6 +11,7 @@ use futures::channel::mpsc::UnboundedSender;
use log::debug;
use mime_guess::get_mime_extensions;
use rayon::prelude::*;
use serde::Serialize;
use crate::common::{prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads};
use crate::common_dir_traversal::{CheckingMethod, DirTraversalBuilder, DirTraversalResult, FileEntry, ProgressData, ToolType};
@ -158,7 +159,7 @@ const WORKAROUNDS: &[(&str, &str)] = &[
("exe", "xls"), // Not sure why xls is not recognized
];
#[derive(Clone)]
#[derive(Clone, Serialize)]
pub struct BadFileEntry {
pub path: PathBuf,
pub modified_date: u64,
@ -426,6 +427,10 @@ impl PrintResults for BadExtensions {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.bad_extensions_files, pretty_print)
}
}
impl BadExtensions {

View file

@ -12,13 +12,14 @@ use futures::channel::mpsc::UnboundedSender;
use humansize::{format_size, BINARY};
use log::debug;
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use crate::common::{check_folder_children, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, split_path};
use crate::common_dir_traversal::{common_get_entry_data_metadata, common_read_dir, get_lowercase_name, get_modified_time, CheckingMethod, ProgressData, ToolType};
use crate::common_tool::{CommonData, CommonToolData, DeleteMethod};
use crate::common_traits::{DebugPrint, PrintResults};
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct FileEntry {
pub path: PathBuf,
pub size: u64,
@ -39,7 +40,7 @@ pub struct Info {
pub struct BigFile {
common_data: CommonToolData,
information: Info,
big_files: Vec<(u64, FileEntry)>,
big_files: Vec<FileEntry>,
number_of_files_to_check: usize,
search_mode: SearchMode,
}
@ -189,7 +190,7 @@ impl BigFile {
iter = Box::new(old_map.into_iter().rev());
}
for (size, mut vector) in iter {
for (_size, mut vector) in iter {
if self.information.number_of_real_files < self.number_of_files_to_check {
if vector.len() > 1 {
vector.sort_unstable_by_key(|e| {
@ -199,7 +200,7 @@ impl BigFile {
}
for file in vector {
if self.information.number_of_real_files < self.number_of_files_to_check {
self.big_files.push((size, file));
self.big_files.push(file);
self.information.number_of_real_files += 1;
} else {
break;
@ -214,7 +215,7 @@ impl BigFile {
fn delete_files(&mut self) {
match self.common_data.delete_method {
DeleteMethod::Delete => {
for (_, file_entry) in &self.big_files {
for file_entry in &self.big_files {
if fs::remove_file(&file_entry.path).is_err() {
self.common_data.text_messages.warnings.push(file_entry.path.display().to_string());
}
@ -262,8 +263,8 @@ impl PrintResults for BigFile {
} else {
writeln!(writer, "{} the smallest files.\n\n", self.information.number_of_real_files)?;
}
for (size, file_entry) in &self.big_files {
writeln!(writer, "{} ({}) - {}", format_size(*size, BINARY), size, file_entry.path.display())?;
for file_entry in &self.big_files {
writeln!(writer, "{} ({}) - {}", format_size(file_entry.size, BINARY), file_entry.size, file_entry.path.display())?;
}
} else {
write!(writer, "Not found any files.").unwrap();
@ -271,6 +272,10 @@ impl PrintResults for BigFile {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.big_files, pretty_print)
}
}
impl CommonData for BigFile {
@ -287,7 +292,7 @@ impl BigFile {
self.search_mode = search_mode;
}
pub const fn get_big_files(&self) -> &Vec<(u64, FileEntry)> {
pub const fn get_big_files(&self) -> &Vec<FileEntry> {
&self.big_files
}

View file

@ -481,6 +481,10 @@ impl PrintResults for BrokenFiles {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.broken_files, pretty_print)
}
}
fn check_extension_availability(file_name_lowercase: &str) -> TypeOfFile {

View file

@ -37,7 +37,7 @@ impl Directories {
let directories: Vec<PathBuf> = included_directory;
let mut checked_directories: Vec<PathBuf> = Vec::new();
for directory in directories {
for mut directory in directories {
if directory.to_string_lossy().contains('*') {
messages.warnings.push(flc!(
"core_directory_wildcard_no_supported",
@ -46,23 +46,6 @@ impl Directories {
continue;
}
#[cfg(not(target_family = "windows"))]
if directory.is_relative() {
messages.warnings.push(flc!(
"core_directory_relative_path",
generate_translation_hashmap(vec![("path", directory.display().to_string())])
));
continue;
}
#[cfg(target_family = "windows")]
if directory.is_relative() && !directory.starts_with("\\") {
messages.warnings.push(flc!(
"core_directory_relative_path",
generate_translation_hashmap(vec![("path", directory.display().to_string())])
));
continue;
}
if !directory.exists() {
messages.warnings.push(flc!(
"core_directory_must_exists",
@ -77,6 +60,20 @@ impl Directories {
));
continue;
}
// If not checking windows strange paths, try to canonicalize them
if !directory.starts_with("\\") {
let Ok(dir2) = directory.canonicalize() else {
messages.warnings.push(flc!(
"core_directory_must_exists",
generate_translation_hashmap(vec![("path", directory.display().to_string())])
));
continue;
};
directory = dir2;
}
checked_directories.push(directory);
}

View file

@ -1,4 +1,5 @@
use fun_time::fun_time;
use serde::Serialize;
use std::fs::File;
use std::io::{BufWriter, Write};
use std::path::Path;
@ -31,6 +32,39 @@ pub trait PrintResults {
writer.flush()?;
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()>;
fn save_results_to_file_as_json_internal<T: Serialize>(&self, file_name: &str, item_to_serialize: &T, pretty_print: bool) -> std::io::Result<()> {
if pretty_print {
self.save_results_to_file_as_json_pretty(file_name, item_to_serialize)
} else {
self.save_results_to_file_as_json_compact(file_name, item_to_serialize)
}
}
#[fun_time(message = "save_results_to_file_as_json_pretty")]
fn save_results_to_file_as_json_pretty<T: Serialize>(&self, file_name: &str, item_to_serialize: &T) -> std::io::Result<()> {
let file_handler = File::create(file_name)?;
let mut writer = BufWriter::new(file_handler);
serde_json::to_writer_pretty(&mut writer, item_to_serialize)?;
Ok(())
}
#[fun_time(message = "save_results_to_file_as_json_compact")]
fn save_results_to_file_as_json_compact<T: Serialize>(&self, file_name: &str, item_to_serialize: &T) -> std::io::Result<()> {
let file_handler = File::create(file_name)?;
let mut writer = BufWriter::new(file_handler);
serde_json::to_writer(&mut writer, item_to_serialize)?;
Ok(())
}
fn save_all_in_one(&self, file_name: &str) -> std::io::Result<()> {
self.save_results_to_file_as_json(&format!("{file_name}_pretty.json"), true)?;
self.save_results_to_file_as_json(&format!("{file_name}_compact.json"), false)?;
self.print_results_to_file(&format!("{file_name}.txt"))?;
Ok(())
}
}
pub trait ResultEntry {

View file

@ -1,4 +1,5 @@
use std::collections::{BTreeMap, HashSet};
use std::fmt::Debug;
use std::fs::File;
use std::hash::Hasher;
use std::io::prelude::*;
@ -813,31 +814,116 @@ impl DuplicateFinder {
match self.check_method {
CheckingMethod::Name => {
for vector in self.files_with_identical_names.values() {
let _tuple: (u64, usize, usize) = delete_files(vector, &self.common_data.delete_method, &mut self.common_data.text_messages, self.dryrun);
}
let vec_files = self.files_with_identical_names.values().collect::<Vec<_>>();
delete_files(&vec_files, &self.common_data.delete_method, &mut self.common_data.text_messages, self.dryrun);
}
CheckingMethod::SizeName => {
for vector in self.files_with_identical_size_names.values() {
let _tuple: (u64, usize, usize) = delete_files(vector, &self.common_data.delete_method, &mut self.common_data.text_messages, self.dryrun);
}
let vec_files = self.files_with_identical_size_names.values().collect::<Vec<_>>();
delete_files(&vec_files, &self.common_data.delete_method, &mut self.common_data.text_messages, self.dryrun);
}
CheckingMethod::Hash => {
for vector_vectors in self.files_with_identical_hashes.values() {
for vector in vector_vectors {
let _tuple: (u64, usize, usize) = delete_files(vector, &self.common_data.delete_method, &mut self.common_data.text_messages, self.dryrun);
}
for vec_files in self.files_with_identical_hashes.values() {
let vev: Vec<&Vec<FileEntry>> = vec_files.iter().collect::<Vec<_>>();
delete_files(&vev, &self.common_data.delete_method, &mut self.common_data.text_messages, self.dryrun);
}
}
CheckingMethod::Size => {
for vector in self.files_with_identical_size.values() {
let _tuple: (u64, usize, usize) = delete_files(vector, &self.common_data.delete_method, &mut self.common_data.text_messages, self.dryrun);
}
let vec_files = self.files_with_identical_size.values().collect::<Vec<_>>();
delete_files(&vec_files, &self.common_data.delete_method, &mut self.common_data.text_messages, self.dryrun);
}
_ => panic!(),
}
}
}
// Here we assume, that internal Vec<> have at least 1 object
#[allow(clippy::ptr_arg)]
fn delete_files(items: &Vec<&Vec<FileEntry>>, delete_method: &DeleteMethod, text_messages: &mut Messages, dryrun: bool) -> (u64, usize, usize) {
let res = items
.iter()
.map(|values| {
let mut gained_space: u64 = 0;
let mut removed_files: usize = 0;
let mut failed_to_remove_files: usize = 0;
let mut infos = Vec::new();
let mut errors = Vec::new();
let mut all_values = (*values).clone();
let len = all_values.len();
// Sorted from oldest to newest - from smallest value to bigger
all_values.sort_unstable_by_key(ResultEntry::get_modified_date);
if delete_method == &DeleteMethod::HardLink {
let original_file = &all_values[0];
for file_entry in &all_values[1..] {
if dryrun {
infos.push(format!(
"Dryrun - would create hardlink from {:?} to {:?}",
original_file.get_path(),
original_file.get_path()
));
} else {
if dryrun {
infos.push(format!("Replace file {:?} with hard link to {:?}", original_file.get_path(), file_entry.get_path()));
} else {
if let Err(e) = make_hard_link(original_file.get_path(), file_entry.get_path()) {
errors.push(format!(
"Cannot create hard link from {:?} to {:?} - {}",
file_entry.get_path(),
original_file.get_path(),
e
));
failed_to_remove_files += 1;
} else {
gained_space += 1;
removed_files += 1;
}
}
}
}
return (infos, errors, gained_space, removed_files, failed_to_remove_files);
}
let items = match delete_method {
DeleteMethod::Delete => &all_values,
DeleteMethod::AllExceptNewest => &all_values[..(len - 1)],
DeleteMethod::AllExceptOldest => &all_values[1..],
DeleteMethod::OneOldest => &all_values[..1],
DeleteMethod::OneNewest => &all_values[(len - 1)..],
DeleteMethod::HardLink | DeleteMethod::None => unreachable!("HardLink and None should be handled before"),
};
for i in items {
if dryrun {
infos.push(format!("Dryrun - would delete file: {:?}", i.get_path()));
} else {
if let Err(e) = std::fs::remove_file(i.get_path()) {
errors.push(format!("Cannot delete file: {:?} - {e}", i.get_path()));
failed_to_remove_files += 1;
} else {
removed_files += 1;
gained_space += i.get_size();
}
}
}
(infos, errors, gained_space, removed_files, failed_to_remove_files)
})
.collect::<Vec<_>>();
let mut gained_space = 0;
let mut removed_files = 0;
let mut failed_to_remove_files = 0;
for (infos, errors, gained_space_v, removed_files_v, failed_to_remove_files_v) in res {
text_messages.messages.extend(infos);
text_messages.errors.extend(errors);
gained_space += gained_space_v;
removed_files += removed_files_v;
failed_to_remove_files += failed_to_remove_files_v;
}
(gained_space, removed_files, failed_to_remove_files)
}
impl DuplicateFinder {
pub fn set_case_sensitive_name_comparison(&mut self, case_sensitive_name_comparison: bool) {
@ -989,6 +1075,24 @@ impl PrintResults for DuplicateFinder {
}
writeln!(writer)?;
}
} else if !self.files_with_identical_names_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same names in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} files in {} groups with same name(may have different content)",
self.information.number_of_duplicated_files_by_name, self.information.number_of_groups_by_name,
)?;
for (name, (file_entry, vector)) in self.files_with_identical_names_referenced.iter().rev() {
writeln!(writer, "Name - {} - {} files ", name, vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for j in vector {
writeln!(writer, "{}", j.path.display())?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any files with same names.")?;
}
@ -1011,6 +1115,24 @@ impl PrintResults for DuplicateFinder {
}
writeln!(writer)?;
}
} else if !self.files_with_identical_names_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same size and names in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} files in {} groups with same size and name(may have different content)",
self.information.number_of_duplicated_files_by_size_name, self.information.number_of_groups_by_size_name,
)?;
for ((size, name), (file_entry, vector)) in self.files_with_identical_size_names_referenced.iter().rev() {
writeln!(writer, "Name - {}, {} - {} files ", name, format_size(*size, BINARY), vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for j in vector {
writeln!(writer, "{}", j.path.display())?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any files with same size and names.")?;
}
@ -1034,6 +1156,25 @@ impl PrintResults for DuplicateFinder {
writeln!(writer, "{}", file_entry.path.display())?;
}
}
} else if !self.files_with_identical_size_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same size in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} duplicated files which in {} groups which takes {}.",
self.information.number_of_duplicated_files_by_size,
self.information.number_of_groups_by_size,
format_size(self.information.lost_space_by_size, BINARY)
)?;
for (size, (file_entry, vector)) in self.files_with_identical_size_referenced.iter().rev() {
writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for file_entry in vector {
writeln!(writer, "{}", file_entry.path.display())?;
}
}
} else {
write!(writer, "Not found any duplicates.")?;
}
@ -1059,6 +1200,27 @@ impl PrintResults for DuplicateFinder {
}
}
}
} else if !self.files_with_identical_hashes_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same hashes in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} duplicated files which in {} groups which takes {}.",
self.information.number_of_duplicated_files_by_hash,
self.information.number_of_groups_by_hash,
format_size(self.information.lost_space_by_hash, BINARY)
)?;
for (size, vectors_vector) in self.files_with_identical_hashes_referenced.iter().rev() {
for (file_entry, vector) in vectors_vector {
writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for file_entry in vector {
writeln!(writer, "{}", file_entry.path.display())?;
}
}
}
} else {
write!(writer, "Not found any duplicates.")?;
}
@ -1068,71 +1230,26 @@ impl PrintResults for DuplicateFinder {
Ok(())
}
}
fn delete_files(vector: &[FileEntry], delete_method: &DeleteMethod, text_messages: &mut Messages, dryrun: bool) -> (u64, usize, usize) {
assert!(vector.len() > 1, "Vector length must be bigger than 1(This should be done in previous steps).");
let mut gained_space: u64 = 0;
let mut removed_files: usize = 0;
let mut failed_to_remove_files: usize = 0;
let mut values = vector.iter().enumerate();
let q_index = match delete_method {
DeleteMethod::OneOldest | DeleteMethod::AllExceptNewest => values.max_by(|(_, l), (_, r)| l.modified_date.cmp(&r.modified_date)),
DeleteMethod::OneNewest | DeleteMethod::AllExceptOldest | DeleteMethod::HardLink => values.min_by(|(_, l), (_, r)| l.modified_date.cmp(&r.modified_date)),
DeleteMethod::None => values.next(),
_ => unreachable!(),
};
let q_index = q_index.map_or(0, |t| t.0);
let n = match delete_method {
DeleteMethod::OneNewest | DeleteMethod::OneOldest => 1,
DeleteMethod::AllExceptNewest | DeleteMethod::AllExceptOldest | DeleteMethod::None | DeleteMethod::HardLink => usize::MAX,
_ => unreachable!(),
};
for (index, file) in vector.iter().enumerate() {
if q_index == index {
continue;
}
if removed_files + failed_to_remove_files >= n {
break;
}
let r = match delete_method {
DeleteMethod::OneOldest | DeleteMethod::OneNewest | DeleteMethod::AllExceptOldest | DeleteMethod::AllExceptNewest => {
if dryrun {
Ok(Some(format!("Delete {}", file.path.display())))
} else {
fs::remove_file(&file.path).map(|()| None)
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> io::Result<()> {
if self.get_use_reference() {
match self.check_method {
CheckingMethod::Name => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_names_referenced, pretty_print),
CheckingMethod::SizeName => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size_names_referenced, pretty_print),
CheckingMethod::Size => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size_referenced, pretty_print),
CheckingMethod::Hash => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_hashes_referenced, pretty_print),
_ => panic!(),
}
DeleteMethod::HardLink => {
let src = &vector[q_index].path;
if dryrun {
Ok(Some(format!("Replace file {} with hard link to {}", file.path.display(), src.display())))
} else {
make_hard_link(src, &file.path).map(|()| None)
}
}
DeleteMethod::None => Ok(None),
_ => unreachable!(),
};
match r {
Err(e) => {
failed_to_remove_files += 1;
text_messages.warnings.push(format!("Failed to remove {} ({})", file.path.display(), e));
}
Ok(Some(msg)) => {
text_messages.messages.push(msg);
removed_files += 1;
gained_space += file.size;
}
Ok(None) => {
removed_files += 1;
gained_space += file.size;
} else {
match self.check_method {
CheckingMethod::Name => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_names, pretty_print),
CheckingMethod::SizeName => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size_names, pretty_print),
CheckingMethod::Size => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size, pretty_print),
CheckingMethod::Hash => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_hashes, pretty_print),
_ => panic!(),
}
}
}
(gained_space, removed_files, failed_to_remove_files)
}
#[cfg(target_family = "windows")]

View file

@ -141,6 +141,10 @@ impl PrintResults for EmptyFiles {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.empty_files, pretty_print)
}
}
impl EmptyFiles {

View file

@ -158,6 +158,10 @@ impl PrintResults for EmptyFolder {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.empty_folder_list.keys().collect::<Vec<_>>(), pretty_print)
}
}
impl CommonData for EmptyFolder {

View file

@ -127,6 +127,10 @@ impl PrintResults for InvalidSymlinks {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.invalid_symlinks, pretty_print)
}
}
impl CommonData for InvalidSymlinks {

View file

@ -924,10 +924,40 @@ impl PrintResults for SameMusic {
file_entry.genre,
file_entry.bitrate,
file_entry.path.display()
)
.unwrap();
)?;
}
writeln!(writer).unwrap();
writeln!(writer)?;
}
} else if !self.duplicated_music_entries_referenced.is_empty() {
writeln!(writer, "{} music files which have similar friends\n\n.", self.duplicated_music_entries_referenced.len())?;
for (file_entry, vec_file_entry) in &self.duplicated_music_entries_referenced {
writeln!(writer, "Found {} music files which have similar friends", vec_file_entry.len())?;
writeln!(writer)?;
writeln!(
writer,
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {}",
file_entry.track_title,
file_entry.track_artist,
file_entry.year,
file_entry.length,
file_entry.genre,
file_entry.bitrate,
file_entry.path.display()
)?;
for file_entry in vec_file_entry {
writeln!(
writer,
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {}",
file_entry.track_title,
file_entry.track_artist,
file_entry.year,
file_entry.length,
file_entry.genre,
file_entry.bitrate,
file_entry.path.display()
)?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any similar music files.")?;
@ -935,6 +965,14 @@ impl PrintResults for SameMusic {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
if self.get_use_reference() {
self.save_results_to_file_as_json_internal(file_name, &self.duplicated_music_entries_referenced, pretty_print)
} else {
self.save_results_to_file_as_json_internal(file_name, &self.duplicated_music_entries, pretty_print)
}
}
}
fn get_approximate_conversion(what: &mut String) {

View file

@ -850,12 +850,46 @@ impl PrintResults for SimilarImages {
}
writeln!(writer)?;
}
} else if !self.similar_referenced_vectors.is_empty() {
writeln!(writer, "{} images which have similar friends\n\n", self.similar_referenced_vectors.len())?;
for (file_entry, vec_file_entry) in &self.similar_referenced_vectors {
writeln!(writer, "Found {} images which have similar friends", vec_file_entry.len())?;
writeln!(writer)?;
writeln!(
writer,
"{} - {} - {} - {}",
file_entry.path.display(),
file_entry.dimensions,
format_size(file_entry.size, BINARY),
get_string_from_similarity(&file_entry.similarity, self.hash_size)
)?;
for file_entry in vec_file_entry {
writeln!(
writer,
"{} - {} - {} - {}",
file_entry.path.display(),
file_entry.dimensions,
format_size(file_entry.size, BINARY),
get_string_from_similarity(&file_entry.similarity, self.hash_size)
)?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any similar images.")?;
}
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
if self.get_use_reference() {
self.save_results_to_file_as_json_internal(file_name, &self.similar_referenced_vectors, pretty_print)
} else {
self.save_results_to_file_as_json_internal(file_name, &self.similar_vectors, pretty_print)
}
}
}
pub fn get_string_from_similarity(similarity: &u32, hash_size: u8) -> String {

View file

@ -435,12 +435,32 @@ impl PrintResults for SimilarVideos {
}
writeln!(writer)?;
}
} else if !self.similar_referenced_vectors.is_empty() {
write!(writer, "{} videos which have similar friends\n\n", self.similar_referenced_vectors.len())?;
for (fe, struct_similar) in &self.similar_referenced_vectors {
writeln!(writer, "Found {} videos which have similar friends", struct_similar.len())?;
writeln!(writer)?;
writeln!(writer, "{} - {}", fe.path.display(), format_size(fe.size, BINARY))?;
for file_entry in struct_similar {
writeln!(writer, "{} - {}", file_entry.path.display(), format_size(file_entry.size, BINARY))?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any similar videos.")?;
}
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
if self.get_use_reference() {
self.save_results_to_file_as_json_internal(file_name, &self.similar_referenced_vectors, pretty_print)
} else {
self.save_results_to_file_as_json_internal(file_name, &self.similar_vectors, pretty_print)
}
}
}
pub fn check_if_ffmpeg_is_installed() -> bool {

View file

@ -10,6 +10,7 @@ use crossbeam_channel::Receiver;
use fun_time::fun_time;
use futures::channel::mpsc::UnboundedSender;
use rayon::prelude::*;
use serde::Serialize;
use crate::common::{check_folder_children, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads};
use crate::common_dir_traversal::{common_get_entry_data_metadata, common_read_dir, get_lowercase_name, get_modified_time, CheckingMethod, ProgressData, ToolType};
@ -32,7 +33,7 @@ const TEMP_EXTENSIONS: &[&str] = &[
".partial",
];
#[derive(Clone)]
#[derive(Clone, Serialize)]
pub struct FileEntry {
pub path: PathBuf,
pub modified_date: u64,
@ -206,6 +207,10 @@ impl PrintResults for Temporary {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.temporary_files, pretty_print)
}
}
impl Default for Temporary {

View file

@ -538,7 +538,7 @@ move_files_title_dialog = Choose folder to which you want to move duplicated fil
move_files_choose_more_than_1_path = Only one path may be selected to be able to copy their duplicated files, selected {$path_number}.
move_stats = Properly moved {$num_files}/{$all_files} items
save_results_to_file = Saved results to file {$name}
save_results_to_file = Saved results both to txt and json files.
search_not_choosing_any_music = ERROR: You must select at least one checkbox with music searching types.
search_not_choosing_any_broken_files = ERROR: You must select at least one checkbox with type of checked broken files.

View file

@ -957,11 +957,11 @@ fn computer_big_files(
let vector = bf.get_big_files();
for (size, file_entry) in vector {
for file_entry in vector {
let (directory, file) = split_path(&file_entry.path);
let values: [(u32, &dyn ToValue); COLUMNS_NUMBER] = [
(ColumnsBigFiles::SelectionButton as u32, &false),
(ColumnsBigFiles::Size as u32, &(format_size(*size, BINARY))),
(ColumnsBigFiles::Size as u32, &(format_size(file_entry.size, BINARY))),
(ColumnsBigFiles::Name as u32, &file),
(ColumnsBigFiles::Path as u32, &directory),
(
@ -969,7 +969,7 @@ fn computer_big_files(
&(NaiveDateTime::from_timestamp_opt(file_entry.modified_date as i64, 0).unwrap().to_string()),
),
(ColumnsBigFiles::ModificationAsSecs as u32, &(file_entry.modified_date as i64)),
(ColumnsBigFiles::SizeAsBytes as u32, &(size)),
(ColumnsBigFiles::SizeAsBytes as u32, &(file_entry.size)),
];
list_store.set(&list_store.append(), &values);
}

View file

@ -2,14 +2,14 @@ use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
use czkawka_core::common_traits::PrintResults;
use gtk4::prelude::*;
use gtk4::{Button, Entry};
use czkawka_core::common_traits::PrintResults;
use crate::flg;
use crate::gui_structs::gui_data::GuiData;
use crate::help_functions::BottomButtonsEnum;
use crate::localizer_core::generate_translation_hashmap;
use crate::notebook_enums::*;
pub fn connect_button_save(gui_data: &GuiData) {
@ -30,64 +30,18 @@ pub fn connect_button_save(gui_data: &GuiData) {
let entry_info = gui_data.entry_info.clone();
let notebook_main = gui_data.main_notebook.notebook_main.clone();
buttons_save.connect_clicked(move |_| {
let file_name;
let result = match to_notebook_main_enum(notebook_main.current_page().unwrap()) {
NotebookMainEnum::Duplicate => {
file_name = "results_duplicates.txt";
shared_duplication_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::EmptyDirectories => {
file_name = "results_empty_folder.txt";
shared_empty_folders_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::EmptyFiles => {
file_name = "results_empty_files.txt";
shared_empty_files_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::Temporary => {
file_name = "results_temporary_files.txt";
shared_temporary_files_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::BigFiles => {
file_name = "results_big_files.txt";
shared_big_files_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::SimilarImages => {
file_name = "results_similar_images.txt";
shared_similar_images_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::SimilarVideos => {
file_name = "results_similar_videos.txt";
shared_similar_videos_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::SameMusic => {
file_name = "results_same_music.txt";
shared_same_music_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::Symlinks => {
file_name = "results_invalid_symlinks.txt";
shared_same_invalid_symlinks.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::BrokenFiles => {
file_name = "results_broken_files.txt";
shared_broken_files_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::BadExtensions => {
file_name = "results_bad_extensions.txt";
shared_bad_extensions_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::Duplicate => shared_duplication_state.borrow().save_all_in_one("results_duplicates"),
NotebookMainEnum::EmptyDirectories => shared_empty_folders_state.borrow().save_all_in_one("results_empty_directories"),
NotebookMainEnum::EmptyFiles => shared_empty_files_state.borrow().save_all_in_one("results_empty_files"),
NotebookMainEnum::Temporary => shared_temporary_files_state.borrow().save_all_in_one("results_temporary_files"),
NotebookMainEnum::BigFiles => shared_big_files_state.borrow().save_all_in_one("results_big_files"),
NotebookMainEnum::SimilarImages => shared_similar_images_state.borrow().save_all_in_one("results_similar_images"),
NotebookMainEnum::SimilarVideos => shared_similar_videos_state.borrow().save_all_in_one("results_similar_videos"),
NotebookMainEnum::SameMusic => shared_same_music_state.borrow().save_all_in_one("results_same_music"),
NotebookMainEnum::Symlinks => shared_same_invalid_symlinks.borrow().save_all_in_one("results_invalid_symlinks"),
NotebookMainEnum::BrokenFiles => shared_broken_files_state.borrow().save_all_in_one("results_broken_files"),
NotebookMainEnum::BadExtensions => shared_bad_extensions_state.borrow().save_all_in_one("results_bad_extensions"),
};
match result {
@ -99,7 +53,6 @@ pub fn connect_button_save(gui_data: &GuiData) {
}
post_save_things(
file_name,
&to_notebook_main_enum(notebook_main.current_page().unwrap()),
&shared_buttons,
&entry_info,
@ -109,13 +62,12 @@ pub fn connect_button_save(gui_data: &GuiData) {
}
fn post_save_things(
file_name: &str,
type_of_tab: &NotebookMainEnum,
shared_buttons: &Rc<RefCell<HashMap<NotebookMainEnum, HashMap<BottomButtonsEnum, bool>>>>,
entry_info: &Entry,
buttons_save: &Button,
) {
entry_info.set_text(flg!("save_results_to_file", generate_translation_hashmap(vec![("name", file_name.to_string())])).as_str());
entry_info.set_text(&flg!("save_results_to_file"));
// Set state
{
buttons_save.hide();

View file

@ -1,22 +0,0 @@
import os
import sys
if len(sys.argv) != 4:
print("ERROR: Not provided 3 required arguments - " + str(sys.argv))
exit(1)
folder_name = sys.argv[1]
files_required = sys.argv[2]
directories_required = sys.argv[3]
file_count = sum(len(files) for _, _, files in os.walk(folder_name))
if str(file_count) != files_required:
print("Current files: " + str(file_count) + ", but required is: " + str(files_required))
print("This commit probably introduced regression, please recheck it.")
exit(1)
directory_count = sum(len(dire) for _, dire, files in os.walk(folder_name))
if str(directory_count) != directories_required:
print("Current directories: " + str(directory_count) + ", but required is: " + str(directories_required))
print("This commit probably introduced regression, please recheck it.")
exit(1)