2023-12-04 00:06:42 +13:00
#![ allow(unused_imports) ]
2024-02-15 05:45:25 +13:00
use std ::{ fs , thread } ;
2023-12-04 00:06:42 +13:00
// I don't wanna fight with unused imports in this file, so simply ignore it to avoid too much complexity
2023-12-08 07:38:41 +13:00
use std ::cmp ::Ordering ;
2020-10-15 05:41:37 +13:00
use std ::ffi ::OsString ;
2023-05-03 08:37:12 +12:00
use std ::fs ::{ DirEntry , File , OpenOptions } ;
2020-10-15 05:41:37 +13:00
use std ::path ::{ Path , PathBuf } ;
2023-12-08 07:38:41 +13:00
use std ::sync ::atomic ::{ AtomicBool , AtomicUsize } ;
use std ::sync ::{ atomic , Arc } ;
2023-05-03 08:37:12 +12:00
use std ::thread ::{ sleep , JoinHandle } ;
2023-12-04 00:06:42 +13:00
use std ::time ::{ Duration , Instant , SystemTime } ;
2020-09-01 05:37:30 +12:00
2022-07-25 06:48:02 +12:00
#[ cfg(feature = " heif " ) ]
use anyhow ::Result ;
2023-12-04 00:06:42 +13:00
use crossbeam_channel ::Sender ;
2022-06-01 03:52:55 +12:00
use directories_next ::ProjectDirs ;
2023-10-11 07:54:41 +13:00
use fun_time ::fun_time ;
2023-10-05 19:06:47 +13:00
use handsome_logger ::{ ColorChoice , ConfigBuilder , TerminalMode } ;
2022-06-01 03:52:55 +12:00
use image ::{ DynamicImage , ImageBuffer , Rgb } ;
use imagepipe ::{ ImageSource , Pipeline } ;
2022-06-09 07:42:51 +12:00
#[ cfg(feature = " heif " ) ]
2023-03-06 08:54:02 +13:00
use libheif_rs ::{ ColorSpace , HeifContext , RgbChroma } ;
2023-12-04 00:06:42 +13:00
#[ cfg(feature = " libraw " ) ]
use libraw ::Processor ;
use log ::{ debug , error , info , warn , LevelFilter , Record } ;
use rawloader ::RawLoader ;
use symphonia ::core ::conv ::IntoSample ;
2022-06-09 07:42:51 +12:00
2023-05-03 08:37:12 +12:00
// #[cfg(feature = "heif")]
// use libheif_rs::LibHeif;
2023-05-11 07:27:41 +12:00
use crate ::common_dir_traversal ::{ CheckingMethod , ProgressData , ToolType } ;
2023-05-03 08:37:12 +12:00
use crate ::common_directory ::Directories ;
2023-12-04 09:18:31 +13:00
use crate ::common_items ::{ ExcludedItems , SingleExcludedItem } ;
2023-10-14 07:33:17 +13:00
use crate ::common_messages ::Messages ;
use crate ::common_tool ::DeleteMethod ;
2023-05-08 06:54:05 +12:00
use crate ::common_traits ::ResultEntry ;
2023-10-14 07:33:17 +13:00
use crate ::duplicate ::make_hard_link ;
2023-10-11 07:54:41 +13:00
use crate ::CZKAWKA_VERSION ;
2023-05-03 08:37:12 +12:00
2023-06-10 08:11:47 +12:00
static NUMBER_OF_THREADS : state ::InitCell < usize > = state ::InitCell ::new ( ) ;
2024-02-15 05:45:25 +13:00
static ALL_AVAILABLE_THREADS : state ::InitCell < usize > = state ::InitCell ::new ( ) ;
2023-12-04 00:06:42 +13:00
pub const DEFAULT_THREAD_SIZE : usize = 8 * 1024 * 1024 ; // 8 MB
pub const DEFAULT_WORKER_THREAD_SIZE : usize = 4 * 1024 * 1024 ; // 4 MB
2022-11-26 08:38:27 +13:00
pub fn get_number_of_threads ( ) -> usize {
let data = NUMBER_OF_THREADS . get ( ) ;
if * data > = 1 {
* data
} else {
2024-02-15 05:45:25 +13:00
get_all_available_threads ( )
2022-11-26 08:38:27 +13:00
}
}
2023-05-03 08:37:12 +12:00
2023-10-05 19:06:47 +13:00
fn filtering_messages ( record : & Record ) -> bool {
if let Some ( module_path ) = record . module_path ( ) {
2023-12-04 00:06:42 +13:00
module_path . starts_with ( " czkawka " ) | | module_path . starts_with ( " krokiet " )
2023-10-05 19:06:47 +13:00
} else {
true
}
}
pub fn setup_logger ( disabled_printing : bool ) {
let log_level = if disabled_printing { LevelFilter ::Off } else { LevelFilter ::Info } ;
let config = ConfigBuilder ::default ( ) . set_level ( log_level ) . set_message_filtering ( Some ( filtering_messages ) ) . build ( ) ;
handsome_logger ::TermLogger ::init ( config , TerminalMode ::Mixed , ColorChoice ::Always ) . unwrap ( ) ;
}
2024-02-15 05:45:25 +13:00
pub fn get_all_available_threads ( ) -> usize {
* ALL_AVAILABLE_THREADS . get_or_init ( | | {
let available_threads = thread ::available_parallelism ( ) . map ( std ::num ::NonZeroUsize ::get ) . unwrap_or ( 1 ) ;
ALL_AVAILABLE_THREADS . set ( available_threads ) ;
available_threads
} )
2023-12-04 00:06:42 +13:00
}
2023-10-11 07:54:41 +13:00
pub fn print_version_mode ( ) {
2023-12-12 07:12:26 +13:00
let rust_version = env! ( " RUST_VERSION_INTERNAL " ) ;
2023-12-04 00:06:42 +13:00
let debug_release = if cfg! ( debug_assertions ) { " debug " } else { " release " } ;
2024-02-15 05:45:25 +13:00
let processors = get_all_available_threads ( ) ;
2023-12-04 00:06:42 +13:00
let info = os_info ::get ( ) ;
2023-10-11 07:54:41 +13:00
info! (
2023-12-04 00:06:42 +13:00
" App version: {CZKAWKA_VERSION}, {debug_release} mode, rust {rust_version}, os {} {} [{} {}], {processors} cpu/threads " ,
info . os_type ( ) ,
info . version ( ) ,
std ::env ::consts ::ARCH ,
info . bitness ( ) ,
2023-10-11 07:54:41 +13:00
) ;
2023-12-04 00:06:42 +13:00
if cfg! ( debug_assertions ) {
warn! ( " You are running debug version of app which is a lot of slower than release version. " ) ;
}
2023-12-12 07:12:26 +13:00
if option_env! ( " USING_CRANELIFT " ) . is_some ( ) {
warn! ( " You are running app with cranelift which is intended only for fast compilation, not runtime performance. " ) ;
}
2023-10-11 07:54:41 +13:00
}
2022-11-26 08:38:27 +13:00
pub fn set_default_number_of_threads ( ) {
2024-02-15 05:45:25 +13:00
set_number_of_threads ( get_all_available_threads ( ) ) ;
2022-11-26 08:38:27 +13:00
}
2023-05-03 08:37:12 +12:00
2022-11-26 08:38:27 +13:00
pub fn set_number_of_threads ( thread_number : usize ) {
NUMBER_OF_THREADS . set ( thread_number ) ;
2023-12-04 00:06:42 +13:00
let additional_message = if thread_number = = 0 {
" (0 - means that all available threads will be used) "
} else {
" "
} ;
debug! ( " Number of threads set to {thread_number}{additional_message} " ) ;
rayon ::ThreadPoolBuilder ::new ( )
. num_threads ( get_number_of_threads ( ) )
. stack_size ( DEFAULT_WORKER_THREAD_SIZE )
. build_global ( )
. unwrap ( ) ;
2022-11-26 08:38:27 +13:00
}
2022-05-17 04:23:07 +12:00
pub const RAW_IMAGE_EXTENSIONS : & [ & str ] = & [
2023-12-17 11:21:09 +13:00
" mrw " , " arw " , " srf " , " sr2 " , " mef " , " orf " , " srw " , " erf " , " kdc " , " kdc " , " dcs " , " rw2 " , " raf " , " dcr " , " dng " , " pef " , " crw " , " iiq " , " 3fr " , " nrw " , " nef " , " mos " , " cr2 " , " ari " ,
2022-05-17 04:23:07 +12:00
] ;
2023-12-17 11:21:09 +13:00
pub const IMAGE_RS_EXTENSIONS : & [ & str ] = & [ " jpg " , " jpeg " , " png " , " bmp " , " tiff " , " tif " , " tga " , " ff " , " jif " , " jfi " , " webp " , " gif " , " ico " , " exr " , " qoi " ] ;
2022-05-17 04:23:07 +12:00
2023-12-17 11:21:09 +13:00
pub const IMAGE_RS_SIMILAR_IMAGES_EXTENSIONS : & [ & str ] = & [ " jpg " , " jpeg " , " png " , " tiff " , " tif " , " tga " , " ff " , " jif " , " jfi " , " bmp " , " webp " , " exr " , " qoi " ] ;
2022-05-17 04:23:07 +12:00
pub const IMAGE_RS_BROKEN_FILES_EXTENSIONS : & [ & str ] = & [
2023-12-17 11:21:09 +13:00
" jpg " , " jpeg " , " png " , " tiff " , " tif " , " tga " , " ff " , " jif " , " jfi " , " gif " , " bmp " , " ico " , " jfif " , " jpe " , " pnz " , " dib " , " webp " , " exr " ,
2022-05-17 04:23:07 +12:00
] ;
2023-12-17 11:21:09 +13:00
pub const HEIC_EXTENSIONS : & [ & str ] = & [ " heif " , " heifs " , " heic " , " heics " , " avci " , " avcs " , " avifs " ] ;
2022-05-17 04:23:07 +12:00
2023-12-17 11:21:09 +13:00
pub const ZIP_FILES_EXTENSIONS : & [ & str ] = & [ " zip " , " jar " ] ;
2022-05-17 04:23:07 +12:00
2023-12-17 11:21:09 +13:00
pub const PDF_FILES_EXTENSIONS : & [ & str ] = & [ " pdf " ] ;
2022-05-17 04:23:07 +12:00
pub const AUDIO_FILES_EXTENSIONS : & [ & str ] = & [
2023-12-17 11:21:09 +13:00
" mp3 " , " flac " , " wav " , " ogg " , " m4a " , " aac " , " aiff " , " pcm " , " aif " , " aiff " , " aifc " , " m3a " , " mp2 " , " mp4a " , " mp2a " , " mpga " , " wave " , " weba " , " wma " , " oga " ,
2022-05-17 04:23:07 +12:00
] ;
pub const VIDEO_FILES_EXTENSIONS : & [ & str ] = & [
2023-12-17 11:21:09 +13:00
" mp4 " , " mpv " , " flv " , " mp4a " , " webm " , " mpg " , " mp2 " , " mpeg " , " m4p " , " m4v " , " avi " , " wmv " , " qt " , " mov " , " swf " , " mkv " ,
2022-05-17 04:23:07 +12:00
] ;
2020-09-02 05:34:39 +12:00
2023-10-05 19:06:47 +13:00
pub const LOOP_DURATION : u32 = 20 ; //ms
pub const SEND_PROGRESS_DATA_TIME_BETWEEN : u32 = 200 ; //ms
2022-01-14 18:34:43 +13:00
2024-02-15 05:45:25 +13:00
pub fn remove_folder_if_contains_only_empty_folders ( path : impl AsRef < Path > , remove_to_trash : bool ) -> Result < ( ) , String > {
2023-12-04 00:06:42 +13:00
let path = path . as_ref ( ) ;
if ! path . is_dir ( ) {
2024-02-15 05:45:25 +13:00
return Err ( format! ( " Trying to remove folder {path:?} which is not a directory " , ) ) ;
2023-12-04 00:06:42 +13:00
}
let mut entries_to_check = Vec ::new ( ) ;
let Ok ( initial_entry ) = path . read_dir ( ) else {
2024-02-15 05:45:25 +13:00
return Err ( format! ( " Cannot read directory {path:?} " , ) ) ;
2023-12-04 00:06:42 +13:00
} ;
for entry in initial_entry {
if let Ok ( entry ) = entry {
entries_to_check . push ( entry ) ;
} else {
2024-02-15 05:45:25 +13:00
return Err ( format! ( " Cannot read entry from directory {path:?} " ) ) ;
2023-12-04 00:06:42 +13:00
}
}
loop {
let Some ( entry ) = entries_to_check . pop ( ) else {
break ;
} ;
2023-12-12 07:12:26 +13:00
let Some ( file_type ) = entry . file_type ( ) . ok ( ) else {
2024-02-15 05:45:25 +13:00
return Err ( format! ( " Folder contains file with unknown type {:?} inside {path:?} " , entry . path ( ) ) ) ;
2023-12-12 07:12:26 +13:00
} ;
if ! file_type . is_dir ( ) {
2024-02-15 05:45:25 +13:00
return Err ( format! ( " Folder contains file {:?} inside {path:?} " , entry . path ( ) , ) ) ;
2023-12-04 00:06:42 +13:00
}
let Ok ( internal_read_dir ) = entry . path ( ) . read_dir ( ) else {
2024-02-15 05:45:25 +13:00
return Err ( format! ( " Cannot read directory {:?} inside {path:?} " , entry . path ( ) ) ) ;
2023-12-04 00:06:42 +13:00
} ;
for internal_elements in internal_read_dir {
if let Ok ( internal_element ) = internal_elements {
entries_to_check . push ( internal_element ) ;
} else {
2024-02-15 05:45:25 +13:00
return Err ( format! ( " Cannot read entry from directory {:?} inside {path:?} " , entry . path ( ) ) ) ;
2023-12-04 00:06:42 +13:00
}
}
}
2024-02-15 05:45:25 +13:00
if remove_to_trash {
trash ::delete ( path ) . map_err ( | e | format! ( " Cannot move folder {path:?} to trash, reason {e} " ) )
} else {
fs ::remove_dir_all ( path ) . map_err ( | e | format! ( " Cannot remove directory {path:?} , reason {e} " ) )
}
2023-12-04 00:06:42 +13:00
}
2022-01-06 10:47:27 +13:00
pub fn open_cache_folder ( cache_file_name : & str , save_to_cache : bool , use_json : bool , warnings : & mut Vec < String > ) -> Option < ( ( Option < File > , PathBuf ) , ( Option < File > , PathBuf ) ) > {
if let Some ( proj_dirs ) = ProjectDirs ::from ( " pl " , " Qarmin " , " Czkawka " ) {
let cache_dir = PathBuf ::from ( proj_dirs . cache_dir ( ) ) ;
let cache_file = cache_dir . join ( cache_file_name ) ;
let cache_file_json = cache_dir . join ( cache_file_name . replace ( " .bin " , " .json " ) ) ;
let mut file_handler_default = None ;
let mut file_handler_json = None ;
if save_to_cache {
if cache_dir . exists ( ) {
if ! cache_dir . is_dir ( ) {
2023-12-08 07:38:41 +13:00
warnings . push ( format! ( " Config dir {cache_dir:?} is a file! " ) ) ;
2022-01-06 10:47:27 +13:00
return None ;
}
} else if let Err ( e ) = fs ::create_dir_all ( & cache_dir ) {
2023-12-08 07:38:41 +13:00
warnings . push ( format! ( " Cannot create config dir {cache_dir:?} , reason {e} " ) ) ;
2022-01-06 10:47:27 +13:00
return None ;
}
file_handler_default = Some ( match OpenOptions ::new ( ) . truncate ( true ) . write ( true ) . create ( true ) . open ( & cache_file ) {
Ok ( t ) = > t ,
Err ( e ) = > {
2023-12-08 07:38:41 +13:00
warnings . push ( format! ( " Cannot create or open cache file {cache_file:?} , reason {e} " ) ) ;
2022-01-06 10:47:27 +13:00
return None ;
}
} ) ;
if use_json {
file_handler_json = Some ( match OpenOptions ::new ( ) . truncate ( true ) . write ( true ) . create ( true ) . open ( & cache_file_json ) {
Ok ( t ) = > t ,
Err ( e ) = > {
2023-12-08 07:38:41 +13:00
warnings . push ( format! ( " Cannot create or open cache file {cache_file_json:?} , reason {e} " ) ) ;
2022-01-06 10:47:27 +13:00
return None ;
}
} ) ;
}
} else {
if let Ok ( t ) = OpenOptions ::new ( ) . read ( true ) . open ( & cache_file ) {
file_handler_default = Some ( t ) ;
} else {
if use_json {
2023-12-04 00:06:42 +13:00
file_handler_json = Some ( OpenOptions ::new ( ) . read ( true ) . open ( & cache_file_json ) . ok ( ) ? ) ;
2022-01-06 10:47:27 +13:00
} else {
2023-12-08 07:38:41 +13:00
// messages.push(format!("Cannot find or open cache file {cache_file:?}")); // No error or warning
2022-01-06 10:47:27 +13:00
return None ;
}
}
} ;
return Some ( ( ( file_handler_default , cache_file ) , ( file_handler_json , cache_file_json ) ) ) ;
}
None
}
2022-06-09 07:42:51 +12:00
#[ cfg(feature = " heif " ) ]
pub fn get_dynamic_image_from_heic ( path : & str ) -> Result < DynamicImage > {
2023-04-05 18:08:43 +12:00
// let libheif = LibHeif::new();
2022-06-09 07:42:51 +12:00
let im = HeifContext ::read_from_file ( path ) ? ;
let handle = im . primary_image_handle ( ) ? ;
2023-04-05 18:08:43 +12:00
// let image = libheif.decode(&handle, ColorSpace::Rgb(RgbChroma::Rgb), None)?; // Enable when using libheif 0.19
2023-02-19 22:21:14 +13:00
let image = handle . decode ( ColorSpace ::Rgb ( RgbChroma ::Rgb ) , None ) ? ;
2023-03-06 08:54:02 +13:00
let width = image . width ( ) ;
let height = image . height ( ) ;
2022-06-09 07:42:51 +12:00
let planes = image . planes ( ) ;
let interleaved_plane = planes . interleaved . unwrap ( ) ;
ImageBuffer ::from_raw ( width , height , interleaved_plane . data . to_owned ( ) )
. map ( DynamicImage ::ImageRgb8 )
. ok_or_else ( | | anyhow ::anyhow! ( " Failed to create image buffer " ) )
}
2023-12-04 00:06:42 +13:00
#[ cfg(feature = " libraw " ) ]
pub fn get_dynamic_image_from_raw_image ( path : impl AsRef < Path > ) -> Option < DynamicImage > {
let buf = fs ::read ( path . as_ref ( ) ) . ok ( ) ? ;
let processor = Processor ::new ( ) ;
let start_timer = Instant ::now ( ) ;
let processed = processor . process_8bit ( & buf ) . expect ( " processing successful " ) ;
println! ( " Processing took {:?} " , start_timer . elapsed ( ) ) ;
let width = processed . width ( ) ;
let height = processed . height ( ) ;
let data = processed . to_vec ( ) ;
let buffer = ImageBuffer ::from_raw ( width , height , data ) ? ;
// Utwórz DynamicImage z ImageBuffer
Some ( DynamicImage ::ImageRgb8 ( buffer ) )
}
#[ cfg(not(feature = " libraw " )) ]
2022-01-01 10:34:24 +13:00
pub fn get_dynamic_image_from_raw_image ( path : impl AsRef < Path > + std ::fmt ::Debug ) -> Option < DynamicImage > {
2023-12-04 00:06:42 +13:00
let mut start_timer = Instant ::now ( ) ;
let mut times = Vec ::new ( ) ;
2022-01-01 10:34:24 +13:00
2023-12-04 00:06:42 +13:00
let loader = RawLoader ::new ( ) ;
let raw = loader . decode_file ( path . as_ref ( ) ) . ok ( ) ? ;
times . push ( ( " After decoding " , start_timer . elapsed ( ) ) ) ;
start_timer = Instant ::now ( ) ;
2022-01-01 10:34:24 +13:00
let source = ImageSource ::Raw ( raw ) ;
2023-12-04 00:06:42 +13:00
times . push ( ( " After creating source " , start_timer . elapsed ( ) ) ) ;
start_timer = Instant ::now ( ) ;
let mut pipeline = Pipeline ::new_from_source ( source ) . ok ( ) ? ;
times . push ( ( " After creating pipeline " , start_timer . elapsed ( ) ) ) ;
start_timer = Instant ::now ( ) ;
2022-01-01 10:34:24 +13:00
pipeline . run ( None ) ;
2023-12-04 00:06:42 +13:00
let image = pipeline . output_8bit ( None ) . ok ( ) ? ;
2022-01-01 10:34:24 +13:00
2023-12-04 00:06:42 +13:00
times . push ( ( " After creating image " , start_timer . elapsed ( ) ) ) ;
start_timer = Instant ::now ( ) ;
let image = ImageBuffer ::< Rgb < u8 > , Vec < u8 > > ::from_raw ( image . width as u32 , image . height as u32 , image . data ) ? ;
2022-01-01 10:34:24 +13:00
2023-12-04 00:06:42 +13:00
times . push ( ( " After creating image buffer " , start_timer . elapsed ( ) ) ) ;
start_timer = Instant ::now ( ) ;
2022-01-01 10:34:24 +13:00
// println!("Properly hashed {:?}", path);
2023-12-04 00:06:42 +13:00
let res = Some ( DynamicImage ::ImageRgb8 ( image ) ) ;
times . push ( ( " After creating dynamic image " , start_timer . elapsed ( ) ) ) ;
let str_timer = times . into_iter ( ) . map ( | ( name , time ) | format! ( " {name} : {time:?} " ) ) . collect ::< Vec < _ > > ( ) . join ( " , " ) ;
debug! ( " Loading raw image --- {str_timer} " ) ;
res
2022-01-01 10:34:24 +13:00
}
2022-06-05 07:20:21 +12:00
pub fn split_path ( path : & Path ) -> ( String , String ) {
match ( path . parent ( ) , path . file_name ( ) ) {
2023-12-08 07:38:41 +13:00
( Some ( dir ) , Some ( file ) ) = > ( dir . to_string_lossy ( ) . to_string ( ) , file . to_string_lossy ( ) . into_owned ( ) ) ,
( Some ( dir ) , None ) = > ( dir . to_string_lossy ( ) . to_string ( ) , String ::new ( ) ) ,
2022-06-05 07:20:21 +12:00
( None , _ ) = > ( String ::new ( ) , String ::new ( ) ) ,
}
}
2023-12-08 07:38:41 +13:00
pub fn split_path_compare ( path_a : & Path , path_b : & Path ) -> Ordering {
2024-01-14 01:57:51 +13:00
match path_a . parent ( ) . cmp ( & path_b . parent ( ) ) {
Ordering ::Equal = > path_a . file_name ( ) . cmp ( & path_b . file_name ( ) ) ,
other = > other ,
2023-12-08 07:38:41 +13:00
}
}
2022-07-28 17:29:50 +12:00
pub fn create_crash_message ( library_name : & str , file_path : & str , home_library_url : & str ) -> String {
2024-01-14 01:57:51 +13:00
format! ( " {library_name} library crashed when opening \" {file_path} \" , please check if this is fixed with the latest version of {library_name} and if it is not fixed, please report bug here - {home_library_url} " )
2022-07-28 17:29:50 +12:00
}
2024-01-14 01:57:51 +13:00
pub fn regex_check ( expression_item : & SingleExcludedItem , directory_name : & str ) -> bool {
2023-12-04 09:18:31 +13:00
if expression_item . expression_splits . is_empty ( ) {
2024-01-14 01:57:51 +13:00
return true ;
2023-12-04 09:18:31 +13:00
}
2020-09-12 08:32:17 +12:00
2023-12-04 09:18:31 +13:00
// Early checking if directory contains all parts needed by expression
for split in & expression_item . unique_extensions_splits {
if ! directory_name . contains ( split ) {
2020-09-12 08:32:17 +12:00
return false ;
}
2023-12-04 09:18:31 +13:00
}
2020-09-12 08:32:17 +12:00
2023-12-04 09:18:31 +13:00
// `git*` shouldn't be true for `/gitsfafasfs`
if ! expression_item . expression . starts_with ( '*' ) & & directory_name . find ( & expression_item . expression_splits [ 0 ] ) . unwrap ( ) > 0 {
return false ;
}
// `*home` shouldn't be true for `/homeowner`
if ! expression_item . expression . ends_with ( '*' ) & & ! directory_name . ends_with ( expression_item . expression_splits . last ( ) . unwrap ( ) ) {
return false ;
}
// At the end we check if parts between * are correctly positioned
let mut last_split_point = directory_name . find ( & expression_item . expression_splits [ 0 ] ) . unwrap ( ) ;
let mut current_index : usize = 0 ;
let mut found_index : usize ;
for spl in & expression_item . expression_splits [ 1 .. ] {
found_index = match directory_name [ current_index .. ] . find ( spl ) {
Some ( t ) = > t ,
None = > return false ,
} ;
current_index = last_split_point + spl . len ( ) ;
last_split_point = found_index + current_index ;
2020-09-12 08:32:17 +12:00
}
2023-12-04 09:18:31 +13:00
true
}
2020-10-15 05:41:37 +13:00
2023-12-04 09:18:31 +13:00
pub fn normalize_windows_path ( path_to_change : impl AsRef < Path > ) -> PathBuf {
let path = path_to_change . as_ref ( ) ;
2020-12-31 01:41:18 +13:00
2023-12-04 09:18:31 +13:00
// Don't do anything, because network path may be case intensive
if path . to_string_lossy ( ) . starts_with ( '\\' ) {
return path . to_path_buf ( ) ;
}
2020-12-31 01:41:18 +13:00
2023-12-04 09:18:31 +13:00
match path . to_str ( ) {
Some ( path ) if path . is_char_boundary ( 1 ) = > {
let replaced = path . replace ( '/' , " \\ " ) ;
let mut new_path = OsString ::new ( ) ;
if replaced [ 1 .. ] . starts_with ( ':' ) {
new_path . push ( replaced [ .. 1 ] . to_ascii_uppercase ( ) ) ;
new_path . push ( replaced [ 1 .. ] . to_ascii_lowercase ( ) ) ;
} else {
new_path . push ( replaced . to_ascii_lowercase ( ) ) ;
2020-10-15 05:41:37 +13:00
}
2023-12-04 09:18:31 +13:00
PathBuf ::from ( new_path )
2020-10-15 05:41:37 +13:00
}
2023-12-04 09:18:31 +13:00
_ = > path . to_path_buf ( ) ,
2020-10-11 02:18:04 +13:00
}
2020-09-12 08:32:17 +12:00
}
2020-09-18 17:32:37 +12:00
2023-05-03 08:37:12 +12:00
pub fn check_folder_children (
dir_result : & mut Vec < PathBuf > ,
warnings : & mut Vec < String > ,
entry_data : & DirEntry ,
recursive_search : bool ,
directories : & Directories ,
excluded_items : & ExcludedItems ,
) {
if ! recursive_search {
return ;
}
2023-12-12 07:12:26 +13:00
let next_item = entry_data . path ( ) ;
if directories . is_excluded ( & next_item ) {
2023-05-03 08:37:12 +12:00
return ;
}
2023-12-12 07:12:26 +13:00
if excluded_items . is_excluded ( & next_item ) {
2023-05-03 08:37:12 +12:00
return ;
}
#[ cfg(target_family = " unix " ) ]
if directories . exclude_other_filesystems ( ) {
2023-12-12 07:12:26 +13:00
match directories . is_on_other_filesystems ( & next_item ) {
2023-05-03 08:37:12 +12:00
Ok ( true ) = > return ,
Err ( e ) = > warnings . push ( e ) ,
_ = > ( ) ,
}
}
2023-12-12 07:12:26 +13:00
dir_result . push ( next_item ) ;
2023-05-03 08:37:12 +12:00
}
2023-10-14 07:33:17 +13:00
// Here we assume, that internal Vec<> have at least 1 object
#[ allow(clippy::ptr_arg) ]
pub fn delete_files_custom < T > ( items : & Vec < & Vec < T > > , delete_method : & DeleteMethod , text_messages : & mut Messages , dry_run : bool ) -> ( u64 , usize , usize )
where
T : ResultEntry + Clone ,
{
let res = items
. iter ( )
. map ( | values | {
let mut gained_space : u64 = 0 ;
let mut removed_files : usize = 0 ;
let mut failed_to_remove_files : usize = 0 ;
let mut infos = Vec ::new ( ) ;
let mut errors = Vec ::new ( ) ;
let mut all_values = ( * values ) . clone ( ) ;
let len = all_values . len ( ) ;
// Sorted from oldest to newest - from smallest value to bigger
all_values . sort_unstable_by_key ( ResultEntry ::get_modified_date ) ;
if delete_method = = & DeleteMethod ::HardLink {
let original_file = & all_values [ 0 ] ;
for file_entry in & all_values [ 1 .. ] {
if dry_run {
infos . push ( format! (
" dry_run - would create hardlink from {:?} to {:?} " ,
original_file . get_path ( ) ,
2024-01-14 01:57:51 +13:00
file_entry . get_path ( )
2023-10-14 07:33:17 +13:00
) ) ;
} else {
if dry_run {
infos . push ( format! ( " Replace file {:?} with hard link to {:?} " , original_file . get_path ( ) , file_entry . get_path ( ) ) ) ;
} else {
if let Err ( e ) = make_hard_link ( original_file . get_path ( ) , file_entry . get_path ( ) ) {
errors . push ( format! (
" Cannot create hard link from {:?} to {:?} - {} " ,
file_entry . get_path ( ) ,
original_file . get_path ( ) ,
e
) ) ;
failed_to_remove_files + = 1 ;
} else {
gained_space + = 1 ;
removed_files + = 1 ;
}
}
}
}
return ( infos , errors , gained_space , removed_files , failed_to_remove_files ) ;
}
let items = match delete_method {
DeleteMethod ::Delete = > & all_values ,
DeleteMethod ::AllExceptNewest = > & all_values [ .. ( len - 1 ) ] ,
DeleteMethod ::AllExceptOldest = > & all_values [ 1 .. ] ,
DeleteMethod ::OneOldest = > & all_values [ .. 1 ] ,
DeleteMethod ::OneNewest = > & all_values [ ( len - 1 ) .. ] ,
DeleteMethod ::HardLink | DeleteMethod ::None = > unreachable! ( " HardLink and None should be handled before " ) ,
} ;
for i in items {
if dry_run {
infos . push ( format! ( " dry_run - would delete file: {:?} " , i . get_path ( ) ) ) ;
} else {
2024-01-14 01:57:51 +13:00
if let Err ( e ) = fs ::remove_file ( i . get_path ( ) ) {
2023-10-14 07:33:17 +13:00
errors . push ( format! ( " Cannot delete file: {:?} - {e} " , i . get_path ( ) ) ) ;
failed_to_remove_files + = 1 ;
} else {
removed_files + = 1 ;
gained_space + = i . get_size ( ) ;
}
}
}
( infos , errors , gained_space , removed_files , failed_to_remove_files )
} )
. collect ::< Vec < _ > > ( ) ;
let mut gained_space = 0 ;
let mut removed_files = 0 ;
let mut failed_to_remove_files = 0 ;
for ( infos , errors , gained_space_v , removed_files_v , failed_to_remove_files_v ) in res {
text_messages . messages . extend ( infos ) ;
text_messages . errors . extend ( errors ) ;
gained_space + = gained_space_v ;
removed_files + = removed_files_v ;
failed_to_remove_files + = failed_to_remove_files_v ;
}
( gained_space , removed_files , failed_to_remove_files )
}
2023-05-08 06:54:05 +12:00
pub fn filter_reference_folders_generic < T > ( entries_to_check : Vec < Vec < T > > , directories : & Directories ) -> Vec < ( T , Vec < T > ) >
where
T : ResultEntry ,
{
entries_to_check
. into_iter ( )
. filter_map ( | vec_file_entry | {
let ( mut files_from_referenced_folders , normal_files ) : ( Vec < _ > , Vec < _ > ) =
vec_file_entry . into_iter ( ) . partition ( | e | directories . is_in_referenced_directory ( e . get_path ( ) ) ) ;
if files_from_referenced_folders . is_empty ( ) | | normal_files . is_empty ( ) {
None
} else {
Some ( ( files_from_referenced_folders . pop ( ) . unwrap ( ) , normal_files ) )
}
} )
. collect ::< Vec < ( T , Vec < T > ) > > ( )
}
2023-05-03 08:37:12 +12:00
pub fn prepare_thread_handler_common (
2023-12-04 00:06:42 +13:00
progress_sender : Option < & Sender < ProgressData > > ,
2023-05-03 08:37:12 +12:00
current_stage : u8 ,
max_stage : u8 ,
max_value : usize ,
checking_method : CheckingMethod ,
2023-05-11 07:27:41 +12:00
tool_type : ToolType ,
2023-05-08 06:54:05 +12:00
) -> ( JoinHandle < ( ) > , Arc < AtomicBool > , Arc < AtomicUsize > , AtomicBool ) {
2023-12-04 00:06:42 +13:00
assert_ne! ( tool_type , ToolType ::None , " ToolType::None should not exist " ) ;
2023-05-08 06:54:05 +12:00
let progress_thread_run = Arc ::new ( AtomicBool ::new ( true ) ) ;
let atomic_counter = Arc ::new ( AtomicUsize ::new ( 0 ) ) ;
let check_was_stopped = AtomicBool ::new ( false ) ;
let progress_thread_sender = if let Some ( progress_sender ) = progress_sender {
2023-05-03 08:37:12 +12:00
let progress_send = progress_sender . clone ( ) ;
let progress_thread_run = progress_thread_run . clone ( ) ;
let atomic_counter = atomic_counter . clone ( ) ;
2023-10-05 19:06:47 +13:00
thread ::spawn ( move | | {
2024-01-15 02:38:55 +13:00
// Use earlier time, to send immediately first message
let mut time_since_last_send = SystemTime ::now ( ) - Duration ::from_secs ( 10 u64 ) ;
2023-10-05 19:06:47 +13:00
loop {
if time_since_last_send . elapsed ( ) . unwrap ( ) . as_millis ( ) > SEND_PROGRESS_DATA_TIME_BETWEEN as u128 {
progress_send
2023-12-04 00:06:42 +13:00
. send ( ProgressData {
2023-10-05 19:06:47 +13:00
checking_method ,
current_stage ,
max_stage ,
2023-12-08 07:38:41 +13:00
entries_checked : atomic_counter . load ( atomic ::Ordering ::Relaxed ) ,
2023-10-05 19:06:47 +13:00
entries_to_check : max_value ,
tool_type ,
} )
. unwrap ( ) ;
time_since_last_send = SystemTime ::now ( ) ;
}
2023-12-08 07:38:41 +13:00
if ! progress_thread_run . load ( atomic ::Ordering ::Relaxed ) {
2023-10-05 19:06:47 +13:00
break ;
}
sleep ( Duration ::from_millis ( LOOP_DURATION as u64 ) ) ;
2023-05-03 08:37:12 +12:00
}
} )
} else {
thread ::spawn ( | | { } )
2023-05-08 06:54:05 +12:00
} ;
( progress_thread_sender , progress_thread_run , atomic_counter , check_was_stopped )
2023-05-03 08:37:12 +12:00
}
2023-10-15 04:48:57 +13:00
#[ inline ]
pub fn check_if_stop_received ( stop_receiver : Option < & crossbeam_channel ::Receiver < ( ) > > ) -> bool {
if let Some ( stop_receiver ) = stop_receiver {
if stop_receiver . try_recv ( ) . is_ok ( ) {
return true ;
}
}
false
}
#[ fun_time(message = " send_info_and_wait_for_ending_all_threads " , level = " debug " ) ]
2023-05-03 08:37:12 +12:00
pub fn send_info_and_wait_for_ending_all_threads ( progress_thread_run : & Arc < AtomicBool > , progress_thread_handle : JoinHandle < ( ) > ) {
2023-12-08 07:38:41 +13:00
progress_thread_run . store ( false , atomic ::Ordering ::Relaxed ) ;
2023-05-03 08:37:12 +12:00
progress_thread_handle . join ( ) . unwrap ( ) ;
}
2020-09-12 08:32:17 +12:00
#[ cfg(test) ]
mod test {
2023-12-04 00:06:42 +13:00
use std ::fs ;
use std ::io ::Write ;
use std ::path ::{ Path , PathBuf } ;
2024-02-15 05:45:25 +13:00
2023-12-04 00:06:42 +13:00
use tempfile ::tempdir ;
2020-09-12 08:32:17 +12:00
2023-12-04 09:18:31 +13:00
use crate ::common ::{ normalize_windows_path , regex_check , remove_folder_if_contains_only_empty_folders } ;
2024-02-15 05:45:25 +13:00
use crate ::common_items ::new_excluded_item ;
2023-12-04 00:06:42 +13:00
#[ test ]
fn test_remove_folder_if_contains_only_empty_folders ( ) {
let dir = tempdir ( ) . unwrap ( ) ;
let sub_dir = dir . path ( ) . join ( " sub_dir " ) ;
fs ::create_dir ( & sub_dir ) . unwrap ( ) ;
// Test with empty directory
2024-02-15 05:45:25 +13:00
assert! ( remove_folder_if_contains_only_empty_folders ( & sub_dir , false ) . is_ok ( ) ) ;
2023-12-04 00:06:42 +13:00
assert! ( ! Path ::new ( & sub_dir ) . exists ( ) ) ;
// Test with directory containing an empty directory
fs ::create_dir ( & sub_dir ) . unwrap ( ) ;
fs ::create_dir ( sub_dir . join ( " empty_sub_dir " ) ) . unwrap ( ) ;
2024-02-15 05:45:25 +13:00
assert! ( remove_folder_if_contains_only_empty_folders ( & sub_dir , false ) . is_ok ( ) ) ;
2023-12-04 00:06:42 +13:00
assert! ( ! Path ::new ( & sub_dir ) . exists ( ) ) ;
// Test with directory containing a file
fs ::create_dir ( & sub_dir ) . unwrap ( ) ;
let mut file = fs ::File ::create ( sub_dir . join ( " file.txt " ) ) . unwrap ( ) ;
writeln! ( file , " Hello, world! " ) . unwrap ( ) ;
2024-02-15 05:45:25 +13:00
assert! ( remove_folder_if_contains_only_empty_folders ( & sub_dir , false ) . is_err ( ) ) ;
2023-12-04 00:06:42 +13:00
assert! ( Path ::new ( & sub_dir ) . exists ( ) ) ;
}
2021-11-28 08:49:20 +13:00
2020-09-12 08:32:17 +12:00
#[ test ]
fn test_regex ( ) {
2024-01-14 01:57:51 +13:00
assert! ( regex_check ( & new_excluded_item ( " * " ) , " /home/rafal " ) ) ;
2023-12-04 09:18:31 +13:00
assert! ( regex_check ( & new_excluded_item ( " *home* " ) , " /home/rafal " ) ) ;
assert! ( regex_check ( & new_excluded_item ( " *home " ) , " /home " ) ) ;
assert! ( regex_check ( & new_excluded_item ( " *home/ " ) , " /home/ " ) ) ;
assert! ( regex_check ( & new_excluded_item ( " *home/* " ) , " /home/ " ) ) ;
assert! ( regex_check ( & new_excluded_item ( " *.git* " ) , " /home/.git " ) ) ;
assert! ( regex_check ( & new_excluded_item ( " */home/rafal*rafal*rafal*rafal* " ) , " /home/rafal/rafalrafalrafal " ) ) ;
assert! ( regex_check ( & new_excluded_item ( " AAA " ) , " AAA " ) ) ;
assert! ( regex_check ( & new_excluded_item ( " AAA* " ) , " AAABDGG/QQPW* " ) ) ;
assert! ( ! regex_check ( & new_excluded_item ( " *home " ) , " /home/ " ) ) ;
assert! ( ! regex_check ( & new_excluded_item ( " *home " ) , " /homefasfasfasfasf/ " ) ) ;
assert! ( ! regex_check ( & new_excluded_item ( " *home " ) , " /homefasfasfasfasf " ) ) ;
assert! ( ! regex_check ( & new_excluded_item ( " rafal*afal*fal " ) , " rafal " ) ) ;
assert! ( ! regex_check ( & new_excluded_item ( " rafal*a " ) , " rafal " ) ) ;
assert! ( ! regex_check ( & new_excluded_item ( " AAAAAAAA**** " ) , " /AAAAAAAAAAAAAAAAA " ) ) ;
assert! ( ! regex_check ( & new_excluded_item ( " *.git/* " ) , " /home/.git " ) ) ;
assert! ( ! regex_check ( & new_excluded_item ( " *home/*koc " ) , " /koc/home/ " ) ) ;
assert! ( ! regex_check ( & new_excluded_item ( " *home/ " ) , " /home " ) ) ;
assert! ( ! regex_check ( & new_excluded_item ( " *TTT " ) , " /GGG " ) ) ;
assert! ( regex_check (
& new_excluded_item ( " */home/*/.local/share/containers " ) ,
" /var/home/roman/.local/share/containers "
) ) ;
if cfg! ( target_family = " windows " ) {
assert! ( regex_check ( & new_excluded_item ( " * \\ home " ) , " C: \\ home " ) ) ;
assert! ( regex_check ( & new_excluded_item ( " */home " ) , " C: \\ home " ) ) ;
2021-05-09 07:54:01 +12:00
}
2020-09-12 08:32:17 +12:00
}
2021-11-28 08:57:10 +13:00
2020-10-11 02:18:04 +13:00
#[ test ]
fn test_windows_path ( ) {
2023-12-04 09:18:31 +13:00
assert_eq! ( PathBuf ::from ( " C: \\ path.txt " ) , normalize_windows_path ( " c:/PATH.tXt " ) ) ;
assert_eq! ( PathBuf ::from ( " H: \\ reka \\ weza \\ roman.txt " ) , normalize_windows_path ( " h:/RekA/Weza \\ roMan.Txt " ) ) ;
assert_eq! ( PathBuf ::from ( " T: \\ a " ) , normalize_windows_path ( " T: \\ A " ) ) ;
assert_eq! ( PathBuf ::from ( " \\ \\ aBBa " ) , normalize_windows_path ( " \\ \\ aBBa " ) ) ;
assert_eq! ( PathBuf ::from ( " a " ) , normalize_windows_path ( " a " ) ) ;
assert_eq! ( PathBuf ::from ( " " ) , normalize_windows_path ( " " ) ) ;
2020-10-11 02:18:04 +13:00
}
2020-09-01 05:37:30 +12:00
}