2021-11-28 08:49:20 +13:00
use std ::collections ::BTreeMap ;
2023-05-02 19:17:39 +12:00
use std ::fs ::{ DirEntry , File , Metadata } ;
2021-01-13 08:06:12 +13:00
use std ::io ::prelude ::* ;
2021-11-28 08:49:20 +13:00
use std ::io ::{ BufReader , BufWriter } ;
2021-01-14 04:03:05 +13:00
use std ::path ::{ Path , PathBuf } ;
2021-11-28 08:49:20 +13:00
use std ::sync ::atomic ::{ AtomicBool , AtomicUsize , Ordering } ;
use std ::sync ::Arc ;
2023-05-02 06:37:58 +12:00
use std ::thread ::{ sleep , JoinHandle } ;
2023-05-02 08:13:35 +12:00
use std ::time ::{ Duration , SystemTime } ;
2021-12-30 01:43:38 +13:00
use std ::{ fs , mem , panic , thread } ;
2021-01-13 08:06:12 +13:00
2021-11-28 08:49:20 +13:00
use crossbeam_channel ::Receiver ;
2023-05-01 02:38:19 +12:00
use pdf ::file ::FileOptions ;
2023-02-19 22:21:14 +13:00
use pdf ::object ::ParseOptions ;
2022-05-17 04:23:07 +12:00
use pdf ::PdfError ;
use pdf ::PdfError ::Try ;
2021-11-28 08:49:20 +13:00
use rayon ::prelude ::* ;
2022-01-06 10:47:27 +13:00
use serde ::{ Deserialize , Serialize } ;
2021-11-28 08:49:20 +13:00
2023-05-02 19:17:39 +12:00
use crate ::common ::{ check_folder_children , create_crash_message , open_cache_folder , send_info_and_wait_for_ending_all_threads , Common , LOOP_DURATION , PDF_FILES_EXTENSIONS } ;
2022-05-17 04:23:07 +12:00
use crate ::common ::{ AUDIO_FILES_EXTENSIONS , IMAGE_RS_BROKEN_FILES_EXTENSIONS , ZIP_FILES_EXTENSIONS } ;
2023-05-02 08:13:35 +12:00
use crate ::common_dir_traversal ::{ common_get_entry_data_metadata , common_read_dir , get_lowercase_name , get_modified_time } ;
2021-01-13 08:06:12 +13:00
use crate ::common_directory ::Directories ;
use crate ::common_extensions ::Extensions ;
use crate ::common_items ::ExcludedItems ;
use crate ::common_messages ::Messages ;
use crate ::common_traits ::* ;
#[ derive(Debug) ]
pub struct ProgressData {
pub current_stage : u8 ,
pub max_stage : u8 ,
pub files_checked : usize ,
pub files_to_check : usize ,
}
2023-02-19 22:21:14 +13:00
#[ derive(Eq, PartialEq, Clone, Debug, Copy) ]
2021-01-13 08:06:12 +13:00
pub enum DeleteMethod {
None ,
Delete ,
}
2022-01-06 10:47:27 +13:00
#[ derive(Clone, Serialize, Deserialize) ]
2021-01-13 08:06:12 +13:00
pub struct FileEntry {
pub path : PathBuf ,
pub modified_date : u64 ,
2021-01-14 04:03:05 +13:00
pub size : u64 ,
2021-01-13 08:06:12 +13:00
pub type_of_file : TypeOfFile ,
pub error_string : String ,
}
2022-06-11 04:58:53 +12:00
#[ derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug) ]
2021-01-13 08:06:12 +13:00
pub enum TypeOfFile {
2021-01-14 04:03:05 +13:00
Unknown = - 1 ,
Image = 0 ,
2021-03-28 01:14:02 +13:00
ArchiveZip ,
2021-01-15 23:04:52 +13:00
Audio ,
2022-05-17 04:23:07 +12:00
PDF ,
2021-01-13 08:06:12 +13:00
}
2022-06-11 04:58:53 +12:00
bitflags! {
2023-04-05 18:08:43 +12:00
#[ derive(PartialEq, Copy, Clone) ]
2022-06-11 04:58:53 +12:00
pub struct CheckedTypes : u32 {
const NONE = 0 ;
const PDF = 0b1 ;
const AUDIO = 0b10 ;
const IMAGE = 0b100 ;
const ARCHIVE = 0b1000 ;
}
}
2021-01-13 08:06:12 +13:00
/// Info struck with helpful information's about results
#[ derive(Default) ]
pub struct Info {
pub number_of_broken_files : usize ,
}
2021-11-28 08:57:10 +13:00
2021-01-13 08:06:12 +13:00
impl Info {
2023-01-29 06:54:02 +13:00
#[ must_use ]
2021-01-13 08:06:12 +13:00
pub fn new ( ) -> Self {
Default ::default ( )
}
}
pub struct BrokenFiles {
text_messages : Messages ,
information : Info ,
2021-05-09 04:29:52 +12:00
files_to_check : BTreeMap < String , FileEntry > ,
2021-01-13 08:06:12 +13:00
broken_files : Vec < FileEntry > ,
directories : Directories ,
allowed_extensions : Extensions ,
excluded_items : ExcludedItems ,
recursive_search : bool ,
delete_method : DeleteMethod ,
stopped_search : bool ,
2022-06-11 04:58:53 +12:00
checked_types : CheckedTypes ,
2021-03-05 00:09:53 +13:00
use_cache : bool ,
2022-01-06 10:47:27 +13:00
delete_outdated_cache : bool , // TODO add this to GUI
save_also_as_json : bool ,
2021-01-13 08:06:12 +13:00
}
impl BrokenFiles {
2023-01-29 06:54:02 +13:00
#[ must_use ]
2021-01-13 08:06:12 +13:00
pub fn new ( ) -> Self {
Self {
text_messages : Messages ::new ( ) ,
information : Info ::new ( ) ,
recursive_search : true ,
allowed_extensions : Extensions ::new ( ) ,
directories : Directories ::new ( ) ,
excluded_items : ExcludedItems ::new ( ) ,
2021-01-14 04:03:05 +13:00
files_to_check : Default ::default ( ) ,
2021-01-13 08:06:12 +13:00
delete_method : DeleteMethod ::None ,
stopped_search : false ,
2021-01-14 04:03:05 +13:00
broken_files : Default ::default ( ) ,
2021-03-05 00:09:53 +13:00
use_cache : true ,
2022-01-06 10:47:27 +13:00
delete_outdated_cache : true ,
save_also_as_json : false ,
2022-06-11 04:58:53 +12:00
checked_types : CheckedTypes ::PDF | CheckedTypes ::AUDIO | CheckedTypes ::IMAGE | CheckedTypes ::ARCHIVE ,
2021-01-13 08:06:12 +13:00
}
}
2021-04-14 18:23:59 +12:00
pub fn find_broken_files ( & mut self , stop_receiver : Option < & Receiver < ( ) > > , progress_sender : Option < & futures ::channel ::mpsc ::UnboundedSender < ProgressData > > ) {
2021-01-13 08:06:12 +13:00
self . directories . optimize_directories ( self . recursive_search , & mut self . text_messages ) ;
if ! self . check_files ( stop_receiver , progress_sender ) {
self . stopped_search = true ;
return ;
}
if ! self . look_for_broken_files ( stop_receiver , progress_sender ) {
self . stopped_search = true ;
return ;
}
self . delete_files ( ) ;
self . debug_print ( ) ;
}
2023-01-29 06:54:02 +13:00
#[ must_use ]
2021-01-13 08:06:12 +13:00
pub fn get_stopped_search ( & self ) -> bool {
self . stopped_search
}
2023-01-29 06:54:02 +13:00
#[ must_use ]
2021-01-13 08:06:12 +13:00
pub const fn get_broken_files ( & self ) -> & Vec < FileEntry > {
& self . broken_files
}
2022-06-11 04:58:53 +12:00
pub fn set_checked_types ( & mut self , checked_types : CheckedTypes ) {
self . checked_types = checked_types ;
}
2023-01-29 06:54:02 +13:00
#[ must_use ]
2021-01-13 08:06:12 +13:00
pub const fn get_text_messages ( & self ) -> & Messages {
& self . text_messages
}
2023-01-29 06:54:02 +13:00
#[ must_use ]
2021-01-13 08:06:12 +13:00
pub const fn get_information ( & self ) -> & Info {
& self . information
}
pub fn set_delete_method ( & mut self , delete_method : DeleteMethod ) {
self . delete_method = delete_method ;
}
2022-01-06 10:47:27 +13:00
pub fn set_save_also_as_json ( & mut self , save_also_as_json : bool ) {
self . save_also_as_json = save_also_as_json ;
}
2021-03-05 00:09:53 +13:00
pub fn set_use_cache ( & mut self , use_cache : bool ) {
self . use_cache = use_cache ;
}
2021-01-13 08:06:12 +13:00
pub fn set_recursive_search ( & mut self , recursive_search : bool ) {
self . recursive_search = recursive_search ;
}
2022-04-24 06:21:46 +12:00
#[ cfg(target_family = " unix " ) ]
pub fn set_exclude_other_filesystems ( & mut self , exclude_other_filesystems : bool ) {
self . directories . set_exclude_other_filesystems ( exclude_other_filesystems ) ;
}
2022-07-20 05:09:52 +12:00
#[ cfg(not(target_family = " unix " )) ]
pub fn set_exclude_other_filesystems ( & mut self , _exclude_other_filesystems : bool ) { }
2022-04-24 06:21:46 +12:00
2021-01-13 08:06:12 +13:00
pub fn set_included_directory ( & mut self , included_directory : Vec < PathBuf > ) -> bool {
self . directories . set_included_directory ( included_directory , & mut self . text_messages )
}
pub fn set_excluded_directory ( & mut self , excluded_directory : Vec < PathBuf > ) {
self . directories . set_excluded_directory ( excluded_directory , & mut self . text_messages ) ;
}
pub fn set_allowed_extensions ( & mut self , allowed_extensions : String ) {
self . allowed_extensions . set_allowed_extensions ( allowed_extensions , & mut self . text_messages ) ;
}
pub fn set_excluded_items ( & mut self , excluded_items : Vec < String > ) {
self . excluded_items . set_excluded_items ( excluded_items , & mut self . text_messages ) ;
}
2023-05-02 06:37:58 +12:00
pub fn prepare_thread_handler_broken_files (
& self ,
progress_sender : Option < & futures ::channel ::mpsc ::UnboundedSender < ProgressData > > ,
progress_thread_run : & Arc < AtomicBool > ,
atomic_counter : & Arc < AtomicUsize > ,
current_stage : u8 ,
max_stage : u8 ,
max_value : usize ,
) -> JoinHandle < ( ) > {
if let Some ( progress_sender ) = progress_sender {
2021-04-14 18:23:59 +12:00
let progress_send = progress_sender . clone ( ) ;
2021-01-13 08:06:12 +13:00
let progress_thread_run = progress_thread_run . clone ( ) ;
2023-05-02 06:37:58 +12:00
let atomic_counter = atomic_counter . clone ( ) ;
2021-12-19 11:45:37 +13:00
thread ::spawn ( move | | loop {
2021-01-13 08:06:12 +13:00
progress_send
2021-04-14 18:23:59 +12:00
. unbounded_send ( ProgressData {
2023-05-02 06:37:58 +12:00
current_stage ,
max_stage ,
files_checked : atomic_counter . load ( Ordering ::Relaxed ) ,
files_to_check : max_value ,
2021-01-13 08:06:12 +13:00
} )
. unwrap ( ) ;
if ! progress_thread_run . load ( Ordering ::Relaxed ) {
break ;
}
sleep ( Duration ::from_millis ( LOOP_DURATION as u64 ) ) ;
2021-12-19 11:45:37 +13:00
} )
2021-01-13 08:06:12 +13:00
} else {
2021-12-19 11:45:37 +13:00
thread ::spawn ( | | { } )
2023-05-02 06:37:58 +12:00
}
}
fn check_files ( & mut self , stop_receiver : Option < & Receiver < ( ) > > , progress_sender : Option < & futures ::channel ::mpsc ::UnboundedSender < ProgressData > > ) -> bool {
let start_time : SystemTime = SystemTime ::now ( ) ;
let mut folders_to_check : Vec < PathBuf > = Vec ::with_capacity ( 1024 * 2 ) ; // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector
// Add root folders for finding
for id in & self . directories . included_directories {
folders_to_check . push ( id . clone ( ) ) ;
}
let progress_thread_run = Arc ::new ( AtomicBool ::new ( true ) ) ;
2023-05-02 06:39:13 +12:00
let atomic_counter = Arc ::new ( AtomicUsize ::new ( 0 ) ) ;
let progress_thread_handle = self . prepare_thread_handler_broken_files ( progress_sender , & progress_thread_run , & atomic_counter , 0 , 1 , 0 ) ;
2023-05-02 06:37:58 +12:00
2021-01-13 08:06:12 +13:00
while ! folders_to_check . is_empty ( ) {
if stop_receiver . is_some ( ) & & stop_receiver . unwrap ( ) . try_recv ( ) . is_ok ( ) {
2023-05-02 19:17:39 +12:00
send_info_and_wait_for_ending_all_threads ( & progress_thread_run , progress_thread_handle ) ;
2021-01-13 08:06:12 +13:00
return false ;
}
2021-12-18 07:29:37 +13:00
let segments : Vec < _ > = folders_to_check
. par_iter ( )
. map ( | current_folder | {
let mut dir_result = vec! [ ] ;
let mut warnings = vec! [ ] ;
let mut fe_result = vec! [ ] ;
2023-05-02 07:47:02 +12:00
let Some ( read_dir ) = common_read_dir ( current_folder , & mut warnings ) else {
return ( dir_result , warnings , fe_result ) ;
2021-12-18 07:29:37 +13:00
} ;
2021-01-13 08:06:12 +13:00
2021-12-18 07:29:37 +13:00
// Check every sub folder/file/link etc.
2023-05-02 19:17:39 +12:00
for entry in read_dir {
2023-05-02 07:47:02 +12:00
let Some ( ( entry_data , metadata ) ) = common_get_entry_data_metadata ( & entry , & mut warnings , current_folder ) else {
continue ;
2021-12-18 07:29:37 +13:00
} ;
2021-01-13 08:06:12 +13:00
2023-05-02 07:47:02 +12:00
if metadata . is_dir ( ) {
2023-05-02 07:51:22 +12:00
check_folder_children (
& mut dir_result ,
& mut warnings ,
current_folder ,
entry_data ,
self . recursive_search ,
& self . directories ,
& self . excluded_items ,
) ;
2021-12-18 07:29:37 +13:00
} else if metadata . is_file ( ) {
2023-05-02 19:17:39 +12:00
if let Some ( file_entry ) = self . get_file_entry ( & metadata , & atomic_counter , entry_data , & mut warnings , current_folder ) {
fe_result . push ( ( file_entry . path . to_string_lossy ( ) . to_string ( ) , file_entry ) ) ;
2021-12-18 07:29:37 +13:00
}
}
}
( dir_result , warnings , fe_result )
} )
. collect ( ) ;
// Advance the frontier
folders_to_check . clear ( ) ;
// Process collected data
for ( segment , warnings , fe_result ) in segments {
folders_to_check . extend ( segment ) ;
self . text_messages . warnings . extend ( warnings ) ;
for ( name , fe ) in fe_result {
self . files_to_check . insert ( name , fe ) ;
2021-01-13 08:06:12 +13:00
}
}
}
2021-12-18 07:29:37 +13:00
2023-05-02 19:17:39 +12:00
send_info_and_wait_for_ending_all_threads ( & progress_thread_run , progress_thread_handle ) ;
2021-01-13 08:06:12 +13:00
2023-01-29 06:54:02 +13:00
Common ::print_time ( start_time , SystemTime ::now ( ) , " check_files " ) ;
2021-01-13 08:06:12 +13:00
true
}
2023-05-02 19:17:39 +12:00
fn get_file_entry (
& self ,
metadata : & Metadata ,
atomic_counter : & Arc < AtomicUsize > ,
entry_data : & DirEntry ,
warnings : & mut Vec < String > ,
current_folder : & Path ,
) -> Option < FileEntry > {
atomic_counter . fetch_add ( 1 , Ordering ::Relaxed ) ;
let Some ( file_name_lowercase ) = get_lowercase_name ( entry_data , warnings ) else {
return None ;
} ;
if ! self . allowed_extensions . matches_filename ( & file_name_lowercase ) {
return None ;
}
let type_of_file = check_extension_availability ( & file_name_lowercase ) ;
if type_of_file = = TypeOfFile ::Unknown {
return None ;
}
if ! check_extension_allowed ( & type_of_file , & self . checked_types ) {
return None ;
}
let current_file_name = current_folder . join ( entry_data . file_name ( ) ) ;
if self . excluded_items . is_excluded ( & current_file_name ) {
return None ;
}
let fe : FileEntry = FileEntry {
path : current_file_name . clone ( ) ,
modified_date : get_modified_time ( metadata , warnings , & current_file_name , false ) ,
size : metadata . len ( ) ,
type_of_file ,
error_string : String ::new ( ) ,
} ;
Some ( fe )
}
2021-04-14 18:23:59 +12:00
fn look_for_broken_files ( & mut self , stop_receiver : Option < & Receiver < ( ) > > , progress_sender : Option < & futures ::channel ::mpsc ::UnboundedSender < ProgressData > > ) -> bool {
2021-01-13 08:06:12 +13:00
let system_time = SystemTime ::now ( ) ;
2021-03-05 00:09:53 +13:00
let loaded_hash_map ;
2021-01-14 04:03:05 +13:00
2021-05-09 04:29:52 +12:00
let mut records_already_cached : BTreeMap < String , FileEntry > = Default ::default ( ) ;
let mut non_cached_files_to_check : BTreeMap < String , FileEntry > = Default ::default ( ) ;
2022-06-11 04:58:53 +12:00
let mut files_to_check = Default ::default ( ) ;
mem ::swap ( & mut self . files_to_check , & mut files_to_check ) ;
2021-03-05 00:09:53 +13:00
if self . use_cache {
2022-01-06 10:47:27 +13:00
loaded_hash_map = match load_cache_from_file ( & mut self . text_messages , self . delete_outdated_cache ) {
2021-03-05 00:09:53 +13:00
Some ( t ) = > t ,
None = > Default ::default ( ) ,
} ;
2022-06-11 04:58:53 +12:00
for ( name , file_entry ) in files_to_check {
let checked_extension = check_extension_allowed ( & file_entry . type_of_file , & self . checked_types ) ; // Only broken
2021-03-28 01:14:02 +13:00
#[ allow(clippy::if_same_then_else) ]
2022-06-11 04:58:53 +12:00
if checked_extension & & ! loaded_hash_map . contains_key ( & name ) {
// If loaded data doesn't contains current info
non_cached_files_to_check . insert ( name , file_entry . clone ( ) ) ;
} else if checked_extension & & file_entry . size ! = loaded_hash_map . get ( & name ) . unwrap ( ) . size
| | file_entry . modified_date ! = loaded_hash_map . get ( & name ) . unwrap ( ) . modified_date
{
2021-03-28 01:14:02 +13:00
// When size or modification date of image changed, then it is clear that is different image
2022-06-11 04:58:53 +12:00
non_cached_files_to_check . insert ( name , file_entry ) ;
2021-01-14 04:03:05 +13:00
} else {
2021-03-28 01:14:02 +13:00
// Checking may be omitted when already there is entry with same size and modification date
2022-06-11 04:58:53 +12:00
records_already_cached . insert ( name . clone ( ) , loaded_hash_map . get ( & name ) . unwrap ( ) . clone ( ) ) ;
2021-01-14 04:03:05 +13:00
}
}
2021-03-05 00:09:53 +13:00
} else {
loaded_hash_map = Default ::default ( ) ;
2022-06-11 04:58:53 +12:00
non_cached_files_to_check = files_to_check ;
2021-01-14 04:03:05 +13:00
}
2021-01-13 08:06:12 +13:00
let progress_thread_run = Arc ::new ( AtomicBool ::new ( true ) ) ;
2023-05-02 06:39:13 +12:00
let atomic_counter = Arc ::new ( AtomicUsize ::new ( 0 ) ) ;
let progress_thread_handle = self . prepare_thread_handler_broken_files ( progress_sender , & progress_thread_run , & atomic_counter , 1 , 1 , non_cached_files_to_check . len ( ) ) ;
2021-01-13 08:06:12 +13:00
2021-01-14 04:03:05 +13:00
let mut vec_file_entry : Vec < FileEntry > = non_cached_files_to_check
2021-12-30 01:43:38 +13:00
. into_par_iter ( )
. map ( | ( _ , mut file_entry ) | {
2023-05-02 06:39:13 +12:00
atomic_counter . fetch_add ( 1 , Ordering ::Relaxed ) ;
2021-01-13 08:06:12 +13:00
if stop_receiver . is_some ( ) & & stop_receiver . unwrap ( ) . try_recv ( ) . is_ok ( ) {
return None ;
}
2021-01-15 23:04:52 +13:00
match file_entry . type_of_file {
2021-01-14 04:03:05 +13:00
TypeOfFile ::Image = > {
2022-05-17 04:23:07 +12:00
let mut file_entry_clone = file_entry . clone ( ) ;
2021-12-30 01:43:38 +13:00
let result = panic ::catch_unwind ( | | {
2022-05-17 04:23:07 +12:00
if let Err ( e ) = image ::open ( & file_entry . path ) {
let error_string = e . to_string ( ) ;
// This error is a problem with image library, remove check when https://github.com/image-rs/jpeg-decoder/issues/130 will be fixed
if error_string . contains ( " spectral selection is not allowed in non-progressive scan " ) {
return Some ( None ) ;
2021-01-14 04:03:05 +13:00
}
2022-05-17 04:23:07 +12:00
file_entry . error_string = error_string ;
2021-12-30 01:43:38 +13:00
}
2022-05-17 04:23:07 +12:00
Some ( Some ( file_entry ) )
2021-12-30 01:43:38 +13:00
} ) ;
2022-05-17 04:23:07 +12:00
// If image crashed during opening, needs to be printed info about crashes thing
2021-12-30 01:43:38 +13:00
if let Ok ( image_result ) = result {
2022-05-17 04:23:07 +12:00
image_result
2021-12-30 01:43:38 +13:00
} else {
2022-08-19 03:16:09 +12:00
let message = create_crash_message ( " Image-rs " , & file_entry_clone . path . to_string_lossy ( ) , " https://github.com/Serial-ATA/lofty-rs " ) ;
2022-07-28 17:29:50 +12:00
println! ( " {message} " ) ;
file_entry_clone . error_string = message ;
2022-05-17 04:23:07 +12:00
Some ( Some ( file_entry_clone ) )
2021-01-14 04:03:05 +13:00
}
}
2022-06-01 03:52:55 +12:00
TypeOfFile ::ArchiveZip = > match File ::open ( & file_entry . path ) {
2022-05-17 04:23:07 +12:00
Ok ( file ) = > {
if let Err ( e ) = zip ::ZipArchive ::new ( file ) {
2021-12-30 01:43:38 +13:00
file_entry . error_string = e . to_string ( ) ;
2021-01-15 23:04:52 +13:00
}
2022-05-17 04:23:07 +12:00
Some ( Some ( file_entry ) )
2022-06-01 03:52:55 +12:00
}
2022-07-28 17:29:50 +12:00
Err ( _inspected ) = > Some ( None ) ,
2021-01-15 23:04:52 +13:00
} ,
2022-06-01 03:52:55 +12:00
TypeOfFile ::Audio = > match File ::open ( & file_entry . path ) {
2022-07-28 17:29:50 +12:00
Ok ( file ) = > {
let mut file_entry_clone = file_entry . clone ( ) ;
2022-05-17 04:23:07 +12:00
2022-07-28 17:29:50 +12:00
let result = panic ::catch_unwind ( | | {
if let Err ( e ) = audio_checker ::parse_audio_file ( file ) {
file_entry . error_string = e . to_string ( ) ;
2022-05-17 04:23:07 +12:00
}
2022-07-28 17:29:50 +12:00
Some ( Some ( file_entry ) )
} ) ;
if let Ok ( audio_result ) = result {
audio_result
} else {
2022-08-19 03:16:09 +12:00
let message = create_crash_message ( " Symphonia " , & file_entry_clone . path . to_string_lossy ( ) , " https://github.com/pdeljanov/Symphonia " ) ;
2022-07-28 17:29:50 +12:00
println! ( " {message} " ) ;
file_entry_clone . error_string = message ;
Some ( Some ( file_entry_clone ) )
2022-06-01 03:52:55 +12:00
}
2022-07-28 17:29:50 +12:00
}
2022-05-22 20:59:09 +12:00
Err ( _inspected ) = > Some ( None ) ,
2021-01-15 23:04:52 +13:00
} ,
2023-05-01 02:38:19 +12:00
TypeOfFile ::PDF = > {
let parser_options = ParseOptions ::tolerant ( ) ; // Only show as broken files with really big bugs
2023-02-19 22:21:14 +13:00
2023-05-01 02:38:19 +12:00
let mut file_entry_clone = file_entry . clone ( ) ;
let result = panic ::catch_unwind ( | | {
if let Err ( e ) = FileOptions ::cached ( ) . parse_options ( parser_options ) . open ( & file_entry . path ) {
if let PdfError ::Io { .. } = e {
return Some ( None ) ;
}
2023-02-19 22:21:14 +13:00
2023-05-01 02:38:19 +12:00
let mut error_string = e . to_string ( ) ;
// Workaround for strange error message https://github.com/qarmin/czkawka/issues/898
if error_string . starts_with ( " Try at " ) {
if let Some ( start_index ) = error_string . find ( " /pdf- " ) {
error_string = format! ( " Decoding error in pdf-rs library - {} " , & error_string [ start_index .. ] ) ;
2023-02-19 22:21:14 +13:00
}
2022-05-17 04:23:07 +12:00
}
2023-05-01 02:38:19 +12:00
file_entry . error_string = error_string ;
let error = unpack_pdf_error ( e ) ;
if let PdfError ::InvalidPassword = error {
return Some ( None ) ;
}
2022-06-01 03:52:55 +12:00
}
2023-05-01 02:38:19 +12:00
Some ( Some ( file_entry ) )
} ) ;
if let Ok ( pdf_result ) = result {
pdf_result
} else {
let message = create_crash_message ( " PDF-rs " , & file_entry_clone . path . to_string_lossy ( ) , " https://github.com/pdf-rs/pdf " ) ;
println! ( " {message} " ) ;
file_entry_clone . error_string = message ;
Some ( Some ( file_entry_clone ) )
2022-05-17 04:23:07 +12:00
}
2023-05-01 02:38:19 +12:00
}
2022-05-17 04:23:07 +12:00
2021-01-14 04:03:05 +13:00
// This means that cache read invalid value because maybe cache comes from different czkawka version
TypeOfFile ::Unknown = > Some ( None ) ,
2021-01-13 08:06:12 +13:00
}
} )
. while_some ( )
2023-01-29 06:54:02 +13:00
. filter ( Option ::is_some )
. map ( Option ::unwrap )
2021-01-13 08:06:12 +13:00
. collect ::< Vec < FileEntry > > ( ) ;
2023-05-02 19:17:39 +12:00
send_info_and_wait_for_ending_all_threads ( & progress_thread_run , progress_thread_handle ) ;
2021-01-13 08:06:12 +13:00
2021-01-14 04:03:05 +13:00
// Just connect loaded results with already calculated
for ( _name , file_entry ) in records_already_cached {
vec_file_entry . push ( file_entry . clone ( ) ) ;
}
2021-03-05 00:09:53 +13:00
if self . use_cache {
// Must save all results to file, old loaded from file with all currently counted results
2022-05-17 04:23:07 +12:00
let mut all_results : BTreeMap < String , FileEntry > = Default ::default ( ) ;
2021-01-14 04:03:05 +13:00
2022-05-10 05:40:35 +12:00
for file_entry in vec_file_entry . clone ( ) {
2021-03-05 00:09:53 +13:00
all_results . insert ( file_entry . path . to_string_lossy ( ) . to_string ( ) , file_entry ) ;
}
for ( _name , file_entry ) in loaded_hash_map {
all_results . insert ( file_entry . path . to_string_lossy ( ) . to_string ( ) , file_entry ) ;
}
2022-01-06 10:47:27 +13:00
save_cache_to_file ( & all_results , & mut self . text_messages , self . save_also_as_json ) ;
2021-01-14 04:03:05 +13:00
}
2022-05-10 05:40:35 +12:00
self . broken_files = vec_file_entry
. into_par_iter ( )
. filter_map ( | f | if f . error_string . is_empty ( ) { None } else { Some ( f ) } )
. collect ( ) ;
2021-01-14 04:03:05 +13:00
self . information . number_of_broken_files = self . broken_files . len ( ) ;
2023-01-29 06:54:02 +13:00
Common ::print_time ( system_time , SystemTime ::now ( ) , " sort_images - reading data from files in parallel " ) ;
2021-01-13 08:06:12 +13:00
2021-01-16 00:41:45 +13:00
// Clean unused data
2021-01-14 04:03:05 +13:00
self . files_to_check = Default ::default ( ) ;
2021-01-13 08:06:12 +13:00
true
}
/// Function to delete files, from filed Vector
fn delete_files ( & mut self ) {
let start_time : SystemTime = SystemTime ::now ( ) ;
match self . delete_method {
DeleteMethod ::Delete = > {
2023-01-29 06:54:02 +13:00
for file_entry in & self . broken_files {
2021-01-13 08:06:12 +13:00
if fs ::remove_file ( & file_entry . path ) . is_err ( ) {
self . text_messages . warnings . push ( file_entry . path . display ( ) . to_string ( ) ) ;
}
}
}
DeleteMethod ::None = > {
//Just do nothing
}
}
2023-01-29 06:54:02 +13:00
Common ::print_time ( start_time , SystemTime ::now ( ) , " delete_files " ) ;
2021-01-13 08:06:12 +13:00
}
}
2021-11-28 08:57:10 +13:00
2021-01-13 08:06:12 +13:00
impl Default for BrokenFiles {
fn default ( ) -> Self {
Self ::new ( )
}
}
impl DebugPrint for BrokenFiles {
#[ allow(dead_code) ]
#[ allow(unreachable_code) ]
/// Debugging printing - only available on debug build
fn debug_print ( & self ) {
#[ cfg(not(debug_assertions)) ]
{
return ;
}
println! ( " ---------------DEBUG PRINT--------------- " ) ;
println! ( " ### Information's " ) ;
println! ( " Errors size - {} " , self . text_messages . errors . len ( ) ) ;
println! ( " Warnings size - {} " , self . text_messages . warnings . len ( ) ) ;
println! ( " Messages size - {} " , self . text_messages . messages . len ( ) ) ;
println! ( " ### Other " ) ;
println! ( " Excluded items - {:?} " , self . excluded_items . items ) ;
println! ( " Included directories - {:?} " , self . directories . included_directories ) ;
println! ( " Excluded directories - {:?} " , self . directories . excluded_directories ) ;
2021-11-23 23:10:24 +13:00
println! ( " Recursive search - {} " , self . recursive_search ) ;
2022-04-24 06:21:46 +12:00
#[ cfg(target_family = " unix " ) ]
2022-06-01 03:52:55 +12:00
println! ( " Skip other filesystems - {} " , self . directories . exclude_other_filesystems ( ) ) ;
2021-01-13 08:06:12 +13:00
println! ( " Delete Method - {:?} " , self . delete_method ) ;
println! ( " ----------------------------------------- " ) ;
}
}
2021-11-28 08:57:10 +13:00
2021-01-13 08:06:12 +13:00
impl SaveResults for BrokenFiles {
fn save_results_to_file ( & mut self , file_name : & str ) -> bool {
let start_time : SystemTime = SystemTime ::now ( ) ;
let file_name : String = match file_name {
" " = > " results.txt " . to_string ( ) ,
k = > k . to_string ( ) ,
} ;
let file_handler = match File ::create ( & file_name ) {
Ok ( t ) = > t ,
2021-11-15 03:53:55 +13:00
Err ( e ) = > {
2022-12-21 20:44:26 +13:00
self . text_messages . errors . push ( format! ( " Failed to create file {file_name} , reason {e} " ) ) ;
2021-01-13 08:06:12 +13:00
return false ;
}
} ;
let mut writer = BufWriter ::new ( file_handler ) ;
2021-11-15 03:53:55 +13:00
if let Err ( e ) = writeln! (
2021-01-13 08:06:12 +13:00
writer ,
" Results of searching {:?} with excluded directories {:?} and excluded items {:?} " ,
self . directories . included_directories , self . directories . excluded_directories , self . excluded_items . items
2021-11-15 03:53:55 +13:00
) {
2022-12-21 20:44:26 +13:00
self . text_messages . errors . push ( format! ( " Failed to save results to file {file_name} , reason {e} " ) ) ;
2021-01-13 08:06:12 +13:00
return false ;
}
if ! self . broken_files . is_empty ( ) {
writeln! ( writer , " Found {} broken files. " , self . information . number_of_broken_files ) . unwrap ( ) ;
2023-01-29 06:54:02 +13:00
for file_entry in & self . broken_files {
2021-01-13 08:06:12 +13:00
writeln! ( writer , " {} - {} " , file_entry . path . display ( ) , file_entry . error_string ) . unwrap ( ) ;
}
} else {
write! ( writer , " Not found any broken files. " ) . unwrap ( ) ;
}
2023-01-29 06:54:02 +13:00
Common ::print_time ( start_time , SystemTime ::now ( ) , " save_results_to_file " ) ;
2021-01-13 08:06:12 +13:00
true
}
}
2021-11-28 08:57:10 +13:00
2021-01-13 08:06:12 +13:00
impl PrintResults for BrokenFiles {
/// Print information's about duplicated entries
/// Only needed for CLI
fn print_results ( & self ) {
let start_time : SystemTime = SystemTime ::now ( ) ;
println! ( " Found {} broken files. \n " , self . information . number_of_broken_files ) ;
2023-01-29 06:54:02 +13:00
for file_entry in & self . broken_files {
2021-01-13 08:06:12 +13:00
println! ( " {} - {} " , file_entry . path . display ( ) , file_entry . error_string ) ;
}
2023-01-29 06:54:02 +13:00
Common ::print_time ( start_time , SystemTime ::now ( ) , " print_entries " ) ;
2021-01-13 08:06:12 +13:00
}
}
2021-01-14 04:03:05 +13:00
2022-01-06 10:47:27 +13:00
fn save_cache_to_file ( old_hashmap : & BTreeMap < String , FileEntry > , text_messages : & mut Messages , save_also_as_json : bool ) {
let mut hashmap : BTreeMap < String , FileEntry > = Default ::default ( ) ;
for ( path , fe ) in old_hashmap {
if fe . size > 1024 {
hashmap . insert ( path . clone ( ) , fe . clone ( ) ) ;
2021-01-14 04:03:05 +13:00
}
2022-01-06 10:47:27 +13:00
}
let hashmap = & hashmap ;
if let Some ( ( ( file_handler , cache_file ) , ( file_handler_json , cache_file_json ) ) ) = open_cache_folder ( & get_cache_file ( ) , true , save_also_as_json , & mut text_messages . warnings ) {
{
let writer = BufWriter ::new ( file_handler . unwrap ( ) ) ; // Unwrap because cannot fail here
if let Err ( e ) = bincode ::serialize_into ( writer , hashmap ) {
2021-12-22 06:44:20 +13:00
text_messages
2022-01-06 10:47:27 +13:00
. warnings
. push ( format! ( " Cannot write data to cache file {} , reason {} " , cache_file . display ( ) , e ) ) ;
2021-01-14 04:03:05 +13:00
return ;
}
2022-01-06 10:47:27 +13:00
}
if save_also_as_json {
if let Some ( file_handler_json ) = file_handler_json {
let writer = BufWriter ::new ( file_handler_json ) ;
if let Err ( e ) = serde_json ::to_writer ( writer , hashmap ) {
2021-12-22 06:44:20 +13:00
text_messages
2022-01-06 10:47:27 +13:00
. warnings
. push ( format! ( " Cannot write data to cache file {} , reason {} " , cache_file_json . display ( ) , e ) ) ;
2021-01-14 04:03:05 +13:00
return ;
2022-01-06 10:47:27 +13:00
}
2021-01-14 04:03:05 +13:00
}
}
2022-01-06 10:47:27 +13:00
text_messages . messages . push ( format! ( " Properly saved to file {} cache entries. " , hashmap . len ( ) ) ) ;
2021-01-14 04:03:05 +13:00
}
}
2022-01-06 10:47:27 +13:00
fn load_cache_from_file ( text_messages : & mut Messages , delete_outdated_cache : bool ) -> Option < BTreeMap < String , FileEntry > > {
if let Some ( ( ( file_handler , cache_file ) , ( file_handler_json , cache_file_json ) ) ) = open_cache_folder ( & get_cache_file ( ) , false , true , & mut text_messages . warnings ) {
let mut hashmap_loaded_entries : BTreeMap < String , FileEntry > ;
if let Some ( file_handler ) = file_handler {
let reader = BufReader ::new ( file_handler ) ;
hashmap_loaded_entries = match bincode ::deserialize_from ( reader ) {
2021-01-14 04:03:05 +13:00
Ok ( t ) = > t ,
2021-11-15 03:53:55 +13:00
Err ( e ) = > {
2021-12-22 06:44:20 +13:00
text_messages
. warnings
2022-01-06 10:47:27 +13:00
. push ( format! ( " Failed to load data from cache file {} , reason {} " , cache_file . display ( ) , e ) ) ;
return None ;
}
} ;
} else {
let reader = BufReader ::new ( file_handler_json . unwrap ( ) ) ; // Unwrap cannot fail, because at least one file must be valid
hashmap_loaded_entries = match serde_json ::from_reader ( reader ) {
Ok ( t ) = > t ,
Err ( e ) = > {
text_messages
. warnings
. push ( format! ( " Failed to load data from cache file {} , reason {} " , cache_file_json . display ( ) , e ) ) ;
2021-01-14 04:03:05 +13:00
return None ;
}
} ;
}
2022-01-06 10:47:27 +13:00
// Don't load cache data if destination file not exists
if delete_outdated_cache {
hashmap_loaded_entries . retain ( | src_path , _file_entry | Path ::new ( src_path ) . exists ( ) ) ;
}
text_messages . messages . push ( format! ( " Properly loaded {} cache entries. " , hashmap_loaded_entries . len ( ) ) ) ;
2021-01-14 04:03:05 +13:00
return Some ( hashmap_loaded_entries ) ;
}
None
}
2022-01-06 10:47:27 +13:00
fn get_cache_file ( ) -> String {
" cache_broken_files.bin " . to_string ( )
}
2022-05-17 04:23:07 +12:00
fn check_extension_availability ( file_name_lowercase : & str ) -> TypeOfFile {
2022-01-01 10:34:24 +13:00
if IMAGE_RS_BROKEN_FILES_EXTENSIONS . iter ( ) . any ( | e | file_name_lowercase . ends_with ( e ) ) {
2021-01-14 04:03:05 +13:00
TypeOfFile ::Image
2022-01-01 10:34:24 +13:00
} else if ZIP_FILES_EXTENSIONS . iter ( ) . any ( | e | file_name_lowercase . ends_with ( e ) ) {
2021-03-28 01:14:02 +13:00
TypeOfFile ::ArchiveZip
2022-01-01 10:34:24 +13:00
} else if AUDIO_FILES_EXTENSIONS . iter ( ) . any ( | e | file_name_lowercase . ends_with ( e ) ) {
2022-05-17 04:23:07 +12:00
TypeOfFile ::Audio
} else if PDF_FILES_EXTENSIONS . iter ( ) . any ( | e | file_name_lowercase . ends_with ( e ) ) {
TypeOfFile ::PDF
2021-01-14 04:03:05 +13:00
} else {
TypeOfFile ::Unknown
}
}
2022-07-25 06:48:02 +12:00
2022-06-11 04:58:53 +12:00
fn check_extension_allowed ( type_of_file : & TypeOfFile , checked_types : & CheckedTypes ) -> bool {
( ( * type_of_file = = TypeOfFile ::Image ) & & ( ( * checked_types & CheckedTypes ::IMAGE ) = = CheckedTypes ::IMAGE ) )
| | ( ( * type_of_file = = TypeOfFile ::PDF ) & & ( ( * checked_types & CheckedTypes ::PDF ) = = CheckedTypes ::PDF ) )
| | ( ( * type_of_file = = TypeOfFile ::ArchiveZip ) & & ( ( * checked_types & CheckedTypes ::ARCHIVE ) = = CheckedTypes ::ARCHIVE ) )
| | ( ( * type_of_file = = TypeOfFile ::Audio ) & & ( ( * checked_types & CheckedTypes ::AUDIO ) = = CheckedTypes ::AUDIO ) )
}
2022-05-17 04:23:07 +12:00
fn unpack_pdf_error ( e : PdfError ) -> PdfError {
if let Try {
file : _ ,
line : _ ,
column : _ ,
2022-12-30 05:25:01 +13:00
context : _ ,
2022-05-17 04:23:07 +12:00
source ,
} = e
{
unpack_pdf_error ( * source )
} else {
e
}
}