Smaller code refactoring
This commit is contained in:
parent
85432f1c18
commit
4fb2a0cc92
|
@ -1,4 +1,5 @@
|
||||||
## Version 0.1.3
|
## Version 0.1.3
|
||||||
|
- Big code refactoring
|
||||||
|
|
||||||
## Version 0.1.2 - 26.09.2020r
|
## Version 0.1.2 - 26.09.2020r
|
||||||
- Add basic search empty folders in GTK GUI
|
- Add basic search empty folders in GTK GUI
|
||||||
|
|
|
@ -72,6 +72,13 @@ impl BigFile {
|
||||||
number_of_files_to_check: 50,
|
number_of_files_to_check: 50,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn find_big_files(&mut self) {
|
||||||
|
self.optimize_directories();
|
||||||
|
self.look_for_big_files();
|
||||||
|
self.debug_print();
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_text_messages(&self) -> &Messages {
|
pub fn get_text_messages(&self) -> &Messages {
|
||||||
&self.text_messages
|
&self.text_messages
|
||||||
}
|
}
|
||||||
|
@ -80,11 +87,6 @@ impl BigFile {
|
||||||
&self.information
|
&self.information
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_big_files(&mut self) {
|
|
||||||
self.optimize_directories();
|
|
||||||
self.look_for_big_files();
|
|
||||||
self.debug_print();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_recursive_search(&mut self, recursive_search: bool) {
|
pub fn set_recursive_search(&mut self, recursive_search: bool) {
|
||||||
self.recursive_search = recursive_search;
|
self.recursive_search = recursive_search;
|
||||||
|
@ -172,8 +174,8 @@ impl BigFile {
|
||||||
// Checking allowed extensions
|
// Checking allowed extensions
|
||||||
if !self.allowed_extensions.file_extensions.is_empty() {
|
if !self.allowed_extensions.file_extensions.is_empty() {
|
||||||
have_valid_extension = false;
|
have_valid_extension = false;
|
||||||
for i in &self.allowed_extensions.file_extensions {
|
for extension in &self.allowed_extensions.file_extensions {
|
||||||
if file_name_lowercase.ends_with((".".to_string() + i.to_lowercase().as_str()).as_str()) {
|
if file_name_lowercase.ends_with((".".to_string() + extension.to_lowercase().as_str()).as_str()) {
|
||||||
have_valid_extension = true;
|
have_valid_extension = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -206,14 +208,14 @@ impl BigFile {
|
||||||
Ok(t) => t,
|
Ok(t) => t,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
self.text_messages.warnings.push("Unable to get creation date from file ".to_string() + current_file_name.as_str());
|
self.text_messages.warnings.push("Unable to get creation date from file ".to_string() + current_file_name.as_str());
|
||||||
SystemTime::now()
|
continue
|
||||||
} // Permissions Denied
|
} // Permissions Denied
|
||||||
},
|
},
|
||||||
modified_date: match metadata.modified() {
|
modified_date: match metadata.modified() {
|
||||||
Ok(t) => t,
|
Ok(t) => t,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
self.text_messages.warnings.push("Unable to get modification date from file ".to_string() + current_file_name.as_str());
|
self.text_messages.warnings.push("Unable to get modification date from file ".to_string() + current_file_name.as_str());
|
||||||
SystemTime::now()
|
continue
|
||||||
} // Permissions Denied
|
} // Permissions Denied
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -232,6 +234,9 @@ impl BigFile {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// Extract n biggest files to new TreeMap
|
||||||
let mut new_map: BTreeMap<u64, Vec<FileEntry>> = Default::default();
|
let mut new_map: BTreeMap<u64, Vec<FileEntry>> = Default::default();
|
||||||
|
|
||||||
for (size, vector) in self.big_files.iter().rev() {
|
for (size, vector) in self.big_files.iter().rev() {
|
||||||
|
|
|
@ -11,6 +11,7 @@ impl Extensions {
|
||||||
Extensions { file_extensions: vec![] }
|
Extensions { file_extensions: vec![] }
|
||||||
}
|
}
|
||||||
/// List of allowed extensions, only files with this extensions will be checking if are duplicates
|
/// List of allowed extensions, only files with this extensions will be checking if are duplicates
|
||||||
|
/// After, extensions cannot contains any dot, commas etc.
|
||||||
pub fn set_allowed_extensions(&mut self, mut allowed_extensions: String, text_messages: &mut Messages) {
|
pub fn set_allowed_extensions(&mut self, mut allowed_extensions: String, text_messages: &mut Messages) {
|
||||||
let start_time: SystemTime = SystemTime::now();
|
let start_time: SystemTime = SystemTime::now();
|
||||||
if allowed_extensions.is_empty() {
|
if allowed_extensions.is_empty() {
|
||||||
|
|
|
@ -84,8 +84,8 @@ impl Default for Info {
|
||||||
pub struct DuplicateFinder {
|
pub struct DuplicateFinder {
|
||||||
text_messages: Messages,
|
text_messages: Messages,
|
||||||
information: Info,
|
information: Info,
|
||||||
files_with_identical_size: BTreeMap<u64, Vec<FileEntry>>,
|
files_with_identical_size: BTreeMap<u64, Vec<FileEntry>>, // File Size, File Entry
|
||||||
files_with_identical_hashes: BTreeMap<u64, Vec<Vec<FileEntry>>>,
|
files_with_identical_hashes: BTreeMap<u64, Vec<Vec<FileEntry>>>, // File Size, File Entry
|
||||||
directories: Directories,
|
directories: Directories,
|
||||||
allowed_extensions: Extensions,
|
allowed_extensions: Extensions,
|
||||||
excluded_items: ExcludedItems,
|
excluded_items: ExcludedItems,
|
||||||
|
@ -188,6 +188,7 @@ impl DuplicateFinder {
|
||||||
while !folders_to_check.is_empty() {
|
while !folders_to_check.is_empty() {
|
||||||
current_folder = folders_to_check.pop().unwrap();
|
current_folder = folders_to_check.pop().unwrap();
|
||||||
|
|
||||||
|
// Read current dir, if permission are denied just go to next
|
||||||
let read_dir = match fs::read_dir(¤t_folder) {
|
let read_dir = match fs::read_dir(¤t_folder) {
|
||||||
Ok(t) => t,
|
Ok(t) => t,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
|
@ -195,6 +196,8 @@ impl DuplicateFinder {
|
||||||
continue;
|
continue;
|
||||||
} // Permissions denied
|
} // Permissions denied
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Check every sub folder/file/link etc.
|
||||||
for entry in read_dir {
|
for entry in read_dir {
|
||||||
let entry_data = match entry {
|
let entry_data = match entry {
|
||||||
Ok(t) => t,
|
Ok(t) => t,
|
||||||
|
@ -250,8 +253,8 @@ impl DuplicateFinder {
|
||||||
// Checking allowed extensions
|
// Checking allowed extensions
|
||||||
if !self.allowed_extensions.file_extensions.is_empty() {
|
if !self.allowed_extensions.file_extensions.is_empty() {
|
||||||
have_valid_extension = false;
|
have_valid_extension = false;
|
||||||
for i in &self.allowed_extensions.file_extensions {
|
for extension in &self.allowed_extensions.file_extensions {
|
||||||
if file_name_lowercase.ends_with((".".to_string() + i.to_lowercase().as_str()).as_str()) {
|
if file_name_lowercase.ends_with((".".to_string() + extension.to_lowercase().as_str()).as_str()) {
|
||||||
have_valid_extension = true;
|
have_valid_extension = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -284,18 +287,19 @@ impl DuplicateFinder {
|
||||||
Ok(t) => t,
|
Ok(t) => t,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
self.text_messages.warnings.push("Unable to get creation date from file ".to_string() + current_file_name.as_str());
|
self.text_messages.warnings.push("Unable to get creation date from file ".to_string() + current_file_name.as_str());
|
||||||
SystemTime::now()
|
continue;
|
||||||
} // Permissions Denied
|
} // Permissions Denied
|
||||||
},
|
},
|
||||||
modified_date: match metadata.modified() {
|
modified_date: match metadata.modified() {
|
||||||
Ok(t) => t,
|
Ok(t) => t,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
self.text_messages.warnings.push("Unable to get modification date from file ".to_string() + current_file_name.as_str());
|
self.text_messages.warnings.push("Unable to get modification date from file ".to_string() + current_file_name.as_str());
|
||||||
SystemTime::now()
|
continue;
|
||||||
} // Permissions Denied
|
} // Permissions Denied
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Adding files to BTreeMap
|
||||||
self.files_with_identical_size.entry(metadata.len()).or_insert_with(Vec::new);
|
self.files_with_identical_size.entry(metadata.len()).or_insert_with(Vec::new);
|
||||||
self.files_with_identical_size.get_mut(&metadata.len()).unwrap().push(fe);
|
self.files_with_identical_size.get_mut(&metadata.len()).unwrap().push(fe);
|
||||||
|
|
||||||
|
@ -310,7 +314,7 @@ impl DuplicateFinder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove files with unique size
|
// Create new BTreeMap without single size entries(files have not duplicates)
|
||||||
let mut new_map: BTreeMap<u64, Vec<FileEntry>> = Default::default();
|
let mut new_map: BTreeMap<u64, Vec<FileEntry>> = Default::default();
|
||||||
|
|
||||||
self.information.number_of_duplicated_files_by_size = 0;
|
self.information.number_of_duplicated_files_by_size = 0;
|
||||||
|
@ -334,14 +338,14 @@ impl DuplicateFinder {
|
||||||
let mut file_handler: File;
|
let mut file_handler: File;
|
||||||
let mut hashmap_with_hash: HashMap<String, Vec<FileEntry>>;
|
let mut hashmap_with_hash: HashMap<String, Vec<FileEntry>>;
|
||||||
|
|
||||||
for entry in &self.files_with_identical_size {
|
for (size, vector) in &self.files_with_identical_size {
|
||||||
hashmap_with_hash = Default::default();
|
hashmap_with_hash = Default::default();
|
||||||
|
|
||||||
for file_entry in entry.1.iter().enumerate() {
|
for file_entry in vector {
|
||||||
file_handler = match File::open(&file_entry.1.path) {
|
file_handler = match File::open(&file_entry.path) {
|
||||||
Ok(t) => t,
|
Ok(t) => t,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
self.text_messages.warnings.push("Unable to check hash of file ".to_string() + file_entry.1.path.as_str());
|
self.text_messages.warnings.push("Unable to check hash of file ".to_string() + file_entry.path.as_str());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -354,7 +358,7 @@ impl DuplicateFinder {
|
||||||
let n = match file_handler.read(&mut buffer) {
|
let n = match file_handler.read(&mut buffer) {
|
||||||
Ok(t) => t,
|
Ok(t) => t,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
self.text_messages.warnings.push("Error happened when checking hash of file ".to_string() + file_entry.1.path.as_str());
|
self.text_messages.warnings.push("Error happened when checking hash of file ".to_string() + file_entry.path.as_str());
|
||||||
error_reading_file = true;
|
error_reading_file = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -368,22 +372,22 @@ impl DuplicateFinder {
|
||||||
if !error_reading_file {
|
if !error_reading_file {
|
||||||
let hash_string: String = hasher.finalize().to_hex().to_string();
|
let hash_string: String = hasher.finalize().to_hex().to_string();
|
||||||
hashmap_with_hash.entry(hash_string.to_string()).or_insert_with(Vec::new);
|
hashmap_with_hash.entry(hash_string.to_string()).or_insert_with(Vec::new);
|
||||||
hashmap_with_hash.get_mut(hash_string.as_str()).unwrap().push(file_entry.1.to_owned());
|
hashmap_with_hash.get_mut(hash_string.as_str()).unwrap().push(file_entry.to_owned());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for hash_entry in hashmap_with_hash {
|
for (_string, vector) in hashmap_with_hash {
|
||||||
if hash_entry.1.len() > 1 {
|
if vector.len() > 1 {
|
||||||
self.files_with_identical_hashes.entry(*entry.0).or_insert_with(Vec::new);
|
self.files_with_identical_hashes.entry(*size).or_insert_with(Vec::new);
|
||||||
self.files_with_identical_hashes.get_mut(entry.0).unwrap().push(hash_entry.1);
|
self.files_with_identical_hashes.get_mut(size).unwrap().push(vector);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (size, vector) in &self.files_with_identical_hashes {
|
for (size, vector_vectors) in &self.files_with_identical_hashes {
|
||||||
for vec_file_entry in vector {
|
for vector in vector_vectors {
|
||||||
self.information.number_of_duplicated_files_by_hash += vec_file_entry.len() - 1;
|
self.information.number_of_duplicated_files_by_hash += vector.len() - 1;
|
||||||
self.information.number_of_groups_by_hash += 1;
|
self.information.number_of_groups_by_hash += 1;
|
||||||
self.information.lost_space_by_hash += (vec_file_entry.len() as u64 - 1) * size;
|
self.information.lost_space_by_hash += (vector.len() as u64 - 1) * size;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -397,9 +401,9 @@ impl DuplicateFinder {
|
||||||
|
|
||||||
match self.check_method {
|
match self.check_method {
|
||||||
CheckingMethod::Hash => {
|
CheckingMethod::Hash => {
|
||||||
for entry in &self.files_with_identical_hashes {
|
for (_size, vector_vectors) in &self.files_with_identical_hashes {
|
||||||
for vector in entry.1 {
|
for vector in vector_vectors.iter() {
|
||||||
let tuple: (u64, usize, usize) = delete_files(&vector, &self.delete_method, &mut self.text_messages.warnings);
|
let tuple: (u64, usize, usize) = delete_files(vector, &self.delete_method, &mut self.text_messages.warnings);
|
||||||
self.information.gained_space += tuple.0;
|
self.information.gained_space += tuple.0;
|
||||||
self.information.number_of_removed_files += tuple.1;
|
self.information.number_of_removed_files += tuple.1;
|
||||||
self.information.number_of_failed_to_remove_files += tuple.2;
|
self.information.number_of_failed_to_remove_files += tuple.2;
|
||||||
|
@ -407,8 +411,8 @@ impl DuplicateFinder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CheckingMethod::Size => {
|
CheckingMethod::Size => {
|
||||||
for entry in &self.files_with_identical_size {
|
for (_size, vector) in &self.files_with_identical_size {
|
||||||
let tuple: (u64, usize, usize) = delete_files(&entry.1, &self.delete_method, &mut self.text_messages.warnings);
|
let tuple: (u64, usize, usize) = delete_files(vector, &self.delete_method, &mut self.text_messages.warnings);
|
||||||
self.information.gained_space += tuple.0;
|
self.information.gained_space += tuple.0;
|
||||||
self.information.number_of_removed_files += tuple.1;
|
self.information.number_of_removed_files += tuple.1;
|
||||||
self.information.number_of_failed_to_remove_files += tuple.2;
|
self.information.number_of_failed_to_remove_files += tuple.2;
|
||||||
|
@ -497,7 +501,7 @@ impl SaveResults for DuplicateFinder {
|
||||||
let mut file = match File::create(&file_name) {
|
let mut file = match File::create(&file_name) {
|
||||||
Ok(t) => t,
|
Ok(t) => t,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
self.text_messages.errors.push("Failed to create file ".to_string() + file_name.as_str());
|
self.text_messages.errors.push(format!("Failed to create file {}", file_name));
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -505,7 +509,7 @@ impl SaveResults for DuplicateFinder {
|
||||||
match file.write_all(format!("Results of searching in {:?}\n", self.directories.included_directories).as_bytes()) {
|
match file.write_all(format!("Results of searching in {:?}\n", self.directories.included_directories).as_bytes()) {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
self.text_messages.errors.push("Failed to save results to file ".to_string() + file_name.as_str());
|
self.text_messages.errors.push(format!("Failed to save results to file {}", file_name));
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -513,49 +517,39 @@ impl SaveResults for DuplicateFinder {
|
||||||
if !self.files_with_identical_size.is_empty() {
|
if !self.files_with_identical_size.is_empty() {
|
||||||
file.write_all(b"-------------------------------------------------Files with same size-------------------------------------------------\n").unwrap();
|
file.write_all(b"-------------------------------------------------Files with same size-------------------------------------------------\n").unwrap();
|
||||||
file.write_all(
|
file.write_all(
|
||||||
("Found ".to_string()
|
format!(
|
||||||
+ self.information.number_of_duplicated_files_by_size.to_string().as_str()
|
"Found {} duplicated files which in {} groups which takes {}.\n",
|
||||||
+ " duplicated files which in "
|
self.information.number_of_duplicated_files_by_size,
|
||||||
+ self.information.number_of_groups_by_size.to_string().as_str()
|
self.information.number_of_groups_by_size,
|
||||||
+ " groups which takes "
|
self.information.lost_space_by_size.file_size(options::BINARY).unwrap()
|
||||||
+ self.information.lost_space_by_size.file_size(options::BINARY).unwrap().as_str()
|
)
|
||||||
+ ".\n")
|
.as_bytes(),
|
||||||
.as_bytes(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
for (size, files) in self.files_with_identical_size.iter().rev() {
|
for (size, vector) in self.files_with_identical_size.iter().rev() {
|
||||||
file.write_all(b"\n---- Size ").unwrap();
|
file.write_all(format!("\n---- Size {} ({}) - {} files \n", size.file_size(options::BINARY).unwrap(), size, vector.len()).as_bytes()).unwrap();
|
||||||
file.write_all(size.file_size(options::BINARY).unwrap().as_bytes()).unwrap();
|
for file_entry in vector {
|
||||||
file.write_all((" (".to_string() + size.to_string().as_str() + ")").as_bytes()).unwrap();
|
file.write_all(format!("{} \n", file_entry.path).as_bytes()).unwrap();
|
||||||
file.write_all((" - ".to_string() + files.len().to_string().as_str() + " files").as_bytes()).unwrap();
|
|
||||||
file.write_all(b"\n").unwrap();
|
|
||||||
for file_entry in files {
|
|
||||||
file.write_all((file_entry.path.clone() + "\n").as_bytes()).unwrap();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !self.files_with_identical_hashes.is_empty() {
|
if !self.files_with_identical_hashes.is_empty() {
|
||||||
file.write_all(b"-------------------------------------------------Files with same hashes-------------------------------------------------\n").unwrap();
|
file.write_all(b"-------------------------------------------------Files with same hashes-------------------------------------------------\n").unwrap();
|
||||||
file.write_all(
|
file.write_all(
|
||||||
("Found ".to_string()
|
format!(
|
||||||
+ self.information.number_of_duplicated_files_by_hash.to_string().as_str()
|
"Found {} duplicated files which in {} groups which takes {}.\n",
|
||||||
+ " duplicated files which in "
|
self.information.number_of_duplicated_files_by_hash,
|
||||||
+ self.information.number_of_groups_by_hash.to_string().as_str()
|
self.information.number_of_groups_by_hash,
|
||||||
+ " groups which takes "
|
self.information.lost_space_by_hash.file_size(options::BINARY).unwrap()
|
||||||
+ self.information.lost_space_by_hash.file_size(options::BINARY).unwrap().as_str()
|
)
|
||||||
+ ".\n")
|
.as_bytes(),
|
||||||
.as_bytes(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
for (size, files) in self.files_with_identical_hashes.iter().rev() {
|
for (size, vectors_vector) in self.files_with_identical_hashes.iter().rev() {
|
||||||
for vector in files {
|
for vector in vectors_vector {
|
||||||
file.write_all(b"\n---- Size ").unwrap();
|
file.write_all(format!("\n---- Size {} ({}) - {} files \n", size.file_size(options::BINARY).unwrap(), size, vector.len()).as_bytes()).unwrap();
|
||||||
file.write_all(size.file_size(options::BINARY).unwrap().as_bytes()).unwrap();
|
|
||||||
file.write_all((" (".to_string() + size.to_string().as_str() + ")").as_bytes()).unwrap();
|
|
||||||
file.write_all((" - ".to_string() + vector.len().to_string().as_str() + " files").as_bytes()).unwrap();
|
|
||||||
file.write_all(b"\n").unwrap();
|
|
||||||
for file_entry in vector {
|
for file_entry in vector {
|
||||||
file.write_all((file_entry.path.clone() + "\n").as_bytes()).unwrap();
|
file.write_all(format!("{} \n", file_entry.path).as_bytes()).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@ use std::io::Write;
|
||||||
use std::time::SystemTime;
|
use std::time::SystemTime;
|
||||||
|
|
||||||
/// Enum with values which show if folder is empty.
|
/// Enum with values which show if folder is empty.
|
||||||
/// In function "optimize_folders" automatically "Maybe" is changed to "Yes", so it is not necessery to put it here
|
/// In function "optimize_folders" automatically "Maybe" is changed to "Yes", so it is not necessary to put it here
|
||||||
#[derive(Eq, PartialEq, Copy, Clone)]
|
#[derive(Eq, PartialEq, Copy, Clone)]
|
||||||
enum FolderEmptiness {
|
enum FolderEmptiness {
|
||||||
No,
|
No,
|
||||||
|
@ -80,7 +80,7 @@ impl EmptyFolder {
|
||||||
pub fn find_empty_folders(&mut self) {
|
pub fn find_empty_folders(&mut self) {
|
||||||
self.directories.optimize_directories(true, &mut self.text_messages);
|
self.directories.optimize_directories(true, &mut self.text_messages);
|
||||||
self.check_for_empty_folders(true);
|
self.check_for_empty_folders(true);
|
||||||
self.check_for_empty_folders(false); // Not needed for CLI, but it is better to check this again, because maybe empty folder stops to be empty
|
//self.check_for_empty_folders(false); // Second check, should be done before deleting to be sure that empty folder is still empty
|
||||||
self.optimize_folders();
|
self.optimize_folders();
|
||||||
if self.delete_folders {
|
if self.delete_folders {
|
||||||
self.delete_empty_folders();
|
self.delete_empty_folders();
|
||||||
|
@ -97,15 +97,15 @@ impl EmptyFolder {
|
||||||
fn optimize_folders(&mut self) {
|
fn optimize_folders(&mut self) {
|
||||||
let mut new_directory_folders: BTreeMap<String, FolderEntry> = Default::default();
|
let mut new_directory_folders: BTreeMap<String, FolderEntry> = Default::default();
|
||||||
|
|
||||||
for entry in &self.empty_folder_list {
|
for (name,folder_entry) in &self.empty_folder_list {
|
||||||
match &entry.1.parent_path {
|
match &folder_entry.parent_path {
|
||||||
Some(t) => {
|
Some(t) => {
|
||||||
if !self.empty_folder_list.contains_key(t) {
|
if !self.empty_folder_list.contains_key(t) {
|
||||||
new_directory_folders.insert(entry.0.clone(), entry.1.clone());
|
new_directory_folders.insert(name.clone(), folder_entry.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
new_directory_folders.insert(entry.0.clone(), entry.1.clone());
|
new_directory_folders.insert(name.clone(), folder_entry.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -135,13 +135,13 @@ impl EmptyFolder {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Add folders searched before
|
// Add folders searched before
|
||||||
for (name, entry) in &self.empty_folder_list {
|
for (name, folder_entry) in &self.empty_folder_list {
|
||||||
folders_checked.insert(
|
folders_checked.insert(
|
||||||
name.clone(),
|
name.clone(),
|
||||||
FolderEntry {
|
FolderEntry {
|
||||||
parent_path: None,
|
parent_path: None,
|
||||||
is_empty: FolderEmptiness::Maybe,
|
is_empty: FolderEmptiness::Maybe,
|
||||||
modified_date: entry.modified_date,
|
modified_date: folder_entry.modified_date,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
folders_to_check.push(name.clone());
|
folders_to_check.push(name.clone());
|
||||||
|
@ -153,10 +153,10 @@ impl EmptyFolder {
|
||||||
while !folders_to_check.is_empty() {
|
while !folders_to_check.is_empty() {
|
||||||
self.information.number_of_checked_folders += 1;
|
self.information.number_of_checked_folders += 1;
|
||||||
current_folder = folders_to_check.pop().unwrap();
|
current_folder = folders_to_check.pop().unwrap();
|
||||||
// Checked folder may be deleted so we assume that cannot removed folder be empty
|
// Checked folder may be deleted or we may not have permissions to open it so we assume that this folder is not be empty
|
||||||
let read_dir = match fs::read_dir(¤t_folder) {
|
let read_dir = match fs::read_dir(¤t_folder) {
|
||||||
Ok(t) => t,
|
Ok(t) => t,
|
||||||
_ => {
|
Err(_) => {
|
||||||
folders_checked.get_mut(¤t_folder).unwrap().is_empty = FolderEmptiness::No;
|
folders_checked.get_mut(¤t_folder).unwrap().is_empty = FolderEmptiness::No;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -195,6 +195,7 @@ impl EmptyFolder {
|
||||||
folders_checked.get_mut(¤t_folder).unwrap().is_empty = FolderEmptiness::No;
|
folders_checked.get_mut(¤t_folder).unwrap().is_empty = FolderEmptiness::No;
|
||||||
let mut d = folders_checked.get_mut(¤t_folder).unwrap();
|
let mut d = folders_checked.get_mut(¤t_folder).unwrap();
|
||||||
let mut cf: String;
|
let mut cf: String;
|
||||||
|
// Loop to recursively set as non empty this and all his parent folders
|
||||||
loop {
|
loop {
|
||||||
d.is_empty = FolderEmptiness::No;
|
d.is_empty = FolderEmptiness::No;
|
||||||
if d.parent_path != None {
|
if d.parent_path != None {
|
||||||
|
@ -207,19 +208,21 @@ impl EmptyFolder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Now we check if checked folders are really empty, and if are, then
|
||||||
if initial_checking {
|
if initial_checking {
|
||||||
// We need to set empty folder list
|
// We need to set empty folder list
|
||||||
for entry in folders_checked {
|
for (name,folder_entry) in folders_checked {
|
||||||
if entry.1.is_empty != FolderEmptiness::No {
|
if folder_entry.is_empty != FolderEmptiness::No {
|
||||||
self.empty_folder_list.insert(entry.0, entry.1);
|
self.empty_folder_list.insert(name, folder_entry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// We need to check if parent of folder isn't also empty, because we wan't to delete only parent with two empty folders except this folders and at the end parent folder
|
// We need to check if parent of folder isn't also empty, because we wan't to delete only parent with two empty folders except this folders and at the end parent folder
|
||||||
let mut new_folders_list: BTreeMap<String, FolderEntry> = Default::default();
|
let mut new_folders_list: BTreeMap<String, FolderEntry> = Default::default();
|
||||||
for entry in folders_checked {
|
for (name,folder_entry) in folders_checked {
|
||||||
if entry.1.is_empty != FolderEmptiness::No && self.empty_folder_list.contains_key(&entry.0) {
|
if folder_entry.is_empty != FolderEmptiness::No && self.empty_folder_list.contains_key(&name) {
|
||||||
new_folders_list.insert(entry.0, entry.1);
|
new_folders_list.insert(name, folder_entry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.empty_folder_list = new_folders_list;
|
self.empty_folder_list = new_folders_list;
|
||||||
|
@ -229,24 +232,16 @@ impl EmptyFolder {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deletes earlier found empty folders
|
/// Deletes earlier found empty folders
|
||||||
fn delete_empty_folders(&self) {
|
fn delete_empty_folders(&mut self) {
|
||||||
let start_time: SystemTime = SystemTime::now();
|
let start_time: SystemTime = SystemTime::now();
|
||||||
let mut errors: Vec<String> = Vec::new();
|
|
||||||
// Folders may be deleted or require too big privileges
|
// Folders may be deleted or require too big privileges
|
||||||
for entry in &self.empty_folder_list {
|
for (name,_folder_entry) in &self.empty_folder_list {
|
||||||
match fs::remove_dir_all(entry.0) {
|
match fs::remove_dir_all(name) {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(_) => errors.push(entry.0.clone()),
|
Err(_) => self.text_messages.warnings.push(format!("Failed to remove folder {}",name)),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if !errors.is_empty() {
|
|
||||||
println!("Failed to delete some files, because they have got deleted earlier or you have too low privileges - try run it as root.");
|
|
||||||
println!("List of files which wasn't deleted:");
|
|
||||||
}
|
|
||||||
for i in errors {
|
|
||||||
println!("{}", i);
|
|
||||||
}
|
|
||||||
Common::print_time(start_time, SystemTime::now(), "delete_files".to_string());
|
Common::print_time(start_time, SystemTime::now(), "delete_files".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue