Remove display
This commit is contained in:
parent
683fcf55c6
commit
056b7133ca
|
@ -319,12 +319,12 @@ fn collect_all_files_and_dirs(dir: &str) -> std::io::Result<CollectedFiles> {
|
|||
let path = entry.path();
|
||||
|
||||
if path.is_dir() {
|
||||
folders.insert(path.display().to_string());
|
||||
folders_to_check.push(path.display().to_string());
|
||||
folders.insert(path.to_string_lossy().to_string());
|
||||
folders_to_check.push(path.to_string_lossy().to_string());
|
||||
} else if path.is_symlink() {
|
||||
symlinks.insert(path.display().to_string());
|
||||
symlinks.insert(path.to_string_lossy().to_string());
|
||||
} else if path.is_file() {
|
||||
files.insert(path.display().to_string());
|
||||
files.insert(path.to_string_lossy().to_string());
|
||||
} else {
|
||||
panic!("Unknown type of file {:?}", path);
|
||||
}
|
||||
|
|
|
@ -426,7 +426,7 @@ impl PrintResults for BadExtensions {
|
|||
writeln!(writer, "Found {} files with invalid extension.\n", self.information.number_of_files_with_bad_extension)?;
|
||||
|
||||
for file_entry in &self.bad_extensions_files {
|
||||
writeln!(writer, "{} ----- {}", file_entry.path.display(), file_entry.proper_extensions)?;
|
||||
writeln!(writer, "{:?} ----- {}", file_entry.path, file_entry.proper_extensions)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -210,7 +210,7 @@ impl BigFile {
|
|||
DeleteMethod::Delete => {
|
||||
for file_entry in &self.big_files {
|
||||
if fs::remove_file(&file_entry.path).is_err() {
|
||||
self.common_data.text_messages.warnings.push(file_entry.path.display().to_string());
|
||||
self.common_data.text_messages.warnings.push(file_entry.path.to_string_lossy().to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -259,7 +259,7 @@ impl PrintResults for BigFile {
|
|||
writeln!(writer, "{} the smallest files.\n\n", self.information.number_of_real_files)?;
|
||||
}
|
||||
for file_entry in &self.big_files {
|
||||
writeln!(writer, "{} ({}) - {}", format_size(file_entry.size, BINARY), file_entry.size, file_entry.path.display())?;
|
||||
writeln!(writer, "{} ({}) - {:?}", format_size(file_entry.size, BINARY), file_entry.size, file_entry.path)?;
|
||||
}
|
||||
} else {
|
||||
write!(writer, "Not found any files.").unwrap();
|
||||
|
|
|
@ -410,7 +410,7 @@ impl BrokenFiles {
|
|||
DeleteMethod::Delete => {
|
||||
for file_entry in &self.broken_files {
|
||||
if fs::remove_file(&file_entry.path).is_err() {
|
||||
self.common_data.text_messages.warnings.push(file_entry.path.display().to_string());
|
||||
self.common_data.text_messages.warnings.push(file_entry.path.to_string_lossy().to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -465,7 +465,7 @@ impl PrintResults for BrokenFiles {
|
|||
if !self.broken_files.is_empty() {
|
||||
writeln!(writer, "Found {} broken files.", self.information.number_of_broken_files)?;
|
||||
for file_entry in &self.broken_files {
|
||||
writeln!(writer, "{} - {}", file_entry.path.display(), file_entry.error_string)?;
|
||||
writeln!(writer, "{:?} - {}", file_entry.path, file_entry.error_string)?;
|
||||
}
|
||||
} else {
|
||||
write!(writer, "Not found any broken files.")?;
|
||||
|
|
|
@ -203,18 +203,18 @@ pub fn open_cache_folder(cache_file_name: &str, save_to_cache: bool, use_json: b
|
|||
if save_to_cache {
|
||||
if cache_dir.exists() {
|
||||
if !cache_dir.is_dir() {
|
||||
warnings.push(format!("Config dir {} is a file!", cache_dir.display()));
|
||||
warnings.push(format!("Config dir {cache_dir:?} is a file!"));
|
||||
return None;
|
||||
}
|
||||
} else if let Err(e) = fs::create_dir_all(&cache_dir) {
|
||||
warnings.push(format!("Cannot create config dir {}, reason {}", cache_dir.display(), e));
|
||||
warnings.push(format!("Cannot create config dir {cache_dir:?}, reason {e}"));
|
||||
return None;
|
||||
}
|
||||
|
||||
file_handler_default = Some(match OpenOptions::new().truncate(true).write(true).create(true).open(&cache_file) {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
warnings.push(format!("Cannot create or open cache file {}, reason {}", cache_file.display(), e));
|
||||
warnings.push(format!("Cannot create or open cache file {cache_file:?}, reason {e}"));
|
||||
return None;
|
||||
}
|
||||
});
|
||||
|
@ -222,7 +222,7 @@ pub fn open_cache_folder(cache_file_name: &str, save_to_cache: bool, use_json: b
|
|||
file_handler_json = Some(match OpenOptions::new().truncate(true).write(true).create(true).open(&cache_file_json) {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
warnings.push(format!("Cannot create or open cache file {}, reason {}", cache_file_json.display(), e));
|
||||
warnings.push(format!("Cannot create or open cache file {cache_file_json:?}, reason {e}"));
|
||||
return None;
|
||||
}
|
||||
});
|
||||
|
@ -234,7 +234,7 @@ pub fn open_cache_folder(cache_file_name: &str, save_to_cache: bool, use_json: b
|
|||
if use_json {
|
||||
file_handler_json = Some(OpenOptions::new().read(true).open(&cache_file_json).ok()?);
|
||||
} else {
|
||||
// messages.push(format!("Cannot find or open cache file {}", cache_file.display())); // No error or warning
|
||||
// messages.push(format!("Cannot find or open cache file {cache_file:?}")); // No error or warning
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
@ -322,8 +322,8 @@ pub fn get_dynamic_image_from_raw_image(path: impl AsRef<Path> + std::fmt::Debug
|
|||
|
||||
pub fn split_path(path: &Path) -> (String, String) {
|
||||
match (path.parent(), path.file_name()) {
|
||||
(Some(dir), Some(file)) => (dir.display().to_string(), file.to_string_lossy().into_owned()),
|
||||
(Some(dir), None) => (dir.display().to_string(), String::new()),
|
||||
(Some(dir), Some(file)) => (dir.to_string_lossy().to_string(), file.to_string_lossy().into_owned()),
|
||||
(Some(dir), None) => (dir.to_string_lossy().to_string(), String::new()),
|
||||
(None, _) => (String::new(), String::new()),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,25 +55,21 @@ where
|
|||
{
|
||||
let writer = BufWriter::new(file_handler.unwrap()); // Unwrap because cannot fail here
|
||||
if let Err(e) = bincode::serialize_into(writer, &hashmap_to_save) {
|
||||
text_messages
|
||||
.warnings
|
||||
.push(format!("Cannot write data to cache file {}, reason {}", cache_file.display(), e));
|
||||
debug!("Failed to save cache to file {:?}", cache_file);
|
||||
text_messages.warnings.push(format!("Cannot write data to cache file {cache_file:?}, reason {e}"));
|
||||
debug!("Failed to save cache to file {cache_file:?}");
|
||||
return text_messages;
|
||||
}
|
||||
debug!("Saved binary to file {:?}", cache_file);
|
||||
debug!("Saved binary to file {cache_file:?}");
|
||||
}
|
||||
if save_also_as_json {
|
||||
if let Some(file_handler_json) = file_handler_json {
|
||||
let writer = BufWriter::new(file_handler_json);
|
||||
if let Err(e) = serde_json::to_writer(writer, &hashmap_to_save) {
|
||||
text_messages
|
||||
.warnings
|
||||
.push(format!("Cannot write data to cache file {}, reason {}", cache_file_json.display(), e));
|
||||
debug!("Failed to save cache to file {:?}", cache_file_json);
|
||||
text_messages.warnings.push(format!("Cannot write data to cache file {cache_file_json:?}, reason {e}"));
|
||||
debug!("Failed to save cache to file {cache_file_json:?}");
|
||||
return text_messages;
|
||||
}
|
||||
debug!("Saved json to file {:?}", cache_file_json);
|
||||
debug!("Saved json to file {cache_file_json:?}");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -182,10 +178,8 @@ where
|
|||
vec_loaded_entries = match bincode::deserialize_from(reader) {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
text_messages
|
||||
.warnings
|
||||
.push(format!("Failed to load data from cache file {}, reason {}", cache_file.display(), e));
|
||||
debug!("Failed to load cache from file {:?}", cache_file);
|
||||
text_messages.warnings.push(format!("Failed to load data from cache file {cache_file:?}, reason {e}"));
|
||||
debug!("Failed to load cache from file {cache_file:?}");
|
||||
return (text_messages, None);
|
||||
}
|
||||
};
|
||||
|
@ -194,10 +188,8 @@ where
|
|||
vec_loaded_entries = match serde_json::from_reader(reader) {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
text_messages
|
||||
.warnings
|
||||
.push(format!("Failed to load data from cache file {}, reason {}", cache_file_json.display(), e));
|
||||
debug!("Failed to load cache from file {:?}", cache_file);
|
||||
text_messages.warnings.push(format!("Failed to load data from cache file {cache_file_json:?}, reason {e}"));
|
||||
debug!("Failed to load cache from file {cache_file:?}");
|
||||
return (text_messages, None);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -729,7 +729,7 @@ pub fn common_read_dir(current_folder: &Path, warnings: &mut Vec<String>) -> Opt
|
|||
Err(e) => {
|
||||
warnings.push(flc!(
|
||||
"core_cannot_open_dir",
|
||||
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
|
||||
generate_translation_hashmap(vec![("dir", current_folder.to_string_lossy().to_string()), ("reason", e.to_string())])
|
||||
));
|
||||
None
|
||||
}
|
||||
|
@ -741,7 +741,7 @@ pub fn common_get_entry_data<'a>(entry: &'a Result<DirEntry, std::io::Error>, wa
|
|||
Err(e) => {
|
||||
warnings.push(flc!(
|
||||
"core_cannot_read_entry_dir",
|
||||
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
|
||||
generate_translation_hashmap(vec![("dir", current_folder.to_string_lossy().to_string()), ("reason", e.to_string())])
|
||||
));
|
||||
return None;
|
||||
}
|
||||
|
@ -754,7 +754,7 @@ pub fn common_get_metadata_dir(entry_data: &DirEntry, warnings: &mut Vec<String>
|
|||
Err(e) => {
|
||||
warnings.push(flc!(
|
||||
"core_cannot_read_metadata_dir",
|
||||
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
|
||||
generate_translation_hashmap(vec![("dir", current_folder.to_string_lossy().to_string()), ("reason", e.to_string())])
|
||||
));
|
||||
return None;
|
||||
}
|
||||
|
@ -768,7 +768,7 @@ pub fn common_get_entry_data_metadata<'a>(entry: &'a Result<DirEntry, std::io::E
|
|||
Err(e) => {
|
||||
warnings.push(flc!(
|
||||
"core_cannot_read_entry_dir",
|
||||
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
|
||||
generate_translation_hashmap(vec![("dir", current_folder.to_string_lossy().to_string()), ("reason", e.to_string())])
|
||||
));
|
||||
return None;
|
||||
}
|
||||
|
@ -778,7 +778,7 @@ pub fn common_get_entry_data_metadata<'a>(entry: &'a Result<DirEntry, std::io::E
|
|||
Err(e) => {
|
||||
warnings.push(flc!(
|
||||
"core_cannot_read_metadata_dir",
|
||||
generate_translation_hashmap(vec![("dir", current_folder.display().to_string()), ("reason", e.to_string())])
|
||||
generate_translation_hashmap(vec![("dir", current_folder.to_string_lossy().to_string()), ("reason", e.to_string())])
|
||||
));
|
||||
return None;
|
||||
}
|
||||
|
@ -791,7 +791,7 @@ pub fn get_modified_time(metadata: &Metadata, warnings: &mut Vec<String>, curren
|
|||
Ok(t) => match t.duration_since(UNIX_EPOCH) {
|
||||
Ok(d) => d.as_secs(),
|
||||
Err(_inspected) => {
|
||||
let translation_hashmap = generate_translation_hashmap(vec![("name", current_file_name.display().to_string())]);
|
||||
let translation_hashmap = generate_translation_hashmap(vec![("name", current_file_name.to_string_lossy().to_string())]);
|
||||
if is_folder {
|
||||
warnings.push(flc!("core_folder_modified_before_epoch", translation_hashmap));
|
||||
} else {
|
||||
|
@ -801,7 +801,7 @@ pub fn get_modified_time(metadata: &Metadata, warnings: &mut Vec<String>, curren
|
|||
}
|
||||
},
|
||||
Err(e) => {
|
||||
let translation_hashmap = generate_translation_hashmap(vec![("name", current_file_name.display().to_string()), ("reason", e.to_string())]);
|
||||
let translation_hashmap = generate_translation_hashmap(vec![("name", current_file_name.to_string_lossy().to_string()), ("reason", e.to_string())]);
|
||||
if is_folder {
|
||||
warnings.push(flc!("core_folder_no_modification_date", translation_hashmap));
|
||||
} else {
|
||||
|
@ -818,7 +818,7 @@ pub fn get_lowercase_name(entry_data: &DirEntry, warnings: &mut Vec<String>) ->
|
|||
Err(_inspected) => {
|
||||
warnings.push(flc!(
|
||||
"core_file_not_utf8_name",
|
||||
generate_translation_hashmap(vec![("name", entry_data.path().display().to_string())])
|
||||
generate_translation_hashmap(vec![("name", entry_data.path().to_string_lossy().to_string())])
|
||||
));
|
||||
return None;
|
||||
}
|
||||
|
|
|
@ -107,7 +107,7 @@ impl Directories {
|
|||
if !is_excluded {
|
||||
messages.warnings.push(flc!(
|
||||
"core_directory_must_exists",
|
||||
generate_translation_hashmap(vec![("path", directory.display().to_string())])
|
||||
generate_translation_hashmap(vec![("path", directory.to_string_lossy().to_string())])
|
||||
));
|
||||
}
|
||||
return (None, messages);
|
||||
|
@ -116,7 +116,7 @@ impl Directories {
|
|||
if !directory.is_dir() {
|
||||
messages.warnings.push(flc!(
|
||||
"core_directory_must_be_directory",
|
||||
generate_translation_hashmap(vec![("path", directory.display().to_string())])
|
||||
generate_translation_hashmap(vec![("path", directory.to_string_lossy().to_string())])
|
||||
));
|
||||
return (None, messages);
|
||||
}
|
||||
|
@ -293,7 +293,7 @@ impl Directories {
|
|||
Ok(m) => self.included_dev_ids.push(m.dev()),
|
||||
Err(_) => messages.errors.push(flc!(
|
||||
"core_directory_unable_to_get_device_id",
|
||||
generate_translation_hashmap(vec![("path", d.display().to_string())])
|
||||
generate_translation_hashmap(vec![("path", d.to_string_lossy().to_string())])
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
@ -326,7 +326,7 @@ impl Directories {
|
|||
Ok(m) => Ok(!self.included_dev_ids.iter().any(|&id| id == m.dev())),
|
||||
Err(_) => Err(flc!(
|
||||
"core_directory_unable_to_get_device_id",
|
||||
generate_translation_hashmap(vec![("path", path.display().to_string())])
|
||||
generate_translation_hashmap(vec![("path", path.to_string_lossy().to_string())])
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1002,7 +1002,7 @@ impl PrintResults for DuplicateFinder {
|
|||
for (name, vector) in self.files_with_identical_names.iter().rev() {
|
||||
writeln!(writer, "Name - {} - {} files ", name, vector.len())?;
|
||||
for j in vector {
|
||||
writeln!(writer, "{}", j.path.display())?;
|
||||
writeln!(writer, "{:?}", j.path)?;
|
||||
}
|
||||
writeln!(writer)?;
|
||||
}
|
||||
|
@ -1018,9 +1018,9 @@ impl PrintResults for DuplicateFinder {
|
|||
)?;
|
||||
for (name, (file_entry, vector)) in self.files_with_identical_names_referenced.iter().rev() {
|
||||
writeln!(writer, "Name - {} - {} files ", name, vector.len())?;
|
||||
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
|
||||
writeln!(writer, "Reference file - {:?}", file_entry.path)?;
|
||||
for j in vector {
|
||||
writeln!(writer, "{}", j.path.display())?;
|
||||
writeln!(writer, "{:?}", j.path)?;
|
||||
}
|
||||
writeln!(writer)?;
|
||||
}
|
||||
|
@ -1042,7 +1042,7 @@ impl PrintResults for DuplicateFinder {
|
|||
for ((size, name), vector) in self.files_with_identical_size_names.iter().rev() {
|
||||
writeln!(writer, "Name - {}, {} - {} files ", name, format_size(*size, BINARY), vector.len())?;
|
||||
for j in vector {
|
||||
writeln!(writer, "{}", j.path.display())?;
|
||||
writeln!(writer, "{:?}", j.path)?;
|
||||
}
|
||||
writeln!(writer)?;
|
||||
}
|
||||
|
@ -1058,9 +1058,9 @@ impl PrintResults for DuplicateFinder {
|
|||
)?;
|
||||
for ((size, name), (file_entry, vector)) in self.files_with_identical_size_names_referenced.iter().rev() {
|
||||
writeln!(writer, "Name - {}, {} - {} files ", name, format_size(*size, BINARY), vector.len())?;
|
||||
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
|
||||
writeln!(writer, "Reference file - {:?}", file_entry.path)?;
|
||||
for j in vector {
|
||||
writeln!(writer, "{}", j.path.display())?;
|
||||
writeln!(writer, "{:?}", j.path)?;
|
||||
}
|
||||
writeln!(writer)?;
|
||||
}
|
||||
|
@ -1084,7 +1084,7 @@ impl PrintResults for DuplicateFinder {
|
|||
for (size, vector) in self.files_with_identical_size.iter().rev() {
|
||||
write!(writer, "\n---- Size {} ({}) - {} files \n", format_size(*size, BINARY), size, vector.len())?;
|
||||
for file_entry in vector {
|
||||
writeln!(writer, "{}", file_entry.path.display())?;
|
||||
writeln!(writer, "{:?}", file_entry.path)?;
|
||||
}
|
||||
}
|
||||
} else if !self.files_with_identical_size_referenced.is_empty() {
|
||||
|
@ -1101,9 +1101,9 @@ impl PrintResults for DuplicateFinder {
|
|||
)?;
|
||||
for (size, (file_entry, vector)) in self.files_with_identical_size_referenced.iter().rev() {
|
||||
writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?;
|
||||
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
|
||||
writeln!(writer, "Reference file - {:?}", file_entry.path)?;
|
||||
for file_entry in vector {
|
||||
writeln!(writer, "{}", file_entry.path.display())?;
|
||||
writeln!(writer, "{:?}", file_entry.path)?;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -1127,7 +1127,7 @@ impl PrintResults for DuplicateFinder {
|
|||
for vector in vectors_vector {
|
||||
writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?;
|
||||
for file_entry in vector {
|
||||
writeln!(writer, "{}", file_entry.path.display())?;
|
||||
writeln!(writer, "{:?}", file_entry.path)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1146,9 +1146,9 @@ impl PrintResults for DuplicateFinder {
|
|||
for (size, vectors_vector) in self.files_with_identical_hashes_referenced.iter().rev() {
|
||||
for (file_entry, vector) in vectors_vector {
|
||||
writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?;
|
||||
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
|
||||
writeln!(writer, "Reference file - {:?}", file_entry.path)?;
|
||||
for file_entry in vector {
|
||||
writeln!(writer, "{}", file_entry.path.display())?;
|
||||
writeln!(writer, "{:?}", file_entry.path)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1226,7 +1226,7 @@ pub trait MyHasher {
|
|||
fn hash_calculation(buffer: &mut [u8], file_entry: &FileEntry, hash_type: &HashType, limit: u64) -> Result<String, String> {
|
||||
let mut file_handler = match File::open(&file_entry.path) {
|
||||
Ok(t) => t,
|
||||
Err(e) => return Err(format!("Unable to check hash of file {}, reason {}", file_entry.path.display(), e)),
|
||||
Err(e) => return Err(format!("Unable to check hash of file {:?}, reason {e}", file_entry.path)),
|
||||
};
|
||||
let hasher = &mut *hash_type.hasher();
|
||||
let mut current_file_read_bytes: u64 = 0;
|
||||
|
@ -1234,7 +1234,7 @@ fn hash_calculation(buffer: &mut [u8], file_entry: &FileEntry, hash_type: &HashT
|
|||
let n = match file_handler.read(buffer) {
|
||||
Ok(0) => break,
|
||||
Ok(t) => t,
|
||||
Err(e) => return Err(format!("Error happened when checking hash of file {}, reason {}", file_entry.path.display(), e)),
|
||||
Err(e) => return Err(format!("Error happened when checking hash of file {:?}, reason {}", file_entry.path, e)),
|
||||
};
|
||||
|
||||
current_file_read_bytes += n as u64;
|
||||
|
|
|
@ -90,7 +90,7 @@ impl EmptyFiles {
|
|||
DeleteMethod::Delete => {
|
||||
for file_entry in &self.empty_files {
|
||||
if fs::remove_file(file_entry.path.clone()).is_err() {
|
||||
self.common_data.text_messages.warnings.push(file_entry.path.display().to_string());
|
||||
self.common_data.text_messages.warnings.push(file_entry.path.to_string_lossy().to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -135,7 +135,7 @@ impl PrintResults for EmptyFiles {
|
|||
if !self.empty_files.is_empty() {
|
||||
writeln!(writer, "Found {} empty files.", self.information.number_of_empty_files)?;
|
||||
for file_entry in &self.empty_files {
|
||||
writeln!(writer, "{}", file_entry.path.display())?;
|
||||
writeln!(writer, "{:?}", file_entry.path)?;
|
||||
}
|
||||
} else {
|
||||
write!(writer, "Not found any empty files.")?;
|
||||
|
|
|
@ -75,7 +75,7 @@ impl InvalidSymlinks {
|
|||
DeleteMethod::Delete => {
|
||||
for file_entry in &self.invalid_symlinks {
|
||||
if fs::remove_file(file_entry.path.clone()).is_err() {
|
||||
self.common_data.text_messages.warnings.push(file_entry.path.display().to_string());
|
||||
self.common_data.text_messages.warnings.push(file_entry.path.to_string_lossy().to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -112,9 +112,9 @@ impl PrintResults for InvalidSymlinks {
|
|||
for file_entry in &self.invalid_symlinks {
|
||||
writeln!(
|
||||
writer,
|
||||
"{}\t\t{}\t\t{}",
|
||||
file_entry.path.display(),
|
||||
file_entry.symlink_info.clone().expect("invalid traversal result").destination_path.display(),
|
||||
"{:?}\t\t{:?}\t\t{}",
|
||||
file_entry.path,
|
||||
file_entry.symlink_info.clone().expect("invalid traversal result").destination_path,
|
||||
match file_entry.symlink_info.clone().expect("invalid traversal result").type_of_error {
|
||||
ErrorType::InfiniteRecursion => "Infinite Recursion",
|
||||
ErrorType::NonExistentFile => "Non Existent File",
|
||||
|
|
|
@ -955,14 +955,8 @@ impl PrintResults for SameMusic {
|
|||
for file_entry in vec_file_entry {
|
||||
writeln!(
|
||||
writer,
|
||||
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {}",
|
||||
file_entry.track_title,
|
||||
file_entry.track_artist,
|
||||
file_entry.year,
|
||||
file_entry.length,
|
||||
file_entry.genre,
|
||||
file_entry.bitrate,
|
||||
file_entry.path.display()
|
||||
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {:?}",
|
||||
file_entry.track_title, file_entry.track_artist, file_entry.year, file_entry.length, file_entry.genre, file_entry.bitrate, file_entry.path
|
||||
)?;
|
||||
}
|
||||
writeln!(writer)?;
|
||||
|
@ -974,26 +968,14 @@ impl PrintResults for SameMusic {
|
|||
writeln!(writer)?;
|
||||
writeln!(
|
||||
writer,
|
||||
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {}",
|
||||
file_entry.track_title,
|
||||
file_entry.track_artist,
|
||||
file_entry.year,
|
||||
file_entry.length,
|
||||
file_entry.genre,
|
||||
file_entry.bitrate,
|
||||
file_entry.path.display()
|
||||
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {:?}",
|
||||
file_entry.track_title, file_entry.track_artist, file_entry.year, file_entry.length, file_entry.genre, file_entry.bitrate, file_entry.path
|
||||
)?;
|
||||
for file_entry in vec_file_entry {
|
||||
writeln!(
|
||||
writer,
|
||||
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {}",
|
||||
file_entry.track_title,
|
||||
file_entry.track_artist,
|
||||
file_entry.year,
|
||||
file_entry.length,
|
||||
file_entry.genre,
|
||||
file_entry.bitrate,
|
||||
file_entry.path.display()
|
||||
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {:?}",
|
||||
file_entry.track_title, file_entry.track_artist, file_entry.year, file_entry.length, file_entry.genre, file_entry.bitrate, file_entry.path
|
||||
)?;
|
||||
}
|
||||
writeln!(writer)?;
|
||||
|
|
|
@ -856,8 +856,8 @@ impl PrintResults for SimilarImages {
|
|||
for file_entry in struct_similar {
|
||||
writeln!(
|
||||
writer,
|
||||
"{} - {} - {} - {}",
|
||||
file_entry.path.display(),
|
||||
"{:?} - {} - {} - {}",
|
||||
file_entry.path,
|
||||
file_entry.dimensions,
|
||||
format_size(file_entry.size, BINARY),
|
||||
get_string_from_similarity(&file_entry.similarity, self.hash_size)
|
||||
|
@ -873,8 +873,8 @@ impl PrintResults for SimilarImages {
|
|||
writeln!(writer)?;
|
||||
writeln!(
|
||||
writer,
|
||||
"{} - {} - {} - {}",
|
||||
file_entry.path.display(),
|
||||
"{:?} - {} - {} - {}",
|
||||
file_entry.path,
|
||||
file_entry.dimensions,
|
||||
format_size(file_entry.size, BINARY),
|
||||
get_string_from_similarity(&file_entry.similarity, self.hash_size)
|
||||
|
@ -882,8 +882,8 @@ impl PrintResults for SimilarImages {
|
|||
for file_entry in vec_file_entry {
|
||||
writeln!(
|
||||
writer,
|
||||
"{} - {} - {} - {}",
|
||||
file_entry.path.display(),
|
||||
"{:?} - {} - {} - {}",
|
||||
file_entry.path,
|
||||
file_entry.dimensions,
|
||||
format_size(file_entry.size, BINARY),
|
||||
get_string_from_similarity(&file_entry.similarity, self.hash_size)
|
||||
|
|
|
@ -440,7 +440,7 @@ impl PrintResults for SimilarVideos {
|
|||
for struct_similar in &self.similar_vectors {
|
||||
writeln!(writer, "Found {} videos which have similar friends", struct_similar.len())?;
|
||||
for file_entry in struct_similar {
|
||||
writeln!(writer, "{} - {}", file_entry.path.display(), format_size(file_entry.size, BINARY))?;
|
||||
writeln!(writer, "{:?} - {}", file_entry.path, format_size(file_entry.size, BINARY))?;
|
||||
}
|
||||
writeln!(writer)?;
|
||||
}
|
||||
|
@ -450,9 +450,9 @@ impl PrintResults for SimilarVideos {
|
|||
for (fe, struct_similar) in &self.similar_referenced_vectors {
|
||||
writeln!(writer, "Found {} videos which have similar friends", struct_similar.len())?;
|
||||
writeln!(writer)?;
|
||||
writeln!(writer, "{} - {}", fe.path.display(), format_size(fe.size, BINARY))?;
|
||||
writeln!(writer, "{:?} - {}", fe.path, format_size(fe.size, BINARY))?;
|
||||
for file_entry in struct_similar {
|
||||
writeln!(writer, "{} - {}", file_entry.path.display(), format_size(file_entry.size, BINARY))?;
|
||||
writeln!(writer, "{:?} - {}", file_entry.path, format_size(file_entry.size, BINARY))?;
|
||||
}
|
||||
writeln!(writer)?;
|
||||
}
|
||||
|
|
|
@ -171,7 +171,7 @@ impl Temporary {
|
|||
let mut warnings = Vec::new();
|
||||
for file_entry in &self.temporary_files {
|
||||
if fs::remove_file(file_entry.path.clone()).is_err() {
|
||||
warnings.push(file_entry.path.display().to_string());
|
||||
warnings.push(file_entry.path.to_string_lossy().to_string());
|
||||
}
|
||||
}
|
||||
self.common_data.text_messages.warnings.extend(warnings);
|
||||
|
@ -196,7 +196,7 @@ impl PrintResults for Temporary {
|
|||
writeln!(writer, "Found {} temporary files.\n", self.information.number_of_temporary_files)?;
|
||||
|
||||
for file_entry in &self.temporary_files {
|
||||
writeln!(writer, "{}", file_entry.path.display())?;
|
||||
writeln!(writer, "{:?}", file_entry.path)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -226,7 +226,7 @@ impl LoadSaveStruct {
|
|||
text_view_errors,
|
||||
&flg!(
|
||||
"saving_loading_folder_config_instead_file",
|
||||
generate_translation_hashmap(vec![("path", config_dir.display().to_string())])
|
||||
generate_translation_hashmap(vec![("path", config_dir.to_string_lossy().to_string())])
|
||||
),
|
||||
);
|
||||
return None;
|
||||
|
@ -236,7 +236,7 @@ impl LoadSaveStruct {
|
|||
text_view_errors,
|
||||
&flg!(
|
||||
"saving_loading_failed_to_create_configuration_folder",
|
||||
generate_translation_hashmap(vec![("path", config_dir.display().to_string()), ("reason", e.to_string())])
|
||||
generate_translation_hashmap(vec![("path", config_dir.to_string_lossy().to_string()), ("reason", e.to_string())])
|
||||
),
|
||||
);
|
||||
return None;
|
||||
|
@ -249,7 +249,7 @@ impl LoadSaveStruct {
|
|||
text_view_errors,
|
||||
&flg!(
|
||||
"saving_loading_failed_to_create_config_file",
|
||||
generate_translation_hashmap(vec![("path", config_file.display().to_string()), ("reason", e.to_string())])
|
||||
generate_translation_hashmap(vec![("path", config_file.to_string_lossy().to_string()), ("reason", e.to_string())])
|
||||
),
|
||||
);
|
||||
return None;
|
||||
|
@ -264,7 +264,7 @@ impl LoadSaveStruct {
|
|||
text_view_errors,
|
||||
&flg!(
|
||||
"saving_loading_failed_to_read_config_file",
|
||||
generate_translation_hashmap(vec![("path", config_file.display().to_string())])
|
||||
generate_translation_hashmap(vec![("path", config_file.to_string_lossy().to_string())])
|
||||
),
|
||||
);
|
||||
}
|
||||
|
@ -278,7 +278,7 @@ impl LoadSaveStruct {
|
|||
text_view_errors,
|
||||
&flg!(
|
||||
"saving_loading_failed_to_create_config_file",
|
||||
generate_translation_hashmap(vec![("path", config_file.display().to_string()), ("reason", e.to_string())])
|
||||
generate_translation_hashmap(vec![("path", config_file.to_string_lossy().to_string()), ("reason", e.to_string())])
|
||||
),
|
||||
);
|
||||
return None;
|
||||
|
@ -299,7 +299,7 @@ impl LoadSaveStruct {
|
|||
text_view_errors,
|
||||
&flg!(
|
||||
"saving_loading_failed_to_read_data_from_file",
|
||||
generate_translation_hashmap(vec![("path", config_file.display().to_string()), ("reason", e.to_string())])
|
||||
generate_translation_hashmap(vec![("path", config_file.to_string_lossy().to_string()), ("reason", e.to_string())])
|
||||
),
|
||||
);
|
||||
return;
|
||||
|
@ -370,7 +370,7 @@ impl LoadSaveStruct {
|
|||
text_view_errors,
|
||||
flg!(
|
||||
"saving_loading_saving_success",
|
||||
generate_translation_hashmap(vec![("name", config_file.display().to_string())])
|
||||
generate_translation_hashmap(vec![("name", config_file.to_string_lossy().to_string())])
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
|
@ -379,7 +379,7 @@ impl LoadSaveStruct {
|
|||
text_view_errors,
|
||||
flg!(
|
||||
"saving_loading_saving_failure",
|
||||
generate_translation_hashmap(vec![("name", config_file.display().to_string())])
|
||||
generate_translation_hashmap(vec![("name", config_file.to_string_lossy().to_string())])
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
|
|
Loading…
Reference in a new issue