Sort cache and allow to optimize later other code (#322)

This commit is contained in:
Rafał Mikrut 2021-05-08 18:29:52 +02:00 committed by GitHub
parent 018d5bebb0
commit 4fc8655272
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 32 additions and 32 deletions

View File

@ -13,7 +13,7 @@ use crate::common_traits::*;
use crossbeam_channel::Receiver;
use directories_next::ProjectDirs;
use rayon::prelude::*;
use std::collections::HashMap;
use std::collections::BTreeMap;
use std::io::{BufReader, BufWriter};
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
@ -70,7 +70,7 @@ impl Info {
pub struct BrokenFiles {
text_messages: Messages,
information: Info,
files_to_check: HashMap<String, FileEntry>,
files_to_check: BTreeMap<String, FileEntry>,
broken_files: Vec<FileEntry>,
directories: Directories,
allowed_extensions: Extensions,
@ -305,8 +305,8 @@ impl BrokenFiles {
let loaded_hash_map;
let mut records_already_cached: HashMap<String, FileEntry> = Default::default();
let mut non_cached_files_to_check: HashMap<String, FileEntry> = Default::default();
let mut records_already_cached: BTreeMap<String, FileEntry> = Default::default();
let mut non_cached_files_to_check: BTreeMap<String, FileEntry> = Default::default();
if self.use_cache {
loaded_hash_map = match load_cache_from_file(&mut self.text_messages) {
@ -444,7 +444,7 @@ impl BrokenFiles {
if self.use_cache {
// Must save all results to file, old loaded from file with all currently counted results
let mut all_results: HashMap<String, FileEntry> = self.files_to_check.clone();
let mut all_results: BTreeMap<String, FileEntry> = self.files_to_check.clone();
for file_entry in vec_file_entry {
all_results.insert(file_entry.path.to_string_lossy().to_string(), file_entry);
@ -573,7 +573,7 @@ impl PrintResults for BrokenFiles {
}
}
fn save_cache_to_file(hashmap_file_entry: &HashMap<String, FileEntry>, text_messages: &mut Messages) {
fn save_cache_to_file(hashmap_file_entry: &BTreeMap<String, FileEntry>, text_messages: &mut Messages) {
if let Some(proj_dirs) = ProjectDirs::from("pl", "Qarmin", "Czkawka") {
// Lin: /home/username/.cache/czkawka
// Win: C:\Users\Username\AppData\Local\Qarmin\Czkawka\cache
@ -613,7 +613,7 @@ fn save_cache_to_file(hashmap_file_entry: &HashMap<String, FileEntry>, text_mess
}
}
fn load_cache_from_file(text_messages: &mut Messages) -> Option<HashMap<String, FileEntry>> {
fn load_cache_from_file(text_messages: &mut Messages) -> Option<BTreeMap<String, FileEntry>> {
if let Some(proj_dirs) = ProjectDirs::from("pl", "Qarmin", "Czkawka") {
let cache_dir = PathBuf::from(proj_dirs.cache_dir());
let cache_file = cache_dir.join(CACHE_FILE_NAME);
@ -627,7 +627,7 @@ fn load_cache_from_file(text_messages: &mut Messages) -> Option<HashMap<String,
let reader = BufReader::new(file_handler);
let mut hashmap_loaded_entries: HashMap<String, FileEntry> = Default::default();
let mut hashmap_loaded_entries: BTreeMap<String, FileEntry> = Default::default();
// Read the file line by line using the lines() iterator from std::io::BufRead.
for (index, line) in reader.lines().enumerate() {

View File

@ -1,8 +1,8 @@
use crossbeam_channel::Receiver;
use humansize::{file_size_opts as options, FileSize};
use std::collections::BTreeMap;
#[cfg(target_family = "unix")]
use std::collections::HashSet;
use std::collections::{BTreeMap, HashMap};
use std::fs::{File, Metadata, OpenOptions};
use std::io::prelude::*;
use std::io::{self, Error, ErrorKind};
@ -684,11 +684,11 @@ impl DuplicateFinder {
//// PROGRESS THREAD END
#[allow(clippy::type_complexity)]
let pre_hash_results: Vec<(u64, HashMap<String, Vec<FileEntry>>, Vec<String>, u64)> = self
let pre_hash_results: Vec<(u64, BTreeMap<String, Vec<FileEntry>>, Vec<String>, u64)> = self
.files_with_identical_size
.par_iter()
.map(|(size, vec_file_entry)| {
let mut hashmap_with_hash: HashMap<String, Vec<FileEntry>> = Default::default();
let mut hashmap_with_hash: BTreeMap<String, Vec<FileEntry>> = Default::default();
let mut errors: Vec<String> = Vec::new();
let mut bytes_read: u64 = 0;
let mut buffer = [0u8; 1024 * 2];
@ -774,14 +774,14 @@ impl DuplicateFinder {
//// PROGRESS THREAD END
#[allow(clippy::type_complexity)]
let mut full_hash_results: Vec<(u64, HashMap<String, Vec<FileEntry>>, Vec<String>, u64)>;
let mut full_hash_results: Vec<(u64, BTreeMap<String, Vec<FileEntry>>, Vec<String>, u64)>;
match self.check_method {
CheckingMethod::HashMb => {
full_hash_results = pre_checked_map
.par_iter()
.map(|(size, vec_file_entry)| {
let mut hashmap_with_hash: HashMap<String, Vec<FileEntry>> = Default::default();
let mut hashmap_with_hash: BTreeMap<String, Vec<FileEntry>> = Default::default();
let mut errors: Vec<String> = Vec::new();
let mut bytes_read: u64 = 0;
let mut buffer = [0u8; 1024 * 128];
@ -852,7 +852,7 @@ impl DuplicateFinder {
full_hash_results = non_cached_files_to_check
.par_iter()
.map(|(size, vec_file_entry)| {
let mut hashmap_with_hash: HashMap<String, Vec<FileEntry>> = Default::default();
let mut hashmap_with_hash: BTreeMap<String, Vec<FileEntry>> = Default::default();
let mut errors: Vec<String> = Vec::new();
let mut bytes_read: u64 = 0;
let mut buffer = [0u8; 1024 * 128];
@ -893,7 +893,7 @@ impl DuplicateFinder {
}
}
// Size doesn't exists add results to files
let mut temp_hashmap: HashMap<String, Vec<FileEntry>> = Default::default();
let mut temp_hashmap: BTreeMap<String, Vec<FileEntry>> = Default::default();
for file_entry in vec_file_entry {
temp_hashmap.entry(file_entry.hash.clone()).or_insert_with(Vec::new);
temp_hashmap.get_mut(&file_entry.hash).unwrap().push(file_entry);
@ -902,7 +902,7 @@ impl DuplicateFinder {
}
// Must save all results to file, old loaded from file with all currently counted results
let mut all_results: HashMap<String, FileEntry> = Default::default();
let mut all_results: BTreeMap<String, FileEntry> = Default::default();
for (_size, vec_file_entry) in loaded_hash_map {
for file_entry in vec_file_entry {
all_results.insert(file_entry.path.to_string_lossy().to_string(), file_entry);
@ -1334,7 +1334,7 @@ pub fn make_hard_link(src: &Path, dst: &Path) -> io::Result<()> {
result
}
fn save_hashes_to_file(hashmap: &HashMap<String, FileEntry>, text_messages: &mut Messages, type_of_hash: &HashType, minimal_cache_file_size: u64) {
fn save_hashes_to_file(hashmap: &BTreeMap<String, FileEntry>, text_messages: &mut Messages, type_of_hash: &HashType, minimal_cache_file_size: u64) {
if let Some(proj_dirs) = ProjectDirs::from("pl", "Qarmin", "Czkawka") {
let cache_dir = PathBuf::from(proj_dirs.cache_dir());
if cache_dir.exists() {

View File

@ -12,7 +12,7 @@ use crate::common_traits::*;
use audiotags::Tag;
use crossbeam_channel::Receiver;
use rayon::prelude::*;
use std::collections::HashMap;
use std::collections::BTreeMap;
use std::io::BufWriter;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
@ -457,7 +457,7 @@ impl SameMusic {
progress_thread_handle.join().unwrap();
return false;
}
let mut hash_map: HashMap<String, Vec<FileEntry>> = Default::default();
let mut hash_map: BTreeMap<String, Vec<FileEntry>> = Default::default();
for file_entry in vec_file_entry {
let title = file_entry.title.to_lowercase().trim().to_string();
if !title.is_empty() {
@ -484,7 +484,7 @@ impl SameMusic {
progress_thread_handle.join().unwrap();
return false;
}
let mut hash_map: HashMap<String, Vec<FileEntry>> = Default::default();
let mut hash_map: BTreeMap<String, Vec<FileEntry>> = Default::default();
for file_entry in vec_file_entry {
let artist = file_entry.artist.to_lowercase().trim().to_string();
if !artist.is_empty() {
@ -511,7 +511,7 @@ impl SameMusic {
progress_thread_handle.join().unwrap();
return false;
}
let mut hash_map: HashMap<String, Vec<FileEntry>> = Default::default();
let mut hash_map: BTreeMap<String, Vec<FileEntry>> = Default::default();
for file_entry in vec_file_entry {
let album_title = file_entry.album_title.to_lowercase().trim().to_string();
if !album_title.is_empty() {
@ -538,7 +538,7 @@ impl SameMusic {
progress_thread_handle.join().unwrap();
return false;
}
let mut hash_map: HashMap<String, Vec<FileEntry>> = Default::default();
let mut hash_map: BTreeMap<String, Vec<FileEntry>> = Default::default();
for file_entry in vec_file_entry {
let album_artist = file_entry.album_artist.to_lowercase().trim().to_string();
if !album_artist.is_empty() {
@ -565,7 +565,7 @@ impl SameMusic {
progress_thread_handle.join().unwrap();
return false;
}
let mut hash_map: HashMap<i32, Vec<FileEntry>> = Default::default();
let mut hash_map: BTreeMap<i32, Vec<FileEntry>> = Default::default();
for file_entry in vec_file_entry {
let year = file_entry.year;
if year != 0 {

View File

@ -10,7 +10,7 @@ use humansize::{file_size_opts as options, FileSize};
use image::GenericImageView;
use img_hash::HasherConfig;
use rayon::prelude::*;
use std::collections::HashMap;
use std::collections::BTreeMap;
use std::fs::OpenOptions;
use std::fs::{File, Metadata};
use std::io::Write;
@ -75,10 +75,10 @@ pub struct SimilarImages {
similar_vectors: Vec<Vec<FileEntry>>,
recursive_search: bool,
minimal_file_size: u64,
image_hashes: HashMap<Node, Vec<FileEntry>>, // Hashmap with image hashes and Vector with names of files
image_hashes: BTreeMap<Node, Vec<FileEntry>>, // Hashmap with image hashes and Vector with names of files
stopped_search: bool,
similarity: Similarity,
images_to_check: HashMap<String, FileEntry>,
images_to_check: BTreeMap<String, FileEntry>,
use_cache: bool,
}
@ -329,8 +329,8 @@ impl SimilarImages {
let loaded_hash_map;
let mut records_already_cached: HashMap<String, FileEntry> = Default::default();
let mut non_cached_files_to_check: HashMap<String, FileEntry> = Default::default();
let mut records_already_cached: BTreeMap<String, FileEntry> = Default::default();
let mut non_cached_files_to_check: BTreeMap<String, FileEntry> = Default::default();
if self.use_cache {
loaded_hash_map = match load_hashes_from_file(&mut self.text_messages) {
@ -440,7 +440,7 @@ impl SimilarImages {
if self.use_cache {
// Must save all results to file, old loaded from file with all currently counted results
let mut all_results: HashMap<String, FileEntry> = loaded_hash_map;
let mut all_results: BTreeMap<String, FileEntry> = loaded_hash_map;
for (file_entry, _hash) in vec_file_entry {
all_results.insert(file_entry.path.to_string_lossy().to_string(), file_entry);
}
@ -671,7 +671,7 @@ fn get_string_from_similarity(similarity: &Similarity) -> &str {
}
}
fn save_hashes_to_file(hashmap: &HashMap<String, FileEntry>, text_messages: &mut Messages) {
fn save_hashes_to_file(hashmap: &BTreeMap<String, FileEntry>, text_messages: &mut Messages) {
if let Some(proj_dirs) = ProjectDirs::from("pl", "Qarmin", "Czkawka") {
// Lin: /home/username/.cache/czkawka
// Win: C:\Users\Username\AppData\Local\Qarmin\Czkawka\cache
@ -714,7 +714,7 @@ fn save_hashes_to_file(hashmap: &HashMap<String, FileEntry>, text_messages: &mut
}
}
}
fn load_hashes_from_file(text_messages: &mut Messages) -> Option<HashMap<String, FileEntry>> {
fn load_hashes_from_file(text_messages: &mut Messages) -> Option<BTreeMap<String, FileEntry>> {
if let Some(proj_dirs) = ProjectDirs::from("pl", "Qarmin", "Czkawka") {
let cache_dir = PathBuf::from(proj_dirs.cache_dir());
let cache_file = cache_dir.join(CACHE_FILE_NAME);
@ -728,7 +728,7 @@ fn load_hashes_from_file(text_messages: &mut Messages) -> Option<HashMap<String,
let reader = BufReader::new(file_handler);
let mut hashmap_loaded_entries: HashMap<String, FileEntry> = Default::default();
let mut hashmap_loaded_entries: BTreeMap<String, FileEntry> = Default::default();
// Read the file line by line using the lines() iterator from std::io::BufRead.
for (index, line) in reader.lines().enumerate() {