Skip to content

Commit

Permalink
Img
Browse files Browse the repository at this point in the history
  • Loading branch information
qarmin committed Feb 25, 2025
1 parent 759794a commit cb2381d
Show file tree
Hide file tree
Showing 8 changed files with 32 additions and 15 deletions.
6 changes: 5 additions & 1 deletion Changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@
### Breaking changes
- Video cache is now incompatible with previous versions, and needs to be regenerated
- Image cache, due using by now exif orientation, is incompatible with previous versions, and needs to be regenerated
- Duplicate cache, due using smaller prehash size, is incompatible with previous versions, and needs to be regenerated


### Known regressions
- Crashes when using similar videos(when hashing some broken video files)
- Rarely crashes when using similar videos tool with some broken videos

### CI

Expand All @@ -20,9 +22,11 @@
- Added size progress in duplicate mode - [#1458](https://github.com/qarmin/czkawka/pull/1458)
- Ability to stop calculating hash of bigger files in the middle of process - [#1458](https://github.com/qarmin/czkawka/pull/1458)
- Using multithreading, to filter out hard links - [#1458](https://github.com/qarmin/czkawka/pull/1458)
- Decreased prehash read file size to max 4k bytes - [#1458](https://github.com/qarmin/czkawka/pull/1458)

### Krokiet
- Changed default tab to duplicate files - [#1368](https://github.com/qarmin/czkawka/pull/1368)
- Progress bar in duplicate mode, now shows progress of processed bytes, not files - [#1458](https://github.com/qarmin/czkawka/pull/1458)

### GTK GUI
- Added window icon in wayland - [#1400](https://github.com/qarmin/czkawka/pull/1400)
Expand Down
3 changes: 2 additions & 1 deletion czkawka_core/src/common_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ use crate::duplicate::HashType;
use crate::similar_images::{convert_algorithm_to_string, convert_filters_to_string};

const CACHE_VERSION: &str = "70";
const CACHE_DUPLICATE_VERSION: &str = "90";
#[cfg(feature = "fast_image_resize")]
const CACHE_IMAGE_VERSION: &str = "90_fast_resize";
#[cfg(not(feature = "fast_image_resize"))]
Expand Down Expand Up @@ -46,7 +47,7 @@ pub fn get_similar_music_cache_file(checking_tags: bool) -> String {

pub fn get_duplicate_cache_file(type_of_hash: &HashType, is_prehash: bool) -> String {
let prehash_str = if is_prehash { "_prehash" } else { "" };
format!("cache_duplicates_{type_of_hash:?}{prehash_str}_{CACHE_VERSION}.bin")
format!("cache_duplicates_{type_of_hash:?}{prehash_str}_{CACHE_DUPLICATE_VERSION}.bin")
}

#[fun_time(message = "save_cache_to_file_generalized", level = "debug")]
Expand Down
2 changes: 1 addition & 1 deletion czkawka_core/src/duplicate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ use crate::progress_data::{CurrentStage, ProgressData};

const TEMP_HARDLINK_FILE: &str = "rzeczek.rxrxrxl";

pub const PREHASHING_BUFFER_SIZE: u64 = 1024 * 32;
pub const PREHASHING_BUFFER_SIZE: u64 = 1024 * 8;
pub const THREAD_BUFFER_SIZE: usize = 2 * 1024 * 1024;

thread_local! {
Expand Down
20 changes: 15 additions & 5 deletions krokiet/src/connect_progress_receiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,11 @@ fn progress_save_load_cache(item: &ProgressData) -> ProgressToSend {
CurrentStage::DuplicateCacheSaving => "Saving hash cache",
_ => unreachable!(),
};
let (all_progress, current_progress) = common_get_data(item);
let (all_progress, current_progress, current_progress_size) = common_get_data(item);
ProgressToSend {
all_progress,
current_progress,
current_progress_size,
step_name: step_name.into(),
}
}
Expand Down Expand Up @@ -74,6 +75,7 @@ fn progress_collect_items(item: &ProgressData, files: bool) -> ProgressToSend {
ProgressToSend {
all_progress,
current_progress,
current_progress_size: -1,
step_name: step_name.into(),
}
}
Expand Down Expand Up @@ -127,10 +129,11 @@ fn progress_default(item: &ProgressData) -> ProgressToSend {
}
_ => unreachable!(),
};
let (all_progress, current_progress) = common_get_data(item);
let (all_progress, current_progress, current_progress_size) = common_get_data(item);
ProgressToSend {
all_progress,
current_progress,
current_progress_size,
step_name: step_name.into(),
}
}
Expand All @@ -144,17 +147,24 @@ fn no_current_stage_get_data(item: &ProgressData) -> (i32, i32) {
}

// Used to calculate number of files to check and also to calculate current progress according to number of files to check and checked
fn common_get_data(item: &ProgressData) -> (i32, i32) {
fn common_get_data(item: &ProgressData) -> (i32, i32, i32) {
if item.entries_to_check != 0 {
let all_stages = (item.current_stage_idx as f64 + item.entries_checked as f64 / item.entries_to_check as f64) / (item.max_stage_idx + 1) as f64;
let all_stages = all_stages.min(0.99);

let current_stage = item.entries_checked as f64 / item.entries_to_check as f64;
let current_stage = current_stage.min(0.99);
((all_stages * 100.0) as i32, (current_stage * 100.0) as i32)

let current_stage_size = if item.bytes_to_check != 0 {
((item.bytes_checked as f64 / item.bytes_to_check as f64).min(0.99) * 100.0) as i32
} else {
-1
};

((all_stages * 100.0) as i32, (current_stage * 100.0) as i32, current_stage_size)
} else {
let all_stages = (item.current_stage_idx as f64) / (item.max_stage_idx + 1) as f64;
let all_stages = all_stages.min(0.99);
((all_stages * 100.0) as i32, 0)
((all_stages * 100.0) as i32, 0, -1)
}
}
1 change: 1 addition & 0 deletions krokiet/src/connect_scan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ pub fn connect_scan_button(app: &MainWindow, progress_sender: Sender<ProgressDat
app.set_progress_datas(ProgressToSend {
all_progress: 0,
current_progress: -1,
current_progress_size: -1,
step_name: "".into(),
});

Expand Down
1 change: 1 addition & 0 deletions krokiet/ui/common.slint
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export enum TypeOfOpenedItem {

export struct ProgressToSend {
current_progress: int,
current_progress_size: int,
all_progress: int,
step_name: string,
}
Expand Down
2 changes: 1 addition & 1 deletion krokiet/ui/progress.slint
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ export component Progress {
ProgressIndicator {
visible: progress_datas.current_progress >= -0.001;
height: 8px;
progress: progress_datas.current_progress / 100.0;
progress: progress_datas.current_progress_size == -1 ? progress_datas.current_progress / 100.0 : progress_datas.current_progress_size / 100.0;
}
}

Expand Down
12 changes: 6 additions & 6 deletions misc/test_read_perf/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,38 +97,38 @@ fn main() {
fn array16(files: &Vec<DuplicateEntry>) {
files.into_par_iter().for_each(|f| {
let mut buffer = [0u8; 16 * 1024];
let _ = hash_calculation(&mut buffer, &f, HashType::Blake3, u64::MAX);
let _ = hash_calculation(&mut buffer, &f, HashType::Blake3, Arc::default(), None);
});
}
fn array256(files: &Vec<DuplicateEntry>) {
files.into_par_iter().for_each(|f| {
let mut buffer = [0u8; 256 * 1024];
let _ = hash_calculation(&mut buffer, &f, HashType::Blake3, u64::MAX);
let _ = hash_calculation(&mut buffer, &f, HashType::Blake3, Arc::default(), None);
});
}
fn vec16(files: &Vec<DuplicateEntry>) {
files.into_par_iter().for_each(|f| {
let mut buffer = vec![0u8; 16 * 1024];
let _ = hash_calculation(&mut buffer, &f, HashType::Blake3, u64::MAX);
let _ = hash_calculation(&mut buffer, &f, HashType::Blake3, Arc::default(), None);
});
}
fn vec1024(files: &Vec<DuplicateEntry>) {
files.into_par_iter().for_each(|f| {
let mut buffer = vec![0u8; 1024 * 1024];
let _ = hash_calculation(&mut buffer, &f, HashType::Blake3, u64::MAX);
let _ = hash_calculation(&mut buffer, &f, HashType::Blake3, Arc::default(), None);
});
}
fn vec1024_locking(files: &Vec<DuplicateEntry>) {
files.into_par_iter().for_each(|f| {
let _lock = GLOBAL_HDD_LOCK.lock().unwrap();
let mut buffer = vec![0u8; 1024 * 1024];
let _ = hash_calculation(&mut buffer, &f, HashType::Blake3, u64::MAX);
let _ = hash_calculation(&mut buffer, &f, HashType::Blake3, Arc::default(), None);
});
}
fn vec1024_thread(files: &Vec<DuplicateEntry>) {
files.into_par_iter().for_each(|f| {
BUFFER.with(|buffer| {
let _ = hash_calculation(&mut buffer.borrow_mut(), &f, HashType::Blake3, u64::MAX, Arc::default(), None);
let _ = hash_calculation(&mut buffer.borrow_mut(), &f, HashType::Blake3, Arc::default(), None);
});
});
}
Expand Down

0 comments on commit cb2381d

Please sign in to comment.