1
1
use std:: collections:: BTreeMap ;
2
- use std:: fs:: { File , Metadata , OpenOptions } ;
2
+ use std:: fs:: { File , Metadata } ;
3
3
use std:: io:: prelude:: * ;
4
4
use std:: io:: { BufReader , BufWriter } ;
5
5
use std:: path:: { Path , PathBuf } ;
@@ -10,10 +10,10 @@ use std::time::{Duration, SystemTime, UNIX_EPOCH};
10
10
use std:: { fs, mem, panic, thread} ;
11
11
12
12
use crossbeam_channel:: Receiver ;
13
- use directories_next:: ProjectDirs ;
14
13
use rayon:: prelude:: * ;
14
+ use serde:: { Deserialize , Serialize } ;
15
15
16
- use crate :: common:: Common ;
16
+ use crate :: common:: { open_cache_folder , Common } ;
17
17
use crate :: common_directory:: Directories ;
18
18
use crate :: common_extensions:: Extensions ;
19
19
use crate :: common_items:: ExcludedItems ;
@@ -23,8 +23,6 @@ use crate::fl;
23
23
use crate :: localizer:: generate_translation_hashmap;
24
24
use crate :: similar_images:: { AUDIO_FILES_EXTENSIONS , IMAGE_RS_BROKEN_FILES_EXTENSIONS , ZIP_FILES_EXTENSIONS } ;
25
25
26
- const CACHE_FILE_NAME : & str = "cache_broken_files.txt" ;
27
-
28
26
#[ derive( Debug ) ]
29
27
pub struct ProgressData {
30
28
pub current_stage : u8 ,
@@ -39,7 +37,7 @@ pub enum DeleteMethod {
39
37
Delete ,
40
38
}
41
39
42
- #[ derive( Clone ) ]
40
+ #[ derive( Clone , Serialize , Deserialize ) ]
43
41
pub struct FileEntry {
44
42
pub path : PathBuf ,
45
43
pub modified_date : u64 ,
@@ -48,7 +46,7 @@ pub struct FileEntry {
48
46
pub error_string : String ,
49
47
}
50
48
51
- #[ derive( Copy , Clone , PartialEq , Eq ) ]
49
+ #[ derive( Copy , Clone , PartialEq , Eq , Serialize , Deserialize ) ]
52
50
pub enum TypeOfFile {
53
51
Unknown = -1 ,
54
52
Image = 0 ,
@@ -82,6 +80,8 @@ pub struct BrokenFiles {
82
80
delete_method : DeleteMethod ,
83
81
stopped_search : bool ,
84
82
use_cache : bool ,
83
+ delete_outdated_cache : bool , // TODO add this to GUI
84
+ save_also_as_json : bool ,
85
85
}
86
86
87
87
impl BrokenFiles {
@@ -98,6 +98,8 @@ impl BrokenFiles {
98
98
stopped_search : false ,
99
99
broken_files : Default :: default ( ) ,
100
100
use_cache : true ,
101
+ delete_outdated_cache : true ,
102
+ save_also_as_json : false ,
101
103
}
102
104
}
103
105
@@ -135,6 +137,10 @@ impl BrokenFiles {
135
137
self . delete_method = delete_method;
136
138
}
137
139
140
+ pub fn set_save_also_as_json ( & mut self , save_also_as_json : bool ) {
141
+ self . save_also_as_json = save_also_as_json;
142
+ }
143
+
138
144
pub fn set_use_cache ( & mut self , use_cache : bool ) {
139
145
self . use_cache = use_cache;
140
146
}
@@ -350,7 +356,7 @@ impl BrokenFiles {
350
356
let mut non_cached_files_to_check: BTreeMap < String , FileEntry > = Default :: default ( ) ;
351
357
352
358
if self . use_cache {
353
- loaded_hash_map = match load_cache_from_file ( & mut self . text_messages ) {
359
+ loaded_hash_map = match load_cache_from_file ( & mut self . text_messages , self . delete_outdated_cache ) {
354
360
Some ( t) => t,
355
361
None => Default :: default ( ) ,
356
362
} ;
@@ -501,7 +507,7 @@ impl BrokenFiles {
501
507
for ( _name, file_entry) in loaded_hash_map {
502
508
all_results. insert ( file_entry. path . to_string_lossy ( ) . to_string ( ) , file_entry) ;
503
509
}
504
- save_cache_to_file ( & all_results, & mut self . text_messages ) ;
510
+ save_cache_to_file ( & all_results, & mut self . text_messages , self . save_also_as_json ) ;
505
511
}
506
512
507
513
self . information . number_of_broken_files = self . broken_files . len ( ) ;
@@ -620,137 +626,84 @@ impl PrintResults for BrokenFiles {
620
626
}
621
627
}
622
628
623
- fn save_cache_to_file ( hashmap_file_entry : & BTreeMap < String , FileEntry > , text_messages : & mut Messages ) {
624
- if let Some ( proj_dirs) = ProjectDirs :: from ( "pl" , "Qarmin" , "Czkawka" ) {
625
- // Lin: /home/username/.cache/czkawka
626
- // Win: C:\Users\Username\AppData\Local\Qarmin\Czkawka\cache
627
- // Mac: /Users/Username/Library/Caches/pl.Qarmin.Czkawka
628
-
629
- let cache_dir = PathBuf :: from ( proj_dirs. cache_dir ( ) ) ;
630
- if cache_dir. exists ( ) {
631
- if !cache_dir. is_dir ( ) {
632
- text_messages. messages . push ( format ! ( "Config dir {} is a file!" , cache_dir. display( ) ) ) ;
633
- return ;
634
- }
635
- } else if let Err ( e) = fs:: create_dir_all ( & cache_dir) {
636
- text_messages. messages . push ( format ! ( "Cannot create config dir {}, reason {}" , cache_dir. display( ) , e) ) ;
637
- return ;
629
+ fn save_cache_to_file ( old_hashmap : & BTreeMap < String , FileEntry > , text_messages : & mut Messages , save_also_as_json : bool ) {
630
+ let mut hashmap: BTreeMap < String , FileEntry > = Default :: default ( ) ;
631
+ for ( path, fe) in old_hashmap {
632
+ if fe. size > 1024 {
633
+ hashmap. insert ( path. clone ( ) , fe. clone ( ) ) ;
638
634
}
639
- let cache_file = cache_dir. join ( CACHE_FILE_NAME ) ;
640
- let file_handler = match OpenOptions :: new ( ) . truncate ( true ) . write ( true ) . create ( true ) . open ( & cache_file) {
641
- Ok ( t) => t,
642
- Err ( e) => {
635
+ }
636
+ let hashmap = & hashmap;
637
+
638
+ if let Some ( ( ( file_handler, cache_file) , ( file_handler_json, cache_file_json) ) ) = open_cache_folder ( & get_cache_file ( ) , true , save_also_as_json, & mut text_messages. warnings ) {
639
+ {
640
+ let writer = BufWriter :: new ( file_handler. unwrap ( ) ) ; // Unwrap because cannot fail here
641
+ if let Err ( e) = bincode:: serialize_into ( writer, hashmap) {
643
642
text_messages
644
- . messages
645
- . push ( format ! ( "Cannot create or open cache file {}, reason {}" , cache_file. display( ) , e) ) ;
643
+ . warnings
644
+ . push ( format ! ( "Cannot write data to cache file {}, reason {}" , cache_file. display( ) , e) ) ;
646
645
return ;
647
646
}
648
- } ;
649
- let mut writer = BufWriter :: new ( file_handler) ;
650
-
651
- for file_entry in hashmap_file_entry. values ( ) {
652
- // Only save to cache files which have more than 1KB
653
- if file_entry. size > 1024 {
654
- let string: String = format ! (
655
- "{}//{}//{}//{}" ,
656
- file_entry. path. display( ) ,
657
- file_entry. size,
658
- file_entry. modified_date,
659
- file_entry. error_string
660
- ) ;
661
-
662
- if let Err ( e) = writeln ! ( writer, "{}" , string) {
647
+ }
648
+ if save_also_as_json {
649
+ if let Some ( file_handler_json) = file_handler_json {
650
+ let writer = BufWriter :: new ( file_handler_json) ;
651
+ if let Err ( e) = serde_json:: to_writer ( writer, hashmap) {
663
652
text_messages
664
- . messages
665
- . push ( format ! ( "Failed to save some data to cache file {}, reason {}" , cache_file . display( ) , e) ) ;
653
+ . warnings
654
+ . push ( format ! ( "Cannot write data to cache file {}, reason {}" , cache_file_json . display( ) , e) ) ;
666
655
return ;
667
- } ;
656
+ }
668
657
}
669
658
}
659
+
660
+ text_messages. messages . push ( format ! ( "Properly saved to file {} cache entries." , hashmap. len( ) ) ) ;
670
661
}
671
662
}
672
663
673
- fn load_cache_from_file ( text_messages : & mut Messages ) -> Option < BTreeMap < String , FileEntry > > {
674
- if let Some ( proj_dirs) = ProjectDirs :: from ( "pl" , "Qarmin" , "Czkawka" ) {
675
- let cache_dir = PathBuf :: from ( proj_dirs. cache_dir ( ) ) ;
676
- let cache_file = cache_dir. join ( CACHE_FILE_NAME ) ;
677
- // TODO add before checking if cache exists(if not just return) but if exists then enable error
678
- let file_handler = match OpenOptions :: new ( ) . read ( true ) . open ( & cache_file) {
679
- Ok ( t) => t,
680
- Err ( _inspected) => {
681
- // text_messages.messages.push(format!("Cannot find or open cache file {}", cache_file.display())); // This shouldn't be write to output
682
- return None ;
683
- }
684
- } ;
685
-
686
- let reader = BufReader :: new ( file_handler) ;
687
-
688
- let mut hashmap_loaded_entries: BTreeMap < String , FileEntry > = Default :: default ( ) ;
689
-
690
- // Read the file line by line using the lines() iterator from std::io::BufRead.
691
- for ( index, line) in reader. lines ( ) . enumerate ( ) {
692
- let line = match line {
664
+ fn load_cache_from_file ( text_messages : & mut Messages , delete_outdated_cache : bool ) -> Option < BTreeMap < String , FileEntry > > {
665
+ if let Some ( ( ( file_handler, cache_file) , ( file_handler_json, cache_file_json) ) ) = open_cache_folder ( & get_cache_file ( ) , false , true , & mut text_messages. warnings ) {
666
+ let mut hashmap_loaded_entries: BTreeMap < String , FileEntry > ;
667
+ if let Some ( file_handler) = file_handler {
668
+ let reader = BufReader :: new ( file_handler) ;
669
+ hashmap_loaded_entries = match bincode:: deserialize_from ( reader) {
693
670
Ok ( t) => t,
694
671
Err ( e) => {
695
672
text_messages
696
673
. warnings
697
- . push ( format ! ( "Failed to load line number {} from cache file {}, reason {}" , index + 1 , cache_file. display( ) , e) ) ;
674
+ . push ( format ! ( "Failed to load data from cache file {}, reason {}" , cache_file. display( ) , e) ) ;
675
+ return None ;
676
+ }
677
+ } ;
678
+ } else {
679
+ let reader = BufReader :: new ( file_handler_json. unwrap ( ) ) ; // Unwrap cannot fail, because at least one file must be valid
680
+ hashmap_loaded_entries = match serde_json:: from_reader ( reader) {
681
+ Ok ( t) => t,
682
+ Err ( e) => {
683
+ text_messages
684
+ . warnings
685
+ . push ( format ! ( "Failed to load data from cache file {}, reason {}" , cache_file_json. display( ) , e) ) ;
698
686
return None ;
699
687
}
700
688
} ;
701
- let uuu = line. split ( "//" ) . collect :: < Vec < & str > > ( ) ;
702
- if uuu. len ( ) != 4 {
703
- text_messages
704
- . warnings
705
- . push ( format ! ( "Found invalid data in line {} - ({}) in cache file {}" , index + 1 , line, cache_file. display( ) ) ) ;
706
- continue ;
707
- }
708
- // Don't load cache data if destination file not exists
709
- if Path :: new ( uuu[ 0 ] ) . exists ( ) {
710
- hashmap_loaded_entries. insert (
711
- uuu[ 0 ] . to_string ( ) ,
712
- FileEntry {
713
- path : PathBuf :: from ( uuu[ 0 ] ) ,
714
- size : match uuu[ 1 ] . parse :: < u64 > ( ) {
715
- Ok ( t) => t,
716
- Err ( e) => {
717
- text_messages. warnings . push ( format ! (
718
- "Found invalid size value in line {} - ({}) in cache file {}, reason {}" ,
719
- index + 1 ,
720
- line,
721
- cache_file. display( ) ,
722
- e
723
- ) ) ;
724
- continue ;
725
- }
726
- } ,
727
- modified_date : match uuu[ 2 ] . parse :: < u64 > ( ) {
728
- Ok ( t) => t,
729
- Err ( e) => {
730
- text_messages. warnings . push ( format ! (
731
- "Found invalid modified date value in line {} - ({}) in cache file {}, reason {}" ,
732
- index + 1 ,
733
- line,
734
- cache_file. display( ) ,
735
- e
736
- ) ) ;
737
- continue ;
738
- }
739
- } ,
740
- type_of_file : check_extension_avaibility ( & uuu[ 0 ] . to_lowercase ( ) ) ,
741
- error_string : uuu[ 3 ] . to_string ( ) ,
742
- } ,
743
- ) ;
744
- }
745
689
}
746
690
691
+ // Don't load cache data if destination file not exists
692
+ if delete_outdated_cache {
693
+ hashmap_loaded_entries. retain ( |src_path, _file_entry| Path :: new ( src_path) . exists ( ) ) ;
694
+ }
695
+
696
+ text_messages. messages . push ( format ! ( "Properly loaded {} cache entries." , hashmap_loaded_entries. len( ) ) ) ;
697
+
747
698
return Some ( hashmap_loaded_entries) ;
748
699
}
749
-
750
- text_messages. messages . push ( "Cannot find or open system config dir to save cache file" . to_string ( ) ) ;
751
700
None
752
701
}
753
702
703
+ fn get_cache_file ( ) -> String {
704
+ "cache_broken_files.bin" . to_string ( )
705
+ }
706
+
754
707
fn check_extension_avaibility ( file_name_lowercase : & str ) -> TypeOfFile {
755
708
if IMAGE_RS_BROKEN_FILES_EXTENSIONS . iter ( ) . any ( |e| file_name_lowercase. ends_with ( e) ) {
756
709
TypeOfFile :: Image
0 commit comments