mirror of
				https://codeberg.org/h3xx/simplify_static_dir
				synced 2024-08-14 23:57:24 +00:00 
			
		
		
		
	Pre-filter based on file size
Only generate hashes for files that have a size duplicate.
This commit is contained in:
		
							parent
							
								
									58740f34c5
								
							
						
					
					
						commit
						967be362dc
					
				
					 1 changed files with 14 additions and 1 deletions
				
			
		|  | @ -168,7 +168,20 @@ MAIN: { | ||||||
|         push @files, Directory::Simplify::File->new($File::Find::name); |         push @files, Directory::Simplify::File->new($File::Find::name); | ||||||
|     }, @dirs_to_process); |     }, @dirs_to_process); | ||||||
| 
 | 
 | ||||||
|     printf STDERR "%s files found.\n", |     printf STDERR "%d files found", | ||||||
|  |         scalar @files | ||||||
|  |         if $opts{v}; | ||||||
|  | 
 | ||||||
|  |     # Shortcut: Only generate hashes and inspect files that do not have a | ||||||
|  |     # unique size. The reasoning being that file sizes do not match, there's no | ||||||
|  |     # possible way those two files can have the same contents. | ||||||
|  |     my %file_sizes; | ||||||
|  |     ++$file_sizes{$_->{size}} foreach @files; | ||||||
|  |     @files = grep { | ||||||
|  |         $file_sizes{$_->{size}} > 1 | ||||||
|  |     } @files; | ||||||
|  | 
 | ||||||
|  |     printf STDERR " (%d candidates).\n", | ||||||
|         scalar @files |         scalar @files | ||||||
|         if $opts{v}; |         if $opts{v}; | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue