@@ -15,7 +15,6 @@ use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
1515use memmap2:: Mmap ;
1616use rayon:: prelude:: * ;
1717use serde:: Serialize ;
18- use std:: time:: Duration ;
1918
2019mod patterns;
2120mod scan;
@@ -60,13 +59,13 @@ struct Cli {
6059 max_file_mb : Option < u64 > ,
6160}
6261
63- #[ derive( Serialize ) ]
62+ #[ derive( Serialize , Clone ) ]
6463struct Evidence {
6564 line : usize ,
6665 column : usize ,
6766}
6867
69- #[ derive( Serialize ) ]
68+ #[ derive( Serialize , Clone ) ]
7069struct Finding {
7170 #[ serde( rename = "assetType" ) ]
7271 asset_type : String ,
@@ -137,6 +136,8 @@ fn main() -> Result<()> {
137136 pb
138137 } ) ;
139138
139+ // Use an unbounded channel to avoid deadlocks
140+ // We'll rely on the file processing being the bottleneck, not the writer
140141 let ( tx, rx) = channel:: unbounded :: < Finding > ( ) ;
141142 let output_path = cli. output . clone ( ) ;
142143 let found_count_writer = found_count. clone ( ) ;
@@ -157,6 +158,8 @@ fn main() -> Result<()> {
157158 pb. set_message ( format ! ( "Found {} cryptographic items" , count) ) ;
158159 }
159160 }
161+ // Flush any remaining buffered output
162+ writer. flush ( ) ?;
160163 Ok ( ( ) )
161164 } ) ;
162165
@@ -289,20 +292,20 @@ fn main() -> Result<()> {
289292 pb. set_message ( "Scanning files..." ) ;
290293 }
291294
292- let patterns_for_scan = patterns. clone ( ) ;
293- let scanned_count_scan = scanned_count. clone ( ) ;
294- let scan_bar_scan = scan_bar. clone ( ) ;
295-
295+ // Process files in parallel with rayon
296296 files_to_scan. into_par_iter ( ) . for_each ( |path| {
297- if let Err ( err) = process_file ( & path, & patterns_for_scan, & tx) {
298- eprintln ! ( "error processing {}: {err:#}" , path. display( ) ) ;
297+ // Process the file
298+ if let Err ( err) = process_file ( & path, & patterns, & tx) {
299+ eprintln ! ( "Error processing {}: {err:#}" , path. display( ) ) ;
299300 }
300- scanned_count_scan. fetch_add ( 1 , Ordering :: Relaxed ) ;
301- if let Some ( pb) = & scan_bar_scan {
301+
302+ scanned_count. fetch_add ( 1 , Ordering :: Relaxed ) ;
303+ if let Some ( pb) = & scan_bar {
302304 pb. inc ( 1 ) ;
303305 }
304306 } ) ;
305307
308+ // All files have been processed
306309 drop ( tx) ;
307310
308311 if let Some ( pb) = & scan_bar {
@@ -384,11 +387,12 @@ fn process_file(
384387 metadata : HashMap :: new ( ) ,
385388 } ;
386389 let key = format ! ( "lib|{}" , finding. identifier) ;
387- if seen. insert ( key) && tx. send_timeout ( finding, Duration :: from_secs ( 5 ) ) . is_err ( ) {
388- eprintln ! (
389- "error: writer thread appears to be blocked (filesystem I/O issue?). Aborting send."
390- ) ;
391- return Ok ( ( ) ) ;
390+ if seen. insert ( key) {
391+ // Use blocking send but log if it takes too long
392+ if let Err ( e) = tx. send ( finding) {
393+ eprintln ! ( "error: writer thread has stopped: {}" , e) ;
394+ return Ok ( ( ) ) ;
395+ }
392396 }
393397
394398 // 2) algorithms for this library
@@ -413,11 +417,12 @@ fn process_file(
413417 "alg|{}|{}:{}" ,
414418 finding. identifier, finding. evidence. line, finding. evidence. column
415419 ) ;
416- if seen. insert ( key) && tx. send_timeout ( finding, Duration :: from_secs ( 5 ) ) . is_err ( ) {
417- eprintln ! (
418- "error: writer thread appears to be blocked (filesystem I/O issue?). Aborting send."
419- ) ;
420- return Ok ( ( ) ) ;
420+ if seen. insert ( key) {
421+ // Use blocking send but log if it takes too long
422+ if let Err ( e) = tx. send ( finding) {
423+ eprintln ! ( "error: writer thread has stopped: {}" , e) ;
424+ return Ok ( ( ) ) ;
425+ }
421426 }
422427 }
423428 }
0 commit comments