@@ -500,6 +500,7 @@ def process_test_files(
500500 function_to_test_map [qualified_name ].add (function_called )
501501 progress .advance (task_id )
502502 else :
503+ all_cache_entries = []
503504 with ProcessPoolExecutor (max_workers = max_workers ) as executor :
504505 future_to_file = {
505506 executor .submit (
@@ -515,8 +516,12 @@ def process_test_files(
515516
516517 test_file = future_to_file [future ]
517518 file_hash = TestsCache .compute_file_hash (str (test_file ))
519+
520+ # Collect cache entries for batch insertion
518521 for cache_entry in cache_entries :
519- tests_cache .insert_test (file_path = str (test_file ), file_hash = file_hash , ** cache_entry )
522+ cache_entry ['file_path' ] = str (test_file )
523+ cache_entry ['file_hash' ] = file_hash
524+ all_cache_entries .append (cache_entry )
520525
521526 for qualified_name , function_called in results :
522527 function_to_test_map [qualified_name ].add (function_called )
@@ -525,6 +530,10 @@ def process_test_files(
525530 test_file = future_to_file [future ]
526531 logger .error (f"Error processing test file { test_file } : { e } " )
527532 progress .advance (task_id )
533+
534+ # Batch insert all cache entries after all workers complete
535+ for cache_entry in all_cache_entries :
536+ tests_cache .insert_test (** cache_entry )
528537
529538 tests_cache .close ()
530539 function_to_tests_dict = {function : list (tests ) for function , tests in function_to_test_map .items ()}
0 commit comments