@@ -75,14 +75,11 @@ def search_all(
7575
7676 search_one = functools .partial (self .__class__ ._search_one , top = top )
7777
78- # Initialize the start time
79- start = time .perf_counter ()
80-
8178 if parallel == 1 :
8279 # Single-threaded execution
83- precisions , latencies = list (
84- zip ( * [search_one (query ) for query in tqdm .tqdm (queries )])
85- )
80+ start = time . perf_counter ()
81+ results = [search_one (query ) for query in tqdm .tqdm (queries )]
82+ total_time = time . perf_counter () - start
8683 else :
8784 # Dynamically calculate chunk size
8885 chunk_size = max (1 , len (queries ) // parallel )
@@ -110,6 +107,9 @@ def worker_function(chunk, result_queue):
110107 processes .append (process )
111108 process .start ()
112109
110+ # Start measuring time for the critical work
111+ start = time .perf_counter ()
112+
113113 # Collect results from all worker processes
114114 results = []
115115 for _ in processes :
@@ -119,10 +119,11 @@ def worker_function(chunk, result_queue):
119119 for process in processes :
120120 process .join ()
121121
122- # Extract precisions and latencies
123- precisions , latencies = zip ( * results )
122+ # Stop measuring time for the critical work
123+ total_time = time . perf_counter () - start
124124
125- total_time = time .perf_counter () - start
125+ # Extract precisions and latencies (outside the timed section)
126+ precisions , latencies = zip (* results )
126127
127128 self .__class__ .delete_client ()
128129
0 commit comments