@@ -231,21 +231,25 @@ def optimize_function(self) -> Result[BestOptimization, str]:
231231 return Failure ("The threshold for test coverage was not met." )
232232
233233 best_optimization = None
234+ lprof_generated_results = []
234235 logger .info (f"Adding more candidates based on lineprof info, calling ai service" )
235236 with progress_bar (
236237 f"Generating new optimizations for function { self .function_to_optimize .function_name } with line profiler information" ,
237238 transient = True ,
238239 ):
239240 pass
240- lprof_generated_results = self .aiservice_client .optimize_python_code_line_profiler (
241-
242- source_code = code_context .read_writable_code ,
243- dependency_code = code_context .read_only_context_code ,
244- trace_id = self .function_trace_id ,
245- line_profiler_results = original_code_baseline .lprof_results ,
246- num_candidates = 10 ,
247- experiment_metadata = None )
248-
241+ with concurrent .futures .ThreadPoolExecutor (max_workers = N_TESTS_TO_GENERATE + 2 ) as executor :
242+ future_optimization_candidates_lp = executor .submit (self .aiservice_client .optimize_python_code_line_profiler ,
243+ source_code = code_context .read_writable_code ,
244+ dependency_code = code_context .read_only_context_code ,
245+ trace_id = self .function_trace_id ,
246+ line_profiler_results = original_code_baseline .lprof_results ,
247+ num_candidates = 10 ,
248+ experiment_metadata = None )
249+ future = [future_optimization_candidates_lp ]
250+ pass
251+ concurrent .futures .wait (future )
252+ lprof_generated_results = future [0 ].result ()
249253 if len (lprof_generated_results )== 0 :
250254 logger .info (f"Generated tests with line profiler failed." )
251255 else :
0 commit comments