2626if TYPE_CHECKING :
2727 import subprocess
2828
29- from codeflash .discovery .functions_to_optimize import FunctionToOptimize
3029 from codeflash .models .models import CodeOptimizationContext , CoverageData , TestFiles
3130 from codeflash .verification .verification_utils import TestConfig
3231
@@ -79,6 +78,31 @@ def calculate_async_throughput_from_stdout(stdout: str, async_function_names: se
7978 return throughput_counts
8079
8180
81+ start_pattern = re .compile (r"!\$######([^:]*):([^:]*):([^:]*):([^:]*):([^:]+)######\$!" )
82+ end_pattern = re .compile (r"!######([^:]*):([^:]*):([^:]*):([^:]*):([^:]+)######!" )
83+
84+
85+ def calculate_function_throughput_from_stdout (stdout : str , function_name : str ) -> int :
86+ """A completed execution is defined as having both a start tag and matching end tag:
87+ Start: !$######test_module:test_function:function_name:loop_index:iteration_id######$!
88+ End: !######test_module:test_function:function_name:loop_index:iteration_id######!
89+ """
90+ start_matches = start_pattern .findall (stdout )
91+ end_matches = end_pattern .findall (stdout )
92+ end_matches_set = set (end_matches )
93+
94+ # Count completed executions for the specific function only
95+ function_throughput = 0
96+
97+ for start_match in start_matches :
98+ # Check if this execution is for the function we're interested in and has a matching end tag
99+ # function_name is at index 2 in the match tuple
100+ if start_match in end_matches_set and len (start_match ) > 2 and start_match [2 ] == function_name :
101+ function_throughput += 1
102+
103+ return function_throughput
104+
105+
82106def parse_test_return_values_bin (file_location : Path , test_files : TestFiles , test_config : TestConfig ) -> TestResults :
83107 test_results = TestResults ()
84108 if not file_location .exists ():
@@ -534,10 +558,7 @@ def parse_test_results(
534558 code_context : CodeOptimizationContext | None = None ,
535559 run_result : subprocess .CompletedProcess | None = None ,
536560 unittest_loop_index : int | None = None ,
537- function_to_optimize : FunctionToOptimize | None = None ,
538- * ,
539- calculate_throughput : bool = False ,
540- ) -> tuple [TestResults , CoverageData | None , dict [str , int ]]:
561+ ) -> tuple [TestResults , CoverageData | None ]:
541562 test_results_xml = parse_test_xml (
542563 test_xml_path ,
543564 test_files = test_files ,
@@ -574,18 +595,6 @@ def parse_test_results(
574595 get_run_tmp_file (Path (f"test_return_values_{ optimization_iteration } .sqlite" )).unlink (missing_ok = True )
575596 results = merge_test_results (test_results_xml , test_results_bin_file , test_config .test_framework )
576597
577- # Calculate throughput for async functions only when requested (during performance testing)
578- throughput_counts = {}
579- if calculate_throughput and function_to_optimize and function_to_optimize .is_async :
580- logger .info (f"Calculating throughput for async function: { function_to_optimize .function_name } " )
581- all_stdout = ""
582- for result in results .test_results :
583- if result .stdout :
584- all_stdout += result .stdout
585-
586- async_function_names = {function_to_optimize .function_name }
587- throughput_counts = calculate_async_throughput_from_stdout (all_stdout , async_function_names )
588-
589598 all_args = False
590599 if coverage_database_file and source_file and code_context and function_name :
591600 all_args = True
@@ -597,5 +606,4 @@ def parse_test_results(
597606 function_name = function_name ,
598607 )
599608 coverage .log_coverage ()
600- # return results, coverage if all_args else None, throughput_counts
601609 return results , coverage if all_args else None
0 commit comments