1212logging .basicConfig (level = logging .INFO , format = "%(message)s" )
1313
1414
15- def parse_benchmark_line (line : str ) -> Tuple [Union [str , None ], Union [float , None ]]:
15+ def parse_benchmark_line (
16+ line : str ,
17+ ) -> Tuple [Union [str , None ], Union [float , None ], Union [str , None ]]:
1618 """
1719 Parses a single line of benchmark output.
1820
19- Example line :
21+ Example lines :
2022 MUL_MAT(...): 744 runs - 1660.11 us/run - 134.48 MFLOP/run - 81.01 GFLOPS
23+ ADD(...): 98280 runs - 10.87 us/run - 48 kB/run - 4.21 GB/s
2124
22- Returns a tuple of (key, gflops) or (None, None) if parsing fails.
25+ Returns a tuple of (key, normalized_value, unit_type) or (None, None, None) if parsing fails.
26+
27+ Performance units:
28+ - GFLOPS/TFLOPS/MFLOPS: Floating Point Operations Per Second (normalized to GFLOPS)
29+ - GB/s/MB/s/TB/s: Bytes Per Second (normalized to GB/s)
2330 """
2431 line = line .strip ()
2532 if ":" not in line :
26- return None , None
33+ return None , None , None
2734
2835 key , data_part = line .split (":" , 1 )
2936 key = key .strip ()
3037
3138 # Remove ANSI color codes from the data part
3239 data_part = re .sub (r"\x1b\[[0-9;]*m" , "" , data_part )
3340
34- # Find the last number and unit in the data part
35- match = re .search (r"([\d\.]+)\s+(GFLOPS|TFLOPS|MFLOPS)\s*$" , data_part .strip ())
36- if not match :
37- return None , None
38-
39- value_str , unit = match .groups ()
40- value = float (value_str )
41-
42- # Normalize everything to GFLOPS
43- if unit == "TFLOPS" :
44- gflops = value * 1000
45- elif unit == "MFLOPS" :
46- gflops = value / 1000
47- elif unit == "GFLOPS" :
48- gflops = value
49- else :
50- assert False
51-
52- return key , gflops
41+ # Try to match FLOPS units first
42+ flops_match = re .search (
43+ r"([\d\.]+)\s+(GFLOPS|TFLOPS|MFLOPS)\s*$" , data_part .strip ()
44+ )
45+ if flops_match :
46+ value_str , unit = flops_match .groups ()
47+ value = float (value_str )
48+
49+ # Normalize everything to GFLOPS
50+ if unit == "TFLOPS" :
51+ normalized_value = value * 1000
52+ elif unit == "MFLOPS" :
53+ normalized_value = value / 1000
54+ elif unit == "GFLOPS" :
55+ normalized_value = value
56+ else :
57+ assert False
58+
59+ return key , normalized_value , "GFLOPS"
60+
61+ # Try to match bandwidth units (GB/s, MB/s, TB/s)
62+ bandwidth_match = re .search (r"([\d\.]+)\s+(GB/s|MB/s|TB/s)\s*$" , data_part .strip ())
63+ if bandwidth_match :
64+ value_str , unit = bandwidth_match .groups ()
65+ value = float (value_str )
66+
67+ # Normalize everything to GB/s
68+ if unit == "TB/s" :
69+ normalized_value = value * 1000
70+ elif unit == "MB/s" :
71+ normalized_value = value / 1000
72+ elif unit == "GB/s" :
73+ normalized_value = value
74+ else :
75+ assert False
76+
77+ return key , normalized_value , "GB/s"
78+
79+ return None , None , None
5380
5481
5582def extract_commit_id (filepath : Path ) -> str :
@@ -68,9 +95,9 @@ def load_results(filepath: Path) -> dict:
6895 try :
6996 with open (filepath , "r" , encoding = "utf-8" ) as f :
7097 for line in f :
71- key , gflops = parse_benchmark_line (line )
72- if key :
73- results [key ] = gflops
98+ key , value , unit_type = parse_benchmark_line (line )
99+ if key and value is not None and unit_type :
100+ results [key ] = { "value" : value , "unit" : unit_type }
74101 except FileNotFoundError :
75102 logger .error (f"Error: File not found at { filepath } " )
76103 sys .exit (1 )
@@ -125,35 +152,54 @@ def main():
125152
126153 all_keys = sorted (list (set (baseline_results .keys ()) | set (compare_results .keys ())))
127154
155+ # Determine the unit type from the first available result
156+ # Assume all data will be of the same unit type (either all GFLOPS or all GB/s)
157+ unit_type = "GFLOPS" # default
158+ for key in all_keys :
159+ baseline_data = baseline_results .get (key )
160+ compare_data = compare_results .get (key )
161+ if baseline_data :
162+ unit_type = baseline_data ["unit" ]
163+ break
164+ elif compare_data :
165+ unit_type = compare_data ["unit" ]
166+ break
167+
128168 comparisons = []
129169
130170 for key in all_keys :
131- baseline_val = baseline_results .get (key )
132- compare_val = compare_results .get (key )
171+ baseline_data = baseline_results .get (key )
172+ compare_data = compare_results .get (key )
173+
174+ # Extract values
175+ baseline_val = baseline_data ["value" ] if baseline_data else None
176+ compare_val = compare_data ["value" ] if compare_data else None
177+
178+ # Calculate change if both values exist
179+ change = 0
180+ if baseline_val is not None and compare_val is not None :
181+ change = ((compare_val - baseline_val ) / baseline_val ) * 100
133182
134183 entry = {
135184 "key" : key ,
136185 "baseline" : baseline_val ,
137186 "compare" : compare_val ,
138- "change" : 0 ,
187+ "change" : change ,
139188 }
140189
141- if baseline_val is not None and compare_val is not None :
142- entry ["change" ] = ((compare_val - baseline_val ) / baseline_val ) * 100
143-
144190 comparisons .append (entry )
145191
146192 # --- Generate Report ---
147193 with open (args .output , "w" , encoding = "utf-8" ) as f :
148194
149- # Create header with commit IDs extracted from filenames
150- baseline_header = "Baseline GFLOPS "
151- compare_header = "Compare GFLOPS "
195+ # Create header with the determined unit type
196+ baseline_header = f "Baseline { unit_type } "
197+ compare_header = f "Compare { unit_type } "
152198
153199 if baseline_commit :
154- baseline_header = f"Baseline ({ baseline_commit } ) GFLOPS "
200+ baseline_header = f"Baseline ({ baseline_commit } ) { unit_type } "
155201 if compare_commit :
156- compare_header = f"Compare ({ compare_commit } ) GFLOPS "
202+ compare_header = f"Compare ({ compare_commit } ) { unit_type } "
157203
158204 key_width = max (len (k ) for k in all_keys ) + 2
159205 header = f"{ 'Test Configuration' :<{key_width }} { baseline_header :>25} { compare_header :>25} { 'Change (%)' :>15} "
0 commit comments