Skip to content

Commit 64d6798

Browse files
committed
Add tool to combine exec-time results
1 parent 4aa6d7f commit 64d6798

File tree

1 file changed

+94
-0
lines changed

1 file changed

+94
-0
lines changed

util/CPU2006/combine-exec.py

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
#!/usr/bin/env python3
2+
3+
from io import StringIO
4+
import csv
5+
import re
6+
import sys
7+
import argparse
8+
from contextlib import ExitStack
9+
from typing import Iterable, List, Tuple
10+
from openpyxl import Workbook
11+
from openpyxl.utils import get_column_letter
12+
13+
14+
class DuplicateDataError(Exception):
15+
def __init__(self, old, new, message):
16+
self.old = old
17+
self.new = new
18+
self.message = message
19+
20+
super().__init__(f'{message} old: {old} -> new: {new}')
21+
22+
23+
def is_blank_row(row: List[str]) -> bool:
24+
return not row or all(cell in ('', 'NR') for cell in row[1:])
25+
26+
27+
def merge_tables(str_tables: Iterable[str]) -> str:
28+
data = dict()
29+
tables = [list(csv.reader(table.splitlines())) for table in str_tables]
30+
31+
for row in tables[0]:
32+
if row:
33+
data[row[0]] = row
34+
35+
for table in tables:
36+
for row in table:
37+
if not is_blank_row(row):
38+
if row[0] in data:
39+
if not is_blank_row(data[row[0]]) and data[row[0]] != row:
40+
raise DuplicateDataError(data[row[0]], row, f'Duplicate data for {row[0]}.')
41+
data[row[0]] = row
42+
43+
out = StringIO()
44+
writer = csv.writer(out)
45+
for row in tables[0]:
46+
if not row:
47+
continue
48+
best_row = data[row[0]]
49+
writer.writerow(best_row)
50+
51+
return out.getvalue()
52+
53+
54+
_RE_FOO_RESULTS_TABLE = re.compile(r'"(?P<tbl_name>\S+ Results) Table"')
55+
56+
57+
def extract_tables(contents: str) -> Iterable[Tuple[str, str]]:
58+
for m in _RE_FOO_RESULTS_TABLE.finditer(contents):
59+
tbl_start = contents.find('\n\n', m.end()) + 1
60+
tbl_end = contents.find('\n\n', tbl_start)
61+
yield (m['tbl_name'], contents[tbl_start:tbl_end])
62+
63+
64+
def main(files, out: str):
65+
wb = Workbook()
66+
files = [f.read() for f in files]
67+
xy = list(extract_tables(files[0]))
68+
tbls = map(extract_tables, files)
69+
for tbl_group in zip(*tbls):
70+
assert len(set(name for name, _ in tbl_group)) == 1
71+
ws = wb.create_sheet(tbl_group[0][0])
72+
73+
str_tables = (tbl for _, tbl in tbl_group)
74+
merged = merge_tables(str_tables)
75+
for row in csv.reader(merged.splitlines()):
76+
ws.append(row)
77+
for i, _ in enumerate(row):
78+
ws.column_dimensions[get_column_letter(i + 1)].bestFit = True
79+
80+
wb.remove(wb.active)
81+
wb.save(out)
82+
83+
84+
if __name__ == '__main__':
85+
parser = argparse.ArgumentParser(description='Merges multiple CPU2017 exec time csv results together')
86+
parser.add_argument('-o', '--output', required=True, help='Where to write the output file')
87+
parser.add_argument('csvs', nargs='+', help='The files to merge')
88+
89+
args = parser.parse_args()
90+
91+
with ExitStack() as stack:
92+
files = [stack.enter_context(open(f, 'r')) for f in args.csvs]
93+
94+
main(files, args.output)

0 commit comments

Comments
 (0)