|
6 | 6 | import multiprocessing |
7 | 7 | import omegaconf |
8 | 8 | import subprocess |
9 | | - |
| 9 | +import json |
10 | 10 |
|
11 | 11 | log = logging.getLogger("run_benchmark") |
12 | 12 |
|
@@ -166,28 +166,55 @@ def execute_command(command): |
166 | 166 |
|
167 | 167 |
|
168 | 168 | def write_results(times, cfg): |
169 | | - row = { |
170 | | - "benchmark": cfg["benchmark"]["name"], |
171 | | - "target": cfg["target"]["name"], |
172 | | - "total_iterations": cfg["iterations"], |
173 | | - "threads": cfg["threads"], |
174 | | - "iteration": None, |
175 | | - "time_ms": None, |
176 | | - } |
177 | | - # also add all parameters and their values |
178 | | - row.update(cfg["benchmark"]["params"]) |
179 | | - if "params" in cfg["target"]: |
180 | | - row.update(cfg["target"]["params"]) |
181 | | - |
182 | | - with open("results.csv", "w", newline="") as csvfile: |
183 | | - writer = csv.DictWriter(csvfile, fieldnames=row.keys()) |
184 | | - writer.writeheader() |
185 | | - i = 0 |
| 169 | + if not cfg["json"]: |
| 170 | + row = { |
| 171 | + "benchmark": cfg["benchmark"]["name"], |
| 172 | + "target": cfg["target"]["name"], |
| 173 | + "total_iterations": cfg["iterations"], |
| 174 | + "threads": cfg["threads"], |
| 175 | + "iteration": None, |
| 176 | + "time_ms": None, |
| 177 | + } |
| 178 | + # also add all parameters and their values |
| 179 | + row.update(cfg["benchmark"]["params"]) |
| 180 | + if "params" in cfg["target"]: |
| 181 | + row.update(cfg["target"]["params"]) |
| 182 | + |
| 183 | + with open("results.csv", "w", newline="") as csvfile: |
| 184 | + writer = csv.DictWriter(csvfile, fieldnames=row.keys()) |
| 185 | + writer.writeheader() |
| 186 | + i = 0 |
| 187 | + for t in times: |
| 188 | + row["iteration"] = i |
| 189 | + row["time_ms"] = t |
| 190 | + writer.writerow(row) |
| 191 | + i += 1 |
| 192 | + else: |
| 193 | + total_time = 0 |
186 | 194 | for t in times: |
187 | | - row["iteration"] = i |
188 | | - row["time_ms"] = t |
189 | | - writer.writerow(row) |
190 | | - i += 1 |
| 195 | + total_time += t |
| 196 | + total_time /= cfg["iterations"] |
| 197 | + data = { |
| 198 | + "name": cfg["benchmark"]["name"], |
| 199 | + "unit": "ms", |
| 200 | + "value": total_time, |
| 201 | + "extra": f"Target: {cfg['target']['name']}\nTotal Iterations: {cfg['iterations']}\nThreads: {cfg['threads']}" |
| 202 | + } |
| 203 | + |
| 204 | + try: |
| 205 | + with open("../../../benchmark_result.json", "r+") as outfile: |
| 206 | + # benchmark_result.json file should be in the multirun directory |
| 207 | + # update existing file |
| 208 | + contents = json.load(outfile) |
| 209 | + contents.append(data) |
| 210 | + outfile.seek(0) |
| 211 | + json.dump(contents, outfile, indent=4) |
| 212 | + |
| 213 | + except FileNotFoundError: |
| 214 | + with open("../../../benchmark_result.json", "w+") as outfile: |
| 215 | + # create new file |
| 216 | + json.dump([data], outfile, indent=4) |
| 217 | + |
191 | 218 |
|
192 | 219 |
|
193 | 220 | if __name__ == "__main__": |
|
0 commit comments