Skip to content

Commit 32f9e84

Browse files
authored
Report improvement - support output of IDs to a json file (#2059)
* Updated final report generation script to output the list of ids being used * Update generate_final_report.py
1 parent f000467 commit 32f9e84

File tree

2 files changed

+35
-7
lines changed

2 files changed

+35
-7
lines changed

main.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -502,7 +502,8 @@ def get_common_info(spaces, implementation, model):
502502
info += f"\n{pre_space}!!! tip\n\n"
503503
info += f"{pre_space} - Number of threads could be adjusted using `--threads=#`, where `#` is the desired number of threads. This option works only if the implementation in use supports threading.\n\n"
504504
info += f"{pre_space} - Batch size could be adjusted using `--batch_size=#`, where `#` is the desired batch size. This option works only if the implementation in use is supporting the given batch size.\n\n"
505-
info += f"{pre_space} - `_r4.1-dev` could also be given instead of `_r5.0-dev` if you want to run the benchmark with the MLPerf version being 4.1.\n\n"
505+
if implementation.lower() == "reference":
506+
info += f"{pre_space} - `_r4.1-dev` could also be given instead of `_r5.0-dev` if you want to run the benchmark with the MLPerf version being 4.1.\n\n"
506507
if model == "rgat":
507508
info += f"{pre_space} - Add `--env.CM_DATASET_IGBH_PATH=<Path to IGBH dataset>` if you have already downloaded the dataset. The path will be automatically mounted when using docker run.\n\n"
508509
info += f"{pre_space} - Add `--env.CM_ML_MODEL_RGAT_CHECKPOINT_PATH=<Path to R-GAT model checkpoint>` if you have already downloaded the model. The path will be automatically mounted when using docker run.\n\n"

tools/submission/generate_final_report.py

Lines changed: 33 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,8 @@ def main():
202202
"datacenter": {
203203
"resnet": ["Server", "Offline"],
204204
"retinanet": ["Server", "Offline"],
205-
"rnnt": ["Server", "Offline"],
205+
"bert-99": [],
206+
"bert-99.9": [],
206207
"dlrm-v2-99": ["Server", "Offline"],
207208
"dlrm-v2-99.9": ["Server", "Offline"],
208209
"3d-unet-99": ["Offline"],
@@ -216,18 +217,26 @@ def main():
216217
"llama2-70b-interactive-99.9": ["Server", "Offline"],
217218
"mixtral-8x7b": ["Server", "Offline"],
218219
"rgat": ["Offline"],
219-
"llama3.1-405b": ["Offline", "Server"]
220+
"llama3.1-405b": ["Offline", "Server"],
221+
"pointpainting": []
220222
},
221223
"edge": {
222224
"resnet": ["SingleStream", "MultiStream", "Offline"],
223225
"retinanet": ["SingleStream", "MultiStream", "Offline"],
224-
"rnnt": ["SingleStream", "Offline"],
225226
"bert-99": ["SingleStream", "Offline"],
226227
"bert-99.9": ["SingleStream", "Offline"],
228+
"dlrm-v2-99": [],
229+
"dlrm-v2-99.9": [],
227230
"3d-unet-99": ["SingleStream", "Offline"],
228231
"3d-unet-99.9": ["SingleStream", "Offline"],
232+
"llama2-70b-99": [],
233+
"llama2-70b-99.9": [],
234+
"llama2-70b-interactive-99": [],
235+
"llama2-70b-interactive-99.9": [],
236+
"llama3.1-405b": [],
229237
"gptj-99": ["SingleStream", "Offline"],
230238
"gptj-99.9": ["SingleStream", "Offline"],
239+
"rgat": [],
231240
"stable-diffusion-xl": ["SingleStream", "Offline"],
232241
"pointpainting": ["SingleStream"],
233242
},
@@ -285,16 +294,34 @@ def MakeUniqueID(x):
285294
"Unique ID (e.g. for Audit)"],
286295
inplace=True,
287296
)
288-
id_dict = {
289-
key: 1 + value
290-
for (value, key) in enumerate(pd.unique(df["Unique ID (e.g. for Audit)"]))
297+
if os.path.exists("ids.json"):
298+
with open("ids.json", "r") as f:
299+
id_dict = json.load(f)
300+
else:
301+
id_dict = {}
302+
cur_keys = id_dict.keys()
303+
cur_ids = id_dict.values()
304+
new_keys = [
305+
a for a in pd.unique(
306+
df["Unique ID (e.g. for Audit)"]) if a not in cur_keys]
307+
if cur_ids:
308+
max_cur_id = max(cur_ids)
309+
else:
310+
max_cur_id = 0
311+
id_dict_new = {
312+
key: 1 + value + max_cur_id
313+
for (value, key) in enumerate(new_keys)
291314
}
315+
id_dict.update(id_dict_new)
316+
292317
df["ID"] = df.apply(
293318
lambda x: "{}-{:04}".format(
294319
args.version, id_dict[x["Unique ID (e.g. for Audit)"]]
295320
),
296321
axis=1,
297322
)
323+
with open("ids.json", "w") as f:
324+
f.write(json.dumps(id_dict, indent=4))
298325

299326
for category in ["closed", "open", "network"]:
300327
for suite in ["datacenter", "edge"]:

0 commit comments

Comments
 (0)