@@ -439,13 +439,18 @@ def createCommandLineArgs(self, benchmarks, bmSuiteArgs):
439
439
polybench_args = ["--path=" + resolved_benchmark .absolute_path ] + self .runArgs (bmSuiteArgs )
440
440
return vm_args + [PolybenchBenchmarkSuite .POLYBENCH_MAIN ] + polybench_args
441
441
442
+ def runAndReturnStdOut (self , benchmarks , bmSuiteArgs ):
443
+ """Delegates to the super implementation then injects engine.config into every datapoint."""
444
+ ret_code , out , dims = super ().runAndReturnStdOut (benchmarks , bmSuiteArgs )
445
+ dims ["engine.config" ] = self ._get_mode (bmSuiteArgs )
446
+ return ret_code , out , dims
447
+
442
448
def rules (self , output , benchmarks , bmSuiteArgs ):
443
449
metric_name = PolybenchBenchmarkSuite ._get_metric_name (output )
444
450
if metric_name is None :
445
451
return []
446
452
rules = []
447
453
benchmark_name = benchmarks [0 ]
448
- mode = self ._get_mode (bmSuiteArgs )
449
454
if metric_name == "time" :
450
455
# For metric "time", two metrics are reported:
451
456
# - "warmup" (per-iteration data for "warmup" and "run" iterations)
@@ -462,7 +467,6 @@ def rules(self, output, benchmarks, bmSuiteArgs):
462
467
"metric.type" : "numeric" ,
463
468
"metric.score-function" : "id" ,
464
469
"metric.iteration" : ("$iteration" , int ),
465
- "engine.config" : mode ,
466
470
},
467
471
),
468
472
ExcludeWarmupRule (
@@ -476,7 +480,6 @@ def rules(self, output, benchmarks, bmSuiteArgs):
476
480
"metric.type" : "numeric" ,
477
481
"metric.score-function" : "id" ,
478
482
"metric.iteration" : ("<iteration>" , int ),
479
- "engine.config" : mode ,
480
483
},
481
484
startPattern = r"::: Running :::" ,
482
485
),
@@ -494,7 +497,6 @@ def rules(self, output, benchmarks, bmSuiteArgs):
494
497
"metric.type" : "numeric" ,
495
498
"metric.score-function" : "id" ,
496
499
"metric.iteration" : ("<iteration>" , int ),
497
- "engine.config" : mode ,
498
500
},
499
501
startPattern = r"::: Running :::" ,
500
502
)
@@ -512,7 +514,6 @@ def rules(self, output, benchmarks, bmSuiteArgs):
512
514
"metric.type" : "numeric" ,
513
515
"metric.score-function" : "id" ,
514
516
"metric.iteration" : 0 ,
515
- "engine.config" : mode ,
516
517
},
517
518
)
518
519
]
@@ -531,7 +532,6 @@ def rules(self, output, benchmarks, bmSuiteArgs):
531
532
"metric.score-function" : "id" ,
532
533
"metric.better" : "lower" ,
533
534
"metric.iteration" : 0 ,
534
- "engine.config" : mode ,
535
535
},
536
536
),
537
537
mx_benchmark .StdOutRule (
@@ -545,7 +545,6 @@ def rules(self, output, benchmarks, bmSuiteArgs):
545
545
"metric.score-function" : "id" ,
546
546
"metric.better" : "lower" ,
547
547
"metric.iteration" : 0 ,
548
- "engine.config" : mode ,
549
548
},
550
549
),
551
550
]
0 commit comments