Skip to content

Commit 5fd2662

Browse files
committed
Added benchmarking to performance test.
1 parent e0db2d0 commit 5fd2662

File tree

2 files changed

+53
-10
lines changed

2 files changed

+53
-10
lines changed

.github/workflows/perf.yml

Lines changed: 17 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -149,18 +149,30 @@ jobs:
149149
path: artifacts
150150

151151
- name: Produce performance report
152-
run: python e2etest/get-performance-model-table.py
152+
run: python e2etest/get-performance-model-table.py ${{ needs.get-testing-version.outputs.testing_version }}
153153

154154
- name: Copy performance report
155-
run: cp doc/performance-report.md gh-pages/benchmark/report.md
155+
run: rsync -avv performance-report.md gh-pages/benchmark/report.md
156156

157-
- name: Copy artifacts
158-
run: rsync -avv artifacts/ gh-pages/benchmark/data
157+
- name: Copy performance data
158+
run: rsync -avv performance-data.json gh-pages/benchmark/data
159+
160+
- name: Store benchmark result
161+
uses: benchmark-action/github-action-benchmark@v1
162+
with:
163+
tool: "customSmallerIsBetter"
164+
output-file-path: performance-data.json
165+
external-data-json-path: gh-pages/benchmark/data/previous-benchmark-data.json
166+
benchmark-data-dir-path: gh-pages/benchmark/trend
167+
skip-fetch-gh-pages: true
168+
github-token: ${{ secrets.GITHUB_TOKEN }}
169+
auto-push: false
159170

160171
- name: Commit to gh-pages branch
161172
uses: stefanzweifel/git-auto-commit-action@v4
162173
with:
163174
commit_message: "Update benchmarking"
175+
commit_options: "--amend"
164176
branch: gh-pages
165177
repository: gh-pages
166-
file_pattern: gh-pages/benchmark/report.md gh-pages/benchmark/data/*
178+
file_pattern: gh-pages/benchmark/report.md gh-pages/benchmark/data/* gh-pages/benchmark/trend/*

e2etest/get-performance-model-table.py

Lines changed: 36 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def add_performance_model(model):
3636
performance_models[data_mode][data_rate].append(model)
3737

3838

39-
def flatten_performance_models(models):
39+
def flatten_performance_models():
4040
"""
4141
Flattens performance model into list of grouped models where each group
4242
corresponds to a table in the report.
@@ -58,9 +58,36 @@ def flatten_performance_models(models):
5858
x["data_mode"], x["data_rate"]))
5959
return models_list
6060

61+
def get_benchmark_data():
62+
"""
63+
Splits models by test
64+
"""
65+
benchmark_data = []
66+
67+
for data_mode, data_rates in performance_models.items():
68+
for data_rate, models in data_rates.items():
69+
for model in models:
70+
benchmark_cpu = {}
71+
benchmark_cpu["name"] = f"{model['testcase']} ({data_mode}/{data_rate}) - Average CPU Usage"
72+
benchmark_cpu["value"] = model["avgCpu"]
73+
benchmark_cpu["unit"] = "Percent"
74+
75+
benchmark_mem = {}
76+
benchmark_mem["name"] = f"{model['testcase']} ({data_mode}/{data_rate}) - Average Memory Usage"
77+
benchmark_mem["value"] = model["avgMem"]
78+
benchmark_mem["unit"] = "Megabytes"
79+
80+
benchmark_data.append(benchmark_cpu)
81+
benchmark_data.append(benchmark_mem)
82+
83+
return benchmark_data
6184

6285
if __name__ == "__main__":
63-
aoc_version = Path('VERSION').read_text()
86+
try:
87+
aoc_version = sys.argv[1].rstrip()
88+
except IndexError:
89+
print(f"Usage: {sys.argv[0]} <version>")
90+
sys.exit(1)
6491

6592
from jinja2 import Environment, PackageLoader, select_autoescape
6693
templateLoader = jinja2.FileSystemLoader(searchpath="e2etest/templates/")
@@ -79,7 +106,7 @@ def flatten_performance_models(models):
79106
testing_ami = model["testingAmi"]
80107
add_performance_model(model)
81108

82-
models_list = flatten_performance_models(performance_models)
109+
models_list = flatten_performance_models()
83110

84111
# render performance models into markdown
85112
template = env.get_template('performance_model.tpl')
@@ -92,6 +119,10 @@ def flatten_performance_models(models):
92119
})
93120
print(rendered_result)
94121

95-
# write rendered result to docs/performance_model.md
96-
with open("docs/performance_model.md", "w+") as f:
122+
# write rendered result to report.md
123+
with open("performance-report.md", "w+") as f:
97124
f.write(rendered_result)
125+
126+
# write benchmark-data.json
127+
with open("performance-data.json", "w+") as f:
128+
f.write(json.dumps(get_benchmark_data()))

0 commit comments

Comments
 (0)