Skip to content

Commit 738e407

Browse files
authored
fix: Micro benchmark flamegraph (#1598)
Fix micro benchmarks after circuit flamegraph changes
1 parent 89ae40e commit 738e407

File tree

5 files changed

+30
-24
lines changed

5 files changed

+30
-24
lines changed

.github/workflows/benchmark-call.yml

Lines changed: 19 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,7 @@ on:
106106

107107
env:
108108
S3_METRICS_PATH: s3://openvm-public-data-sandbox-us-east-1/benchmark/github/metrics
109+
S3_FLAMEGRAPHS_PATH: s3://openvm-public-data-sandbox-us-east-1/benchmark/github/flamegraphs
109110
FEATURE_FLAGS: "bench-metrics,parallel,nightly-features"
110111
INPUT_ARGS: ""
111112
CARGO_NET_GIT_FETCH_WITH_CLI: "true"
@@ -228,16 +229,25 @@ jobs:
228229
229230
s5cmd cp $METRIC_PATH ${{ env.S3_METRICS_PATH }}/${METRIC_NAME}-${current_sha}.json
230231
231-
# - name: Install inferno-flamegraph
232-
# run: cargo install inferno
232+
- name: Install inferno-flamegraph
233+
if: ${{ contains(env.FEATURE_FLAGS, 'profiling') }}
234+
run: cargo install inferno
233235

234-
# - name: Generate flamegraphs
235-
# run: |
236-
# if [[ -f $METRIC_PATH ]]; then
237-
# python3 ci/scripts/metric_unify/flamegraph.py $METRIC_PATH
238-
# s5cmd cp '.bench_metrics/flamegraphs/*.svg' "${{ env.PUBLIC_S3_PATH }}/${current_sha}/"
239-
# echo "UPLOAD_FLAMEGRAPHS=1" >> $GITHUB_ENV
240-
# fi
236+
- name: Generate flamegraphs
237+
if: ${{ contains(env.FEATURE_FLAGS, 'profiling') }}
238+
run: |
239+
if [[ -f $METRIC_PATH ]]; then
240+
GUEST_SYMBOLS_PATH="${METRIC_PATH%.json}.syms"
241+
if [[ -f $GUEST_SYMBOLS_PATH ]]; then
242+
echo "Generating flamegraphs with guest symbols"
243+
python3 ci/scripts/metric_unify/flamegraph.py $METRIC_PATH --guest-symbols $GUEST_SYMBOLS_PATH
244+
else
245+
echo "No guest symbols found, generating flamegraphs without symbols"
246+
python3 ci/scripts/metric_unify/flamegraph.py $METRIC_PATH
247+
fi
248+
s5cmd cp '.bench_metrics/flamegraphs/*.svg' "${{ env.S3_FLAMEGRAPHS_PATH }}/${METRIC_NAME}-${current_sha}/"
249+
echo "UPLOAD_FLAMEGRAPHS=1" >> $GITHUB_ENV
250+
fi
241251
242252
##########################################################################
243253
# Update s3 for latest main metrics upon a push event #

ci/scripts/bench.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,8 @@ def run_cargo_command(
5050
# Prepare the environment variables
5151
env = os.environ.copy() # Copy current environment variables
5252
env["OUTPUT_PATH"] = output_path
53+
if "profiling" in feature_flags:
54+
env["GUEST_SYMBOLS_PATH"] = os.path.splitext(output_path)[0] + ".syms"
5355
env["RUSTFLAGS"] = "-Ctarget-cpu=native"
5456

5557
# Run the subprocess with the updated environment

ci/scripts/utils.sh

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,6 @@ add_metadata_and_flamegraphs() {
1818
}')
1919
echo "inputs: $inputs"
2020

21-
if [[ "$FLAMEGRAPHS" == 'true' ]]; then
22-
repo_root=$(git rev-parse --show-toplevel)
23-
python3 ${repo_root}/ci/scripts/metric_unify/flamegraph.py $metric_path
24-
s5cmd cp "${repo_root}/.bench_metrics/flamegraphs/*.svg" "${S3_FLAMEGRAPHS_PATH}/${CURRENT_SHA}/"
25-
fi
26-
2721
if [ ! -z "$inputs" ]; then
2822
max_segment_length=$(echo "$inputs" | jq -r '.max_segment_length')
2923
instance_type=$(echo "$inputs" | jq -r '.instance_type')
@@ -54,13 +48,12 @@ add_metadata() {
5448
echo "<details>" >> $result_path
5549
echo "<summary>Flamegraphs</summary>" >> $result_path
5650
echo "" >> $result_path
57-
repo_root=$(git rev-parse --show-toplevel)
58-
for file in $repo_root/.bench_metrics/flamegraphs/*.svg; do
59-
filename=$(basename "$file")
60-
flamegraph_url=https://openvm-public-data-sandbox-us-east-1.s3.us-east-1.amazonaws.com/benchmark/github/flamegraphs/${CURRENT_SHA}/${filename}
51+
benchmark_name=$(basename "$result_path" | cut -d'-' -f1)
52+
flamegraph_files=$(s5cmd ls ${S3_FLAMEGRAPHS_PATH}/${benchmark_name}-${CURRENT_SHA}/*.svg | awk '{print $4}' | xargs -n1 basename)
53+
for file in $flamegraph_files; do
54+
flamegraph_url=https://openvm-public-data-sandbox-us-east-1.s3.us-east-1.amazonaws.com/benchmark/github/flamegraphs/${benchmark_name}-${CURRENT_SHA}/${file}
6155
echo "[![]($flamegraph_url)]($flamegraph_url)" >> $result_path
6256
done
63-
rm -f ${repo_root}/.bench_metrics/flamegraphs/*.svg
6457
echo "" >> $result_path
6558
echo "</details>" >> $result_path
6659
echo "" >> $result_path

crates/toolchain/transpiler/src/elf.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,8 @@ impl Elf {
132132
}
133133
}
134134

135-
let guest_symbols_path = std::env::var("GUEST_SYMBOLS_PATH")?;
135+
let guest_symbols_path = std::env::var("GUEST_SYMBOLS_PATH")
136+
.map_err(|e| eyre::eyre!("{e}: GUEST_SYMBOLS_PATH"))?;
136137
let mut guest_symbols_file =
137138
std::fs::File::create(&guest_symbols_path).map_err(|e| {
138139
eyre::eyre!(

docs/crates/benchmarks.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -120,10 +120,10 @@ for more detailed profiling we generate special flamegraphs that visualize VM-sp
120120
The benchmark must be run with special configuration so that additional metrics are collected for profiling. Note that the additional metric collection will slow down the benchmark. To run a benchmark with the additional profiling, run the following command:
121121

122122
```bash
123-
OUTPUT_PATH="metrics.json" cargo run --release --bin <benchmark_name> --features profiling -- --profiling
123+
OUTPUT_PATH="metrics.json" GUEST_SYMBOLS_PATH="guest.syms" cargo run --release --bin <benchmark_name> --features profiling -- --profiling
124124
```
125125

126-
Add `--features aggregation,profiling` to run with leaf aggregation. The `profiling` feature tells the VM to run with additional metric collection. The `--profiling` CLI argument tells the script to build the guest program with `profile=profiling` so that the guest program is compiled without stripping debug symbols.
126+
Add `--features aggregation,profiling` to run with leaf aggregation. The `profiling` feature tells the VM to run with additional metric collection. The `--profiling` CLI argument tells the script to build the guest program with `profile=profiling` so that the guest program is compiled without stripping debug symbols. When the `profiling` feature is enabled, the `GUEST_SYMBOLS_PATH` environment variable must be set to the file path where function symbols of the guest program will be exported. Those symbols are then used to annotate the flamegraph with function names.
127127

128128
After the collected metrics are written to `$OUTPUT_PATH`, these flamegraphs can be generated if you have [inferno-flamegraph](https://crates.io/crates/inferno) installed. Install via
129129

@@ -134,7 +134,7 @@ cargo install inferno
134134
Then run
135135

136136
```bash
137-
python <repo_root>/ci/scripts/metric_unify/flamegraph.py $OUTPUT_PATH
137+
python <repo_root>/ci/scripts/metric_unify/flamegraph.py $OUTPUT_PATH --guest-symbols $GUEST_SYMBOLS_PATH
138138
```
139139

140140
The flamegraphs will be written to `*.svg` files in `.bench_metrics/flamegraphs` with respect to the repo root.

0 commit comments

Comments
 (0)