Skip to content

Commit 42eb5e9

Browse files
committed
simplify workflow to abtest
1 parent ba17b97 commit 42eb5e9

File tree

4 files changed

+141
-197
lines changed

4 files changed

+141
-197
lines changed

.github/workflows/_linux-benchmark-abtest-h100.yml

Lines changed: 0 additions & 125 deletions
This file was deleted.

.github/workflows/_linux-benchmark-h100.yml

Lines changed: 61 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -7,26 +7,38 @@ on:
77
description: |
88
Tritonbench Scribe Graph Access Token
99
inputs:
10-
benchmark_name:
10+
test_type:
1111
required: True
1212
type: string
13-
description: |
14-
Benchmark name
15-
conda_env:
13+
descript: |
14+
Type of the test (single or abtest)
15+
benchmark_name:
1616
required: True
1717
type: string
1818
description: |
19-
Conda environment to activate when testing Triton
19+
Benchmark name
2020
side_a_triton:
21-
required: False
2221
type: string
22+
required: False
23+
default: "triton-lang/triton"
2324
description: |
24-
Triton repo name
25+
Triton repository to test on side A, e.g., "triton-lang/triton"
2526
side_a_commit:
27+
type: string
28+
required: False
29+
description: |
30+
Triton commit or tag to test on side A, e.g., "main"
31+
side_b_triton:
32+
type: string
2633
required: False
34+
default: "triton-lang/triton"
35+
description: |
36+
Triton repository to test on side B, e.g., "triton-lang/triton"
37+
side_b_commit:
2738
type: string
39+
required: False
2840
description: |
29-
Triton repo commit
41+
Triton commit or tag to test on side B, e.g., "main"
3042
3143
jobs:
3244
linux-benchmark-h100:
@@ -39,9 +51,9 @@ jobs:
3951
contents: read
4052
env:
4153
SETUP_SCRIPT: "/workspace/setup_instance.sh"
42-
CONDA_ENV: ${{ inputs.conda_env }}
4354
RUNNER_TYPE: "gcp-h100-runner"
44-
JOB_NAME: tritonbench-h100-${{ inputs.conda_env }}-${{ inputs.benchmark_name }}
55+
JOB_NAME: tritonbench-h100-benchmark-${{ inputs.test_type }}-${{ inputs.benchmark_name }}
56+
TRITONBENCH_SIDE_A_ENV: "triton-main"
4557
TRITONBENCH_SCRIBE_GRAPHQL_ACCESS_TOKEN: ${{ secrets.TRITONBENCH_SCRIBE_GRAPHQL_ACCESS_TOKEN }}
4658
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
4759
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@@ -62,18 +74,29 @@ jobs:
6274
# The max duration enforced by the server side
6375
role-duration-seconds: 18000
6476
aws-region: us-east-1
65-
- name: Compile Triton (On Demand)
77+
- name: Compile Triton on Demand (Side A)
6678
if: ${{ inputs.side_a_triton && inputs.side_a_commit }}
6779
run: |
68-
bash ./.ci/triton/compile.sh --repo ${{ inputs.side_a_triton }} --commit ${{ inputs.side_a_commit }} --side a
69-
- name: Benchmarking
80+
bash ./.ci/triton/install.sh --repo ${{ inputs.side_a_triton }} --commit ${{ inputs.side_a_commit }} --side a
81+
export 'TRITONBENCH_SIDE_A_ENV=triton-side-a' >> $GITHUB_ENV
82+
- name: Benchmark Triton (Side A)
7083
run: |
71-
if [ -n "${{ inputs.side_a_triton }}" ] && [ -n "${{ inputs.side_a_commit }}" ]; then
72-
bash .ci/tritonbench/run-benchmark.sh ${{ inputs.benchmark_name }} --conda-env triton-side-a
73-
else
74-
bash .ci/tritonbench/run-benchmark.sh ${{ inputs.benchmark_name }}
75-
fi
76-
cp -r ".benchmarks/${{ inputs.benchmark_name }}" benchmark-output
84+
bash ./.ci/tritonbench/run-benchmark.sh ${{ inputs.benchmark_name }} --conda-env ${TRITONBENCH_SIDE_A_ENV}
85+
mkdir -p benchmark-output
86+
cp -r .benchmarks/${{ inputs.benchmark_name }} benchmark-output/${TRITONBENCH_SIDE_A_ENV}
87+
rm -rf .benchmarks || true
88+
- name: Compile Triton on Demand (Side B)
89+
if: ${{ inputs.test_type == 'abtest' && inputs.side_b_triton && inputs.side_b_commit }}
90+
run: |
91+
bash ./.ci/triton/install.sh --repo ${{ inputs.side_b_triton }} --commit ${{ inputs.side_b_commit }} --side b
92+
export 'TRITONBENCH_SIDE_A_ENV=triton-side-a' >> $GITHUB_ENV
93+
- name: Benchmark Triton (Side B)
94+
if: ${{ inputs.test_type == 'abtest' && inputs.side_b_triton && inputs.side_b_commit }}
95+
run: |
96+
bash ./.ci/tritonbench/run-benchmark.sh ${{ inputs.benchmark_name }} --conda-env --conda-env ${TRITONBENCH_SIDE_B_ENV}
97+
mkdir -p benchmark-output
98+
cp -r ".benchmarks/${{ inputs.benchmark_name }}" benchmark-output/${TRITONBENCH_SIDE_B_ENV}
99+
rm -rf .benchmarks || true
77100
- name: Upload result to GH Actions Artifact
78101
uses: actions/upload-artifact@v4
79102
with:
@@ -82,21 +105,34 @@ jobs:
82105
- name: Upload result to Scribe
83106
run: |
84107
. "${SETUP_SCRIPT}"
85-
latest_result_json=$(find ./benchmark-output -name "result.json" | sort -r | head -n 1)
86-
python ./.ci/upload/scribe.py --json ${latest_result_json}
108+
if [ -n "${TRITONBENCH_SIDE_A_ENV}" ]; then
109+
triton_side_a_json=$(find ./benchmark-output/${TRITONBENCH_SIDE_A_ENV} -name "result.json" | sort -r | head -n 1)
110+
python ./.ci/upload/scribe.py --json ${triton_side_a_json}
111+
fi
112+
if [ -n "${TRITONBENCH_SIDE_B_ENV}" ]; then
113+
triton_side_b_json=$(find ./benchmark-output/${TRITONBENCH_SIDE_B_ENV} -name "result.json" | sort -r | head -n 1)
114+
python ./.ci/upload/scribe.py --json ${triton_side_b_json}
115+
fi
87116
- name: Rewrite Tritonbench json to ClickHouse style
88117
run: |
89118
. "${SETUP_SCRIPT}"
90-
latest_result_json=$(find ./benchmark-output -name "result.json" | sort -r | head -n 1)
91-
python ./.ci/test_infra/oss_ci_benchmark_v3.py --json ${latest_result_json} \
92-
--output benchmark-output/results/result.json
119+
if [ -n "${TRITONBENCH_SIDE_A_ENV}"" ]; then
120+
triton_side_a_json=$(find ./benchmark-output/${TRITONBENCH_SIDE_A_ENV} -name "result.json" | sort -r | head -n 1)
121+
python ./.ci/test_infra/oss_ci_benchmark_v3.py --json "${triton_side_a_json}" \
122+
--output "benchmark-output/clickouse-results/result-${TRITONBENCH_SIDE_A_ENV}.json"
123+
fi
124+
if [ -n "${TRITONBENCH_SIDE_B_ENV}"" ]; then
125+
triton_side_a_json=$(find ./benchmark-output/${TRITONBENCH_SIDE_B_ENV} -name "result.json" | sort -r | head -n 1)
126+
python ./.ci/test_infra/oss_ci_benchmark_v3.py --json "${triton_side_a_json}" \
127+
--output "benchmark-output/clickouse-results/result-${TRITONBENCH_SIDE_B_ENV}.json"
128+
fi
93129
- name: Setup uploader dependencies
94130
run: |
95131
sudo apt-get install -y python3-pip
96132
- name: Upload result to ClickHouse
97133
uses: pytorch/test-infra/.github/actions/upload-benchmark-results@main
98134
with:
99-
benchmark-results-dir: benchmark-output/results
135+
benchmark-results-dir: benchmark-output/clickouse-results
100136
dry-run: false
101137
schema-version: v3
102138
github-token: ${{ secrets.GITHUB_TOKEN }}

0 commit comments

Comments
 (0)