|
1 | 1 | name: Benchmarks Reusable |
2 | 2 |
|
3 | | -on: |
4 | | - workflow_call: |
5 | | - inputs: |
6 | | - str_name: |
7 | | - required: true |
8 | | - type: string |
9 | | - pr_no: |
10 | | - required: true |
11 | | - # even though this is a number, this is a workaround for issues with |
12 | | - # reusable workflow calls that result in "Unexpected value '0'" error. |
13 | | - type: string |
14 | | - bench_script_params: |
15 | | - required: false |
16 | | - type: string |
17 | | - default: '' |
18 | | - sycl_config_params: |
19 | | - required: false |
20 | | - type: string |
21 | | - default: '' |
22 | | - upload_report: |
23 | | - required: false |
24 | | - type: boolean |
25 | | - default: false |
26 | | - compute_runtime_commit: |
27 | | - required: false |
28 | | - type: string |
29 | | - default: '' |
| 3 | +# This workflow is a WIP: This workflow file acts as a placeholder. |
30 | 4 |
|
31 | | -permissions: |
32 | | - contents: read |
33 | | - pull-requests: write |
| 5 | +on: [ workflow_call ] |
34 | 6 |
|
35 | 7 | jobs: |
36 | | - bench-run: |
37 | | - name: Build SYCL, Run Benchmarks |
38 | | - strategy: |
39 | | - matrix: |
40 | | - adapter: [ |
41 | | - {str_name: "${{ inputs.str_name }}", |
42 | | - sycl_config: "${{ inputs.sycl_config_params }}" |
43 | | - } |
44 | | - ] |
45 | | - build_type: [Release] |
46 | | - compiler: [{c: clang, cxx: clang++}] |
47 | | - |
48 | | - runs-on: "PVC_PERF" |
49 | | - |
| 8 | + do-nothing: |
| 9 | + runs-on: ubuntu-latest |
50 | 10 | steps: |
51 | | - - name: Add comment to PR |
52 | | - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 |
53 | | - if: ${{ always() && inputs.pr_no != 0 }} |
54 | | - with: |
55 | | - script: | |
56 | | - const pr_no = '${{ inputs.pr_no }}'; |
57 | | - const adapter = '${{ matrix.adapter.str_name }}'; |
58 | | - const url = '${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}'; |
59 | | - const params = '${{ inputs.bench_script_params }}'; |
60 | | - const body = `Compute Benchmarks ${adapter} run (with params: ${params}):\n${url}`; |
61 | | -
|
62 | | - github.rest.issues.createComment({ |
63 | | - issue_number: pr_no, |
64 | | - owner: context.repo.owner, |
65 | | - repo: context.repo.repo, |
66 | | - body: body |
67 | | - }) |
68 | | -
|
69 | | - - name: Checkout SYCL |
70 | | - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 |
71 | | - with: |
72 | | - path: sycl-repo |
73 | | - |
74 | | - # We need to fetch special ref for proper PR's merge commit. Note, this ref may be absent if the PR is already merged. |
75 | | - - name: Fetch PR's merge commit |
76 | | - if: ${{ inputs.pr_no != 0 }} |
77 | | - working-directory: ${{github.workspace}}/sycl-repo |
78 | | - run: | |
79 | | - git fetch -- https://github.com/${{github.repository}} +refs/pull/${{ inputs.pr_no }}/*:refs/remotes/origin/pr/${{ inputs.pr_no }}/* |
80 | | - git checkout origin/pr/${{ inputs.pr_no }}/merge |
81 | | - git rev-parse origin/pr/${{ inputs.pr_no }}/merge |
82 | | -
|
83 | | - # TODO: As long as we didn't merge this workflow into main, we should allow both scripts location |
84 | | - - name: Establish bench scripts location |
85 | | - run: | |
86 | | - if [ -d "${{github.workspace}}/sycl-repo/devops/scripts/benchmarks" ]; then |
87 | | - echo "Bench scripts are in devops/scripts" |
88 | | - echo "BENCH_SCRIPTS_DIR=${{github.workspace}}/sycl-repo/devops/scripts/benchmarks" >> $GITHUB_ENV |
89 | | - elif [ -d "${{github.workspace}}/sycl-repo/unified-runtime/scripts/benchmarks" ]; then |
90 | | - echo "Bench scripts are in unified-runtime/scripts" |
91 | | - echo "BENCH_SCRIPTS_DIR=${{github.workspace}}/sycl-repo/unified-runtime/scripts/benchmarks" >> $GITHUB_ENV |
92 | | - else |
93 | | - echo "Bench scripts are absent...?" |
94 | | - exit 1 |
95 | | - fi |
96 | | -
|
97 | | - - name: Create virtual environment |
98 | | - run: python -m venv .venv |
99 | | - |
100 | | - - name: Activate virtual environment and install pip packages |
101 | | - run: | |
102 | | - source .venv/bin/activate |
103 | | - pip install -r ${BENCH_SCRIPTS_DIR}/requirements.txt |
104 | | -
|
105 | | - - name: Configure SYCL |
106 | | - run: > |
107 | | - python3 sycl-repo/buildbot/configure.py |
108 | | - -t ${{matrix.build_type}} |
109 | | - -o ${{github.workspace}}/sycl_build |
110 | | - --cmake-gen "Ninja" |
111 | | - --cmake-opt="-DLLVM_INSTALL_UTILS=ON" |
112 | | - --cmake-opt="-DSYCL_PI_TESTS=OFF" |
113 | | - --cmake-opt=-DCMAKE_C_COMPILER_LAUNCHER=ccache |
114 | | - --cmake-opt=-DCMAKE_CXX_COMPILER_LAUNCHER=ccache |
115 | | - ${{matrix.adapter.sycl_config}} |
116 | | -
|
117 | | - - name: Build SYCL |
118 | | - run: cmake --build ${{github.workspace}}/sycl_build -j $(nproc) |
119 | | - |
120 | | - # We need a complete installed UR for compute-benchmarks. |
121 | | - - name: Configure UR |
122 | | - run: > |
123 | | - cmake -DCMAKE_BUILD_TYPE=${{matrix.build_type}} |
124 | | - -S${{github.workspace}}/sycl-repo/unified-runtime |
125 | | - -B${{github.workspace}}/ur_build |
126 | | - -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/ur_install |
127 | | - -DUR_BUILD_TESTS=OFF |
128 | | - -DUR_BUILD_ADAPTER_L0=ON |
129 | | - -DUR_BUILD_ADAPTER_L0_V2=ON |
130 | | - -DUMF_DISABLE_HWLOC=ON |
131 | | -
|
132 | | - - name: Build UR |
133 | | - run: cmake --build ${{github.workspace}}/ur_build -j $(nproc) |
134 | | - |
135 | | - - name: Install UR |
136 | | - run: cmake --install ${{github.workspace}}/ur_build |
137 | | - |
138 | | - - name: Compute core range |
139 | | - run: | |
140 | | - # Compute the core range for the first NUMA node; second node is for UMF jobs. |
141 | | - # Skip the first 4 cores - the kernel is likely to schedule more work on these. |
142 | | - CORES="$(lscpu | awk ' |
143 | | - /NUMA node0 CPU|On-line CPU/ {line=$0} |
144 | | - END { |
145 | | - split(line, a, " ") |
146 | | - split(a[4], b, ",") |
147 | | - sub(/^0/, "4", b[1]) |
148 | | - print b[1] |
149 | | - }')" |
150 | | - echo "Selected core: $CORES" |
151 | | - echo "CORES=$CORES" >> $GITHUB_ENV |
152 | | -
|
153 | | - ZE_AFFINITY_MASK=0 |
154 | | - echo "ZE_AFFINITY_MASK=$ZE_AFFINITY_MASK" >> $GITHUB_ENV |
155 | | -
|
156 | | - - name: Run benchmarks |
157 | | - working-directory: ${{ github.workspace }} |
158 | | - id: benchmarks |
159 | | - run: > |
160 | | - source .venv/bin/activate && |
161 | | - taskset -c "${{ env.CORES }}" ${BENCH_SCRIPTS_DIR}/main.py |
162 | | - ~/llvm_bench_workdir |
163 | | - --sycl ${{ github.workspace }}/sycl_build |
164 | | - --ur ${{ github.workspace }}/ur_install |
165 | | - --adapter ${{ matrix.adapter.str_name }} |
166 | | - --compare baseline |
167 | | - --compute-runtime ${{ inputs.compute_runtime_commit }} |
168 | | - --build-igc |
169 | | - ${{ inputs.upload_report && '--output-html' || '' }} |
170 | | - ${{ inputs.pr_no != 0 && '--output-markdown' || '' }} |
171 | | - ${{ inputs.bench_script_params }} |
172 | | -
|
173 | | - - name: Print benchmark results |
174 | | - run: | |
175 | | - cat ${{ github.workspace }}/benchmark_results.md || true |
176 | | -
|
177 | | - - name: Add comment to PR |
178 | | - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 |
179 | | - if: ${{ always() && inputs.pr_no != 0 }} |
180 | | - with: |
181 | | - script: | |
182 | | - let markdown = "" |
183 | | - try { |
184 | | - const fs = require('fs'); |
185 | | - markdown = fs.readFileSync('benchmark_results.md', 'utf8'); |
186 | | - } catch(err) { |
187 | | - } |
188 | | -
|
189 | | - const pr_no = '${{ inputs.pr_no }}'; |
190 | | - const adapter = '${{ matrix.adapter.str_name }}'; |
191 | | - const url = '${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}'; |
192 | | - const test_status = '${{ steps.benchmarks.outcome }}'; |
193 | | - const job_status = '${{ job.status }}'; |
194 | | - const params = '${{ inputs.bench_script_params }}'; |
195 | | - const body = `Benchmarks ${adapter} run (${params}):\n${url}\nJob status: ${job_status}. Test status: ${test_status}.\n ${markdown}`; |
196 | | -
|
197 | | - github.rest.issues.createComment({ |
198 | | - issue_number: pr_no, |
199 | | - owner: context.repo.owner, |
200 | | - repo: context.repo.repo, |
201 | | - body: body |
202 | | - }) |
203 | | -
|
204 | | - - name: Rename benchmark results file |
205 | | - if: ${{ always() && inputs.upload_report }} |
206 | | - run: mv benchmark_results.html benchmark_results_${{ inputs.pr_no }}.html |
207 | | - |
208 | | - - name: Upload HTML report |
209 | | - if: ${{ always() && inputs.upload_report }} |
210 | | - uses: actions/cache/save@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0 |
211 | | - with: |
212 | | - path: benchmark_results_${{ inputs.pr_no }}.html |
213 | | - key: benchmark-results-${{ inputs.pr_no }}-${{ matrix.adapter.str_name }}-${{ github.run_id }} |
214 | | - |
215 | | - # TODO: As long as we didn't merge this workflow into main, we should allow both scripts location |
216 | | - - name: Get information about platform |
217 | | - if: ${{ always() }} |
218 | | - run: | |
219 | | - ${{github.workspace}}/sycl-repo/devops/scripts/get_system_info.sh || true |
220 | | - ${{github.workspace}}/sycl-repo/unified-runtime/.github/scripts/get_system_info.sh || true |
| 11 | + - run: echo 'This workflow is a WIP.' |
| 12 | + |
0 commit comments