Skip to content

[CI] Add ability to rename benchmarking CI run results #19734

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 13 commits into
base: sycl
Choose a base branch
from
89 changes: 75 additions & 14 deletions .github/workflows/sycl-ur-perf-benchmarking.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,18 @@ on:
deployment branch will be used.
required: false
default: ''
save_name:
type: string
description: |
Specify a custom name to use for the benchmark result: If uploading
results, this will be the name used to refer results from the current
run.
required: false
default: ''
upload_results:
type: string # true/false: workflow_dispatch does not support booleans
description: |
Upload results to https://intel.github.io/llvm/benchmarks/.
required: true
runner:
type: string
Expand Down Expand Up @@ -67,8 +77,14 @@ on:
Leave both pr_no and commit_hash empty to use latest commit.
required: false
default: ''
save_name:
type: string
description: |
Name to use for the benchmark result:
required: false
default: ''
upload_results:
description: 'Save and upload results'
description: 'Save and upload results (to https://intel.github.io/llvm/benchmarks)'
type: choice
options:
- false
Expand All @@ -90,16 +106,63 @@ on:
permissions: read-all

jobs:
sanitize_inputs:
name: Sanitize inputs
runs-on: ubuntu-latest
env:
COMMIT_HASH: ${{ inputs.commit_hash }}
PR_NO: ${{ inputs.pr_no }}
SAVE_NAME: ${{ inputs.save_name }}
outputs:
benchmark_save_name: ${{ steps.sanitize.outputs.benchmark_save_name }}
build_ref: ${{ steps.sanitize.outputs.build_ref }}
steps:
- id: sanitize
run: |
# Validate user inputs:
check_nonempty() {
# usage: check_if_nonempty <var> <regex to check var against> <err message>
[ -z "$1" ] && return
if [ -z "$(echo "$1" | grep -P "$2")" ]; then
echo "$3"
exit 1
fi
}
check_nonempty "$COMMIT_HASH" '^[0-9a-f]{7,}$' "Bad commit hash (or hash short)."
check_nonempty "$PR_NO" '^[0-9]+$' "Bad PR number."
check_nonempty "$SAVE_NAME" '^[A-Za-z][A-Za-z0-9_-]+$' "Bad save name."

BENCHMARK_SAVE_NAME=""
BUILD_REF="${{ github.ref }}"
if [ -n "$SAVE_NAME" ]; then
BENCHMARK_SAVE_NAME="$(echo "$SAVE_NAME" | tr -cd 'A-Za-z0-9_-')"
fi;
if [ -n "$COMMIT_HASH" ]; then
commit_cleaned="$(echo "$COMMIT_HASH" | tr -cd 'a-z0-9')"
echo "Using commit hash $commit_cleaned for build..."
BUILD_REF="$commit_cleaned"
shortened_commit="$(echo "$commit_cleaned" | cut -c 1-7)"
[ -z "$BENCHMARK_SAVE_NAME" ] && BENCHMARK_SAVE_NAME="Commit_$shortened_commit"
elif [ -n "$PR_NO" ]; then
pr_no_cleaned="$(echo "$PR_NO" | tr -cd '0-9')"
echo "Using PR no. $pr_no_cleaned for build..."
BUILD_REF="refs/pull/$pr_no_cleaned/head"
[ -z "$BENCHMARK_SAVE_NAME" ] && BENCHMARK_SAVE_NAME="PR_$pr_no_cleaned"
fi
[ -z "$BENCHMARK_SAVE_NAME" ] && BENCHMARK_SAVE_NAME="Baseline"

echo "benchmark_save_name=$BENCHMARK_SAVE_NAME" >> $GITHUB_OUTPUT
echo "build_ref=$BUILD_REF" >> $GITHUB_OUTPUT

echo "Final sanitized values:"
cat $GITHUB_OUTPUT

build_sycl:
name: Build SYCL
needs: [ sanitize_inputs ]
uses: ./.github/workflows/sycl-linux-build.yml
with:
build_ref: |
${{
inputs.commit_hash != '' && inputs.commit_hash ||
inputs.pr_no != '' && format('refs/pull/{0}/head', inputs.pr_no) ||
github.ref
}}
build_ref: ${{ needs.sanitize_inputs.outputs.build_ref }}
build_cache_root: "/__w/"
build_cache_suffix: "prod_noassert"
build_configure_extra_args: "--no-assertions"
Expand All @@ -112,14 +175,12 @@ jobs:

run_benchmarks_build:
name: Run Benchmarks on Build
needs: [ build_sycl ]
needs: [ build_sycl, sanitize_inputs ]
strategy:
matrix:
include:
- ref: ${{ inputs.commit_hash != '' && inputs.commit_hash || inputs.pr_no != '' && format('refs/pull/{0}/head', inputs.pr_no) || github.ref }}
save_name: ${{ inputs.commit_hash != '' && format('Commit{0}', inputs.commit_hash) || inputs.pr_no != '' && format('PR{0}', inputs.pr_no) || 'Baseline' }}
# Set default values if not specified:
runner: ${{ inputs.runner || '["PVC_PERF"]' }}
# Set default values if not specified:
- runner: ${{ inputs.runner || '["PVC_PERF"]' }}
backend: ${{ inputs.backend || 'level_zero:gpu' }}
uses: ./.github/workflows/sycl-linux-run-tests.yml
secrets: inherit
Expand All @@ -131,9 +192,9 @@ jobs:
target_devices: ${{ matrix.backend }}
tests_selector: benchmarks
benchmark_upload_results: ${{ inputs.upload_results }}
benchmark_save_name: ${{ matrix.save_name }}
benchmark_save_name: ${{ needs.sanitize_inputs.outputs.benchmark_save_name }}
benchmark_preset: ${{ inputs.preset }}
repo_ref: ${{ matrix.ref }}
repo_ref: ${{ needs.sanitize_inputs.outputs.build_ref }}
toolchain_artifact: ${{ needs.build_sycl.outputs.toolchain_artifact }}
toolchain_artifact_filename: ${{ needs.build_sycl.outputs.toolchain_artifact_filename }}
toolchain_decompress_command: ${{ needs.build_sycl.outputs.toolchain_decompress_command }}