Skip to content

validation-bare-metal #28

validation-bare-metal

validation-bare-metal #28

name: validation-bare-metal
on:
# allow manually trigger
workflow_dispatch:
inputs:
branch-to-checkout:
type: string
default: 'main'
required: false
description: 'Branch name to use'
validation-no-fail-tests:
type: choice
required: false
description: 'Run all tests, non will fail'
options:
- "false"
- "true"
validation-tests-1:
type: string
default: 'tests/single/video/pacing'
required: true
description: '1st validation tests to run'
validation-tests-2:
type: string
default: ''
required: false
description: '2nd validation tests to run'
validation-tests-3:
type: string
default: ''
required: false
description: '3th validation tests to run'
validation-tests-4:
type: string
default: ''
required: false
description: '4th validation tests to run'
validation-tests-5:
type: string
default: ''
required: false
description: '5th validation tests to run'
validation-pre-release-1:
description: 'Select from pre-release group tests nr-1'
required: false
type: choice
options:
- NONE
- ancillary
- kernel-socket
- rss-mode
- st20p
- st30p
- st41
- udp
- video
- xdp
validation-pre-release-2:
description: 'Select from pre-release group tests nr-2'
required: false
type: choice
options:
- NONE
- ffmpeg-plugin
- fuzzy-tests
- performance
- ptp
- rx-timing
- vero
- virtio-enable
- wrong-parameter
validation-pre-release-3:
description: 'Select from pre-release group tests nr-3'
required: false
type: choice
options:
- NONE
- gpu-direct
- gpu-enabling
env:
# Customize the env if
BUILD_TYPE: 'Release'
DPDK_VERSION: '23.11'
# Bellow ENV variables example set is defined on the self-hosted runner side:
# TEST_PF_PORT_P=0000:49:00.0
# TEST_PF_PORT_R=0000:49:00.1
# TEST_PORT_P=0000:49:01.2
# TEST_PORT_R=0000:49:01.3
# TEST_DMA_PORT_P=0000:80:04.0
# TEST_DMA_PORT_R=0000:80:04.1
# TEST_VF_PORT_P_0=0000:49:01.0
# TEST_VF_PORT_P_1=0000:49:01.1
# TEST_VF_PORT_P_2=0000:49:01.2
# TEST_VF_PORT_P_3=0000:49:01.3
# TEST_VF_PORT_R_0=0000:49:11.0
# TEST_VF_PORT_R_1=0000:49:11.1
# TEST_VF_PORT_R_2=0000:49:11.2
# TEST_VF_PORT_R_3=0000:49:11.3
permissions:
contents: read
jobs:
validation-build-mtl:
runs-on: [Linux, self-hosted, DPDK]
timeout-minutes: 60
outputs:
pipenv-activate: ${{ steps.pipenv-install.outputs.VIRTUAL_ENV }}
steps:
- name: 'preparation: Harden Runner'
uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1
with:
egress-policy: audit
- name: 'preparation: Restore valid owner to repository and directories'
if: always()
run: |
sudo chown -R "${USER}" "$(pwd)"
- name: 'preparation: Checkout MTL'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: '${{ inputs.branch-to-checkout }}'
- name: 'preparation: Checkout DPDK'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
repository: 'DPDK/dpdk'
ref: 'v${{ env.DPDK_VERSION }}'
path: 'dpdk'
- name: 'configuration: Install the build dependency'
run: |
sudo apt update
sudo apt-get remove -y pipenv || true
sudo apt-get install -y \
git gcc meson \
pkg-config \
python3 \
python3-pyelftools \
python3-virtualenv \
python3-pip \
libnuma-dev \
libjson-c-dev \
libpcap-dev \
libgtest-dev \
libsdl2-dev \
libsdl2-ttf-dev \
libssl-dev \
systemtap-sdt-dev
- name: 'configuration: Apply dpdk patches'
run: |
patch -d "dpdk" -p1 -i <(cat patches/dpdk/${{ env.DPDK_VERSION }}/*.patch)
- name: 'installation: Build dpdk'
working-directory: dpdk
run: |
meson build
ninja -C build
sudo ninja -C build install
- name: 'installation: Build mtl'
run: |
./build.sh
sudo ldconfig
- name: 'installation: Install pipenv environment'
working-directory: tests/validation
id: pipenv-install
run: |
python3 -m pip install pipenv
python3 -m pipenv install -r requirements.txt
echo "VIRTUAL_ENV=$(python3 -m pipenv --venv)/bin/activate" >> "$GITHUB_ENV"
validation-run-tests:
needs: [validation-build-mtl]
runs-on: [Linux, self-hosted, DPDK]
env:
PYTEST_ALIAS: 'sudo --preserve-env python3 -m pipenv run pytest'
PYTEST_PARAMS: '--nic="${TEST_VF_PORT_P_0},${TEST_VF_PORT_P_1}" --media=/mnt/media --build="../.."'
PYTEST_RETRIES: '3'
steps:
- name: 'preparation: Harden Runner'
uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1
with:
egress-policy: audit
- name: 'preparation: Kill MtlManager and pytest routines'
run: |
sudo killall -SIGINT pipenv || true
sudo killall -SIGINT pytest || true
sudo killall -SIGINT MtlManager || true
- name: 'preparation: Binding network adapter'
run: |
sudo rmmod irdma || true
sudo ./script/nicctl.sh create_tvf "${TEST_PF_PORT_P}" || true
sudo ./script/nicctl.sh create_tvf "${TEST_PF_PORT_R}" || true
sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true
sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true
- name: 'preparation: Start MtlManager at background'
run: |
sudo MtlManager &
- name: 'execution: Run validation-tests-1 in pipenv environment'
if: inputs.validation-tests-1 != ''
working-directory: tests/validation
run: |
set +e
TESTS_TO_EXECUTE=( $(grep -v "collected in" <(${{ env.PYTEST_ALIAS }} "${{ inputs.validation-tests-1 }}" ${{ env.PYTEST_PARAMS }} --collect-only -q --no-summary 2>&1)) )
NUMBER_OF_TESTS="${#TESTS_TO_EXECUTE[@]}"
TESTS_FAIL=()
TESTS_SUCCESS=()
echo "## Starting tests (${NUMBER_OF_TESTS}) :rocket:" >> $GITHUB_STEP_SUMMARY
echo "| ? | Collected Test | Started | Ended | Took (s) | Result |" >> $GITHUB_STEP_SUMMARY
echo "| --- | ---| --- | --- | --- | --- |" >> $GITHUB_STEP_SUMMARY
for test in ${TESTS_TO_EXECUTE[@]}; do
echo "::group::${test}"
PYTEST_START_HI="$(date '+%s')"
${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }} --collect-only -q --no-summary
for retry in $(seq 1 "${{ env.PYTEST_RETRIES }}"); do
${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }}
PYTEST_RESULT="$?"
echo "RETRY: ${retry}"
[[ "${PYTEST_RESULT}" == "0" ]] && break
done
PYTEST_END_HI="$(date '+%s')"
if [[ "${PYTEST_RESULT}" == "0" ]]; then
PREFIX="✅"
SUFFIX="Success [OK]"
TESTS_SUCCESS+=( "${test}" )
else
PREFIX="❌"
SUFFIX="Failed [Err]"
TESTS_FAIL+=( "${test}" )
fi
echo -n "| ${PREFIX} | ${test} | $(date --date=@${PYTEST_START_HI} '+%d%m%y_%H%M%S') | $(date --date=@${PYTEST_END_HI} '+%d%m%y_%H%M%S') | $((PYTEST_END_HI-PYTEST_START_HI))s | ${SUFFIX} |" >> $GITHUB_STEP_SUMMARY
echo "::endgroup::"
done
echo "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" >> $GITHUB_STEP_SUMMARY
echo "${TESTS_SUCCESS[@]}" >> $GITHUB_STEP_SUMMARY
echo "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" >> $GITHUB_STEP_SUMMARY
echo "${TESTS_FAIL[@]}" >> $GITHUB_STEP_SUMMARY
if [[ "${#TESTS_FAIL[@]}" == "0" ]] || [[ "${{ inputs.validation-no-fail-tests }}" == "true" ]]; then
exit 0
else
exit 1
fi
- name: 'execution: Run validation-tests-2 in pipenv environment'
if: inputs.validation-tests-2 != ''
working-directory: tests/validation
run: |
${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-2 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
- name: 'execution: Run validation-tests-3 in pipenv environment'
if: inputs.validation-tests-3 != ''
working-directory: tests/validation
run: |
${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-3 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
- name: 'execution: Run validation-tests-4 in pipenv environment'
if: inputs.validation-tests-4 != ''
working-directory: tests/validation
run: |
${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-4 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
- name: 'execution: Run validation-tests-5 in pipenv environment'
if: inputs.validation-tests-5 != ''
working-directory: tests/validation
run: |
${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-5 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
- name: 'execution: Run validation-pre-release-1 in pipenv environment'
if: inputs.validation-pre-release-1 != 'NONE'
working-directory: tests/validation
run: |
echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-1 }} ==" || ${{ inputs.validation-no-fail-tests }}
- name: 'execution: Run validation-pre-release-2 in pipenv environment'
if: inputs.validation-pre-release-2 != 'NONE'
working-directory: tests/validation
run: |
echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-2 }} ==" || ${{ inputs.validation-no-fail-tests }}
- name: 'execution: Run validation-pre-release-3 in pipenv environment'
if: inputs.validation-pre-release-3 != 'NONE'
working-directory: tests/validation
run: |
echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-3 }} ==" || ${{ inputs.validation-no-fail-tests }}
- name: 'cleanup: Kill MtlManager and pytest routines'
if: always()
run: |
sudo killall -SIGINT pipenv || true
sudo killall -SIGINT pytest || true
sudo killall -SIGINT MtlManager || true
- name: 'cleanup: Restore valid owner to repository and directories'
if: always()
run: |
sudo chown -R "${USER}" "$(pwd)"
- name: 'cleanup: Generate runner summary'
if: always()
run: |
echo "## Runner ${{ runner.name }}" >> $GITHUB_STEP_SUMMARY
echo "Bellow ENV variables are defined on the ${{ runner.name }} self-hosted runner side" >> $GITHUB_STEP_SUMMARY
echo "| Variable | Value |" >> $GITHUB_STEP_SUMMARY
echo "| --- | --- |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_PF_PORT_P | TEST_PF_PORT_P |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_PF_PORT_R | ${TEST_PF_PORT_R} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_PORT_P | ${TEST_PORT_P} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_PORT_R | ${TEST_PORT_R} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_DMA_PORT_P | ${TEST_DMA_PORT_P} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_DMA_PORT_R | ${TEST_DMA_PORT_R} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_VF_PORT_P_0 | ${TEST_VF_PORT_P_0} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_VF_PORT_P_1 | ${TEST_VF_PORT_P_1} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_VF_PORT_P_2 | ${TEST_VF_PORT_P_2} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_VF_PORT_P_3 | ${TEST_VF_PORT_P_3} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_VF_PORT_R_0 | ${TEST_VF_PORT_R_0} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_VF_PORT_R_1 | ${TEST_VF_PORT_R_1} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_VF_PORT_R_2 | ${TEST_VF_PORT_R_2} |" >> $GITHUB_STEP_SUMMARY
echo "| TEST_VF_PORT_R_3 | ${TEST_VF_PORT_R_3} |" >> $GITHUB_STEP_SUMMARY