Skip to content

Commit 4e3673d

Browse files
committed
Fix: Update validation-tests.yml pipenv install issues
Fix: Update validation-tests.yml pipenv install issues and run issues Add: Multiple test cases run inputs Add: Dropdown for pre-release use cases Add: Fixtures.py time.sleep between runs Signed-off-by: Milosz Linkiewicz <[email protected]>
1 parent 31c21a1 commit 4e3673d

File tree

3 files changed

+316
-36
lines changed

3 files changed

+316
-36
lines changed

.github/workflows/validation-tests.yml

Lines changed: 249 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -9,23 +9,95 @@ on:
99
default: 'main'
1010
required: false
1111
description: 'Branch name to use'
12-
validation-tests:
12+
validation-no-fail-tests:
13+
type: choice
14+
required: false
15+
description: 'Run all tests, non will fail'
16+
options:
17+
- "false"
18+
- "true"
19+
validation-tests-1:
1320
type: string
1421
default: 'tests/single/video/pacing'
1522
required: true
16-
description: 'Validation tests to run'
23+
description: '1st validation tests to run'
24+
validation-tests-2:
25+
type: string
26+
default: ''
27+
required: false
28+
description: '2nd validation tests to run'
29+
validation-tests-3:
30+
type: string
31+
default: ''
32+
required: false
33+
description: '3th validation tests to run'
34+
validation-tests-4:
35+
type: string
36+
default: ''
37+
required: false
38+
description: '4th validation tests to run'
39+
validation-tests-5:
40+
type: string
41+
default: ''
42+
required: false
43+
description: '5th validation tests to run'
44+
validation-pre-release-1:
45+
description: 'Select from pre-release group tests nr-1'
46+
required: false
47+
type: choice
48+
options:
49+
- NONE
50+
- ancillary
51+
- kernel-socket
52+
- rss-mode
53+
- st20p
54+
- st30p
55+
- st41
56+
- udp
57+
- video
58+
- xdp
59+
validation-pre-release-2:
60+
description: 'Select from pre-release group tests nr-2'
61+
required: false
62+
type: choice
63+
options:
64+
- NONE
65+
- ffmpeg-plugin
66+
- fuzzy-tests
67+
- performance
68+
- ptp
69+
- rx-timing
70+
- vero
71+
- virtio-enable
72+
- wrong-parameter
73+
validation-pre-release-3:
74+
description: 'Select from pre-release group tests nr-3'
75+
required: false
76+
type: choice
77+
options:
78+
- NONE
79+
- gpu-direct
80+
- gpu-enabling
1781

1882
env:
1983
# Customize the env if
2084
BUILD_TYPE: 'Release'
2185
DPDK_VERSION: '23.11'
22-
# Bellow ENV variables are required to be defined on runner side:
23-
# TEST_PF_PORT_P: '0000:49:00.0'
24-
# TEST_PF_PORT_R: '0000:49:00.1'
25-
# TEST_PORT_P: '0000:49:01.2'
26-
# TEST_PORT_R: '0000:49:01.3'
27-
# TEST_DMA_PORT_P: '0000:6a:01.0'
28-
# TEST_DMA_PORT_R: '0000:6f:01.0'
86+
# Bellow ENV variables example set is defined on the self-hosted runner side:
87+
# TEST_PF_PORT_P=0000:49:00.0
88+
# TEST_PF_PORT_R=0000:49:00.1
89+
# TEST_PORT_P=0000:49:01.2
90+
# TEST_PORT_R=0000:49:01.3
91+
# TEST_DMA_PORT_P=0000:80:04.0
92+
# TEST_DMA_PORT_R=0000:80:04.1
93+
# TEST_VF_PORT_P_0=0000:49:01.0
94+
# TEST_VF_PORT_P_1=0000:49:01.1
95+
# TEST_VF_PORT_P_2=0000:49:01.2
96+
# TEST_VF_PORT_P_3=0000:49:01.3
97+
# TEST_VF_PORT_R_0=0000:49:11.0
98+
# TEST_VF_PORT_R_1=0000:49:11.1
99+
# TEST_VF_PORT_R_2=0000:49:11.2
100+
# TEST_VF_PORT_R_3=0000:49:11.3
29101

30102
permissions:
31103
contents: read
@@ -34,78 +106,224 @@ jobs:
34106
validation-build-mtl:
35107
runs-on: [Linux, self-hosted, DPDK]
36108
timeout-minutes: 60
37-
109+
outputs:
110+
pipenv-activate: ${{ steps.pipenv-install.outputs.VIRTUAL_ENV }}
38111
steps:
39-
- name: Harden Runner
112+
- name: 'preparation: Harden Runner'
40113
uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1
41114
with:
42115
egress-policy: audit
43116

44-
- name: Checkout MTL
117+
- name: 'preparation: Restore valid owner to repository and directories'
118+
if: always()
119+
run: |
120+
sudo chown -R "${USER}" "$(pwd)"
121+
122+
- name: 'preparation: Checkout MTL'
45123
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
46124
with:
47125
ref: '${{ inputs.branch-to-checkout }}'
48126

49-
- name: Checkout DPDK
127+
- name: 'preparation: Checkout DPDK'
50128
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
51129
with:
52130
repository: 'DPDK/dpdk'
53131
ref: 'v${{ env.DPDK_VERSION }}'
54132
path: 'dpdk'
55133

56-
- name: Install the build dependency
134+
- name: 'configuration: Install the build dependency'
57135
run: |
58-
sudo apt-get install -y git gcc meson python3 python3-pyelftools pkg-config libnuma-dev libjson-c-dev libpcap-dev libgtest-dev libsdl2-dev libsdl2-ttf-dev libssl-dev
59-
sudo apt-get install -y systemtap-sdt-dev pipenv
136+
sudo apt update
137+
sudo apt-get remove -y pipenv || true
138+
sudo apt-get install -y \
139+
git gcc meson \
140+
pkg-config \
141+
python3 \
142+
python3-pyelftools \
143+
python3-virtualenv \
144+
python3-pip \
145+
libnuma-dev \
146+
libjson-c-dev \
147+
libpcap-dev \
148+
libgtest-dev \
149+
libsdl2-dev \
150+
libsdl2-ttf-dev \
151+
libssl-dev \
152+
systemtap-sdt-dev
60153
61-
- name: Apply dpdk patches
154+
- name: 'configuration: Apply dpdk patches'
62155
run: |
63156
patch -d "dpdk" -p1 -i <(cat patches/dpdk/${{ env.DPDK_VERSION }}/*.patch)
64157
65-
- name: Build dpdk
158+
- name: 'installation: Build dpdk'
159+
working-directory: dpdk
66160
run: |
67-
cd dpdk
68161
meson build
69162
ninja -C build
70-
cd build
71-
sudo ninja install
163+
sudo ninja -C build install
72164
73-
- name: Build
165+
- name: 'installation: Build mtl'
74166
run: |
75167
./build.sh
76168
sudo ldconfig
77169
78-
- name: Prepare pipenv environment
170+
- name: 'installation: Install pipenv environment'
79171
working-directory: tests/validation
172+
id: pipenv-install
80173
run: |
81-
pipenv install -r requirements.txt
174+
python3 -m pip install pipenv
175+
python3 -m pipenv install -r requirements.txt
176+
echo "VIRTUAL_ENV=$(python3 -m pipenv --venv)/bin/activate" >> "$GITHUB_ENV"
82177
83178
validation-run-tests:
84179
needs: [validation-build-mtl]
85180
runs-on: [Linux, self-hosted, DPDK]
181+
env:
182+
PYTEST_ALIAS: 'sudo --preserve-env python3 -m pipenv run pytest'
183+
PYTEST_PARAMS: '--nic="${TEST_VF_PORT_P_0},${TEST_VF_PORT_P_1}" --media=/mnt/media --build="../.."'
184+
PYTEST_RETRIES: '3'
86185
steps:
87-
- name: Harden Runner
186+
- name: 'preparation: Harden Runner'
88187
uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1
89188
with:
90189
egress-policy: audit
91190

92-
- name: Kill previous pytest routine
191+
- name: 'preparation: Kill MtlManager and pytest routines'
93192
run: |
94193
sudo killall -SIGINT pipenv || true
95194
sudo killall -SIGINT pytest || true
96195
sudo killall -SIGINT MtlManager || true
97196
98-
- name: Binding network adapter
197+
- name: 'preparation: Binding network adapter'
99198
run: |
100-
sudo ./script/nicctl.sh create_vf "${TEST_PF_PORT_P}" || true
199+
sudo rmmod irdma || true
200+
sudo ./script/nicctl.sh create_tvf "${TEST_PF_PORT_P}" || true
201+
sudo ./script/nicctl.sh create_tvf "${TEST_PF_PORT_R}" || true
101202
sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true
102203
sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true
103204
104-
- name: Start MtlManager at background
205+
- name: 'preparation: Start MtlManager at background'
105206
run: |
106207
sudo MtlManager &
107208
108-
- name: Run tests in pipenv environment
209+
- name: 'execution: Run validation-tests-1 in pipenv environment'
210+
if: inputs.validation-tests-1 != ''
211+
working-directory: tests/validation
212+
run: |
213+
set +e
214+
TESTS_TO_EXECUTE=( $(grep -v "collected in" <(${{ env.PYTEST_ALIAS }} "${{ inputs.validation-tests-1 }}" ${{ env.PYTEST_PARAMS }} --collect-only -q --no-summary 2>&1)) )
215+
NUMBER_OF_TESTS="${#TESTS_TO_EXECUTE[@]}"
216+
TESTS_FAIL=()
217+
TESTS_SUCCESS=()
218+
echo "## Starting tests (${NUMBER_OF_TESTS}) :rocket:" >> $GITHUB_STEP_SUMMARY
219+
echo "| ? | Collected Test | Started | Ended | Took (s) | Result |" >> $GITHUB_STEP_SUMMARY
220+
echo "| --- | ---| --- | --- | --- | --- |" >> $GITHUB_STEP_SUMMARY
221+
222+
for test in ${TESTS_TO_EXECUTE[@]}; do
223+
echo "::group::${test}"
224+
PYTEST_START_HI="$(date '+%s')"
225+
${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }} --collect-only -q --no-summary
226+
for retry in $(seq 1 "${{ env.PYTEST_RETRIES }}"); do
227+
${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }}
228+
PYTEST_RESULT="$?"
229+
echo "RETRY: ${retry}"
230+
[[ "${PYTEST_RESULT}" == "0" ]] && break
231+
done
232+
PYTEST_END_HI="$(date '+%s')"
233+
if [[ "${PYTEST_RESULT}" == "0" ]]; then
234+
PREFIX="✅"
235+
SUFFIX="Success [OK]"
236+
TESTS_SUCCESS+=( "${test}" )
237+
else
238+
PREFIX="❌"
239+
SUFFIX="Failed [Err]"
240+
TESTS_FAIL+=( "${test}" )
241+
fi
242+
echo -n "| ${PREFIX} | ${test} | $(date --date=@${PYTEST_START_HI} '+%d%m%y_%H%M%S') | $(date --date=@${PYTEST_END_HI} '+%d%m%y_%H%M%S') | $((PYTEST_END_HI-PYTEST_START_HI))s | ${SUFFIX} |" >> $GITHUB_STEP_SUMMARY
243+
echo "::endgroup::"
244+
done
245+
echo "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" >> $GITHUB_STEP_SUMMARY
246+
echo "${TESTS_SUCCESS[@]}" >> $GITHUB_STEP_SUMMARY
247+
echo "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" >> $GITHUB_STEP_SUMMARY
248+
echo "${TESTS_FAIL[@]}" >> $GITHUB_STEP_SUMMARY
249+
if [[ "${#TESTS_FAIL[@]}" == "0" ]] || [[ "${{ inputs.validation-no-fail-tests }}" == "true" ]]; then
250+
exit 0
251+
else
252+
exit 1
253+
fi
254+
255+
- name: 'execution: Run validation-tests-2 in pipenv environment'
256+
if: inputs.validation-tests-2 != ''
257+
working-directory: tests/validation
258+
run: |
259+
${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-2 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
260+
261+
- name: 'execution: Run validation-tests-3 in pipenv environment'
262+
if: inputs.validation-tests-3 != ''
263+
working-directory: tests/validation
264+
run: |
265+
${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-3 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
266+
267+
- name: 'execution: Run validation-tests-4 in pipenv environment'
268+
if: inputs.validation-tests-4 != ''
269+
working-directory: tests/validation
270+
run: |
271+
${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-4 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
272+
273+
- name: 'execution: Run validation-tests-5 in pipenv environment'
274+
if: inputs.validation-tests-5 != ''
275+
working-directory: tests/validation
276+
run: |
277+
${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-5 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
278+
279+
- name: 'execution: Run validation-pre-release-1 in pipenv environment'
280+
if: inputs.validation-pre-release-1 != 'NONE'
281+
working-directory: tests/validation
282+
run: |
283+
echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-1 }} ==" || ${{ inputs.validation-no-fail-tests }}
284+
285+
- name: 'execution: Run validation-pre-release-2 in pipenv environment'
286+
if: inputs.validation-pre-release-2 != 'NONE'
109287
working-directory: tests/validation
110288
run: |
111-
sudo pipenv run pytest '${{ inputs.validation-tests }}' --nic="${TEST_PORT_P},${TEST_PORT_R}" --media=/mnt/media
289+
echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-2 }} ==" || ${{ inputs.validation-no-fail-tests }}
290+
291+
- name: 'execution: Run validation-pre-release-3 in pipenv environment'
292+
if: inputs.validation-pre-release-3 != 'NONE'
293+
working-directory: tests/validation
294+
run: |
295+
echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-3 }} ==" || ${{ inputs.validation-no-fail-tests }}
296+
297+
- name: 'cleanup: Kill MtlManager and pytest routines'
298+
if: always()
299+
run: |
300+
sudo killall -SIGINT pipenv || true
301+
sudo killall -SIGINT pytest || true
302+
sudo killall -SIGINT MtlManager || true
303+
304+
- name: 'cleanup: Restore valid owner to repository and directories'
305+
if: always()
306+
run: |
307+
sudo chown -R "${USER}" "$(pwd)"
308+
309+
- name: 'cleanup: Generate runner summary'
310+
if: always()
311+
run: |
312+
echo "## Runner ${{ runner.name }}" >> $GITHUB_STEP_SUMMARY
313+
echo "Bellow ENV variables are defined on the ${{ runner.name }} self-hosted runner side" >> $GITHUB_STEP_SUMMARY
314+
echo "| Variable | Value |" >> $GITHUB_STEP_SUMMARY
315+
echo "| --- | --- |" >> $GITHUB_STEP_SUMMARY
316+
echo "| TEST_PF_PORT_P | TEST_PF_PORT_P |" >> $GITHUB_STEP_SUMMARY
317+
echo "| TEST_PF_PORT_R | ${TEST_PF_PORT_R} |" >> $GITHUB_STEP_SUMMARY
318+
echo "| TEST_PORT_P | ${TEST_PORT_P} |" >> $GITHUB_STEP_SUMMARY
319+
echo "| TEST_PORT_R | ${TEST_PORT_R} |" >> $GITHUB_STEP_SUMMARY
320+
echo "| TEST_DMA_PORT_P | ${TEST_DMA_PORT_P} |" >> $GITHUB_STEP_SUMMARY
321+
echo "| TEST_DMA_PORT_R | ${TEST_DMA_PORT_R} |" >> $GITHUB_STEP_SUMMARY
322+
echo "| TEST_VF_PORT_P_0 | ${TEST_VF_PORT_P_0} |" >> $GITHUB_STEP_SUMMARY
323+
echo "| TEST_VF_PORT_P_1 | ${TEST_VF_PORT_P_1} |" >> $GITHUB_STEP_SUMMARY
324+
echo "| TEST_VF_PORT_P_2 | ${TEST_VF_PORT_P_2} |" >> $GITHUB_STEP_SUMMARY
325+
echo "| TEST_VF_PORT_P_3 | ${TEST_VF_PORT_P_3} |" >> $GITHUB_STEP_SUMMARY
326+
echo "| TEST_VF_PORT_R_0 | ${TEST_VF_PORT_R_0} |" >> $GITHUB_STEP_SUMMARY
327+
echo "| TEST_VF_PORT_R_1 | ${TEST_VF_PORT_R_1} |" >> $GITHUB_STEP_SUMMARY
328+
echo "| TEST_VF_PORT_R_2 | ${TEST_VF_PORT_R_2} |" >> $GITHUB_STEP_SUMMARY
329+
echo "| TEST_VF_PORT_R_3 | ${TEST_VF_PORT_R_3} |" >> $GITHUB_STEP_SUMMARY

0 commit comments

Comments
 (0)