Skip to content

Commit f4e0afc

Browse files
committed
Fix: Update validation-tests.yml pipenv install issues
Fix: Update validation-tests.yml pipenv install issues and run issues Add: Multiple test cases run inputs Add: Dropdown for pre-release use cases Add: Fixtures.py time.sleep between runs Signed-off-by: Milosz Linkiewicz <[email protected]>
1 parent 31c21a1 commit f4e0afc

File tree

3 files changed

+356
-39
lines changed

3 files changed

+356
-39
lines changed

.github/workflows/validation-tests.yml

Lines changed: 289 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -9,23 +9,113 @@ on:
99
default: 'main'
1010
required: false
1111
description: 'Branch name to use'
12-
validation-tests:
12+
validation-iface-binding:
13+
type: choice
14+
required: false
15+
description: 'Type of iface binding to use'
16+
options:
17+
- "vf_create_vf"
18+
- "vf_create_kvf"
19+
- "vf_create_tvf"
20+
- "pf_bind_pmd"
21+
- "pf_bind_kernel"
22+
validation-test-port-p:
23+
type: choice
24+
required: false
25+
description: 'Which to use as Test-Port-P'
26+
options:
27+
- TEST_VF_PORT_P_0
28+
- TEST_VF_PORT_P_1
29+
- TEST_VF_PORT_P_2
30+
- TEST_VF_PORT_P_3
31+
- TEST_PF_PORT_P
32+
- TEST_VF_PORT_R_0
33+
- TEST_VF_PORT_R_1
34+
- TEST_VF_PORT_R_2
35+
- TEST_VF_PORT_R_3
36+
- TEST_PF_PORT_R
37+
- TEST_DMA_PORT_P
38+
- TEST_DMA_PORT_R
39+
validation-test-port-r:
40+
type: choice
41+
required: false
42+
description: 'Which to use as Test-Port-R'
43+
options:
44+
- TEST_VF_PORT_P_1
45+
- TEST_VF_PORT_P_0
46+
- TEST_VF_PORT_P_2
47+
- TEST_VF_PORT_P_3
48+
- TEST_PF_PORT_P
49+
- TEST_VF_PORT_R_0
50+
- TEST_VF_PORT_R_1
51+
- TEST_VF_PORT_R_2
52+
- TEST_VF_PORT_R_3
53+
- TEST_PF_PORT_R
54+
- TEST_DMA_PORT_P
55+
- TEST_DMA_PORT_R
56+
validation-no-fail-tests:
57+
type: choice
58+
required: false
59+
description: 'Run all tests, non will fail'
60+
options:
61+
- "false"
62+
- "true"
63+
validation-tests-1:
1364
type: string
1465
default: 'tests/single/video/pacing'
1566
required: true
16-
description: 'Validation tests to run'
67+
description: '1st validation tests to run'
68+
validation-tests-2:
69+
type: string
70+
default: ''
71+
required: false
72+
description: '2nd validation tests to run'
73+
validation-tests-3:
74+
type: string
75+
default: ''
76+
required: false
77+
description: '3th validation tests to run'
78+
validation-pre-release-1:
79+
description: 'Select from pre-release group tests nr-1'
80+
required: false
81+
type: choice
82+
options:
83+
- NONE
84+
- ancillary
85+
- kernel-socket
86+
- rss-mode
87+
- st20p
88+
- st30p
89+
- st41
90+
- udp
91+
- video
92+
- xdp
93+
validation-pre-release-2:
94+
description: 'Select from pre-release group tests nr-2'
95+
required: false
96+
type: choice
97+
options:
98+
- NONE
99+
- ffmpeg-plugin
100+
- fuzzy-tests
101+
- performance
102+
- ptp
103+
- rx-timing
104+
- vero
105+
- virtio-enable
106+
- wrong-parameter
107+
validation-pre-release-3:
108+
description: 'Select from pre-release group tests nr-3'
109+
required: false
110+
type: choice
111+
options:
112+
- NONE
113+
- gpu-direct
114+
- gpu-enabling
17115

18116
env:
19-
# Customize the env if
20117
BUILD_TYPE: 'Release'
21118
DPDK_VERSION: '23.11'
22-
# Bellow ENV variables are required to be defined on runner side:
23-
# TEST_PF_PORT_P: '0000:49:00.0'
24-
# TEST_PF_PORT_R: '0000:49:00.1'
25-
# TEST_PORT_P: '0000:49:01.2'
26-
# TEST_PORT_R: '0000:49:01.3'
27-
# TEST_DMA_PORT_P: '0000:6a:01.0'
28-
# TEST_DMA_PORT_R: '0000:6f:01.0'
29119

30120
permissions:
31121
contents: read
@@ -34,78 +124,243 @@ jobs:
34124
validation-build-mtl:
35125
runs-on: [Linux, self-hosted, DPDK]
36126
timeout-minutes: 60
37-
127+
outputs:
128+
pipenv-activate: ${{ steps.pipenv-install.outputs.VIRTUAL_ENV }}
38129
steps:
39-
- name: Harden Runner
130+
- name: 'preparation: Harden Runner'
40131
uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1
41132
with:
42133
egress-policy: audit
43134

44-
- name: Checkout MTL
135+
- name: 'preparation: Restore valid owner to repository and directories'
136+
if: always()
137+
run: |
138+
sudo chown -R "${USER}" "$(pwd)"
139+
140+
- name: 'preparation: Checkout MTL'
45141
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
46142
with:
47143
ref: '${{ inputs.branch-to-checkout }}'
48144

49-
- name: Checkout DPDK
145+
- name: 'preparation: Checkout DPDK'
50146
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
51147
with:
52148
repository: 'DPDK/dpdk'
53149
ref: 'v${{ env.DPDK_VERSION }}'
54150
path: 'dpdk'
55151

56-
- name: Install the build dependency
152+
- name: 'configuration: Install the build dependency'
57153
run: |
58-
sudo apt-get install -y git gcc meson python3 python3-pyelftools pkg-config libnuma-dev libjson-c-dev libpcap-dev libgtest-dev libsdl2-dev libsdl2-ttf-dev libssl-dev
59-
sudo apt-get install -y systemtap-sdt-dev pipenv
154+
sudo apt update
155+
sudo apt-get remove -y pipenv || true
156+
sudo apt-get install -y \
157+
git gcc meson \
158+
pkg-config \
159+
python3 \
160+
python3-pyelftools \
161+
python3-virtualenv \
162+
python3-pip \
163+
libnuma-dev \
164+
libjson-c-dev \
165+
libpcap-dev \
166+
libgtest-dev \
167+
libsdl2-dev \
168+
libsdl2-ttf-dev \
169+
libssl-dev \
170+
systemtap-sdt-dev
60171
61-
- name: Apply dpdk patches
172+
- name: 'configuration: Apply dpdk patches'
62173
run: |
63174
patch -d "dpdk" -p1 -i <(cat patches/dpdk/${{ env.DPDK_VERSION }}/*.patch)
64175
65-
- name: Build dpdk
176+
- name: 'installation: Build dpdk'
177+
working-directory: dpdk
66178
run: |
67-
cd dpdk
68179
meson build
69180
ninja -C build
70-
cd build
71-
sudo ninja install
181+
sudo ninja -C build install
72182
73-
- name: Build
183+
- name: 'installation: Build mtl'
74184
run: |
75185
./build.sh
76186
sudo ldconfig
77187
78-
- name: Prepare pipenv environment
188+
- name: 'installation: Install pipenv environment'
79189
working-directory: tests/validation
190+
id: pipenv-install
80191
run: |
81-
pipenv install -r requirements.txt
192+
python3 -m pip install pipenv
193+
python3 -m pipenv install -r requirements.txt
194+
echo "VIRTUAL_ENV=$(python3 -m pipenv --venv)/bin/activate" >> $GITHUB_ENV
82195
83196
validation-run-tests:
84197
needs: [validation-build-mtl]
85198
runs-on: [Linux, self-hosted, DPDK]
199+
env:
200+
PYTEST_ALIAS: 'sudo --preserve-env python3 -m pipenv run pytest'
201+
PYTEST_PARAMS: '--media=/mnt/media --build="../.."'
202+
PYTEST_RETRIES: '3'
86203
steps:
87-
- name: Harden Runner
204+
- name: 'preparation: Harden Runner'
88205
uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1
89206
with:
90207
egress-policy: audit
91208

92-
- name: Kill previous pytest routine
209+
- name: 'preparation: Evaluate choosen validation-test-port-p and validation-test-port-r'
210+
run: |
211+
eval "export TEST_PORT_P=\$${{ inputs.validation-test-port-p }}"
212+
eval "export TEST_PORT_R=\$${{ inputs.validation-test-port-r }}"
213+
echo "TEST_PORT_P=${TEST_PORT_P}" >> $GITHUB_ENV
214+
echo "TEST_PORT_R=${TEST_PORT_R}" >> $GITHUB_ENV
215+
216+
- name: 'preparation: Kill MtlManager and pytest routines'
93217
run: |
94218
sudo killall -SIGINT pipenv || true
95219
sudo killall -SIGINT pytest || true
96220
sudo killall -SIGINT MtlManager || true
97221
98-
- name: Binding network adapter
222+
- name: 'preparation: Binding network adapter pf to kernel driver'
223+
if: inputs.validation-iface-binding != "pf_bind_pmd"
224+
run: |
225+
sudo rmmod irdma || true
226+
sudo ./script/nicctl.sh pf_bind_kernel "${TEST_PF_PORT_P}" || true
227+
sudo ./script/nicctl.sh pf_bind_kernel "${TEST_PF_PORT_R}" || true
228+
229+
- name: 'preparation: Binding network adapter ${{ inputs.validation-iface-binding }}'
99230
run: |
100-
sudo ./script/nicctl.sh create_vf "${TEST_PF_PORT_P}" || true
101-
sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true
102-
sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true
231+
sudo rmmod irdma || true
232+
sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_P}" || true
233+
sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_R}" || true
234+
sudo modprobe irdma || true
103235
104-
- name: Start MtlManager at background
236+
- name: 'preparation: Start MtlManager at background'
105237
run: |
106238
sudo MtlManager &
107239
108-
- name: Run tests in pipenv environment
240+
- name: 'execution: Run validation-tests-1 in pipenv environment'
241+
if: inputs.validation-tests-1 != ''
109242
working-directory: tests/validation
110243
run: |
111-
sudo pipenv run pytest '${{ inputs.validation-tests }}' --nic="${TEST_PORT_P},${TEST_PORT_R}" --media=/mnt/media
244+
set +e
245+
echo "TEST_PORT_P=${{ env.TEST_PORT_P }} and TEST_PORT_R=${{ env.TEST_PORT_R }}"
246+
eval "export TEST_PORT_P=\$${{ inputs.validation-test-port-p }}"
247+
eval "export TEST_PORT_R=\$${{ inputs.validation-test-port-r }}"
248+
TESTS_TO_EXECUTE=( $(grep -v "collected in" <(${{ env.PYTEST_ALIAS }} "${{ inputs.validation-tests-1 }}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary 2>&1)) )
249+
NUMBER_OF_TESTS="${#TESTS_TO_EXECUTE[@]}"
250+
TESTS_FAIL=()
251+
TESTS_SUCCESS=()
252+
echo "## Starting ${{ inputs.validation-tests-1 }} (total ${NUMBER_OF_TESTS}) :rocket:" >> $GITHUB_STEP_SUMMARY
253+
echo "| ? | Collected Test | Started | Ended | Took (s) | Result |" >> $GITHUB_STEP_SUMMARY
254+
echo "| --- | ---| --- | --- | --- | --- |" >> $GITHUB_STEP_SUMMARY
255+
256+
for test in ${TESTS_TO_EXECUTE[@]}; do
257+
echo "::group::${test}"
258+
PYTEST_START_HI="$(date '+%s')"
259+
${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary
260+
for retry in $(seq 1 "${{ env.PYTEST_RETRIES }}"); do
261+
${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}"
262+
PYTEST_RESULT="$?"
263+
echo "RETRY: ${retry}"
264+
[[ "${PYTEST_RESULT}" == "0" ]] && break
265+
done
266+
PYTEST_END_HI="$(date '+%s')"
267+
if [[ "${PYTEST_RESULT}" == "0" ]]; then
268+
PREFIX="✅"
269+
SUFFIX="[OK]"
270+
TESTS_SUCCESS+=( "${test}" )
271+
else
272+
PREFIX="❌"
273+
SUFFIX="[Err]"
274+
TESTS_FAIL+=( "${test}" )
275+
fi
276+
echo "| ${PREFIX} | ${test} | $(date --date=@${PYTEST_START_HI} '+%d%m%y_%H%M%S') | $(date --date=@${PYTEST_END_HI} '+%d%m%y_%H%M%S') | $((PYTEST_END_HI-PYTEST_START_HI))s | ${SUFFIX} |" >> $GITHUB_STEP_SUMMARY
277+
echo "::endgroup::"
278+
done
279+
echo "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" >> $GITHUB_STEP_SUMMARY
280+
echo "${TESTS_SUCCESS[@]}" >> $GITHUB_STEP_SUMMARY
281+
echo "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" >> $GITHUB_STEP_SUMMARY
282+
echo "${TESTS_FAIL[@]}" >> $GITHUB_STEP_SUMMARY
283+
if [[ "${#TESTS_FAIL[@]}" == "0" ]] || [[ "${{ inputs.validation-no-fail-tests }}" == "true" ]]; then
284+
exit 0
285+
else
286+
exit 1
287+
fi
288+
289+
- name: 'execution: Run validation-tests-2 in pipenv environment'
290+
if: inputs.validation-tests-2 != ''
291+
working-directory: tests/validation
292+
run: |
293+
${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-2 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
294+
295+
- name: 'execution: Run validation-tests-3 in pipenv environment'
296+
if: inputs.validation-tests-3 != ''
297+
working-directory: tests/validation
298+
run: |
299+
${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-3 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
300+
301+
- name: 'execution: Run validation-pre-release-1 in pipenv environment'
302+
if: inputs.validation-pre-release-1 != 'NONE'
303+
working-directory: tests/validation
304+
run: |
305+
echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-1 }} ==" || ${{ inputs.validation-no-fail-tests }}
306+
307+
- name: 'execution: Run validation-pre-release-2 in pipenv environment'
308+
if: inputs.validation-pre-release-2 != 'NONE'
309+
working-directory: tests/validation
310+
run: |
311+
echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-2 }} ==" || ${{ inputs.validation-no-fail-tests }}
312+
313+
- name: 'execution: Run validation-pre-release-3 in pipenv environment'
314+
if: inputs.validation-pre-release-3 != 'NONE'
315+
working-directory: tests/validation
316+
run: |
317+
echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-3 }} ==" || ${{ inputs.validation-no-fail-tests }}
318+
319+
- name: 'cleanup: Kill MtlManager and pytest routines'
320+
if: always()
321+
run: |
322+
sudo killall -SIGINT pipenv || true
323+
sudo killall -SIGINT pytest || true
324+
sudo killall -SIGINT MtlManager || true
325+
326+
- name: 'cleanup: Restore valid owner to repository and directories'
327+
if: always()
328+
run: |
329+
sudo chown -R "${USER}" "$(pwd)"
330+
331+
- name: 'cleanup: Bind pf to kernel driver'
332+
if: always()
333+
run: |
334+
sudo rmmod irdma || true
335+
sudo ./script/nicctl.sh pf_bind_kernel "${TEST_PF_PORT_P}" || true
336+
sudo ./script/nicctl.sh pf_bind_kernel "${TEST_PF_PORT_R}" || true
337+
sudo modprobe irdma || true
338+
339+
- name: 'cleanup: Validation execution logs'
340+
if: always()
341+
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
342+
with:
343+
name: validation-execution-logs
344+
path: '${{ github.workspace }}/tests/validation/logs'
345+
346+
- name: 'cleanup: Generate runner summary'
347+
if: always()
348+
run: |
349+
echo "## Runner ${{ runner.name }}" >> $GITHUB_STEP_SUMMARY
350+
echo "Bellow ENV variables are defined on the ${{ runner.name }} self-hosted runner side" >> $GITHUB_STEP_SUMMARY
351+
echo "| Variable | Value |" >> $GITHUB_STEP_SUMMARY
352+
echo "| --- | --- |" >> $GITHUB_STEP_SUMMARY
353+
echo "| TEST_PF_PORT_P | ${TEST_PF_PORT_P} |" >> $GITHUB_STEP_SUMMARY
354+
echo "| TEST_PF_PORT_R | ${TEST_PF_PORT_R} |" >> $GITHUB_STEP_SUMMARY
355+
echo "| TEST_PORT_P | ${TEST_PORT_P} |" >> $GITHUB_STEP_SUMMARY
356+
echo "| TEST_PORT_R | ${TEST_PORT_R} |" >> $GITHUB_STEP_SUMMARY
357+
echo "| TEST_DMA_PORT_P | ${TEST_DMA_PORT_P} |" >> $GITHUB_STEP_SUMMARY
358+
echo "| TEST_DMA_PORT_R | ${TEST_DMA_PORT_R} |" >> $GITHUB_STEP_SUMMARY
359+
echo "| TEST_VF_PORT_P_0 | ${TEST_VF_PORT_P_0} |" >> $GITHUB_STEP_SUMMARY
360+
echo "| TEST_VF_PORT_P_1 | ${TEST_VF_PORT_P_1} |" >> $GITHUB_STEP_SUMMARY
361+
echo "| TEST_VF_PORT_P_2 | ${TEST_VF_PORT_P_2} |" >> $GITHUB_STEP_SUMMARY
362+
echo "| TEST_VF_PORT_P_3 | ${TEST_VF_PORT_P_3} |" >> $GITHUB_STEP_SUMMARY
363+
echo "| TEST_VF_PORT_R_0 | ${TEST_VF_PORT_R_0} |" >> $GITHUB_STEP_SUMMARY
364+
echo "| TEST_VF_PORT_R_1 | ${TEST_VF_PORT_R_1} |" >> $GITHUB_STEP_SUMMARY
365+
echo "| TEST_VF_PORT_R_2 | ${TEST_VF_PORT_R_2} |" >> $GITHUB_STEP_SUMMARY
366+
echo "| TEST_VF_PORT_R_3 | ${TEST_VF_PORT_R_3} |" >> $GITHUB_STEP_SUMMARY

0 commit comments

Comments
 (0)