99 default : ' main'
1010 required : false
1111 description : ' Branch name to use'
12- validation-tests :
12+ validation-iface-binding :
13+ type : choice
14+ required : true
15+ description : ' Type of iface binding to use'
16+ options :
17+ - " create_vf"
18+ - " create_kvf"
19+ - " create_tvf"
20+ - " bind_pmd"
21+ - " bind_kernel"
22+ validation-test-port-p :
23+ type : choice
24+ required : true
25+ description : ' Which to use as Test-Port-P'
26+ options :
27+ - TEST_VF_PORT_P_0
28+ - TEST_VF_PORT_P_1
29+ - TEST_VF_PORT_P_2
30+ - TEST_VF_PORT_P_3
31+ - TEST_PF_PORT_P
32+ - TEST_VF_PORT_R_0
33+ - TEST_VF_PORT_R_1
34+ - TEST_VF_PORT_R_2
35+ - TEST_VF_PORT_R_3
36+ - TEST_PF_PORT_R
37+ - TEST_DMA_PORT_P
38+ - TEST_DMA_PORT_R
39+ validation-test-port-r :
40+ type : choice
41+ required : true
42+ description : ' Which to use as Test-Port-R'
43+ options :
44+ - TEST_VF_PORT_P_1
45+ - TEST_VF_PORT_P_0
46+ - TEST_VF_PORT_P_2
47+ - TEST_VF_PORT_P_3
48+ - TEST_PF_PORT_P
49+ - TEST_VF_PORT_R_0
50+ - TEST_VF_PORT_R_1
51+ - TEST_VF_PORT_R_2
52+ - TEST_VF_PORT_R_3
53+ - TEST_PF_PORT_R
54+ - TEST_DMA_PORT_P
55+ - TEST_DMA_PORT_R
56+ validation-no-fail-tests :
57+ type : choice
58+ required : false
59+ description : ' Run all tests, non will fail'
60+ options :
61+ - " false"
62+ - " true"
63+ validation-tests-1 :
1364 type : string
1465 default : ' tests/single/video/pacing'
1566 required : true
16- description : ' Validation tests to run'
67+ description : ' 1st validation tests to run'
68+ validation-tests-2 :
69+ type : string
70+ default : ' '
71+ required : false
72+ description : ' 2nd validation tests to run'
73+ validation-pre-release-1 :
74+ description : ' Select from pre-release group tests nr-1'
75+ required : false
76+ type : choice
77+ options :
78+ - NONE
79+ - ancillary
80+ - kernel-socket
81+ - rss-mode
82+ - st20p
83+ - st30p
84+ - st41
85+ - udp
86+ - video
87+ - xdp
88+ validation-pre-release-2 :
89+ description : ' Select from pre-release group tests nr-2'
90+ required : false
91+ type : choice
92+ options :
93+ - NONE
94+ - ffmpeg-plugin
95+ - fuzzy-tests
96+ - performance
97+ - ptp
98+ - rx-timing
99+ - vero
100+ - virtio-enable
101+ - wrong-parameter
102+ validation-pre-release-3 :
103+ description : ' Select from pre-release group tests nr-3'
104+ required : false
105+ type : choice
106+ options :
107+ - NONE
108+ - gpu-direct
109+ - gpu-enabling
17110
18111env :
19- # Customize the env if
20112 BUILD_TYPE : ' Release'
21113 DPDK_VERSION : ' 23.11'
22- # Bellow ENV variables are required to be defined on runner side:
23- # TEST_PF_PORT_P: '0000:49:00.0'
24- # TEST_PF_PORT_R: '0000:49:00.1'
25- # TEST_PORT_P: '0000:49:01.2'
26- # TEST_PORT_R: '0000:49:01.3'
27- # TEST_DMA_PORT_P: '0000:6a:01.0'
28- # TEST_DMA_PORT_R: '0000:6f:01.0'
29114
30115permissions :
31116 contents : read
@@ -34,78 +119,239 @@ jobs:
34119 validation-build-mtl :
35120 runs-on : [Linux, self-hosted, DPDK]
36121 timeout-minutes : 60
37-
122+ outputs :
123+ pipenv-activate : ${{ steps.pipenv-install.outputs.VIRTUAL_ENV }}
38124 steps :
39- - name : Harden Runner
125+ - name : ' preparation: Harden Runner'
40126 uses : step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1
41127 with :
42128 egress-policy : audit
43129
44- - name : Checkout MTL
130+ - name : ' preparation: Restore valid repository owner and print env'
131+ if : always()
132+ run : |
133+ sudo chown -R "${USER}" "$(pwd)"
134+ env
135+
136+ - name : ' preparation: Checkout MTL'
45137 uses : actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
46138 with :
47139 ref : ' ${{ inputs.branch-to-checkout }}'
48140
49- - name : Checkout DPDK
141+ - name : ' preparation: Checkout DPDK'
50142 uses : actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
51143 with :
52144 repository : ' DPDK/dpdk'
53145 ref : ' v${{ env.DPDK_VERSION }}'
54146 path : ' dpdk'
55147
56- - name : Install the build dependency
148+ - name : ' configuration: Install the build dependency'
57149 run : |
58- sudo apt-get install -y git gcc meson python3 python3-pyelftools pkg-config libnuma-dev libjson-c-dev libpcap-dev libgtest-dev libsdl2-dev libsdl2-ttf-dev libssl-dev
59- sudo apt-get install -y systemtap-sdt-dev pipenv
150+ sudo apt update
151+ sudo apt-get remove -y pipenv || true
152+ sudo apt-get install -y \
153+ git gcc meson \
154+ pkg-config \
155+ python3 \
156+ python3-pyelftools \
157+ python3-virtualenv \
158+ python3-pip \
159+ libnuma-dev \
160+ libjson-c-dev \
161+ libpcap-dev \
162+ libgtest-dev \
163+ libsdl2-dev \
164+ libsdl2-ttf-dev \
165+ libssl-dev \
166+ systemtap-sdt-dev
60167
61- - name : Apply dpdk patches
168+ - name : ' configuration: Apply dpdk patches'
62169 run : |
63170 patch -d "dpdk" -p1 -i <(cat patches/dpdk/${{ env.DPDK_VERSION }}/*.patch)
64171
65- - name : Build dpdk
172+ - name : ' installation: Build dpdk'
173+ working-directory : dpdk
66174 run : |
67- cd dpdk
68175 meson build
69176 ninja -C build
70- cd build
71- sudo ninja install
177+ sudo ninja -C build install
72178
73- - name : Build
179+ - name : ' installation: Build mtl '
74180 run : |
75181 ./build.sh
76182 sudo ldconfig
77183
78- - name : Prepare pipenv environment
184+ - name : ' installation: Install pipenv environment'
79185 working-directory : tests/validation
186+ id : pipenv-install
80187 run : |
81- pipenv install -r requirements.txt
188+ python3 -m pip install pipenv
189+ python3 -m pipenv install -r requirements.txt
190+ echo "VIRTUAL_ENV=$(python3 -m pipenv --venv)/bin/activate" >> "$GITHUB_ENV"
82191
83192 validation-run-tests :
84193 needs : [validation-build-mtl]
85194 runs-on : [Linux, self-hosted, DPDK]
195+ env :
196+ PYTEST_ALIAS : ' sudo --preserve-env python3 -m pipenv run pytest'
197+ PYTEST_PARAMS : ' --media=/mnt/media --build="../.."'
198+ PYTEST_RETRIES : ' 3'
86199 steps :
87- - name : Harden Runner
200+ - name : ' preparation: Harden Runner'
88201 uses : step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1
89202 with :
90203 egress-policy : audit
91204
92- - name : Kill previous pytest routine
205+ - name : ' preparation: Evaluate choosen validation-test-port-p and validation-test-port-r'
206+ run : |
207+ eval "export TEST_PORT_P=\$${{ inputs.validation-test-port-p }}"
208+ eval "export TEST_PORT_R=\$${{ inputs.validation-test-port-r }}"
209+ echo "TEST_PORT_P=${TEST_PORT_P}" >> "$GITHUB_ENV"
210+ echo "TEST_PORT_R=${TEST_PORT_R}" >> "$GITHUB_ENV"
211+ echo "TEST_PORT_P=${TEST_PORT_P}"
212+ echo "TEST_PORT_R=${TEST_PORT_R}"
213+
214+ - name : ' preparation: Kill MtlManager and pytest routines'
93215 run : |
94216 sudo killall -SIGINT pipenv || true
95217 sudo killall -SIGINT pytest || true
96218 sudo killall -SIGINT MtlManager || true
97219
98- - name : Binding network adapter
220+ - name : ' preparation: Binding network adapter pf to kernel driver'
221+ if : inputs.validation-iface-binding != 'bind_pmd'
222+ run : |
223+ sudo rmmod irdma || true
224+ sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_P}" || true
225+ sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_R}" || true
226+
227+ - name : ' preparation: Binding network adapter ${{ inputs.validation-iface-binding }}'
99228 run : |
100- sudo ./script/nicctl.sh create_vf "${TEST_PF_PORT_P}" || true
101- sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true
102- sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true
229+ sudo rmmod irdma || true
230+ sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_P}" || true
231+ sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_R}" || true
232+ sudo modprobe irdma || true
103233
104- - name : Start MtlManager at background
234+ - name : ' preparation: Start MtlManager at background'
105235 run : |
106236 sudo MtlManager &
107237
108- - name : Run tests in pipenv environment
238+ - name : ' execution: Run validation-tests-1 in pipenv environment'
239+ if : inputs.validation-tests-1 != ''
240+ working-directory : tests/validation
241+ run : |
242+ set +e
243+ export TEST_PORT_P="${{ env.TEST_PORT_P }}"
244+ export TEST_PORT_R="${{ env.TEST_PORT_R }}"
245+ TESTS_TO_EXECUTE=( $(grep -v "collected in" <(${{ env.PYTEST_ALIAS }} "${{ inputs.validation-tests-1 }}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary 2>&1)) )
246+ NUMBER_OF_TESTS="${#TESTS_TO_EXECUTE[@]}"
247+ TESTS_FAIL=()
248+ TESTS_SUCCESS=()
249+ echo "## Starting ${{ inputs.validation-tests-1 }} (total ${NUMBER_OF_TESTS}) :rocket:" >> "$GITHUB_STEP_SUMMARY"
250+ echo "| ? | Collected Test | Started | Ended | Took (s) | Result |" >> "$GITHUB_STEP_SUMMARY"
251+ echo "| --- | ---| --- | --- | --- | --- |" >> "$GITHUB_STEP_SUMMARY"
252+
253+ for test in ${TESTS_TO_EXECUTE[@]}; do
254+ echo "::group::${test}"
255+ PYTEST_START_HI="$(date '+%s')"
256+ ${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary
257+ for retry in $(seq 1 "${{ env.PYTEST_RETRIES }}"); do
258+ ${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}"
259+ PYTEST_RESULT="$?"
260+ echo "RETRY: ${retry}"
261+ [[ "${PYTEST_RESULT}" == "0" ]] && break
262+ done
263+ PYTEST_END_HI="$(date '+%s')"
264+ if [[ "${PYTEST_RESULT}" == "0" ]]; then
265+ PREFIX="✅"
266+ SUFFIX="[OK]"
267+ TESTS_SUCCESS+=( "${test}" )
268+ else
269+ PREFIX="❌"
270+ SUFFIX="[Err]"
271+ TESTS_FAIL+=( "${test}" )
272+ fi
273+ echo "| ${PREFIX} | ${test} | $(date --date=@${PYTEST_START_HI} '+%d%m%y_%H%M%S') | $(date --date=@${PYTEST_END_HI} '+%d%m%y_%H%M%S') | $((PYTEST_END_HI-PYTEST_START_HI))s | ${SUFFIX} |" >> "$GITHUB_STEP_SUMMARY"
274+ echo "::endgroup::"
275+ done
276+ echo "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" >> "$GITHUB_STEP_SUMMARY"
277+ echo "${TESTS_SUCCESS[@]}" >> "$GITHUB_STEP_SUMMARY"
278+ echo "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" >> "$GITHUB_STEP_SUMMARY"
279+ echo "${TESTS_FAIL[@]}" >> "$GITHUB_STEP_SUMMARY"
280+ if [[ "${#TESTS_FAIL[@]}" == "0" ]] || [[ "${{ inputs.validation-no-fail-tests }}" == "true" ]]; then
281+ exit 0
282+ else
283+ exit 1
284+ fi
285+
286+ - name : ' execution: Run validation-tests-2 in pipenv environment'
287+ if : inputs.validation-tests-2 != ''
288+ working-directory : tests/validation
289+ run : |
290+ ${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-2 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
291+
292+ - name : ' execution: Run validation-pre-release-1 in pipenv environment'
293+ if : inputs.validation-pre-release-1 != 'NONE'
294+ working-directory : tests/validation
295+ run : |
296+ echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-1 }} ==" || ${{ inputs.validation-no-fail-tests }}
297+
298+ - name : ' execution: Run validation-pre-release-2 in pipenv environment'
299+ if : inputs.validation-pre-release-2 != 'NONE'
109300 working-directory : tests/validation
110301 run : |
111- sudo pipenv run pytest '${{ inputs.validation-tests }}' --nic="${TEST_PORT_P},${TEST_PORT_R}" --media=/mnt/media
302+ echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-2 }} ==" || ${{ inputs.validation-no-fail-tests }}
303+
304+ - name : ' execution: Run validation-pre-release-3 in pipenv environment'
305+ if : inputs.validation-pre-release-3 != 'NONE'
306+ working-directory : tests/validation
307+ run : |
308+ echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-3 }} ==" || ${{ inputs.validation-no-fail-tests }}
309+
310+ - name : ' cleanup: Kill MtlManager and pytest routines'
311+ if : always()
312+ run : |
313+ sudo killall -SIGINT pipenv || true
314+ sudo killall -SIGINT pytest || true
315+ sudo killall -SIGINT MtlManager || true
316+
317+ - name : ' cleanup: Restore valid owner to repository and directories'
318+ if : always()
319+ run : |
320+ sudo chown -R "${USER}" "$(pwd)"
321+
322+ - name : ' cleanup: Bind pf to kernel driver'
323+ if : always()
324+ run : |
325+ sudo rmmod irdma || true
326+ sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_P}" || true
327+ sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_R}" || true
328+ sudo modprobe irdma || true
329+
330+ - name : ' cleanup: Validation execution logs'
331+ if : always()
332+ uses : actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
333+ with :
334+ name : validation-execution-logs
335+ path : ' ${{ github.workspace }}/tests/validation/logs'
336+
337+ - name : ' cleanup: Generate runner summary'
338+ if : always()
339+ run : |
340+ echo "## Runner ${{ runner.name }}" >> "$GITHUB_STEP_SUMMARY"
341+ echo "Bellow ENV variables are defined on the ${{ runner.name }} self-hosted runner side" >> "$GITHUB_STEP_SUMMARY"
342+ echo "| Variable | Value |" >> "$GITHUB_STEP_SUMMARY"
343+ echo "| --- | --- |" >> "$GITHUB_STEP_SUMMARY"
344+ echo "| TEST_PF_PORT_P | ${TEST_PF_PORT_P} |" >> "$GITHUB_STEP_SUMMARY"
345+ echo "| TEST_PF_PORT_R | ${TEST_PF_PORT_R} |" >> "$GITHUB_STEP_SUMMARY"
346+ echo "| TEST_PORT_P | ${TEST_PORT_P} |" >> "$GITHUB_STEP_SUMMARY"
347+ echo "| TEST_PORT_R | ${TEST_PORT_R} |" >> "$GITHUB_STEP_SUMMARY"
348+ echo "| TEST_DMA_PORT_P | ${TEST_DMA_PORT_P} |" >> "$GITHUB_STEP_SUMMARY"
349+ echo "| TEST_DMA_PORT_R | ${TEST_DMA_PORT_R} |" >> "$GITHUB_STEP_SUMMARY"
350+ echo "| TEST_VF_PORT_P_0 | ${TEST_VF_PORT_P_0} |" >> "$GITHUB_STEP_SUMMARY"
351+ echo "| TEST_VF_PORT_P_1 | ${TEST_VF_PORT_P_1} |" >> "$GITHUB_STEP_SUMMARY"
352+ echo "| TEST_VF_PORT_P_2 | ${TEST_VF_PORT_P_2} |" >> "$GITHUB_STEP_SUMMARY"
353+ echo "| TEST_VF_PORT_P_3 | ${TEST_VF_PORT_P_3} |" >> "$GITHUB_STEP_SUMMARY"
354+ echo "| TEST_VF_PORT_R_0 | ${TEST_VF_PORT_R_0} |" >> "$GITHUB_STEP_SUMMARY"
355+ echo "| TEST_VF_PORT_R_1 | ${TEST_VF_PORT_R_1} |" >> "$GITHUB_STEP_SUMMARY"
356+ echo "| TEST_VF_PORT_R_2 | ${TEST_VF_PORT_R_2} |" >> "$GITHUB_STEP_SUMMARY"
357+ echo "| TEST_VF_PORT_R_3 | ${TEST_VF_PORT_R_3} |" >> "$GITHUB_STEP_SUMMARY"
0 commit comments