99 default : ' main'
1010 required : false
1111 description : ' Branch name to use'
12- validation-tests :
12+ validation-iface-binding :
13+ type : choice
14+ required : true
15+ description : ' Type of iface binding to use'
16+ options :
17+ - " create_vf"
18+ - " create_kvf"
19+ - " create_tvf"
20+ - " bind_pmd"
21+ - " bind_kernel"
22+ validation-test-port-p :
23+ type : choice
24+ required : true
25+ description : ' Which to use as Test-Port-P'
26+ options :
27+ - TEST_VF_PORT_P_0
28+ - TEST_VF_PORT_P_1
29+ - TEST_VF_PORT_P_2
30+ - TEST_VF_PORT_P_3
31+ - TEST_PF_PORT_P
32+ - TEST_VF_PORT_R_0
33+ - TEST_VF_PORT_R_1
34+ - TEST_VF_PORT_R_2
35+ - TEST_VF_PORT_R_3
36+ - TEST_PF_PORT_R
37+ - TEST_DMA_PORT_P
38+ - TEST_DMA_PORT_R
39+ validation-test-port-r :
40+ type : choice
41+ required : true
42+ description : ' Which to use as Test-Port-R'
43+ options :
44+ - TEST_VF_PORT_P_1
45+ - TEST_VF_PORT_P_0
46+ - TEST_VF_PORT_P_2
47+ - TEST_VF_PORT_P_3
48+ - TEST_PF_PORT_P
49+ - TEST_VF_PORT_R_0
50+ - TEST_VF_PORT_R_1
51+ - TEST_VF_PORT_R_2
52+ - TEST_VF_PORT_R_3
53+ - TEST_PF_PORT_R
54+ - TEST_DMA_PORT_P
55+ - TEST_DMA_PORT_R
56+ validation-no-fail-tests :
57+ type : choice
58+ required : false
59+ description : ' Run all tests, non will fail'
60+ options :
61+ - " false"
62+ - " true"
63+ validation-tests-1 :
1364 type : string
1465 default : ' tests/single/video/pacing'
1566 required : true
16- description : ' Validation tests to run'
67+ description : ' 1st validation tests to run'
68+ validation-tests-2 :
69+ type : string
70+ default : ' '
71+ required : false
72+ description : ' 2nd validation tests to run'
73+ validation-pre-release-1 :
74+ description : ' Select from pre-release group tests nr-1'
75+ required : false
76+ type : choice
77+ options :
78+ - NONE
79+ - ancillary
80+ - kernel-socket
81+ - rss-mode
82+ - st20p
83+ - st30p
84+ - st41
85+ - udp
86+ - video
87+ - xdp
88+ validation-pre-release-2 :
89+ description : ' Select from pre-release group tests nr-2'
90+ required : false
91+ type : choice
92+ options :
93+ - NONE
94+ - ffmpeg-plugin
95+ - fuzzy-tests
96+ - performance
97+ - ptp
98+ - rx-timing
99+ - vero
100+ - virtio-enable
101+ - wrong-parameter
102+ validation-pre-release-3 :
103+ description : ' Select from pre-release group tests nr-3'
104+ required : false
105+ type : choice
106+ options :
107+ - NONE
108+ - gpu-direct
109+ - gpu-enabling
17110
18111env :
19- # Customize the env if
20112 BUILD_TYPE : ' Release'
21113 DPDK_VERSION : ' 23.11'
22- # Bellow ENV variables are required to be defined on runner side:
23- # TEST_PF_PORT_P: '0000:49:00.0'
24- # TEST_PF_PORT_R: '0000:49:00.1'
25- # TEST_PORT_P: '0000:49:01.2'
26- # TEST_PORT_R: '0000:49:01.3'
27- # TEST_DMA_PORT_P: '0000:6a:01.0'
28- # TEST_DMA_PORT_R: '0000:6f:01.0'
29114
30115permissions :
31116 contents : read
@@ -34,78 +119,237 @@ jobs:
34119 validation-build-mtl :
35120 runs-on : [Linux, self-hosted, DPDK]
36121 timeout-minutes : 60
37-
122+ outputs :
123+ pipenv-activate : ${{ steps.pipenv-install.outputs.VIRTUAL_ENV }}
38124 steps :
39- - name : Harden Runner
125+ - name : ' preparation: Harden Runner'
40126 uses : step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1
41127 with :
42128 egress-policy : audit
43129
44- - name : Checkout MTL
130+ - name : ' preparation: Restore valid owner to repository and directories'
131+ if : always()
132+ run : |
133+ sudo chown -R "${USER}" "$(pwd)"
134+
135+ - name : ' preparation: Checkout MTL'
45136 uses : actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
46137 with :
47138 ref : ' ${{ inputs.branch-to-checkout }}'
48139
49- - name : Checkout DPDK
140+ - name : ' preparation: Checkout DPDK'
50141 uses : actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
51142 with :
52143 repository : ' DPDK/dpdk'
53144 ref : ' v${{ env.DPDK_VERSION }}'
54145 path : ' dpdk'
55146
56- - name : Install the build dependency
147+ - name : ' configuration: Install the build dependency'
57148 run : |
58- sudo apt-get install -y git gcc meson python3 python3-pyelftools pkg-config libnuma-dev libjson-c-dev libpcap-dev libgtest-dev libsdl2-dev libsdl2-ttf-dev libssl-dev
59- sudo apt-get install -y systemtap-sdt-dev pipenv
149+ sudo apt update
150+ sudo apt-get remove -y pipenv || true
151+ sudo apt-get install -y \
152+ git gcc meson \
153+ pkg-config \
154+ python3 \
155+ python3-pyelftools \
156+ python3-virtualenv \
157+ python3-pip \
158+ libnuma-dev \
159+ libjson-c-dev \
160+ libpcap-dev \
161+ libgtest-dev \
162+ libsdl2-dev \
163+ libsdl2-ttf-dev \
164+ libssl-dev \
165+ systemtap-sdt-dev
60166
61- - name : Apply dpdk patches
167+ - name : ' configuration: Apply dpdk patches'
62168 run : |
63169 patch -d "dpdk" -p1 -i <(cat patches/dpdk/${{ env.DPDK_VERSION }}/*.patch)
64170
65- - name : Build dpdk
171+ - name : ' installation: Build dpdk'
172+ working-directory : dpdk
66173 run : |
67- cd dpdk
68174 meson build
69175 ninja -C build
70- cd build
71- sudo ninja install
176+ sudo ninja -C build install
72177
73- - name : Build
178+ - name : ' installation: Build mtl '
74179 run : |
75180 ./build.sh
76181 sudo ldconfig
77182
78- - name : Prepare pipenv environment
183+ - name : ' installation: Install pipenv environment'
79184 working-directory : tests/validation
185+ id : pipenv-install
80186 run : |
81- pipenv install -r requirements.txt
187+ python3 -m pip install pipenv
188+ python3 -m pipenv install -r requirements.txt
189+ echo "VIRTUAL_ENV=$(python3 -m pipenv --venv)/bin/activate" >> "$GITHUB_ENV"
82190
83191 validation-run-tests :
84192 needs : [validation-build-mtl]
85193 runs-on : [Linux, self-hosted, DPDK]
194+ env :
195+ PYTEST_ALIAS : ' sudo --preserve-env python3 -m pipenv run pytest'
196+ PYTEST_PARAMS : ' --media=/mnt/media --build="../.."'
197+ PYTEST_RETRIES : ' 3'
86198 steps :
87- - name : Harden Runner
199+ - name : ' preparation: Harden Runner'
88200 uses : step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1
89201 with :
90202 egress-policy : audit
91203
92- - name : Kill previous pytest routine
204+ - name : ' preparation: Evaluate choosen validation-test-port-p and validation-test-port-r'
205+ run : |
206+ eval "export TEST_PORT_P=\$${{ inputs.validation-test-port-p }}"
207+ eval "export TEST_PORT_R=\$${{ inputs.validation-test-port-r }}"
208+ echo "TEST_PORT_P=${TEST_PORT_P}" >> "$GITHUB_ENV"
209+ echo "TEST_PORT_R=${TEST_PORT_R}" >> "$GITHUB_ENV"
210+
211+ - name : ' preparation: Kill MtlManager and pytest routines'
93212 run : |
94213 sudo killall -SIGINT pipenv || true
95214 sudo killall -SIGINT pytest || true
96215 sudo killall -SIGINT MtlManager || true
97216
98- - name : Binding network adapter
217+ - name : ' preparation: Binding network adapter pf to kernel driver'
218+ if : inputs.validation-iface-binding != 'bind_pmd'
219+ run : |
220+ sudo rmmod irdma || true
221+ sudo ./script/nicctl.sh pf_bind_kernel "${TEST_PF_PORT_P}" || true
222+ sudo ./script/nicctl.sh pf_bind_kernel "${TEST_PF_PORT_R}" || true
223+
224+ - name : ' preparation: Binding network adapter ${{ inputs.validation-iface-binding }}'
99225 run : |
100- sudo ./script/nicctl.sh create_vf "${TEST_PF_PORT_P}" || true
101- sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true
102- sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true
226+ sudo rmmod irdma || true
227+ sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_P}" || true
228+ sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_R}" || true
229+ sudo modprobe irdma || true
103230
104- - name : Start MtlManager at background
231+ - name : ' preparation: Start MtlManager at background'
105232 run : |
106233 sudo MtlManager &
107234
108- - name : Run tests in pipenv environment
235+ - name : ' execution: Run validation-tests-1 in pipenv environment'
236+ if : inputs.validation-tests-1 != ''
237+ working-directory : tests/validation
238+ run : |
239+ set +e
240+ echo "TEST_PORT_P=${{ env.TEST_PORT_P }} and TEST_PORT_R=${{ env.TEST_PORT_R }}"
241+ eval "export TEST_PORT_P=\$${{ inputs.validation-test-port-p }}"
242+ eval "export TEST_PORT_R=\$${{ inputs.validation-test-port-r }}"
243+ TESTS_TO_EXECUTE=( $(grep -v "collected in" <(${{ env.PYTEST_ALIAS }} "${{ inputs.validation-tests-1 }}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary 2>&1)) )
244+ NUMBER_OF_TESTS="${#TESTS_TO_EXECUTE[@]}"
245+ TESTS_FAIL=()
246+ TESTS_SUCCESS=()
247+ echo "## Starting ${{ inputs.validation-tests-1 }} (total ${NUMBER_OF_TESTS}) :rocket:" >> "$GITHUB_STEP_SUMMARY"
248+ echo "| ? | Collected Test | Started | Ended | Took (s) | Result |" >> "$GITHUB_STEP_SUMMARY"
249+ echo "| --- | ---| --- | --- | --- | --- |" >> "$GITHUB_STEP_SUMMARY"
250+
251+ for test in ${TESTS_TO_EXECUTE[@]}; do
252+ echo "::group::${test}"
253+ PYTEST_START_HI="$(date '+%s')"
254+ ${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary
255+ for retry in $(seq 1 "${{ env.PYTEST_RETRIES }}"); do
256+ ${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}"
257+ PYTEST_RESULT="$?"
258+ echo "RETRY: ${retry}"
259+ [[ "${PYTEST_RESULT}" == "0" ]] && break
260+ done
261+ PYTEST_END_HI="$(date '+%s')"
262+ if [[ "${PYTEST_RESULT}" == "0" ]]; then
263+ PREFIX="✅"
264+ SUFFIX="[OK]"
265+ TESTS_SUCCESS+=( "${test}" )
266+ else
267+ PREFIX="❌"
268+ SUFFIX="[Err]"
269+ TESTS_FAIL+=( "${test}" )
270+ fi
271+ echo "| ${PREFIX} | ${test} | $(date --date=@${PYTEST_START_HI} '+%d%m%y_%H%M%S') | $(date --date=@${PYTEST_END_HI} '+%d%m%y_%H%M%S') | $((PYTEST_END_HI-PYTEST_START_HI))s | ${SUFFIX} |" >> "$GITHUB_STEP_SUMMARY"
272+ echo "::endgroup::"
273+ done
274+ echo "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" >> "$GITHUB_STEP_SUMMARY"
275+ echo "${TESTS_SUCCESS[@]}" >> "$GITHUB_STEP_SUMMARY"
276+ echo "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" >> "$GITHUB_STEP_SUMMARY"
277+ echo "${TESTS_FAIL[@]}" >> "$GITHUB_STEP_SUMMARY"
278+ if [[ "${#TESTS_FAIL[@]}" == "0" ]] || [[ "${{ inputs.validation-no-fail-tests }}" == "true" ]]; then
279+ exit 0
280+ else
281+ exit 1
282+ fi
283+
284+ - name : ' execution: Run validation-tests-2 in pipenv environment'
285+ if : inputs.validation-tests-2 != ''
286+ working-directory : tests/validation
287+ run : |
288+ ${{ env.PYTEST_ALIAS }} '${{ inputs.validation-tests-2 }}' ${{ env.PYTEST_PARAMS }} || ${{ inputs.validation-no-fail-tests }}
289+
290+ - name : ' execution: Run validation-pre-release-1 in pipenv environment'
291+ if : inputs.validation-pre-release-1 != 'NONE'
292+ working-directory : tests/validation
293+ run : |
294+ echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-1 }} ==" || ${{ inputs.validation-no-fail-tests }}
295+
296+ - name : ' execution: Run validation-pre-release-2 in pipenv environment'
297+ if : inputs.validation-pre-release-2 != 'NONE'
109298 working-directory : tests/validation
110299 run : |
111- sudo pipenv run pytest '${{ inputs.validation-tests }}' --nic="${TEST_PORT_P},${TEST_PORT_R}" --media=/mnt/media
300+ echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-2 }} ==" || ${{ inputs.validation-no-fail-tests }}
301+
302+ - name : ' execution: Run validation-pre-release-3 in pipenv environment'
303+ if : inputs.validation-pre-release-3 != 'NONE'
304+ working-directory : tests/validation
305+ run : |
306+ echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-3 }} ==" || ${{ inputs.validation-no-fail-tests }}
307+
308+ - name : ' cleanup: Kill MtlManager and pytest routines'
309+ if : always()
310+ run : |
311+ sudo killall -SIGINT pipenv || true
312+ sudo killall -SIGINT pytest || true
313+ sudo killall -SIGINT MtlManager || true
314+
315+ - name : ' cleanup: Restore valid owner to repository and directories'
316+ if : always()
317+ run : |
318+ sudo chown -R "${USER}" "$(pwd)"
319+
320+ - name : ' cleanup: Bind pf to kernel driver'
321+ if : always()
322+ run : |
323+ sudo rmmod irdma || true
324+ sudo ./script/nicctl.sh pf_bind_kernel "${TEST_PF_PORT_P}" || true
325+ sudo ./script/nicctl.sh pf_bind_kernel "${TEST_PF_PORT_R}" || true
326+ sudo modprobe irdma || true
327+
328+ - name : ' cleanup: Validation execution logs'
329+ if : always()
330+ uses : actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
331+ with :
332+ name : validation-execution-logs
333+ path : ' ${{ github.workspace }}/tests/validation/logs'
334+
335+ - name : ' cleanup: Generate runner summary'
336+ if : always()
337+ run : |
338+ echo "## Runner ${{ runner.name }}" >> "$GITHUB_STEP_SUMMARY"
339+ echo "Bellow ENV variables are defined on the ${{ runner.name }} self-hosted runner side" >> "$GITHUB_STEP_SUMMARY"
340+ echo "| Variable | Value |" >> "$GITHUB_STEP_SUMMARY"
341+ echo "| --- | --- |" >> "$GITHUB_STEP_SUMMARY"
342+ echo "| TEST_PF_PORT_P | ${TEST_PF_PORT_P} |" >> "$GITHUB_STEP_SUMMARY"
343+ echo "| TEST_PF_PORT_R | ${TEST_PF_PORT_R} |" >> "$GITHUB_STEP_SUMMARY"
344+ echo "| TEST_PORT_P | ${TEST_PORT_P} |" >> "$GITHUB_STEP_SUMMARY"
345+ echo "| TEST_PORT_R | ${TEST_PORT_R} |" >> "$GITHUB_STEP_SUMMARY"
346+ echo "| TEST_DMA_PORT_P | ${TEST_DMA_PORT_P} |" >> "$GITHUB_STEP_SUMMARY"
347+ echo "| TEST_DMA_PORT_R | ${TEST_DMA_PORT_R} |" >> "$GITHUB_STEP_SUMMARY"
348+ echo "| TEST_VF_PORT_P_0 | ${TEST_VF_PORT_P_0} |" >> "$GITHUB_STEP_SUMMARY"
349+ echo "| TEST_VF_PORT_P_1 | ${TEST_VF_PORT_P_1} |" >> "$GITHUB_STEP_SUMMARY"
350+ echo "| TEST_VF_PORT_P_2 | ${TEST_VF_PORT_P_2} |" >> "$GITHUB_STEP_SUMMARY"
351+ echo "| TEST_VF_PORT_P_3 | ${TEST_VF_PORT_P_3} |" >> "$GITHUB_STEP_SUMMARY"
352+ echo "| TEST_VF_PORT_R_0 | ${TEST_VF_PORT_R_0} |" >> "$GITHUB_STEP_SUMMARY"
353+ echo "| TEST_VF_PORT_R_1 | ${TEST_VF_PORT_R_1} |" >> "$GITHUB_STEP_SUMMARY"
354+ echo "| TEST_VF_PORT_R_2 | ${TEST_VF_PORT_R_2} |" >> "$GITHUB_STEP_SUMMARY"
355+ echo "| TEST_VF_PORT_R_3 | ${TEST_VF_PORT_R_3} |" >> "$GITHUB_STEP_SUMMARY"
0 commit comments