|
2 | 2 |
|
3 | 3 | set +e |
4 | 4 |
|
5 | | -VALIDATION_TESTS_1="${1:-$VALIDATION_TESTS_1}" |
6 | | -VALIDATION_TESTS_2="${2:-$VALIDATION_TESTS_2}" |
7 | | -PYTEST_ALIAS="${3:-$PYTEST_ALIAS}" |
8 | | -PYTEST_PARAMS="${4:-$PYTEST_PARAMS}" |
9 | | -export TEST_PORT_P="${5:-$TEST_PORT_P}" |
10 | | -export TEST_PORT_R="${6:-$TEST_PORT_R}" |
11 | | -PYTEST_RETRIES="${PYTEST_RETRIES:-3}" |
12 | | - |
13 | 5 | # Function to log messages to GitHub Actions |
14 | | -function LOG_GITHUB_SUMMARY() { |
15 | | - echo "$@" >> "$GITHUB_STEP_SUMMARY" |
16 | | -} |
17 | | - |
18 | | -function LOG_GITHUB_CONSOLE() { |
19 | | - echo "$@" |
| 6 | +log_to_github() { |
| 7 | + echo "$1" >> "$GITHUB_STEP_SUMMARY" |
20 | 8 | } |
21 | 9 |
|
22 | 10 | # Function to run a test and handle retries |
23 | 11 | run_test() { |
24 | 12 | local test=$1 |
25 | 13 | local retries=$2 |
26 | | - local pytest_alias=$3 |
27 | | - local pytest_params=$4 |
28 | | - local test_port_p=$5 |
29 | | - local test_port_r=$6 |
30 | | - local PYTEST_START_TIME="" |
31 | | - local PYTEST_END_TIME="" |
32 | | - local PYTEST_DURATION="" |
33 | | - local PYTEST_TASK_STATUS="❌" |
34 | | - local PYTEST_SUFFIX="[Err]" |
35 | | - |
36 | | - LOG_GITHUB_CONSOLE "::group::${test}" |
37 | | - PYTEST_START_TIME=$(date '+%s') |
38 | | - # shellcheck disable=SC2086 |
39 | | - ${pytest_alias} "${test}" ${pytest_params} --nic="${test_port_p},${test_port_r}" --collect-only -q --no-summary |
| 14 | + local test_port_p=$3 |
| 15 | + local test_port_r=$4 |
| 16 | + |
| 17 | + echo "::group::${test}" |
| 18 | + local start_time=$(date '+%s') |
| 19 | + sudo --preserve-env python3 -m pipenv run pytest "${test}" --media=/mnt/media --build="../.." --nic="${test_port_p},${test_port_r}" --collect-only -q --no-summary |
40 | 20 |
|
41 | 21 | for retry in $(seq 1 "$retries"); do |
42 | | - # shellcheck disable=SC2086 |
43 | | - ${pytest_alias} "${test}" ${pytest_params} --nic="${test_port_p},${test_port_r}" |
| 22 | + echo "sudo --preserve-env python3 -m pipenv run pytest \"${test}\" --media=/mnt/media --build="../.." --nic=\"${test_port_p},${test_port_r}\"" |
| 23 | + sudo --preserve-env python3 -m pipenv run pytest "${test}" --media=/mnt/media --build="../.." --nic="${test_port_p},${test_port_r}" |
44 | 24 | local result=$? |
45 | | - LOG_GITHUB_CONSOLE "RETRY: ${retry}" |
| 25 | + echo "RETRY: ${retry}" |
46 | 26 | [[ "$result" == "0" ]] && break |
47 | 27 | done |
48 | 28 |
|
49 | | - PYTEST_END_TIME="$(date '+%s')" |
50 | | - PYTEST_DURATION="$((PYTEST_END_TIME - PYTEST_START_TIME))" |
| 29 | + local end_time=$(date '+%s') |
| 30 | + local duration=$((end_time - start_time)) |
| 31 | + local status="❌" |
| 32 | + local suffix="[Err]" |
51 | 33 |
|
52 | 34 | if [[ "$result" == "0" ]]; then |
53 | | - PYTEST_TASK_STATUS="✅" |
54 | | - PYTEST_SUFFIX="[OK]" |
| 35 | + status="✅" |
| 36 | + suffix="[OK]" |
55 | 37 | TESTS_SUCCESS+=("${test}") |
56 | 38 | else |
57 | 39 | TESTS_FAIL+=("${test}") |
58 | 40 | fi |
59 | 41 |
|
60 | | - LOG_GITHUB_SUMMARY "| ${PYTEST_TASK_STATUS} | ${test} | $(date --date=@${PYTEST_START_TIME} '+%d%m%y_%H%M%S') | $(date --date="@${PYTEST_END_TIME}" '+%d%m%y_%H%M%S') | ${PYTEST_DURATION}s | ${PYTEST_SUFFIX} |" |
61 | | - LOG_GITHUB_CONSOLE "::endgroup::" |
| 42 | + log_to_github "| ${status} | ${test} | $(date --date=@${start_time} '+%d%m%y_%H%M%S') | $(date --date=@${end_time} '+%d%m%y_%H%M%S') | ${duration}s | ${suffix} |" |
| 43 | + echo "::endgroup::" |
62 | 44 | } |
63 | 45 |
|
64 | 46 | # Main script execution |
65 | | -LOG_GITHUB_CONSOLE "::group::pre-execution-summary" |
| 47 | +echo "::group::pre-execution-summary" |
| 48 | + |
| 49 | +# Export environment variables |
| 50 | +export TEST_PORT_P="${TEST_PORT_P}" |
| 51 | +export TEST_PORT_R="${TEST_PORT_R}" |
66 | 52 |
|
67 | 53 | # Collect tests to be executed |
68 | 54 | TESTS_INCLUDED_IN_EXECUTION=( |
69 | | - $(grep -v "collected in" <(${PYTEST_ALIAS} "tests/${VALIDATION_TESTS_1}" --collect-only -q --no-summary 2>&1)) |
| 55 | + $(grep -v "collected in" <(sudo --preserve-env python3 -m pipenv run pytest "tests/${VALIDATION_TESTS_1}" --media=/mnt/media --build="../.." --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary 2>&1)) |
70 | 56 | ) |
71 | 57 | SUMMARY_MAIN_HEADER="Starting tests/${VALIDATION_TESTS_1}" |
72 | 58 |
|
73 | 59 | if [[ -n "${VALIDATION_TESTS_2}" ]]; then |
74 | 60 | TESTS_INCLUDED_IN_EXECUTION+=( |
75 | | - $(grep -v "collected in" <(${PYTEST_ALIAS} "tests/${VALIDATION_TESTS_2}" --collect-only -q --no-summary 2>&1)) |
| 61 | + $(grep -v "collected in" <(sudo --preserve-env python3 -m pipenv run pytest "tests/${VALIDATION_TESTS_2}" --media=/mnt/media --build="../.." --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary 2>&1)) |
76 | 62 | ) |
77 | 63 | SUMMARY_MAIN_HEADER="${SUMMARY_MAIN_HEADER}, tests/${VALIDATION_TESTS_2}" |
78 | 64 | fi |
79 | 65 |
|
| 66 | +NUMBER_OF_TESTS="${#TESTS_INCLUDED_IN_EXECUTION[@]}" |
80 | 67 | TESTS_FAIL=() |
81 | 68 | TESTS_SUCCESS=() |
82 | 69 |
|
83 | | -LOG_GITHUB_CONSOLE "${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" |
84 | | -LOG_GITHUB_CONSOLE "----------------------------------" |
85 | | -LOG_GITHUB_CONSOLE "Tests to be executed:" |
86 | | -LOG_GITHUB_CONSOLE "${TESTS_INCLUDED_IN_EXECUTION[@]}" |
87 | | - |
88 | | -LOG_GITHUB_SUMMARY "## ${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" |
89 | | -LOG_GITHUB_SUMMARY "| ❌/✅ | Collected Test | Started | Ended | Took (s) | Result |" |
90 | | -LOG_GITHUB_SUMMARY "| --- | --- | --- | --- | --- | --- |" |
| 70 | +echo "${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" |
| 71 | +echo "----------------------------------" |
| 72 | +echo "Tests to be executed:" |
| 73 | +echo "${TESTS_INCLUDED_IN_EXECUTION[@]}" |
91 | 74 |
|
92 | | -LOG_GITHUB_CONSOLE "::endgroup::" |
| 75 | +log_to_github "## ${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" |
| 76 | +log_to_github "| ❌/✅ | Collected Test | Started | Ended | Took (s) | Result |" |
| 77 | +log_to_github "| --- | --- | --- | --- | --- | --- |" |
| 78 | +echo "::endgroup::" |
93 | 79 |
|
94 | 80 | # Execute each test |
95 | 81 | for test in "${TESTS_INCLUDED_IN_EXECUTION[@]}"; do |
96 | | - run_test "$test" "${PYTEST_RETRIES}" "${PYTEST_ALIAS}" "${PYTEST_PARAMS}" "${TEST_PORT_P}" "${TEST_PORT_R}" |
| 82 | + run_test "$test" "$PYTEST_RETRIES" "$TEST_PORT_P" "$TEST_PORT_R" |
97 | 83 | done |
98 | 84 |
|
99 | 85 | # Summary of test results |
100 | | -LOG_GITHUB_SUMMARY "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" |
101 | | -LOG_GITHUB_SUMMARY "${TESTS_SUCCESS[@]}" |
102 | | -LOG_GITHUB_SUMMARY "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" |
103 | | -LOG_GITHUB_SUMMARY "${TESTS_FAIL[@]}" |
| 86 | +log_to_github "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" |
| 87 | +log_to_github "${TESTS_SUCCESS[@]}" |
| 88 | +log_to_github "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" |
| 89 | +log_to_github "${TESTS_FAIL[@]}" |
104 | 90 |
|
105 | 91 | # Determine exit status |
106 | 92 | if [[ "${#TESTS_FAIL[@]}" == "0" ]] || [[ "${VALIDATION_NO_FAIL_TESTS}" == "true" ]]; then |
107 | 93 | exit 0 |
| 94 | +else |
| 95 | + exit 1 |
108 | 96 | fi |
109 | | - |
110 | | -exit 1 |
0 commit comments