|
15 | 15 | name: E2E Matrix Tests (bootstrap) |
16 | 16 |
|
17 | 17 | on: |
18 | | - pull_request: |
19 | | - types: [opened, reopened, synchronize, labeled, unlabeled] |
20 | | - branches: |
21 | | - - main |
22 | | - - feat/ci-e2e-matrix |
23 | 18 | workflow_dispatch: |
| 19 | + schedule: |
| 20 | + - cron: "45 6 * * *" |
24 | 21 |
|
25 | | -permissions: |
26 | | - contents: read |
| 22 | +concurrency: |
| 23 | + group: "${{ github.workflow }}-${{ github.event.number || github.ref }}" |
| 24 | + cancel-in-progress: true |
| 25 | + |
| 26 | +defaults: |
| 27 | + run: |
| 28 | + shell: bash |
27 | 29 |
|
28 | 30 | jobs: |
29 | | - noop: |
30 | | - name: Bootstrap |
| 31 | + e2e-ceph: |
| 32 | + name: E2E Pipeline (Ceph) |
| 33 | + uses: ./.github/workflows/e2e-reusable-pipeline.yml |
| 34 | + with: |
| 35 | + storage_type: ceph |
| 36 | + nested_storageclass_name: nested-ceph-pool-r2-csi-rbd |
| 37 | + branch: main |
| 38 | + virtualization_tag: main |
| 39 | + deckhouse_channel: alpha |
| 40 | + default_user: cloud |
| 41 | + go_version: "1.24.6" |
| 42 | + e2e_timeout: "3h" |
| 43 | + secrets: |
| 44 | + DEV_REGISTRY_DOCKER_CFG: ${{ secrets.DEV_REGISTRY_DOCKER_CFG }} |
| 45 | + VIRT_E2E_NIGHTLY_SA_TOKEN: ${{ secrets.VIRT_E2E_NIGHTLY_SA_TOKEN }} |
| 46 | + PROD_IO_REGISTRY_DOCKER_CFG: ${{ secrets.PROD_IO_REGISTRY_DOCKER_CFG }} |
| 47 | + BOOTSTRAP_DEV_PROXY: ${{ secrets.BOOTSTRAP_DEV_PROXY }} |
| 48 | + |
| 49 | + e2e-replicated: |
| 50 | + name: E2E Pipeline (Replicated) |
| 51 | + uses: ./.github/workflows/e2e-reusable-pipeline.yml |
| 52 | + with: |
| 53 | + storage_type: replicated |
| 54 | + nested_storageclass_name: nested-thin-r1 |
| 55 | + branch: main |
| 56 | + virtualization_tag: main |
| 57 | + deckhouse_channel: alpha |
| 58 | + default_user: cloud |
| 59 | + go_version: "1.24.6" |
| 60 | + e2e_timeout: "3h" |
| 61 | + secrets: |
| 62 | + DEV_REGISTRY_DOCKER_CFG: ${{ secrets.DEV_REGISTRY_DOCKER_CFG }} |
| 63 | + VIRT_E2E_NIGHTLY_SA_TOKEN: ${{ secrets.VIRT_E2E_NIGHTLY_SA_TOKEN }} |
| 64 | + PROD_IO_REGISTRY_DOCKER_CFG: ${{ secrets.PROD_IO_REGISTRY_DOCKER_CFG }} |
| 65 | + BOOTSTRAP_DEV_PROXY: ${{ secrets.BOOTSTRAP_DEV_PROXY }} |
| 66 | + report-to-channel: |
31 | 67 | runs-on: ubuntu-latest |
| 68 | + name: End-to-End tests report |
| 69 | + needs: |
| 70 | + - e2e-ceph |
| 71 | + - e2e-replicated |
| 72 | + if: ${{ always()}} |
| 73 | + env: |
| 74 | + STORAGE_TYPES: '["ceph", "replicated"]' |
32 | 75 | steps: |
33 | | - - name: Say hello |
34 | | - run: echo "Bootstrap workflow OK" |
| 76 | + - uses: actions/checkout@v4 |
| 77 | + |
| 78 | + - name: Download E2E report artifacts |
| 79 | + uses: actions/download-artifact@v5 |
| 80 | + continue-on-error: true |
| 81 | + id: download-artifacts-pattern |
| 82 | + with: |
| 83 | + pattern: "e2e-report-*" |
| 84 | + path: downloaded-artifacts/ |
| 85 | + merge-multiple: false |
| 86 | + |
| 87 | + - name: Send results to channel |
| 88 | + run: | |
| 89 | + # Map storage types to CSI names |
| 90 | + get_csi_name() { |
| 91 | + local storage_type=$1 |
| 92 | + case "$storage_type" in |
| 93 | + "ceph") |
| 94 | + echo "rbd.csi.ceph.com" |
| 95 | + ;; |
| 96 | + "replicated") |
| 97 | + echo "replicated.csi.storage.deckhouse.io" |
| 98 | + ;; |
| 99 | + *) |
| 100 | + echo "$storage_type" |
| 101 | + ;; |
| 102 | + esac |
| 103 | + } |
| 104 | +
|
| 105 | + # Function to load and parse report from artifact |
| 106 | + # Outputs: file content to stdout, debug messages to stderr |
| 107 | + # Works with pattern-based artifact download (e2e-report-*) |
| 108 | + # Artifacts are organized as: downloaded-artifacts/e2e-report-<storage_type>-<run_id>/e2e_report_<storage_type>.json |
| 109 | + load_report_from_artifact() { |
| 110 | + local storage_type=$1 |
| 111 | + local base_path="downloaded-artifacts/" |
| 112 | + |
| 113 | + echo "[INFO] Searching for report for storage type: $storage_type" >&2 |
| 114 | + echo "[DEBUG] Base path: $base_path" >&2 |
| 115 | + |
| 116 | + if [ ! -d "$base_path" ]; then |
| 117 | + echo "[WARN] Base path does not exist: $base_path" >&2 |
| 118 | + return 1 |
| 119 | + fi |
| 120 | + |
| 121 | + local report_file="" |
| 122 | + |
| 123 | + # First, search in artifact directories matching pattern: e2e-report-<storage_type>-* |
| 124 | + # Pattern downloads create subdirectories named after the artifact |
| 125 | + # e.g., downloaded-artifacts/e2e-report-ceph-<run_id>/e2e_report_ceph.json |
| 126 | + echo "[DEBUG] Searching in artifact directories matching pattern: e2e-report-${storage_type}-*" >&2 |
| 127 | + local artifact_dir=$(find "$base_path" -type d -name "e2e-report-${storage_type}-*" 2>/dev/null | head -1) |
| 128 | + if [ -n "$artifact_dir" ]; then |
| 129 | + echo "[DEBUG] Found artifact dir: $artifact_dir" >&2 |
| 130 | + report_file=$(find "$artifact_dir" -name "e2e_report_*.json" -type f 2>/dev/null | head -1) |
| 131 | + if [ -n "$report_file" ] && [ -f "$report_file" ]; then |
| 132 | + echo "[INFO] Found report file in artifact dir: $report_file" >&2 |
| 133 | + cat "$report_file" |
| 134 | + return 0 |
| 135 | + fi |
| 136 | + fi |
| 137 | + |
| 138 | + # Fallback: search for file by name pattern anywhere in base_path |
| 139 | + echo "[DEBUG] Searching for file: e2e_report_${storage_type}.json" >&2 |
| 140 | + report_file=$(find "$base_path" -type f -name "e2e_report_${storage_type}.json" 2>/dev/null | head -1) |
| 141 | + if [ -n "$report_file" ] && [ -f "$report_file" ]; then |
| 142 | + echo "[INFO] Found report file by name: $report_file" >&2 |
| 143 | + cat "$report_file" |
| 144 | + return 0 |
| 145 | + fi |
| 146 | + |
| 147 | + echo "[WARN] Could not load report artifact for $storage_type" >&2 |
| 148 | + return 1 |
| 149 | + } |
| 150 | +
|
| 151 | + # Function to create failure summary JSON (fallback) |
| 152 | + create_failure_summary() { |
| 153 | + local storage_type=$1 |
| 154 | + local stage=$2 |
| 155 | + local run_id=$3 |
| 156 | + local csi=$(get_csi_name "$storage_type") |
| 157 | + local date=$(date +"%Y-%m-%d") |
| 158 | + local time=$(date +"%H:%M:%S") |
| 159 | + local branch="${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" |
| 160 | + local link="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${run_id:-${GITHUB_RUN_ID}}" |
| 161 | + |
| 162 | + # Map stage to status message |
| 163 | + local status_msg |
| 164 | + case "$stage" in |
| 165 | + "bootstrap") |
| 166 | + status_msg=":x: BOOTSTRAP CLUSTER FAILED" |
| 167 | + ;; |
| 168 | + "storage-setup") |
| 169 | + status_msg=":x: STORAGE SETUP FAILED" |
| 170 | + ;; |
| 171 | + "virtualization-setup") |
| 172 | + status_msg=":x: VIRTUALIZATION SETUP FAILED" |
| 173 | + ;; |
| 174 | + "e2e-test") |
| 175 | + status_msg=":x: E2E TEST FAILED" |
| 176 | + ;; |
| 177 | + *) |
| 178 | + status_msg=":question: UNKNOWN" |
| 179 | + ;; |
| 180 | + esac |
| 181 | + |
| 182 | + jq -n \ |
| 183 | + --arg csi "$csi" \ |
| 184 | + --arg date "$date" \ |
| 185 | + --arg time "$time" \ |
| 186 | + --arg branch "$branch" \ |
| 187 | + --arg status "$status_msg" \ |
| 188 | + --arg link "$link" \ |
| 189 | + '{CSI: $csi, Date: $date, StartTime: $time, Branch: $branch, Status: $status, Passed: 0, Failed: 0, Pending: 0, Skipped: 0, Link: $link}' |
| 190 | + } |
| 191 | +
|
| 192 | +
|
| 193 | + # Parse summary JSON and add to table |
| 194 | + parse_summary() { |
| 195 | + local summary_json=$1 |
| 196 | + local storage_type=$2 |
| 197 | + |
| 198 | + if [ -z "$summary_json" ] || [ "$summary_json" == "null" ] || [ "$summary_json" == "" ]; then |
| 199 | + echo "Warning: Empty summary for $storage_type" |
| 200 | + return |
| 201 | + fi |
| 202 | +
|
| 203 | + # Try to parse as JSON (handle both JSON string and already parsed JSON) |
| 204 | + if ! echo "$summary_json" | jq empty 2>/dev/null; then |
| 205 | + echo "Warning: Invalid JSON for $storage_type: $summary_json" |
| 206 | + echo "[DEBUG] json: $summary_json" |
| 207 | + return |
| 208 | + fi |
| 209 | +
|
| 210 | + # Parse JSON fields |
| 211 | + csi_raw=$(echo "$summary_json" | jq -r '.CSI // empty' 2>/dev/null) |
| 212 | + if [ -z "$csi_raw" ] || [ "$csi_raw" == "null" ] || [ "$csi_raw" == "" ]; then |
| 213 | + csi=$(get_csi_name "$storage_type") |
| 214 | + else |
| 215 | + csi="$csi_raw" |
| 216 | + fi |
| 217 | + |
| 218 | + date=$(echo "$summary_json" | jq -r '.Date // ""' 2>/dev/null) |
| 219 | + time=$(echo "$summary_json" | jq -r '.StartTime // ""' 2>/dev/null) |
| 220 | + branch=$(echo "$summary_json" | jq -r '.Branch // ""' 2>/dev/null) |
| 221 | + status=$(echo "$summary_json" | jq -r '.Status // ":question: UNKNOWN"' 2>/dev/null) |
| 222 | + passed=$(echo "$summary_json" | jq -r '.Passed // 0' 2>/dev/null) |
| 223 | + failed=$(echo "$summary_json" | jq -r '.Failed // 0' 2>/dev/null) |
| 224 | + pending=$(echo "$summary_json" | jq -r '.Pending // 0' 2>/dev/null) |
| 225 | + skipped=$(echo "$summary_json" | jq -r '.Skipped // 0' 2>/dev/null) |
| 226 | + link=$(echo "$summary_json" | jq -r '.Link // ""' 2>/dev/null) |
| 227 | +
|
| 228 | + # Set defaults if empty |
| 229 | + [ -z "$passed" ] && passed=0 |
| 230 | + [ -z "$failed" ] && failed=0 |
| 231 | + [ -z "$pending" ] && pending=0 |
| 232 | + [ -z "$skipped" ] && skipped=0 |
| 233 | + [ -z "$status" ] && status=":question: UNKNOWN" |
| 234 | +
|
| 235 | + # Format link - use CSI name as fallback if link is empty |
| 236 | + if [ -z "$link" ] || [ "$link" == "" ]; then |
| 237 | + link_text="$csi" |
| 238 | + else |
| 239 | + link_text="[:link: $csi]($link)" |
| 240 | + fi |
| 241 | +
|
| 242 | + # Add row to table |
| 243 | + markdown_table+="| $link_text | $status | $passed | $failed | $pending | $skipped | $date | $time | $branch |\n" |
| 244 | + } |
| 245 | +
|
| 246 | + # Initialize markdown table |
| 247 | + echo "[INFO] Generate markdown table" |
| 248 | + markdown_table="" |
| 249 | + header="| CSI | Status | Passed | Failed | Pending | Skipped | Date | Time | Branch|\n" |
| 250 | + separator="|---|---|---|---|---|---|---|---|---|\n" |
| 251 | + markdown_table+="$header" |
| 252 | + markdown_table+="$separator" |
| 253 | +
|
| 254 | + # Get current date for header |
| 255 | + DATE=$(date +"%Y-%m-%d") |
| 256 | + COMBINED_SUMMARY="## :dvp: **DVP | End-to-End tests | $DATE**\n\n" |
| 257 | +
|
| 258 | + echo "[INFO] Get storage types" |
| 259 | + readarray -t storage_types < <(echo "$STORAGE_TYPES" | jq -r '.[]') |
| 260 | + echo "[INFO] Storage types: " "${storage_types[@]}" |
| 261 | +
|
| 262 | + echo "[INFO] Generate summary for each storage type" |
| 263 | + for storage in "${storage_types[@]}"; do |
| 264 | + echo "[INFO] Processing $storage" |
| 265 | + |
| 266 | + # Try to load report from artifact |
| 267 | + # Debug messages go to stderr (visible in logs), JSON content goes to stdout |
| 268 | + echo "[INFO] Attempting to load report for $storage" |
| 269 | + structured_report=$(load_report_from_artifact "$storage" || true) |
| 270 | + |
| 271 | + if [ -n "$structured_report" ]; then |
| 272 | + # Check if it's valid JSON |
| 273 | + if echo "$structured_report" | jq empty 2>/dev/null; then |
| 274 | + echo "[INFO] Report is valid JSON for $storage" |
| 275 | + else |
| 276 | + echo "[WARN] Report is not valid JSON for $storage" |
| 277 | + echo "[DEBUG] Raw report content (first 200 chars):" |
| 278 | + echo "$structured_report" | head -c 200 |
| 279 | + echo "" |
| 280 | + structured_report="" |
| 281 | + fi |
| 282 | + fi |
| 283 | + |
| 284 | + if [ -n "$structured_report" ] && echo "$structured_report" | jq empty 2>/dev/null; then |
| 285 | + # Extract report data from structured file |
| 286 | + report_json=$(echo "$structured_report" | jq -c '.report // empty') |
| 287 | + failed_stage=$(echo "$structured_report" | jq -r '.failed_stage // empty') |
| 288 | + workflow_run_id=$(echo "$structured_report" | jq -r '.workflow_run_id // empty') |
| 289 | + |
| 290 | + echo "[INFO] Loaded report for $storage (failed_stage: ${failed_stage}, run_id: ${workflow_run_id})" |
| 291 | + |
| 292 | + # Validate and parse report |
| 293 | + if [ -n "$report_json" ] && [ "$report_json" != "" ] && [ "$report_json" != "null" ]; then |
| 294 | + if echo "$report_json" | jq empty 2>/dev/null; then |
| 295 | + echo "[INFO] Found valid report for $storage" |
| 296 | + parse_summary "$report_json" "$storage" |
| 297 | + else |
| 298 | + echo "[WARN] Invalid report JSON for $storage, using failed stage info" |
| 299 | + # Fallback to failed stage |
| 300 | + if [ -n "$failed_stage" ] && [ "$failed_stage" != "" ] && [ "$failed_stage" != "success" ]; then |
| 301 | + failed_summary=$(create_failure_summary "$storage" "$failed_stage" "$workflow_run_id") |
| 302 | + parse_summary "$failed_summary" "$storage" |
| 303 | + else |
| 304 | + csi=$(get_csi_name "$storage") |
| 305 | + markdown_table+="| $csi | :warning: INVALID REPORT | 0 | 0 | 0 | 0 | — | — | — |\n" |
| 306 | + fi |
| 307 | + fi |
| 308 | + else |
| 309 | + # No report in structured file, use failed stage |
| 310 | + if [ -n "$failed_stage" ] && [ "$failed_stage" != "" ] && [ "$failed_stage" != "success" ]; then |
| 311 | + echo "[INFO] Stage '$failed_stage' failed for $storage" |
| 312 | + failed_summary=$(create_failure_summary "$storage" "$failed_stage" "$workflow_run_id") |
| 313 | + parse_summary "$failed_summary" "$storage" |
| 314 | + else |
| 315 | + csi=$(get_csi_name "$storage") |
| 316 | + markdown_table+="| $csi | :warning: NO REPORT | 0 | 0 | 0 | 0 | — | — | — |\n" |
| 317 | + fi |
| 318 | + fi |
| 319 | + else |
| 320 | + # Artifact not found or invalid, show warning |
| 321 | + echo "[WARN] Could not load report artifact for $storage" |
| 322 | + csi=$(get_csi_name "$storage") |
| 323 | + markdown_table+="| $csi | :warning: ARTIFACT NOT FOUND | 0 | 0 | 0 | 0 | — | — | — |\n" |
| 324 | + fi |
| 325 | + done |
| 326 | +
|
| 327 | + echo "[INFO] Combined summary" |
| 328 | + COMBINED_SUMMARY+="${markdown_table}\n" |
| 329 | +
|
| 330 | + echo -e "$COMBINED_SUMMARY" |
| 331 | +
|
| 332 | + # Send to channel if webhook is configured |
| 333 | + echo "[INFO] Send to webhook" |
| 334 | + if [ -n "$LOOP_WEBHOOK_URL" ]; then |
| 335 | + curl --request POST --header 'Content-Type: application/json' --data "{\"text\": \"${COMBINED_SUMMARY}\"}" "$LOOP_WEBHOOK_URL" |
| 336 | + fi |
| 337 | + env: |
| 338 | + LOOP_WEBHOOK_URL: ${{ secrets.LOOP_WEBHOOK_URL }} |
0 commit comments