|
1 | | -name: 'Test Summary' |
2 | | -description: 'Composite action to generate and publish test summaries for OpenCB/Xetabase projects' |
3 | | - |
4 | | -# TODO: Define inputs for test results paths, formats, etc. |
5 | | -# inputs: |
6 | | -# test-results-path: |
7 | | -# description: 'Path to test results' |
8 | | -# required: false |
9 | | -# default: '**/target/surefire-reports/*.xml' |
10 | | - |
11 | | -# TODO: Define outputs if needed |
12 | | -# outputs: |
13 | | -# summary: |
14 | | -# description: 'Test summary' |
15 | | -# value: ${{ steps.summary.outputs.summary }} |
| 1 | +name: "Test summary" |
| 2 | +description: "Generate a Markdown summary of Surefire test results and write it to GITHUB_STEP_SUMMARY" |
| 3 | + |
| 4 | +inputs: |
| 5 | + report_paths: |
| 6 | + description: "Glob pattern for Surefire XML reports" |
| 7 | + required: false |
| 8 | + default: "./**/surefire-reports/TEST-*.xml" |
| 9 | + title: |
| 10 | + description: "Title for the test summary section" |
| 11 | + required: false |
| 12 | + default: "Test summary" |
| 13 | + include_module_table: |
| 14 | + description: "Whether to include the per-module breakdown table" |
| 15 | + required: false |
| 16 | + default: "true" |
| 17 | + |
| 18 | +outputs: |
| 19 | + total_success: |
| 20 | + description: "Total number of successful tests" |
| 21 | + value: ${{ steps.generate_summary.outputs.total_success }} |
| 22 | + total_failed: |
| 23 | + description: "Total number of failed tests" |
| 24 | + value: ${{ steps.generate_summary.outputs.total_failed }} |
| 25 | + total_errors: |
| 26 | + description: "Total number of tests with errors" |
| 27 | + value: ${{ steps.generate_summary.outputs.total_errors }} |
| 28 | + total_skipped: |
| 29 | + description: "Total number of skipped tests" |
| 30 | + value: ${{ steps.generate_summary.outputs.total_skipped }} |
| 31 | + total_tests: |
| 32 | + description: "Total number of tests" |
| 33 | + value: ${{ steps.generate_summary.outputs.total_tests }} |
16 | 34 |
|
17 | 35 | runs: |
18 | | - using: 'composite' |
| 36 | + using: "composite" |
19 | 37 | steps: |
20 | | - # TODO: Add steps to collect test results |
21 | | - # TODO: Add steps to parse test results |
22 | | - # TODO: Add steps to generate summary |
23 | | - # TODO: Add steps to publish summary to GitHub Actions UI |
24 | | - - name: Placeholder |
| 38 | + - name: Generate test summary |
| 39 | + id: generate_summary |
25 | 40 | shell: bash |
26 | | - run: echo "Test summary action - to be implemented" |
| 41 | + env: |
| 42 | + REPORT_GLOB: ${{ inputs.report_paths }} |
| 43 | + SUMMARY_TITLE: ${{ inputs.title }} |
| 44 | + INCLUDE_MODULE_TABLE: ${{ inputs.include_module_table }} |
| 45 | + run: | |
| 46 | + # Use Python to parse Surefire XML reports and generate a Markdown summary |
| 47 | + python - << 'PY' |
| 48 | + import glob |
| 49 | + import os |
| 50 | + import xml.etree.ElementTree as ET |
| 51 | + import sys |
| 52 | +
|
| 53 | + report_glob = os.environ.get("REPORT_GLOB", "./**/surefire-reports/TEST-*.xml") |
| 54 | + title = os.environ.get("SUMMARY_TITLE", "Test summary") |
| 55 | + include_module_table = os.environ.get("INCLUDE_MODULE_TABLE", "true").lower() == "true" |
| 56 | + github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY") |
| 57 | + github_output = os.environ.get("GITHUB_OUTPUT") |
| 58 | +
|
| 59 | + # Collect all report files |
| 60 | + report_files = glob.glob(report_glob, recursive=True) |
| 61 | +
|
| 62 | + # Data structures for totals and per-module aggregation |
| 63 | + total = { |
| 64 | + "tests": 0, |
| 65 | + "failures": 0, |
| 66 | + "errors": 0, |
| 67 | + "skipped": 0, |
| 68 | + } |
| 69 | + modules = {} |
| 70 | +
|
| 71 | + def update_counts(target, tests, failures, errors, skipped): |
| 72 | + target["tests"] += tests |
| 73 | + target["failures"] += failures |
| 74 | + target["errors"] += errors |
| 75 | + target["skipped"] += skipped |
| 76 | +
|
| 77 | + for path in report_files: |
| 78 | + try: |
| 79 | + tree = ET.parse(path) |
| 80 | + root = tree.getroot() |
| 81 | + except Exception as e: |
| 82 | + # Skip malformed XML files |
| 83 | + continue |
| 84 | +
|
| 85 | + # Surefire can be <testsuite> or <testsuites> root |
| 86 | + suites = [] |
| 87 | + if root.tag == "testsuite": |
| 88 | + suites = [root] |
| 89 | + elif root.tag == "testsuites": |
| 90 | + suites = list(root.findall("testsuite")) |
| 91 | +
|
| 92 | + if not suites: |
| 93 | + continue |
| 94 | +
|
| 95 | + # Deduce module name from path (dir before 'target') |
| 96 | + # Example: some-module/target/surefire-reports/TEST-*.xml -> module = 'some-module' |
| 97 | + parts = path.split(os.sep) |
| 98 | + module_name = "root" |
| 99 | + if "target" in parts: |
| 100 | + idx = parts.index("target") |
| 101 | + if idx > 0: |
| 102 | + module_name = parts[idx - 1] |
| 103 | +
|
| 104 | + if module_name not in modules: |
| 105 | + modules[module_name] = { |
| 106 | + "tests": 0, |
| 107 | + "failures": 0, |
| 108 | + "errors": 0, |
| 109 | + "skipped": 0, |
| 110 | + } |
| 111 | +
|
| 112 | + for suite in suites: |
| 113 | + def get_int(attr_name): |
| 114 | + value = suite.attrib.get(attr_name, "0") |
| 115 | + try: |
| 116 | + return int(value) |
| 117 | + except ValueError: |
| 118 | + return 0 |
| 119 | +
|
| 120 | + tests = get_int("tests") |
| 121 | + failures = get_int("failures") |
| 122 | + errors = get_int("errors") |
| 123 | + skipped = get_int("skipped") |
| 124 | +
|
| 125 | + update_counts(total, tests, failures, errors, skipped) |
| 126 | + update_counts(modules[module_name], tests, failures, errors, skipped) |
| 127 | +
|
| 128 | + # Compute derived values |
| 129 | + def compute_success(data): |
| 130 | + return max(0, data["tests"] - data["failures"] - data["errors"] - data["skipped"]) |
| 131 | +
|
| 132 | + total_success = compute_success(total) |
| 133 | +
|
| 134 | + # Build Markdown summary |
| 135 | + lines = [] |
| 136 | +
|
| 137 | + lines.append(f"## {title}") |
| 138 | + lines.append("") |
| 139 | + if not report_files: |
| 140 | + lines.append("_No test reports were found with pattern:_") |
| 141 | + lines.append(f"`{report_glob}`") |
| 142 | + else: |
| 143 | + # Overall table |
| 144 | + lines.append("### Overall") |
| 145 | + lines.append("") |
| 146 | + lines.append("| Success | Failed | Errors | Skipped | Total |") |
| 147 | + lines.append("| --- | --- | --- | --- | --- |") |
| 148 | + lines.append( |
| 149 | + f"| {total_success} | {total['failures']} | {total['errors']} | {total['skipped']} | {total['tests']} |" |
| 150 | + ) |
| 151 | + lines.append("") |
| 152 | +
|
| 153 | + if include_module_table and modules: |
| 154 | + lines.append("### By module") |
| 155 | + lines.append("") |
| 156 | + lines.append("| Module | Success | Failed | Errors | Skipped | Total |") |
| 157 | + lines.append("| --- | --- | --- | --- | --- | --- |") |
| 158 | + for module_name in sorted(modules.keys()): |
| 159 | + data = modules[module_name] |
| 160 | + success = compute_success(data) |
| 161 | + lines.append( |
| 162 | + f"| {module_name} | {success} | {data['failures']} | {data['errors']} | {data['skipped']} | {data['tests']} |" |
| 163 | + ) |
| 164 | + lines.append("") |
| 165 | +
|
| 166 | + # Optional detail: number of XML report files found |
| 167 | + lines.append(f"_Processed {len(report_files)} Surefire report file(s)._") |
| 168 | +
|
| 169 | + markdown = "\n".join(lines) |
| 170 | +
|
| 171 | + # Write to step summary if available |
| 172 | + if github_step_summary: |
| 173 | + with open(github_step_summary, "a", encoding="utf-8") as f: |
| 174 | + f.write(markdown + "\n") |
| 175 | +
|
| 176 | + # Write outputs for reuse |
| 177 | + if github_output: |
| 178 | + def write_output(name, value): |
| 179 | + with open(github_output, "a", encoding="utf-8") as f: |
| 180 | + f.write(f"{name}={value}\n") |
| 181 | +
|
| 182 | + write_output("total_success", total_success) |
| 183 | + write_output("total_failed", total["failures"]) |
| 184 | + write_output("total_errors", total["errors"]) |
| 185 | + write_output("total_skipped", total["skipped"]) |
| 186 | + write_output("total_tests", total["tests"]) |
| 187 | + PY |
0 commit comments