Skip to content

Remove obsolete items #386

Remove obsolete items

Remove obsolete items #386

Workflow file for this run

name: Python Tests
on:
workflow_dispatch:
push:
branches: [ main ]
pull_request:
branches: [ main ]
permissions:
contents: read
checks: write
pull-requests: write
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.12', '3.13', '3.14' ]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
cache-dependency-glob: "pyproject.toml"
- name: Install dependencies
run: |
uv venv
uv sync
# Verify coverage HTML templates are present
uv run python -c "import coverage.html; print(coverage.html.__file__)"
# Lint the Python files & upload the result statistics
- name: Run pylint analysis
id: pylint
run: |
mkdir -p tests/python/pylint/reports
# Use python -m pylint and tee to ensure output is captured and visible in logs
PYTHONPATH=$(pwd) uv run python -m pylint --rcfile tests/python/.pylintrc infrastructure samples setup shared tests 2>&1 | tee tests/python/pylint/reports/latest.txt
- name: Upload pylint reports
uses: actions/upload-artifact@v4
with:
name: pylint-reports-${{ matrix.python-version }}
path: tests/python/pylint/reports/
# Static code analysis through simple compilation to ensure code is syntactically sound
- name: Verify bytecode compilation
run: |
uv run python -m compileall infrastructure samples setup shared tests
# Run tests and generate coverage reports
- name: Run pytest with coverage and generate JUnit XML
id: pytest
run: |
PYTHONPATH=$(pwd) COVERAGE_FILE=tests/python/.coverage-${{ matrix.python-version }} uv run pytest --cov=shared/python --cov-config=tests/python/.coveragerc --cov-report=html:tests/python/htmlcov-${{ matrix.python-version }} --cov-report=term-missing --junitxml=tests/python/junit-${{ matrix.python-version }}.xml tests/python/
- name: Upload coverage HTML report
uses: actions/upload-artifact@v4
with:
name: coverage-html-${{ matrix.python-version }}
path: tests/python/htmlcov-${{ matrix.python-version }}/
- name: Upload JUnit test results
uses: actions/upload-artifact@v4
with:
name: junit-results-${{ matrix.python-version }}
path: tests/python/junit-${{ matrix.python-version }}.xml
# Extract all linting and coverage results in preparation for publish
- name: Extract and Summarize Metrics
id: metrics
run: |
# Pylint Score
TEXT_REPORT="tests/python/pylint/reports/latest.txt"
if [ -s "$TEXT_REPORT" ]; then
PYLINT_SCORE=$(grep -Eo 'Your code has been rated at [0-9.]+/10' "$TEXT_REPORT" | grep -Eo '[0-9.]+/10' | head -n 1)
if [ -n "$PYLINT_SCORE" ]; then
echo "pylint_score=$PYLINT_SCORE" >> "$GITHUB_OUTPUT"
else
echo "pylint_score=N/A" >> "$GITHUB_OUTPUT"
fi
else
echo "pylint_score=N/A" >> "$GITHUB_OUTPUT"
fi
# Coverage Percentage
if [ -f "tests/python/.coverage-${{ matrix.python-version }}" ]; then
TOTAL_COV=$(PYTHONPATH=$(pwd) COVERAGE_FILE=tests/python/.coverage-${{ matrix.python-version }} uv run python -m coverage report | grep TOTAL | awk '{print $NF}')
echo "coverage=$TOTAL_COV" >> "$GITHUB_OUTPUT"
else
echo "coverage=N/A" >> "$GITHUB_OUTPUT"
fi
# Publish general statistics for linting, test success, and code coverage as well as detailed tests results
- name: Publish Consolidated Results to PR
if: github.event_name == 'pull_request'
uses: marocchino/sticky-pull-request-comment@v2
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
header: python-results-${{ matrix.python-version }}
message: |
## 🐍 Python ${{ matrix.python-version }} Results
| Metric | Status | Value |
| :--- | :---: | :--- |
| **Pylint Score** | ${{ steps.pylint.outcome == 'success' && '✅' || '⚠️' }} | `${{ steps.metrics.outputs.pylint_score }}` |
| **Unit Tests** | ${{ steps.pytest.outcome == 'success' && '✅' || '❌' }} | `${{ steps.pytest.outcome }}` |
| **Code Coverage** | 📊 | `${{ steps.metrics.outputs.coverage }}` |
[Full Workflow Logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
- name: Generate Job Summary
run: |
echo "## 🐍 Python ${{ matrix.python-version }} Execution Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| Category | Status | Detail |" >> $GITHUB_STEP_SUMMARY
echo "| :--- | :---: | :--- |" >> $GITHUB_STEP_SUMMARY
echo "| **Pylint** | ${{ steps.pylint.outcome == 'success' && '✅' || '⚠️' }} | Score: `${{ steps.metrics.outputs.pylint_score }}` |" >> $GITHUB_STEP_SUMMARY
echo "| **Pytest** | ${{ steps.pytest.outcome == 'success' && '✅' || '❌' }} | Outcome: `${{ steps.pytest.outcome }}` |" >> $GITHUB_STEP_SUMMARY
echo "| **Coverage** | 📊 | Total: `${{ steps.metrics.outputs.coverage }}` |" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "---" >> $GITHUB_STEP_SUMMARY
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@v2
with:
files: tests/python/junit-${{ matrix.python-version }}.xml
comment_title: Python ${{ matrix.python-version }} Detailed Test Results