Skip to content

feat(infra): Add workflow for running backend tests with full codecov reporting #1

feat(infra): Add workflow for running backend tests with full codecov reporting

feat(infra): Add workflow for running backend tests with full codecov reporting #1

name: backend - with test coverage reporting
on: [workflow_dispatch, workflow_call, pull_request]
jobs:
backend-test-with-cov-context:
name: backend test
runs-on: ubuntu-24.04
timeout-minutes: 60
strategy:
fail-fast: false
matrix:
instance:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21]
env:
MATRIX_INSTANCE_TOTAL: 22
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Setup sentry env
uses: ./.github/actions/setup-sentry
id: setup
with:
mode: backend-ci
- name: Run backend test with coverage (${{ steps.setup.outputs.matrix-instance-number }} of ${{ steps.setup.outputs.matrix-instance-total }})
id: run_backend_tests
run: |
make test-python-ci-with-coverage
# Even if tests fail, gather the raw coverage sqlite DB(s)
- name: Collect raw coverage database files
if: ${{ always() }}
shell: bash
run: |
set -euxo pipefail
# List all coverage files before processing
echo "Coverage files before combining:"
ls -lah .coverage* 2>/dev/null || echo "No coverage files found"
# If there are sharded coverage files, combine them first
# This ensures each job uploads only ONE combined .coverage file
if compgen -G ".coverage.*" > /dev/null; then
echo "Found sharded coverage files, combining..."
# Combine without --keep to remove sharded files after combining
coverage combine
fi
# Ensure we have a .coverage file
if [[ ! -f .coverage ]]; then
echo "Error: No .coverage file found after tests"
exit 1
fi
# Verify the database is not corrupted
python -c "import sqlite3; con=sqlite3.connect('.coverage'); con.execute('SELECT COUNT(*) FROM file'); print('Coverage DB is valid')" || {
echo "Error: Coverage database is corrupted"
exit 1
}
- name: Upload raw coverage sqlite as artifact
if: ${{ always() }}
uses: actions/upload-artifact@v4
with:
name: pycoverage-sqlite-${{ github.run_id }}-${{ steps.setup.outputs.matrix-instance-number }}
path: .coverage
if-no-files-found: error
retention-days: 7
include-hidden-files: true
combine-coverage:
name: Combine coverage from all test jobs
runs-on: ubuntu-24.04
needs: backend-test-with-cov-context
if: ${{ always() }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: astral-sh/setup-uv@884ad927a57e558e7a70b92f2bccf9198a4be546 # v6
with:
version: '0.8.2'
enable-cache: false
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.13.1'
- name: Install coverage and plugins
run: |
uv pip install --system coverage[toml] covdefaults sentry-covdefaults-disable-branch-coverage
- name: Download all coverage artifacts
uses: actions/download-artifact@v4
with:
pattern: pycoverage-sqlite-${{ github.run_id }}-*
path: .artifacts/all-coverage
- name: List downloaded artifacts (debug)
run: |
echo "=== Downloaded artifacts structure ==="
ls -la .artifacts/all-coverage || true
find .artifacts/all-coverage -type f || true
- name: Rename coverage files for combining
run: |
set -euxo pipefail
mkdir -p .artifacts/to-combine
# The download-artifact action creates a subdirectory for each artifact
# Find all .coverage files and rename them with unique identifiers
find .artifacts/all-coverage -name ".coverage" -type f | while IFS= read -r file; do
# Get the parent directory name (artifact name)
parent_dir=$(basename "$(dirname "$file")")
echo "Found coverage file: $file from $parent_dir"
# Extract the matrix instance number from the artifact name
instance=$(echo "$parent_dir" | grep -oP '(?<=-)\d+$' || echo "unknown")
# Copy with a unique name for coverage combine
cp -v "$file" ".artifacts/to-combine/.coverage.${instance}"
done
echo "=== Files ready for combining ==="
ls -la .artifacts/to-combine/
- name: Combine all coverage databases
run: |
set -euxo pipefail
mkdir -p .artifacts/combined-coverage
# Combine all coverage files into a single database
# Don't use --keep so the sharded files are removed after combining
coverage combine .artifacts/to-combine/.coverage.*
# Move the combined file to output directory
if [[ -f .coverage ]]; then
mv .coverage .artifacts/combined-coverage/.coverage.combined
else
echo "Error: Combined coverage file was not created"
exit 1
fi
- name: Generate coverage report (debug)
run: |
if [[ -f .artifacts/combined-coverage/.coverage.combined ]]; then
python -c "import os,sqlite3; p='.artifacts/combined-coverage/.coverage.combined'; \
print('Combined coverage db:', p, os.path.getsize(p), 'bytes'); \
con=sqlite3.connect(p); cur=con.cursor(); \
print('tables:', [r[0] for r in cur.execute(\"select name from sqlite_master where type='table' order by 1\").fetchall()]); \
print('file rows:', cur.execute('select count(*) from file').fetchone()[0] if cur.execute(\"select count(*) from sqlite_master where type='table' and name='file'\").fetchone() else 'n/a'); \
print('context rows:', cur.execute('select count(*) from context').fetchone()[0] if cur.execute(\"select count(*) from sqlite_master where type='table' and name='context'\").fetchone() else 'n/a')"
fi
- name: Upload combined coverage database
uses: actions/upload-artifact@v4
with:
name: pycoverage-sqlite-combined-${{ github.run_id }}
path: .artifacts/combined-coverage/.coverage.combined
if-no-files-found: error
retention-days: 30
include-hidden-files: true