Skip to content

frontend unit+e2e tests #15

frontend unit+e2e tests

frontend unit+e2e tests #15

Workflow file for this run

name: CI
on:
push:
branches: [main, dev]
pull_request:
branches: [main, dev]
workflow_dispatch:
jobs:
# ============================================================
# Phase 1: Unit tests run immediately in parallel (no docker)
# ============================================================
frontend-unit:
name: Frontend Unit Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '22'
cache: 'npm'
cache-dependency-path: frontend/package-lock.json
- name: Install dependencies
working-directory: frontend
run: npm ci
- name: Run unit tests
working-directory: frontend
run: npm test
- name: Run tests with coverage
working-directory: frontend
run: npm run test:coverage
- name: Upload coverage
uses: actions/upload-artifact@v6
if: always()
with:
name: frontend-coverage
path: frontend/coverage/
# NOTE: Backend has no pure unit tests (all tests require MongoDB/Redis)
# All backend tests run in backend-integration job below
# ============================================================
# Phase 2: Integration/E2E tests run in parallel after unit tests
# Each job sets up its own infrastructure (docker cache is shared)
# ============================================================
frontend-e2e:
name: Frontend E2E Tests
needs: frontend-unit
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '22'
cache: 'npm'
cache-dependency-path: frontend/package-lock.json
- name: Install dependencies
working-directory: frontend
run: npm ci
- name: Install Playwright browsers
working-directory: frontend
run: npx playwright install chromium
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Setup Kubernetes (k3s)
run: |
curl -sfL https://get.k3s.io | INSTALL_K3S_EXEC="--disable=traefik --tls-san host.docker.internal" sh -
mkdir -p /home/runner/.kube
sudo k3s kubectl config view --raw > /home/runner/.kube/config
sudo chmod 600 /home/runner/.kube/config
export KUBECONFIG=/home/runner/.kube/config
timeout 90 bash -c 'until sudo k3s kubectl cluster-info; do sleep 5; done'
- name: Create kubeconfig for CI
run: |
cat > backend/kubeconfig.yaml <<EOF
apiVersion: v1
kind: Config
clusters:
- name: ci-cluster
cluster:
server: https://host.docker.internal:6443
insecure-skip-tls-verify: true
users:
- name: ci-user
user:
token: "ci-token"
contexts:
- name: ci
context:
cluster: ci-cluster
user: ci-user
current-context: ci
EOF
- name: Setup CI Compose
uses: ./.github/actions/setup-ci-compose
with:
kubeconfig-path: /home/runner/.kube/config
- name: Build services
uses: docker/bake-action@v6
with:
source: .
files: docker-compose.ci.yaml
load: true
set: |
*.cache-from=type=gha,scope=buildkit-${{ github.repository }}-${{ github.ref_name }}
*.cache-from=type=gha,scope=buildkit-${{ github.repository }}-main
*.cache-to=type=gha,mode=max,scope=buildkit-${{ github.repository }}-${{ github.ref_name }}
*.pull=true
env:
BUILDKIT_PROGRESS: plain
- name: Start services
run: |
docker compose -f docker-compose.ci.yaml up -d --remove-orphans
docker compose -f docker-compose.ci.yaml ps
- name: Wait for services
run: |
echo "Waiting for backend..."
curl --retry 60 --retry-delay 5 --retry-all-errors -ksf https://127.0.0.1:443/api/v1/health/live
echo "Waiting for frontend..."
curl --retry 60 --retry-delay 5 --retry-all-errors -ksf https://127.0.0.1:5001/
echo "Testing frontend->backend proxy..."
docker exec frontend curl -ksf https://backend:443/api/v1/health/live || echo "WARNING: Frontend cannot reach backend!"
echo "Testing auth endpoint via proxy..."
curl -ksf https://127.0.0.1:5001/api/v1/auth/verify-token || echo "WARNING: Auth endpoint returned error (expected without cookies)"
echo "Services ready!"
- name: Seed test users
run: |
docker compose -f docker-compose.ci.yaml exec -T backend uv run python scripts/seed_users.py
echo "Test users seeded"
- name: Run E2E tests
working-directory: frontend
env:
CI: true
run: npx playwright test --reporter=html
- name: Upload Playwright report
uses: actions/upload-artifact@v6
if: always()
with:
name: playwright-report
path: frontend/playwright-report/
- name: Collect logs
if: failure()
run: |
mkdir -p logs
docker compose -f docker-compose.ci.yaml logs > logs/docker-compose.log
docker compose -f docker-compose.ci.yaml logs backend > logs/backend.log
docker compose -f docker-compose.ci.yaml logs frontend > logs/frontend.log
- name: Upload logs
if: failure()
uses: actions/upload-artifact@v6
with:
name: frontend-e2e-logs
path: logs/
backend-integration:
name: Backend Integration Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- name: Set up uv
uses: astral-sh/setup-uv@v7
with:
enable-cache: true
cache-dependency-glob: "backend/uv.lock"
- name: Install Python dependencies
run: |
cd backend
uv python install 3.12
uv sync --frozen
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Setup Kubernetes (k3s)
run: |
curl -sfL https://get.k3s.io | INSTALL_K3S_EXEC="--disable=traefik --tls-san host.docker.internal" sh -
mkdir -p /home/runner/.kube
sudo k3s kubectl config view --raw > /home/runner/.kube/config
sudo chmod 600 /home/runner/.kube/config
export KUBECONFIG=/home/runner/.kube/config
timeout 90 bash -c 'until sudo k3s kubectl cluster-info; do sleep 5; done'
- name: Create kubeconfig for CI
run: |
cat > backend/kubeconfig.yaml <<EOF
apiVersion: v1
kind: Config
clusters:
- name: ci-cluster
cluster:
server: https://host.docker.internal:6443
insecure-skip-tls-verify: true
users:
- name: ci-user
user:
token: "ci-token"
contexts:
- name: ci
context:
cluster: ci-cluster
user: ci-user
current-context: ci
EOF
- name: Setup CI Compose
uses: ./.github/actions/setup-ci-compose
with:
kubeconfig-path: /home/runner/.kube/config
- name: Build services
uses: docker/bake-action@v6
with:
source: .
files: docker-compose.ci.yaml
load: true
set: |
*.cache-from=type=gha,scope=buildkit-${{ github.repository }}-${{ github.ref_name }}
*.cache-from=type=gha,scope=buildkit-${{ github.repository }}-main
*.cache-to=type=gha,mode=max,scope=buildkit-${{ github.repository }}-${{ github.ref_name }}
*.pull=true
env:
BUILDKIT_PROGRESS: plain
- name: Start services
run: |
docker compose -f docker-compose.ci.yaml up -d --remove-orphans
docker compose -f docker-compose.ci.yaml ps
- name: Wait for backend
run: |
curl --retry 60 --retry-delay 5 --retry-all-errors -ksf https://127.0.0.1:443/api/v1/health/live
docker compose -f docker-compose.ci.yaml ps
kubectl get pods -A -o wide
- name: Run integration tests
timeout-minutes: 5
env:
BACKEND_BASE_URL: https://127.0.0.1:443
MONGO_ROOT_USER: root
MONGO_ROOT_PASSWORD: rootpassword
MONGODB_HOST: 127.0.0.1
MONGODB_PORT: 27017
MONGODB_URL: mongodb://root:[email protected]:27017/?authSource=admin
SCHEMA_SUBJECT_PREFIX: "ci.${{ github.run_id }}."
run: |
cd backend
uv run pytest tests/integration -v --cov=app --cov-branch --cov-report=xml --cov-report=term
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
if: always()
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: backend/coverage.xml
flags: backend-integration
name: backend-integration-coverage
slug: HardMax71/Integr8sCode
fail_ci_if_error: false
- name: Collect logs
if: failure()
run: |
mkdir -p logs
docker compose -f docker-compose.ci.yaml logs > logs/docker-compose.log
docker compose -f docker-compose.ci.yaml logs backend > logs/backend.log
docker compose -f docker-compose.ci.yaml logs mongo > logs/mongo.log
kubectl get events --sort-by='.metadata.creationTimestamp' > logs/k8s-events.log 2>&1 || true
kubectl describe pods -A > logs/k8s-describe-pods.log 2>&1 || true
- name: Upload logs
if: failure()
uses: actions/upload-artifact@v6
with:
name: backend-integration-logs
path: logs/