Skip to content

VLM integration via Ollama added #154

VLM integration via Ollama added

VLM integration via Ollama added #154

Workflow file for this run

name: Playwright Tests
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
test:
timeout-minutes: 10
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 20
- name: Install dependencies
run: npm ci
- name: Install Playwright browsers
run: npx playwright install --with-deps chromium
- name: Build and start frontend container
run: |
docker compose build ui
docker compose up -d ui
- name: Wait for frontend to be ready
run: |
timeout=60
counter=0
until curl -f http://localhost:8080 || [ $counter -eq $timeout ]; do
sleep 1
counter=$((counter+1))
done
if [ $counter -eq $timeout ]; then
echo "Frontend did not become ready in time"
docker compose logs ui
exit 1
fi
- name: Run Playwright tests
run: npx playwright test
env:
CI: true
BASE_URL: http://localhost:8080
- name: Stop frontend container
if: always()
run: docker compose down
- uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-report
path: playwright-report/
retention-days: 30