Skip to content

feat: Add loadtest to testbench #543

feat: Add loadtest to testbench

feat: Add loadtest to testbench #543

Workflow file for this run

name: TestBench Validation
on:
pull_request_target:
branches:
- main
types: [opened, synchronize, reopened, edited]
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.head_ref || github.ref_name }}
cancel-in-progress: true
env:
HEAD_REF: ${{ github.head_ref }}
REF_NAME: ${{ github.ref_name }}
BASE_REF: ${{ github.event.pull_request.base.ref }}
HEAD_SHA: ${{ github.event.pull_request.head.sha }}
PR_NUMBER: ${{ github.event.number }}
JAVA_VERSION: 21
jobs:
check-permissions:
uses: ./.github/workflows/check-permissions.yml
format-check:
name: Format Check
needs: check-permissions
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ env.HEAD_SHA }}
- name: Set up JDK ${{ env.JAVA_VERSION }}
uses: actions/setup-java@v4
with:
java-version: ${{ env.JAVA_VERSION }}
distribution: "temurin"
- name: Cache Maven dependencies
uses: actions/cache@v4
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Run spotless validation
run: mvn spotless:check --batch-mode --no-transfer-progress
build:
name: Build Project
needs: format-check
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ env.HEAD_SHA }}
- name: Set up JDK ${{ env.JAVA_VERSION }}
uses: actions/setup-java@v4
with:
java-version: ${{ env.JAVA_VERSION }}
distribution: "temurin"
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
- name: Cache Maven dependencies
uses: actions/cache@v4
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}-${{ github.run_id }}
restore-keys: |
${{ runner.os }}-maven-
- name: Clean Maven cache (if needed)
if: github.event_name == 'workflow_dispatch'
run: |
echo "---> Clean maven cache"
rm -rf ~/.m2/repository ~/.npm* ~/.pnpm*
- name: Build with Maven and Generate Javadoc
run: mvn clean install javadoc:javadoc -DskipTests -Dtestbench.javadocs -B
unit-tests:
name: Unit Tests
needs: build
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ env.HEAD_SHA }}
- name: Set up JDK ${{ env.JAVA_VERSION }}
uses: actions/setup-java@v4
with:
java-version: ${{ env.JAVA_VERSION }}
distribution: "temurin"
- name: Restore Maven cache
uses: actions/cache/restore@v4
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}-${{ github.run_id }}
- name: Set TB License
run: |
TB_LICENSE=${{secrets.TB_LICENSE}}
mkdir -p ~/.vaadin/
echo '{"username":"'`echo $TB_LICENSE | cut -d / -f1`'","proKey":"'`echo $TB_LICENSE | cut -d / -f2`'"}' > ~/.vaadin/proKey
- name: Run unit tests
run: |
mvn test -B
- name: Upload test results
if: always()
uses: actions/upload-artifact@v4
with:
name: unit-test-results
path: |
**/target/surefire-reports/TEST-*.xml
retention-days: 7
integration-tests:
name: Integration Tests (${{ matrix.name }})
needs: build
runs-on: ubuntu-latest
timeout-minutes: 45
concurrency:
group: saucelabs-testbench # Global queue for SauceLabs tests only
cancel-in-progress: false
strategy:
max-parallel: 1 # Only one JUnit version at a time to stay within SauceLabs limit
matrix:
include:
- name: JUnit 4
module: vaadin-testbench-integration-tests
- name: JUnit 5
module: vaadin-testbench-integration-tests-junit5
- name: JUnit 6
module: vaadin-testbench-integration-tests-junit6
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ env.HEAD_SHA }}
fetch-depth: 0
- name: Set up JDK ${{ env.JAVA_VERSION }}
uses: actions/setup-java@v4
with:
java-version: ${{ env.JAVA_VERSION }}
distribution: "temurin"
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
- name: Restore Maven cache
uses: actions/cache/restore@v4
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}-${{ github.run_id }}
- name: Set up Sauce Labs tunnel
uses: saucelabs/sauce-connect-action@v3.0.0
with:
username: ${{ secrets.SAUCE_USERNAME }}
accessKey: ${{ secrets.SAUCE_ACCESS_KEY }}
tunnelName: ${{ github.run_id }}-${{ github.run_number }}
region: us-west-1
retryTimeout: 300
proxyLocalhost: allow
- name: Set TB License
run: |
TB_LICENSE=${{secrets.TB_LICENSE}}
mkdir -p ~/.vaadin/
echo '{"username":"'`echo $TB_LICENSE | cut -d / -f1`'","proKey":"'`echo $TB_LICENSE | cut -d / -f2`'"}' > ~/.vaadin/proKey
- name: Run Integration Tests
env:
SAUCE_USERNAME: ${{ secrets.SAUCE_USERNAME }}
SAUCE_ACCESS_KEY: ${{ secrets.SAUCE_ACCESS_KEY }}
SAUCE_TUNNEL_ID: ${{ github.run_id }}-${{ github.run_number }}
run: |
mvn verify \
-pl ${{ matrix.module }} -am \
-P validation \
-DskipUnitTests \
-Dsystem.com.vaadin.testbench.Parameters.testsInParallel=5 \
-Dsystem.com.vaadin.testbench.Parameters.maxAttempts=2 \
-Dcom.vaadin.testbench.Parameters.hubHostname=localhost \
-Dsauce.tunnelId=${SAUCE_TUNNEL_ID} \
-Dfailsafe.forkCount=5 \
-Dsystem.sauce.user=${SAUCE_USERNAME} \
-Dsystem.sauce.sauceAccessKey=${SAUCE_ACCESS_KEY} \
-B
- name: Upload error screenshots
if: failure()
uses: actions/upload-artifact@v4
with:
name: error-screenshots-${{ matrix.name }}
path: |
**/error-screenshots/**
retention-days: 7
- name: Upload test results
if: always()
uses: actions/upload-artifact@v4
with:
name: integration-test-results-${{ matrix.name }}
path: |
**/target/failsafe-reports/TEST-*.xml
retention-days: 7
validation-status:
name: Validation Status
permissions:
actions: write
issues: read
checks: write
pull-requests: write
if: always()
needs: [format-check, build, unit-tests, integration-tests]
runs-on: ubuntu-latest
steps:
- name: Merge test result artifacts
uses: actions/upload-artifact/merge@v4
with:
name: test-results
pattern: '{unit-test-results,integration-test-results-*}'
delete-merged: true
- name: Download merged test results
uses: actions/download-artifact@v4
with:
name: test-results
- name: Publish test results
uses: EnricoMi/publish-unit-test-result-action@v2
with:
files: '**/TEST-*.xml'
check_name: Test Results
comment_mode: failures
- name: Check all jobs status
run: |
echo "format-check: ${{ needs.format-check.result }}"
echo "build: ${{ needs.build.result }}"
echo "unit-tests: ${{ needs.unit-tests.result }}"
echo "integration-tests: ${{ needs.integration-tests.result }}"
if [ "${{ needs.format-check.result }}" != "success" ] || \
[ "${{ needs.build.result }}" != "success" ] || \
[ "${{ needs.unit-tests.result }}" != "success" ] || \
[ "${{ needs.integration-tests.result }}" != "success" ]; then
echo "One or more validation jobs failed"
exit 1
fi
echo "All validation jobs completed successfully"