Skip to content

build: remove lingering references to log-rotator.sh #5467

build: remove lingering references to log-rotator.sh

build: remove lingering references to log-rotator.sh #5467

Workflow file for this run

name: Main
on:
push:
branches: [main, v1]
pull_request:
branches: [main, v1]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
lint:
timeout-minutes: 8
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache-dependency-path: 'yarn.lock'
cache: 'yarn'
- name: Install root dependencies
run: yarn install
- name: Build dependencies
run: make ci-build
- name: Install core libs
run: sudo apt-get install --yes curl bc
- name: Run lint + type check
run: make ci-lint
unit:
timeout-minutes: 8
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache-dependency-path: 'yarn.lock'
cache: 'yarn'
- name: Install root dependencies
run: yarn install
- name: Build dependencies
run: make ci-build
- name: Run unit tests
run: make ci-unit
integration:
timeout-minutes: 8
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache-dependency-path: 'yarn.lock'
cache: 'yarn'
- name: Install root dependencies
run: yarn install
- name: Expose GitHub Runtime
uses: crazy-max/ghaction-github-runtime@v2
- name: Spin up docker services
run: |
docker buildx create --use --driver=docker-container
docker buildx bake -f ./docker-compose.ci.yml --set *.cache-to="type=gha" --set *.cache-from="type=gha" --load
- name: Build dependencies
run: make ci-build
- name: Run integration tests
run: make ci-int
otel-smoke-test:
timeout-minutes: 8
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Get changed OTEL collector files
id: changed-files
uses: tj-actions/changed-files@v46
with:
files: |
docker/otel-collector/**
smoke-tests/otel-ccollector/**
- name: Install required tooling
if: steps.changed-files.outputs.any_changed == 'true'
env:
DEBIAN_FRONTEND: noninteractive
run: |
sudo apt-get install -y apt-transport-https ca-certificates curl gnupg
curl -fsSL 'https://packages.clickhouse.com/rpm/lts/repodata/repomd.xml.key' | sudo gpg --dearmor -o /usr/share/keyrings/clickhouse-keyring.gpg
ARCH=$(dpkg --print-architecture)
echo "deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg arch=${ARCH}] https://packages.clickhouse.com/deb stable main" | sudo tee /etc/apt/sources.list.d/clickhouse.list
sudo apt-get update
sudo apt-get install --yes curl bats clickhouse-client
- name: Run Smoke Tests
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./smoke-tests/otel-collector
run: bats .
e2e-tests:
name: E2E Tests - Shard ${{ matrix.shard }}
runs-on: ubuntu-24.04
timeout-minutes: 15
container:
image: mcr.microsoft.com/playwright:v1.57.0-jammy
options: --network-alias playwright
services:
mongodb:
image: mongo:5.0.14-focal
options: >-
--health-cmd "mongosh --quiet --eval 'db.adminCommand({ping: 1});
db.getSiblingDB(\"test\").test.insertOne({_id: \"hc\"});
db.getSiblingDB(\"test\").test.deleteOne({_id: \"hc\"})'"
--health-interval 10s --health-timeout 5s --health-retries 10
--health-start-period 10s
permissions:
contents: read
pull-requests: write
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3, 4]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache-dependency-path: 'yarn.lock'
cache: 'yarn'
- name: Install dependencies
run: yarn install
- name: Build dependencies
run: npx nx run-many -t ci:build
- name: Run Playwright tests (full-stack mode)
# MongoDB service health check ensures it's ready before this step runs
# Note: Tests use ClickHouse demo instance (otel_demo with empty password)
# This is intentionally public - it's ClickHouse's read-only demo instance
env:
E2E_FULLSTACK: 'true'
E2E_UNIQUE_USER: 'true'
E2E_API_HEALTH_CHECK_MAX_RETRIES: '60'
MONGO_URI: mongodb://mongodb:27017/hyperdx-e2e
run: |
cd packages/app
yarn test:e2e --shard=${{ matrix.shard }}/4
- name: Upload Playwright report
uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-report-${{ matrix.shard }}
path: packages/app/playwright-report/
retention-days: 30
- name: Upload test results
uses: actions/upload-artifact@v4
if: always()
with:
name: test-results-${{ matrix.shard }}
path: packages/app/test-results/
retention-days: 30
e2e-report:
name: End-to-End Tests
if: always()
needs: e2e-tests
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write
steps:
- name: Download all test results
uses: actions/download-artifact@v4
with:
pattern: test-results-*
path: all-test-results
- name: Aggregate test results
id: test-results
if: github.event_name == 'pull_request'
uses: actions/github-script@v7
with:
result-encoding: string
script: |
const fs = require('fs');
const path = require('path');
let totalPassed = 0;
let totalFailed = 0;
let totalFlaky = 0;
let totalSkipped = 0;
let totalDuration = 0;
let foundResults = false;
try {
const resultsDir = 'all-test-results';
const shards = fs.readdirSync(resultsDir);
for (const shard of shards) {
const resultsPath = path.join(resultsDir, shard, 'results.json');
if (fs.existsSync(resultsPath)) {
foundResults = true;
const results = JSON.parse(fs.readFileSync(resultsPath, 'utf8'));
const { stats } = results;
totalPassed += stats.expected || 0;
totalFailed += stats.unexpected || 0;
totalFlaky += stats.flaky || 0;
totalSkipped += stats.skipped || 0;
totalDuration += stats.duration || 0;
}
}
if (foundResults) {
const duration = Math.round(totalDuration / 1000);
const summary = totalFailed > 0
? `❌ **${totalFailed} test${totalFailed > 1 ? 's' : ''} failed**`
: `✅ **All tests passed**`;
return `## E2E Test Results
${summary} • ${totalPassed} passed • ${totalSkipped} skipped • ${duration}s
| Status | Count |
|--------|-------|
| ✅ Passed | ${totalPassed} |
| ❌ Failed | ${totalFailed} |
| ⚠️ Flaky | ${totalFlaky} |
| ⏭️ Skipped | ${totalSkipped} |
Tests ran across ${shards.length} shards in parallel.
[View full report →](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})`;
} else {
return `## E2E Test Results
❌ **Test results file not found**
[View full report →](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})`;
}
} catch (error) {
console.log('Could not parse test results:', error.message);
return `## E2E Test Results
❌ **Error reading test results**: ${error.message}
[View full report →](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})`;
}
- name: Comment PR with test results
uses: mshick/add-pr-comment@v2
if: always() && github.event_name == 'pull_request'
with:
message: ${{ steps.test-results.outputs.result }}
message-id: e2e-test-results
- name: Check test results
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const path = require('path');
let totalFailed = 0;
try {
const resultsDir = 'all-test-results';
const shards = fs.readdirSync(resultsDir);
for (const shard of shards) {
const resultsPath = path.join(resultsDir, shard, 'results.json');
if (fs.existsSync(resultsPath)) {
const results = JSON.parse(fs.readFileSync(resultsPath, 'utf8'));
totalFailed += results.stats.unexpected || 0;
}
}
if (totalFailed > 0) {
core.setFailed(`${totalFailed} test(s) failed`);
}
} catch (error) {
core.setFailed(`Failed to read test results: ${error.message}`);
}