Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 32 additions & 0 deletions .github/workflows/visual-comparison.yml
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ jobs:
id: download-references
env:
GITHUB_REPOSITORY: ${{ github.repository }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
run: |
if ./static/scripts/screenshot-artifacts.sh ./screenshots; then
Expand All @@ -160,10 +161,32 @@ jobs:
echo "⚠️ No reference screenshots found. This will create new baselines instead of comparing."
fi

- name: Debug - List downloaded screenshots
run: |
echo "=== Contents of ./screenshots ==="
ls -la ./screenshots || echo "Directory doesn't exist"
echo ""
echo "=== First level subdirectories ==="
find ./screenshots -maxdepth 2 -type d || echo "No subdirectories"
echo ""
echo "=== Sample files (first 10) ==="
find ./screenshots -type f -name "*.png" | head -10 || echo "No PNG files found"
echo ""
echo "=== Total PNG count ==="
find ./screenshots -type f -name "*.png" | wc -l
echo ""
echo "=== Check specific files ==="
ls -la ./screenshots/screenshot.doc.spec.ts/Docs-screenshots-pathname-cli-cmd-options*.png 2>&1 || echo "cli-cmd-options file not found"
echo ""
echo "=== Check if paths contain -1.png suffix ==="
find ./screenshots -name "*-1.png" | head -5

- name: Take screenshots with Playwright
env:
PLAYWRIGHT_SNAPSHOT_DIR: ${{ github.workspace }}/screenshots
run: |
echo "PLAYWRIGHT_SNAPSHOT_DIR is set to: ${PLAYWRIGHT_SNAPSHOT_DIR}"
echo "Workspace directory: ${{ github.workspace }}"
if [ "${{ steps.download-references.outputs.has_references }}" = "false" ]; then
echo "Creating new baseline screenshots..."
npx playwright test --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }} --update-snapshots || exit 0
Expand All @@ -174,6 +197,7 @@ jobs:

- name: Upload blob report
uses: actions/upload-artifact@v4
if: ${{ !cancelled() }}
with:
name: blob-report-${{ matrix.shardIndex }}
path: blob-report
Expand Down Expand Up @@ -210,6 +234,14 @@ jobs:
pattern: blob-report-*
merge-multiple: true

- name: Debug - Check downloaded reports
run: |
echo "=== Contents of all-blob-reports ==="
ls -laR all-blob-reports/ || echo "Directory doesn't exist or is empty"
echo ""
echo "=== File count ==="
find all-blob-reports -type f | wc -l

- name: Merge into HTML report
run: npx playwright merge-reports --reporter html ./all-blob-reports

Expand Down
69 changes: 63 additions & 6 deletions static/scripts/screenshot-artifacts.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,16 +36,40 @@ fi
ARTIFACTS=$(api "actions/runs/${RUN_ID}/artifacts?per_page=100")
COUNT=$(echo "${ARTIFACTS}" | jq '.total_count')

# Filter out expired artifacts
VALID_ARTIFACTS=$(echo "${ARTIFACTS}" | jq '.artifacts | map(select(.expired == false))')
VALID_COUNT=$(echo "${VALID_ARTIFACTS}" | jq 'length')

if [[ "${COUNT}" -eq 0 ]]; then
echo "No artifacts available for run ${RUN_ID}." >&2
exit 1
fi

echo "Downloading ${COUNT} artifact(s) from run ${RUN_ID}..."
if [[ "${VALID_COUNT}" -eq 0 ]]; then
echo "All ${COUNT} artifact(s) from run ${RUN_ID} have expired." >&2
exit 1
fi

echo "Found ${VALID_COUNT} valid (non-expired) artifact(s) from run ${RUN_ID} (${COUNT} total)..."

INDEX=0
DOWNLOADED=0
while IFS= read -r artifact; do
NAME=$(echo "${artifact}" | jq -r '.name')

# Only download the merged screenshots artifact, skip individual shards and build
if [[ "${NAME}" =~ ^screenshots-[0-9]+$ ]]; then
echo "Skipping shard artifact ${NAME} (merged artifact preferred)"
continue
fi

if [[ "${NAME}" == "build" ]]; then
echo "Skipping build artifact ${NAME} (not needed for references)"
continue
fi

echo "Processing artifact ${NAME}..."

URL=$(echo "${artifact}" | jq -r '.archive_download_url')
if [[ -z "${URL}" || "${URL}" == "null" ]]; then
echo "Skipping artifact ${NAME} with no download URL" >&2
Expand All @@ -54,23 +78,56 @@ while IFS= read -r artifact; do

TMP_ZIP="$(mktemp)"
TMP_DIR="$(mktemp -d)"
curl -sSL -H "Authorization: Bearer ${TOKEN}" -H "Accept: application/vnd.github+json" "${URL}" -o "${TMP_ZIP}"

echo "Downloading ${NAME} from ${URL}..."
if ! curl -fsSL -H "Authorization: Bearer ${TOKEN}" -H "Accept: application/vnd.github+json" "${URL}" -o "${TMP_ZIP}"; then
echo "Failed to download artifact ${NAME}" >&2
rm -rf "${TMP_ZIP}" "${TMP_DIR}"
continue
fi

# Verify the download produced a valid zip file
if [[ ! -s "${TMP_ZIP}" ]]; then
echo "Downloaded file is empty for artifact ${NAME}" >&2
rm -rf "${TMP_ZIP}" "${TMP_DIR}"
continue
fi

if ! unzip -tq "${TMP_ZIP}" >/dev/null 2>&1; then
echo "Downloaded file is not a valid zip for artifact ${NAME}" >&2
rm -rf "${TMP_ZIP}" "${TMP_DIR}"
continue
fi

unzip -oq "${TMP_ZIP}" -d "${TMP_DIR}"

if [[ -d "${TMP_DIR}/screenshots" ]]; then
# Artifact contains a screenshots/ subdirectory - extract its contents
shopt -s dotglob
mkdir -p "${TARGET_DIR}"
cp -R "${TMP_DIR}/screenshots"/* "${TARGET_DIR}/" 2>/dev/null || true
shopt -u dotglob
elif [[ "${NAME}" == "screenshots" ]]; then
# Merged screenshots artifact extracts directly to target
shopt -s dotglob
mkdir -p "${TARGET_DIR}"
cp -R "${TMP_DIR}"/* "${TARGET_DIR}/" 2>/dev/null || true
shopt -u dotglob
else
# Other artifacts go to subdirectories
DEST_DIR="${TARGET_DIR}/${NAME}"
mkdir -p "${DEST_DIR}"
cp -R "${TMP_DIR}"/* "${DEST_DIR}/" 2>/dev/null || true
fi

rm -rf "${TMP_ZIP}" "${TMP_DIR}"
INDEX=$((INDEX + 1))
echo "Downloaded artifact ${NAME}"
done < <(echo "${ARTIFACTS}" | jq -c '.artifacts[]')
DOWNLOADED=$((DOWNLOADED + 1))
echo "Successfully downloaded artifact ${NAME}"
done < <(echo "${VALID_ARTIFACTS}" | jq -c '.[]')

if [[ "${DOWNLOADED}" -eq 0 ]]; then
echo "No artifacts were downloaded successfully" >&2
exit 1
fi

echo "Downloaded ${INDEX} artifact(s) to ${TARGET_DIR}".
echo "Successfully downloaded ${DOWNLOADED} artifact(s) to ${TARGET_DIR}"
2 changes: 1 addition & 1 deletion tests/playwright/cookbook-home.spec.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { expect, test } from '@playwright/test';
import { expect, test } from './fixtures';
import type { Locator } from '@playwright/test';

const collectTileData = async (locator: Locator) => {
Expand Down
24 changes: 24 additions & 0 deletions tests/playwright/fixtures.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { test as base } from '@playwright/test';

/**
* Custom test with auto-fixture that blocks analytics requests.
* This ensures consistent behavior across all tests and prevents
* amplitude tracking from interfering with visual comparisons.
*
* The auto fixture runs before every test automatically.
*/
export const test = base.extend<{ blockAnalytics: void }>({
blockAnalytics: [async ({ context }, use) => {
// Block all Amplitude and analytics requests by fulfilling with empty responses
// This prevents timeouts that would occur if we abort the requests
await context.route('**/*amplitude*/**', route => route.fulfill({ status: 200, body: '' }));
await context.route('**/comms', route => route.fulfill({ status: 200, body: '' }));
await context.route('**/*.amplitude.com/**', route => route.fulfill({ status: 200, body: '' }));
await context.route('**/api.amplitude.com/**', route => route.fulfill({ status: 200, body: '' }));
await context.route('**/api2.amplitude.com/**', route => route.fulfill({ status: 200, body: '' }));

await use();
}, { auto: true }], // Automatically runs for every test
});

export { expect } from '@playwright/test';
9 changes: 1 addition & 8 deletions tests/playwright/screenshot.doc.spec.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import fs from 'fs';
import path from 'path';
import { expect, test } from '@playwright/test';
import { expect, test } from './fixtures';
import { extractSitemapPathnames, WaitForDocusaurusHydration } from './utils';

const siteUrl = process.env.PLAYWRIGHT_BASE_URL ?? 'http://localhost:3000';
Expand Down Expand Up @@ -28,11 +28,6 @@ function isVersionedDocsPathname(pathname: string, list: string[]): boolean {
return true;
}

function sanitizePathname(pathname: string): string {
const cleaned = pathname.replace(/^\/+/, '').replace(/[^a-zA-Z0-9]+/g, '_');
return cleaned === '' ? 'home' : cleaned;
}

test.beforeAll(async () => {
console.log('Excluded pages: ', excludeList);
console.log('Total pages: ', extractSitemapPathnames(sitemapPath).length);
Expand All @@ -53,10 +48,8 @@ function screenshotPathname(pathname: string) {

await page.waitForTimeout(1_000);

const snapshotName = `${sanitizePathname(pathname)}.png`;
await expect(page).toHaveScreenshot({
fullPage: true,
path: testInfo.snapshotPath(snapshotName),
timeout: 10_000,
});
});
Expand Down