diff --git a/.github/workflows/studio-e2e-test.yml b/.github/workflows/studio-e2e-test.yml
index 1403a3814943e..160f43494f5ac 100644
--- a/.github/workflows/studio-e2e-test.yml
+++ b/.github/workflows/studio-e2e-test.yml
@@ -1,4 +1,4 @@
-name: Studio E2E Tests
+name: Selfhosted Studio E2E Tests
on:
push:
branches: [master]
@@ -22,28 +22,33 @@ concurrency:
permissions:
contents: write
+ pull-requests: write
jobs:
test:
timeout-minutes: 60
runs-on: ubuntu-latest
- # Make the job non-blocking
- continue-on-error: true
# Require approval only for external contributors
environment: ${{ github.event.pull_request.author_association != 'MEMBER' && 'Studio E2E Tests' || '' }}
env:
EMAIL: ${{ secrets.CI_EMAIL }}
PASSWORD: ${{ secrets.CI_PASSWORD }}
- PROJECT_REF: ${{ secrets.CI_PROJECT_REF }}
- NEXT_PUBLIC_IS_PLATFORM: true
NEXT_PUBLIC_API_URL: https://api.supabase.green
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
- VERCEL_PROJECT_ID: ${{ secrets.VERCEL_STUDIO_HOSTED_PROJECT_ID }}
+ # Studio Self-Hosted project ID
+ VERCEL_PROJECT_ID: prj_CnatEuo7L6bUZAgmujMrm5P1rxtv
NEXT_PUBLIC_HCAPTCHA_SITE_KEY: 10000000-ffff-ffff-ffff-000000000001
+ VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO: ${{ secrets.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO }}
steps:
- uses: actions/checkout@v4
+ - name: Verify Vercel bypass secret exists
+ run: |
+ if [ -z "${{ secrets.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO }}" ]; then
+ echo "Required secret VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO is not set" >&2
+ exit 1
+ fi
- uses: pnpm/action-setup@v4
name: Install pnpm
with:
@@ -57,13 +62,16 @@ jobs:
- name: Install dependencies
run: pnpm i
+ # Deploy a preview to Vercel (CLI mode) and capture the URL
- name: Install Vercel CLI
run: pnpm add --global vercel@latest
- name: Pull Vercel Environment Information (Preview)
run: vercel pull --yes --environment=preview --token=${{ secrets.VERCEL_TOKEN }}
- - name: Build Project Artifacts for Vercel
+ - name: Build Project Artifacts for Vercel (is_platform=false)
+ env:
+ NEXT_PUBLIC_IS_PLATFORM: false
run: vercel build --token=${{ secrets.VERCEL_TOKEN }}
- name: Deploy Project to Vercel and Get URL
@@ -74,13 +82,16 @@ jobs:
echo "DEPLOY_URL=$DEPLOY_URL" >> $GITHUB_OUTPUT
- name: Install Playwright Browsers
- run: pnpm -C e2e/studio exec playwright install --with-deps
+ run: pnpm -C e2e/studio exec playwright install chromium --with-deps --only-shell
- - name: Run Playwright tests
+ - name: 🚀 Run Playwright tests against Vercel Preview
id: playwright
+ continue-on-error: true
env:
- AUTHENTICATION: true
- STUDIO_URL: ${{ steps.deploy_vercel.outputs.DEPLOY_URL }}/dashboard
+ AUTHENTICATION: false
+ STUDIO_URL: ${{ steps.deploy_vercel.outputs.DEPLOY_URL }}
+ API_URL: ${{ steps.deploy_vercel.outputs.DEPLOY_URL }}
+ VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO: ${{ secrets.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO }}
run: pnpm e2e
- uses: actions/upload-artifact@v4
@@ -91,3 +102,93 @@ jobs:
e2e/studio/playwright-report/
e2e/studio/test-results/
retention-days: 7
+
+ - name: Prepare summary (outputs)
+ if: always()
+ id: summarize
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const fs = require('fs')
+ const p = 'e2e/studio/test-results/test-results.json'
+ // Initialize a summary object to hold test statistics.
+ let s={total:0,passed:0,failed:0,skipped:0,timedOut:0,interrupted:0,flaky:0,durationMs:0,note:''}
+ try {
+ const data = JSON.parse(fs.readFileSync(p,'utf8'))
+ // Recursively walk through the test suites to process each test.
+ const walk=suite=>{
+ if(!suite)return;
+ suite.specs?.forEach(spec=>{
+ spec.tests?.forEach(test=>{
+ s.total++;
+ // Get the last result of the test, as tests can be retried.
+ const lastResult = test.results[test.results.length - 1];
+ s.durationMs += lastResult.duration || 0;
+ // A test is considered flaky if it has more than one run and the final status is 'passed'.
+ if (test.results.length > 1 && lastResult.status === 'passed') {
+ s.flaky++
+ }
+ const status = lastResult.status === 'passed' && s.flaky > 0 ? 'flaky' : lastResult.status
+ s[status] = (s[status]||0)+1;
+ })
+ })
+ suite.suites?.forEach(walk)
+ }
+ walk(data.suites?.[0])
+ } catch { s.note='No JSON report found or parse error.' }
+ // Generate the markdown for the summary comment.
+ const md = s.note ? `Note: ${s.note}` : `- Total: ${s.total}\n- Passed: ${s.passed||0}\n- Failed: ${s.failed||0}\n- Skipped: ${s.skipped||0}\n- Timed out: ${s.timedOut||0}\n- Interrupted: ${s.interrupted||0}\n- Flaky: ${s.flaky||0}\n- Duration: ${(s.durationMs/1000).toFixed(1)}s`
+ // Set the summary and flaky_count as outputs for subsequent steps.
+ core.setOutput('summary', md)
+ core.setOutput('flaky_count', s.flaky)
+
+ - name: Comment summary on PR
+ if: always() && github.event_name == 'pull_request'
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const owner = context.repo.owner
+ const repo = context.repo.repo
+ const issue_number = context.issue.number
+ const summary = `${{ steps.summarize.outputs.summary }}`.replace(/^"|"$/g,'')
+ const runUrl = `https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}`
+ const marker = ''
+
+ const now = new Date()
+ const weekday = now.toLocaleString('en-US', { weekday: 'long', timeZone: 'UTC' })
+ const day = now.toLocaleString('en-US', { day: 'numeric', timeZone: 'UTC' })
+ const month = now.toLocaleString('en-US', { month: 'long', timeZone: 'UTC' })
+ const year = now.toLocaleString('en-US', { year: 'numeric', timeZone: 'UTC' })
+ const time = now.toLocaleTimeString('en-US', {
+ hour: '2-digit',
+ minute: '2-digit',
+ second: '2-digit',
+ hour12: false,
+ timeZone: 'UTC',
+ })
+ const date = `${weekday} ${day}, ${month}, ${year} ${time} (UTC)`
+
+ const body = [
+ marker,
+ `**Studio E2E Results**`,
+ '',
+ summary,
+ '',
+ `Artifacts: ${runUrl}`,
+ '',
+ `Last updated: ${date}`
+ ].join('\n')
+
+ const { data: comments } = await github.rest.issues.listComments({ owner, repo, issue_number, per_page: 100 })
+ const existing = comments.find(c => c.body && c.body.includes(marker))
+ if (existing) {
+ await github.rest.issues.updateComment({ owner, repo, comment_id: existing.id, body })
+ } else {
+ await github.rest.issues.createComment({ owner, repo, issue_number, body })
+ }
+
+ - name: Fail job if tests failed
+ if: steps.playwright.outcome != 'success' || steps.summarize.outputs.flaky_count > 0
+ run: |
+ echo "E2E tests failed" >&2
+ exit 1
diff --git a/.github/workflows/typecheck.yml b/.github/workflows/typecheck.yml
index 826b08a649cbc..f5215584d8651 100644
--- a/.github/workflows/typecheck.yml
+++ b/.github/workflows/typecheck.yml
@@ -4,6 +4,7 @@ on:
pull_request:
branches:
- 'master'
+ merge_group:
# Cancel old builds on new commit for same workflow + branch/PR
concurrency:
diff --git a/apps/design-system/package.json b/apps/design-system/package.json
index b0715f521da8b..e23cbf1e4affc 100644
--- a/apps/design-system/package.json
+++ b/apps/design-system/package.json
@@ -57,7 +57,7 @@
"postcss": "^8.5.3",
"rimraf": "^4.1.3",
"shiki": "^1.1.7",
- "tailwindcss": "^3.3.0",
+ "tailwindcss": "catalog:",
"tsconfig": "workspace:*",
"tsx": "^4.19.3",
"typescript": "~5.5.0",
diff --git a/apps/studio/components/interfaces/Reports/v2/ReportChartUpsell.tsx b/apps/studio/components/interfaces/Reports/v2/ReportChartUpsell.tsx
index 942445df9fa3b..779aa17ee33ef 100644
--- a/apps/studio/components/interfaces/Reports/v2/ReportChartUpsell.tsx
+++ b/apps/studio/components/interfaces/Reports/v2/ReportChartUpsell.tsx
@@ -57,7 +57,7 @@ export function ReportChartUpsell({
onMouseLeave={() => setIsHoveringUpgrade(false)}
className="mt-4"
>
-
+
Upgrade to{' '}
{!!report.availableIn?.length ? report.availableIn[0] : 'Pro'}
diff --git a/apps/studio/data/organizations/organization-customer-profile-query.ts b/apps/studio/data/organizations/organization-customer-profile-query.ts
index 7fe953a0a17e2..1b6373605fb13 100644
--- a/apps/studio/data/organizations/organization-customer-profile-query.ts
+++ b/apps/studio/data/organizations/organization-customer-profile-query.ts
@@ -5,6 +5,7 @@ import { get, handleError } from 'data/fetchers'
import { useAsyncCheckPermissions } from 'hooks/misc/useCheckPermissions'
import type { ResponseError } from 'types'
import { organizationKeys } from './keys'
+import { IS_PLATFORM } from 'common'
export type OrganizationCustomerProfileVariables = {
slug?: string
@@ -52,7 +53,7 @@ export const useOrganizationCustomerProfileQuery = getOrganizationCustomerProfile({ slug }, signal),
{
- enabled: enabled && canReadCustomerProfile && typeof slug !== 'undefined',
+ enabled: IS_PLATFORM && enabled && canReadCustomerProfile && typeof slug !== 'undefined',
...options,
}
)
diff --git a/apps/studio/next.config.js b/apps/studio/next.config.js
index 93a6d8b029764..37c55f2be285a 100644
--- a/apps/studio/next.config.js
+++ b/apps/studio/next.config.js
@@ -531,10 +531,8 @@ const nextConfig = {
pagesBufferLength: 100,
},
typescript: {
- // On previews, typechecking is run via GitHub Action only for efficiency
- // On production, we turn it on to prevent errors from conflicting PRs getting into
- // prod
- ignoreBuildErrors: process.env.NEXT_PUBLIC_VERCEL_ENV === 'production' ? false : true,
+ // Typechecking is run via GitHub Action only for efficiency.
+ ignoreBuildErrors: true,
},
eslint: {
// We are already running linting via GH action, this will skip linting during production build on Vercel
diff --git a/apps/studio/package.json b/apps/studio/package.json
index 7cb777345f687..532ee4aafcf49 100644
--- a/apps/studio/package.json
+++ b/apps/studio/package.json
@@ -197,7 +197,7 @@
"prettier": "3.2.4",
"raw-loader": "^4.0.2",
"require-in-the-middle": "^7.5.2",
- "tailwindcss": "^3.4.1",
+ "tailwindcss": "catalog:",
"tsx": "^4.19.3",
"typescript": "~5.5.0",
"vite": "catalog:",
diff --git a/apps/studio/state/tabs.tsx b/apps/studio/state/tabs.tsx
index 2d6fda0103554..348528335f31b 100644
--- a/apps/studio/state/tabs.tsx
+++ b/apps/studio/state/tabs.tsx
@@ -347,7 +347,7 @@ function createTabsState(projectRef: string) {
router.push(`/project/${router.query.ref}/editor`)
break
default:
- router.push(`/project/${router.query.ref}/${editor}`)
+ router.push(`/project/${router.query.ref}/${editor === 'table' ? 'editor' : 'sql'}`)
}
}
}
diff --git a/apps/ui-library/package.json b/apps/ui-library/package.json
index fa50cfe2c1819..fdaa04634ee9d 100644
--- a/apps/ui-library/package.json
+++ b/apps/ui-library/package.json
@@ -106,7 +106,7 @@
"rimraf": "^4.1.3",
"shadcn": "^2.10.0",
"shiki": "^1.1.7",
- "tailwindcss": "^3.3.0",
+ "tailwindcss": "catalog:",
"tsconfig": "workspace:*",
"tsx": "^4.19.3",
"typescript": "~5.5.0",
diff --git a/e2e/studio/env.config.ts b/e2e/studio/env.config.ts
index f1edf3d0a0050..e7d451460d61e 100644
--- a/e2e/studio/env.config.ts
+++ b/e2e/studio/env.config.ts
@@ -1,13 +1,21 @@
import path from 'path'
+const toBoolean = (value?: string) => {
+ if (value == null) return false
+ const normalized = value.trim().toLowerCase()
+ return normalized === 'true'
+}
+
export const env = {
STUDIO_URL: process.env.STUDIO_URL,
API_URL: process.env.API_URL || 'https://api.supabase.green',
- AUTHENTICATION: process.env.AUTHENTICATION,
+ AUTHENTICATION: toBoolean(process.env.AUTHENTICATION),
EMAIL: process.env.EMAIL,
PASSWORD: process.env.PASSWORD,
PROJECT_REF: process.env.PROJECT_REF || 'default',
IS_PLATFORM: process.env.IS_PLATFORM || 'false',
+ VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO:
+ process.env.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO || 'false',
}
export const STORAGE_STATE_PATH = path.join(__dirname, './playwright/.auth/user.json')
diff --git a/e2e/studio/features/_global.setup.ts b/e2e/studio/features/_global.setup.ts
index 187e72fb8fb29..acf4bfa6c0b35 100644
--- a/e2e/studio/features/_global.setup.ts
+++ b/e2e/studio/features/_global.setup.ts
@@ -25,6 +25,18 @@ setup('Global Setup', async ({ page }) => {
- Is Platform: ${IS_PLATFORM}
`)
+ /*
+ * Check if we're in CI, if so, check VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO
+ * is set to true.
+ */
+ const VERCEL_BYPASS = process.env.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO
+
+ if (process.env.CI === 'true') {
+ if (!VERCEL_BYPASS || VERCEL_BYPASS.length === 0) {
+ throw new Error('VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO is not set')
+ }
+ }
+
/**
* Studio Check
*/
diff --git a/e2e/studio/features/database.spec.ts b/e2e/studio/features/database.spec.ts
index 54e8d8ec9f160..b3d328fa8a662 100644
--- a/e2e/studio/features/database.spec.ts
+++ b/e2e/studio/features/database.spec.ts
@@ -2,7 +2,11 @@ import { expect, Page } from '@playwright/test'
import { env } from '../env.config'
import { test } from '../utils/test'
import { toUrl } from '../utils/to-url'
-import { waitForApiResponse, waitForDatabaseToLoad } from '../utils/wait-for-response'
+import {
+ waitForApiResponse,
+ waitForDatabaseToLoad,
+ createApiResponseWaiter,
+} from '../utils/wait-for-response'
const databaseTableName = 'pw_database_table'
const databaseTableNameNew = 'pw_database_table_new'
@@ -61,13 +65,14 @@ const deleteTable = async (page: Page, tableName: string) => {
).toBeVisible()
}
-test.describe('Database', () => {
+test.describe.serial('Database', () => {
let page: Page
test.beforeAll(async ({ browser, ref }) => {
page = await browser.newPage()
+ const wait = createApiResponseWaiter(page, 'pg-meta', ref, 'query?key=entity-types-public-0')
await page.goto(toUrl(`/project/${ref}/editor`))
- await waitForApiResponse(page, 'pg-meta', ref, 'query?key=entity-types-public-0')
+ await wait
if ((await page.getByRole('button', { name: `View ${databaseTableName}` }).count()) > 0) {
await deleteTable(page, databaseTableName)
@@ -77,8 +82,9 @@ test.describe('Database', () => {
})
test.afterAll(async ({ ref }) => {
+ const wait = createApiResponseWaiter(page, 'pg-meta', ref, 'query?key=entity-types-public-0')
await page.goto(toUrl(`/project/${ref}/editor`))
- await waitForApiResponse(page, 'pg-meta', ref, 'query?key=entity-types-public-0')
+ await wait
if ((await page.getByRole('button', { name: `View ${databaseTableName}` }).count()) > 0) {
await deleteTable(page, databaseTableName)
}
@@ -86,10 +92,14 @@ test.describe('Database', () => {
test.describe('Schema Visualizer', () => {
test('actions works as expected', async ({ page, ref }) => {
+ const wait = createApiResponseWaiter(
+ page,
+ 'pg-meta',
+ ref,
+ 'tables?include_columns=true&included_schemas=public'
+ )
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/schemas?schema=public`))
-
- // Wait for schema visualizer to load
- await waitForDatabaseToLoad(page, ref)
+ await wait
// validates table and column exists
await page.waitForTimeout(500)
@@ -130,12 +140,16 @@ test.describe('Database', () => {
})
})
- test.describe('Tables', () => {
+ test.describe.serial('Tables', () => {
test('actions works as expected', async ({ page, ref }) => {
+ const wait = createApiResponseWaiter(
+ page,
+ 'pg-meta',
+ ref,
+ 'tables?include_columns=true&included_schemas=public'
+ )
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/tables?schema=public`))
-
- // Wait for database tables to be populated
- await waitForDatabaseToLoad(page, ref)
+ await wait
// check new table button is present in public schema
await expect(page.getByRole('button', { name: 'New table' })).toBeVisible()
@@ -258,7 +272,7 @@ test.describe('Database', () => {
})
test.describe('Tables columns', () => {
- test('everything works as expected', async ({ page, ref }) => {
+ test('can view, create, update, delete, and filter table columns', async ({ page, ref }) => {
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/tables?schema=public`))
// Wait for database tables to be populated
@@ -322,7 +336,7 @@ test.describe('Database', () => {
})
})
- test.describe('Triggers', () => {
+ test.describe.serial('Triggers', () => {
test('actions works as expected', async ({ page, ref }) => {
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/triggers?schema=public`))
@@ -377,7 +391,7 @@ test.describe('Database', () => {
await page.getByRole('checkbox').nth(2).click()
await page.getByRole('button', { name: 'Choose a function to trigger' }).click()
await page.getByRole('paragraph').filter({ hasText: 'subscription_check_filters' }).click()
- await page.getByRole('button', { name: 'Create trigger' }).click()
+ await page.getByRole('button', { name: /^(Create|Save) trigger$/ }).click()
// validate trigger creation
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=trigger-create')
@@ -395,7 +409,7 @@ test.describe('Database', () => {
await triggerRow.getByRole('button', { name: 'More options' }).click()
await page.getByRole('menuitem', { name: 'Edit trigger' }).click()
await page.getByRole('textbox', { name: 'Name of trigger' }).fill(databaseTriggerNameUpdated)
- await page.getByRole('button', { name: 'Create trigger' }).click()
+ await page.getByRole('button', { name: /^(Create|Save) trigger$/ }).click()
// validate trigger update
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=trigger-update')
@@ -441,7 +455,6 @@ test.describe('Database', () => {
await page.getByTestId('schema-selector').click()
await page.getByPlaceholder('Find schema...').fill('auth')
await page.getByRole('option', { name: 'auth' }).click()
- await waitForApiResponse(page, 'pg-meta', ref, 'query?key=indexes-auth')
await page.waitForTimeout(500)
expect(page.getByText('sso_providers_pkey')).toBeVisible()
expect(page.getByText('confirmation_token_idx')).toBeVisible()
@@ -579,12 +592,13 @@ test.describe('Database', () => {
})
})
-test.describe('Database Enumerated Types', () => {
+test.describe.serial('Database Enumerated Types', () => {
test('actions works as expected', async ({ page, ref }) => {
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/types?schema=public`))
// Wait for database enumerated types to be populated
- await waitForApiResponse(page, 'pg-meta', ref, 'query?key=schemas')
+ // await waitForApiResponse(page, 'pg-meta', ref, 'query?key=schemas')
+ await page.waitForLoadState('networkidle')
// create new type button exists in public schema
await expect(page.getByRole('button', { name: 'Create type' })).toBeVisible()
@@ -593,23 +607,26 @@ test.describe('Database Enumerated Types', () => {
await page.getByTestId('schema-selector').click()
await page.getByPlaceholder('Find schema...').fill('auth')
await page.getByRole('option', { name: 'auth' }).click()
- expect(page.getByText('factor_type')).toBeVisible()
- expect(page.getByText('code_challenge_method')).toBeVisible()
+
+ await expect(page.getByText('factor_type')).toBeVisible()
+ await expect(page.getByText('code_challenge_method')).toBeVisible()
// create new type button does not exist in other schemas
- expect(page.getByRole('button', { name: 'Create type' })).not.toBeVisible()
+ await expect(page.getByRole('button', { name: 'Create type' })).not.toBeVisible()
// filter by querying
await page.getByRole('textbox', { name: 'Search for a type' }).fill('code')
- await page.waitForTimeout(500) // wait for enum types to be loaded
- expect(page.getByText('factor_type')).not.toBeVisible()
- expect(page.getByText('code_challenge_method')).toBeVisible()
+ await page.waitForTimeout(1000) // wait for enum types to be loaded
+ await expect(page.getByText('factor_type')).not.toBeVisible()
+ await expect(page.getByText('code_challenge_method')).toBeVisible()
})
test('CRUD operations works as expected', async ({ page, ref }) => {
+ const wait = createApiResponseWaiter(page, 'pg-meta', ref, 'query?key=schemas')
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/types?schema=public`))
// Wait for database roles list to be populated
- await waitForApiResponse(page, 'pg-meta', ref, 'query?key=schemas')
+ await wait
+ // await page.waitForLoadState('networkidle')
// if enum exists, delete it.
await page.waitForTimeout(500)
@@ -661,12 +678,13 @@ test.describe('Database Enumerated Types', () => {
})
})
-test.describe('Database Functions', () => {
+test.describe.serial('Database Functions', () => {
test('actions works as expected', async ({ page, ref }) => {
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/functions?schema=public`))
// Wait for database functions to be populated
- await waitForApiResponse(page, 'pg-meta', ref, 'query?key=database-functions')
+ await page.waitForLoadState('networkidle')
+ // await waitForApiResponse(page, 'pg-meta', ref, 'query?key=database-functions')
// create a new function button exists in public schema
await expect(page.getByRole('button', { name: 'Create a new function' })).toBeVisible()
@@ -691,7 +709,8 @@ test.describe('Database Functions', () => {
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/functions?schema=public`))
// Wait for database functions to be populated
- await waitForApiResponse(page, 'pg-meta', ref, 'query?key=database-functions')
+ // await waitForApiResponse(page, 'pg-meta', ref, 'query?key=database-functions')
+ await page.waitForLoadState('networkidle')
// delete function if exists
if ((await page.getByRole('button', { name: databaseFunctionName }).count()) > 0) {
@@ -703,7 +722,7 @@ test.describe('Database Functions', () => {
.fill(databaseFunctionName)
await page.getByRole('button', { name: `Delete function ${databaseFunctionName}` }).click()
await expect(
- page.getByText(`Successfully removed ${databaseFunctionName}`),
+ page.getByText(`Successfully removed function ${databaseFunctionName}`),
'Delete confirmation toast should be visible'
).toBeVisible({
timeout: 50000,
@@ -719,7 +738,7 @@ test.describe('Database Functions', () => {
END;`)
await page.waitForTimeout(500) // wait for text content to be visible
expect(await page.getByRole('presentation').textContent()).toBe(`BEGINEND;`)
- await page.getByRole('button', { name: 'Confirm' }).click()
+ await page.getByRole('button', { name: 'Create function' }).click()
// validate function creation
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=functions-create')
@@ -737,7 +756,7 @@ END;`)
await functionRow.getByRole('button', { name: 'More options' }).click()
await page.getByRole('menuitem', { name: 'Edit function', exact: true }).click()
await page.getByRole('textbox', { name: 'Name of function' }).fill(databaseFunctionNameUpdated)
- await page.getByRole('button', { name: 'Confirm' }).click()
+ await page.getByRole('button', { name: 'Save function' }).click()
// validate function update
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=functions-update')
diff --git a/e2e/studio/features/home.spec.ts b/e2e/studio/features/home.spec.ts
index 9acbdf745af48..ad753f8c78b03 100644
--- a/e2e/studio/features/home.spec.ts
+++ b/e2e/studio/features/home.spec.ts
@@ -6,6 +6,8 @@ test.describe('Project', async () => {
test('Can navigate to project home page', async ({ page, ref }) => {
await page.goto(toUrl(`/project/${ref}`))
- await expect(page.getByRole('link', { name: 'Tables' })).toBeVisible()
+ // The home page has 2 variants (classic and new). Both render an H1 heading.
+ // Assert on a stable, variant-agnostic selector.
+ await expect(page.getByRole('heading', { level: 1 })).toBeVisible()
})
})
diff --git a/e2e/studio/features/table-editor.spec.ts b/e2e/studio/features/table-editor.spec.ts
index 5519523c48983..106a928f067e7 100644
--- a/e2e/studio/features/table-editor.spec.ts
+++ b/e2e/studio/features/table-editor.spec.ts
@@ -15,9 +15,36 @@ import { waitForApiResponseWithTimeout } from '../utils/wait-for-response-with-t
const tableNamePrefix = 'pw_table'
const columnName = 'pw_column'
+const dismissToastsIfAny = async (page: Page) => {
+ const closeButtons = page.getByRole('button', { name: 'Close toast' })
+ const count = await closeButtons.count()
+ for (let i = 0; i < count; i++) {
+ await closeButtons.nth(i).click()
+ }
+}
+
const createTable = async (page: Page, ref: string, tableName: string) => {
- await page.getByRole('button', { name: 'New table', exact: true }).click()
- await page.getByTestId('table-name-input').fill(tableName)
+ // Ensure no toast overlays block the dialog trigger
+ await dismissToastsIfAny(page)
+
+ const newTableBtn = page.getByRole('button', { name: 'New table', exact: true })
+ await expect(newTableBtn).toBeVisible()
+ await newTableBtn.click()
+
+ // Check if we're in the unable to find table error
+ // If so, click Close tab or Head back first
+ const closeTabBtn = page.getByRole('button', { name: 'Close tab' })
+ const headBackBtn = page.getByRole('button', { name: 'Head back' })
+ if ((await closeTabBtn.count()) > 0) {
+ await closeTabBtn.click()
+ }
+ if ((await headBackBtn.count()) > 0) {
+ await headBackBtn.click()
+ }
+
+ const nameInput = page.getByTestId('table-name-input')
+ await expect(nameInput).toBeVisible()
+ await nameInput.fill(tableName)
await page.getByTestId('created_at-extra-options').click()
await page.getByText('Is Nullable').click()
await page.getByTestId('created_at-extra-options').click({ force: true })
@@ -26,12 +53,10 @@ const createTable = async (page: Page, ref: string, tableName: string) => {
await page.getByText('Choose a column type...').click()
await page.getByRole('option', { name: 'text Variable-length' }).click()
await page.getByRole('button', { name: 'Save' }).click()
- await waitForApiResponse(
- page,
- 'pg-meta',
- ref,
- 'tables?include_columns=true&included_schemas=public'
- ) // wait for table creation
+ // Wait specifically for tables list refresh instead of generic networkidle
+ await waitForApiResponseWithTimeout(page, (response) =>
+ response.url().includes('tables?include_columns=true&included_schemas=public')
+ )
// wait for tables to load, we don't need to wait here cause this response may complete before the table creation.
await waitForApiResponseWithTimeout(page, (response) =>
response.url().includes('query?key=entity-types-public-')
@@ -43,8 +68,10 @@ const createTable = async (page: Page, ref: string, tableName: string) => {
}
const deleteTable = async (page: Page, ref: string, tableName: string) => {
- await page.getByLabel(`View ${tableName}`).nth(0).click()
- await page.getByLabel(`View ${tableName}`).getByRole('button').nth(1).click()
+ const viewLocator = page.getByLabel(`View ${tableName}`)
+ if ((await viewLocator.count()) === 0) return
+ await viewLocator.nth(0).click()
+ await viewLocator.getByRole('button').nth(1).click({ force: true })
await page.getByText('Delete table').click()
await page.getByRole('checkbox', { name: 'Drop table with cascade?' }).click()
await page.getByRole('button', { name: 'Delete' }).click()
@@ -68,18 +95,13 @@ const deleteEnumIfExist = async (page: Page, ref: string, enumName: string) => {
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=', { method: 'POST' })
}
-test.describe('table editor', () => {
+test.describe.serial('table editor', () => {
let page: Page
test.beforeEach(async ({ ref }) => {
await resetLocalStorage(page, ref)
-
- if (!page.url().includes('/editor')) {
- await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
- }
-
- // wait for all settings to load, and no more network request for 500ms
- await page.waitForLoadState('networkidle')
+ await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
+ await waitForTableToLoad(page, ref)
})
test.beforeAll(async ({ browser, ref }) => {
@@ -87,10 +109,10 @@ test.describe('table editor', () => {
await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
await waitForTableToLoad(page, ref)
- // Delete all tables with prefix pw_table
- const tablesToDelete = await (
- await page.getByRole('button', { name: 'View' }).allTextContents()
- ).filter((tableName) => tableName.startsWith(tableNamePrefix))
+ // Delete all tables with prefix pw_table (ensure page is stable first)
+ const viewButtons = page.getByRole('button', { name: /^View / })
+ const names = (await viewButtons.allTextContents()).map((t) => t.replace(/^View\s+/, '').trim())
+ const tablesToDelete = names.filter((tableName) => tableName.startsWith(tableNamePrefix))
for (const tableName of tablesToDelete) {
await deleteTable(page, ref, tableName)
@@ -101,16 +123,14 @@ test.describe('table editor', () => {
test.afterAll(async ({ ref }) => {
await resetLocalStorage(page, ref)
- // Only navigate and wait if not already at /editor
- if (!page.url().includes('/editor')) {
- await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
- await waitForTableToLoad(page, ref) // wait for table data to load
- }
+ // Always navigate explicitly to editor and wait for tables to be loaded
+ await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
+ await waitForTableToLoad(page, ref)
// Delete all tables with prefix pw_table
- const tablesToDelete = await (
- await page.getByRole('button', { name: 'View' }).allTextContents()
- ).filter((tableName) => tableName.startsWith(tableNamePrefix))
+ const viewButtons = page.getByRole('button', { name: /^View / })
+ const names = (await viewButtons.allTextContents()).map((t) => t.replace(/^View\s+/, '').trim())
+ const tablesToDelete = names.filter((tableName) => tableName.startsWith(tableNamePrefix))
for (const tableName of tablesToDelete) {
await deleteTable(page, ref, tableName)
@@ -174,7 +194,7 @@ test.describe('table editor', () => {
})
test('switching schemas work as expected', async ({ ref }) => {
- const authTableSso = 'sso_provider'
+ const authTableSso = 'identities'
const authTableMfa = 'mfa_factors'
// change schema from public to auth
@@ -358,10 +378,16 @@ test.describe('table editor', () => {
.getByRole('button')
.nth(1)
.click()
- await page.getByRole('menuitem', { name: 'Export data' }).click()
- const downloadCsvPromise = page.waitForEvent('download')
- await page.getByRole('menuitem', { name: 'Export table as CSV' }).click()
- const downloadCsv = await downloadCsvPromise
+ // Open nested export submenu via keyboard (more stable than hover in headless)
+ const exportDataItemCsv = page.getByRole('menuitem', { name: 'Export data' })
+ await expect(exportDataItemCsv).toBeVisible()
+ await exportDataItemCsv.hover()
+ await expect(exportDataItemCsv).toHaveAttribute('data-state', /open/)
+ await expect(page.getByRole('menuitem', { name: 'Export table as CSV' })).toBeVisible()
+ const [downloadCsv] = await Promise.all([
+ page.waitForEvent('download'),
+ page.getByRole('menuitem', { name: 'Export table as CSV' }).click(),
+ ])
expect(downloadCsv.suggestedFilename()).toContain('.csv')
const downloadCsvPath = await downloadCsv.path()
@@ -377,16 +403,32 @@ test.describe('table editor', () => {
})
fs.unlinkSync(downloadCsvPath)
+ // Close submenu and parent menu to avoid UI leftovers
+ await page.keyboard.press('Escape')
+ await page.keyboard.press('Escape')
+ await page.waitForTimeout(500)
+
+ // expect to NOT find the Export data menu item
+ await expect(page.getByRole('menuitem', { name: 'Export data' })).not.toBeVisible()
+
// test export data via SQL + verify
await page
.getByRole('button', { name: `View ${tableNameUpdated}`, exact: true })
.getByRole('button')
.nth(1)
.click()
- await page.getByRole('menuitem', { name: 'Export data' }).click()
- const downloadSqlPromise = page.waitForEvent('download')
- await page.getByRole('menuitem', { name: 'Export table as SQL' }).click()
- const downloadSql = await downloadSqlPromise
+ // Open nested export submenu via keyboard (more stable than hover in headless)
+ const exportDataItemSql = page.getByRole('menuitem', { name: 'Export data' })
+ await expect(exportDataItemSql).toBeVisible()
+ await exportDataItemSql.hover({
+ force: true,
+ })
+ await expect(exportDataItemSql).toHaveAttribute('data-state', /open/)
+ await expect(page.getByRole('menuitem', { name: 'Export table as SQL' })).toBeVisible()
+ const [downloadSql] = await Promise.all([
+ page.waitForEvent('download'),
+ page.getByRole('menuitem', { name: 'Export table as SQL' }).click(),
+ ])
expect(downloadSql.suggestedFilename()).toContain('.sql')
const downloadSqlPath = await downloadSql.path()
const sqlContent = fs.readFileSync(downloadSqlPath, 'utf-8')
@@ -398,53 +440,128 @@ test.describe('table editor', () => {
expect(sqlContent).toContain('123')
fs.unlinkSync(downloadSqlPath)
+ // Close submenu and parent menu to avoid UI leftovers
+ await page.keyboard.press('Escape')
+ await page.keyboard.press('Escape')
+ await page.waitForTimeout(500)
+
// test export data via CLI
await page
.getByRole('button', { name: `View ${tableNameUpdated}`, exact: true })
.getByRole('button')
.nth(1)
.click()
- await page.getByRole('menuitem', { name: 'Export data' }).click()
+
+ const exportDataItemCli = page.getByRole('menuitem', { name: 'Export data' })
+ await expect(exportDataItemCli).toBeVisible()
+ await exportDataItemCli.hover({
+ force: true,
+ })
+ await expect(page.getByRole('menuitem', { name: 'Export table via CLI' })).toBeVisible()
await page.getByRole('menuitem', { name: 'Export table via CLI' }).click()
await expect(page.getByRole('heading', { name: 'Export table data via CLI' })).toBeVisible()
await page.getByRole('button', { name: 'Close' }).first().click()
- // sort rows
- await page.getByRole('button', { name: 'Sort' }).click()
- await page.getByRole('button', { name: 'Pick a column to sort by' }).click()
- await page.getByRole('menuitem', { name: columnNameUpdated }).click()
- await page.getByRole('button', { name: 'Apply sorting' }).click()
- await waitForGridDataToLoad(page, ref) // wait for sorted table data to load
- await page.getByRole('button', { name: 'Sorted by 1 rule' }).click()
+ // Ensure all menus/dialogs are closed before continuing
+ await page.keyboard.press('Escape')
+ await page.keyboard.press('Escape')
+ await page.waitForTimeout(500)
+ })
- // verify sorted row content
- await page.waitForTimeout(500) // may take some time for sorting to complete
- expect(await page.getByRole('gridcell').nth(3).textContent()).toBe('123')
- expect(await page.getByRole('gridcell').nth(8).textContent()).toBe('456')
- expect(await page.getByRole('gridcell').nth(13).textContent()).toBe('789')
+ test('filtering rows works as expected', async ({ ref }) => {
+ const tableName = 'pw_table_filtering'
+ const colName = 'pw_column'
+
+ if (!page.url().includes('/editor')) {
+ await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
+ await waitForTableToLoad(page, ref)
+ }
+
+ await createTable(page, ref, tableName)
+ await page.getByRole('button', { name: `View ${tableName}`, exact: true }).click()
+ await page.waitForURL(/\/editor\/\d+\?schema=public$/)
+
+ for (const value of ['789', '456', '123']) {
+ await page.getByTestId('table-editor-insert-new-row').click()
+ await page.getByRole('menuitem', { name: 'Insert row Insert a new row' }).click()
+ await page.getByTestId(`${colName}-input`).fill(value)
+ await page.getByTestId('action-bar-save-row').click()
+ await waitForApiResponse(page, 'pg-meta', ref, 'query?key=', { method: 'POST' })
+ }
- // filter rows
await page.getByRole('button', { name: 'Filter', exact: true }).click()
await page.getByRole('button', { name: 'Add filter' }).click()
await page.getByRole('button', { name: 'id' }).click()
- await page.getByRole('menuitem', { name: columnNameUpdated }).click()
+ await page.getByRole('menuitem', { name: colName }).click()
await page.getByRole('textbox', { name: 'Enter a value' }).fill('789')
await page.getByRole('button', { name: 'Apply filter' }).click()
- await waitForGridDataToLoad(page, ref) // wait for filtered table data to load
- await page.waitForTimeout(500) // may take some time for filtering to complete
- await page.getByRole('button', { name: 'Filtered by 1 rule' }).click()
+ await waitForGridDataToLoad(page, ref)
+ await page.waitForTimeout(500)
+ await page.getByRole('button', { name: 'Filtered by 1 rule' }).click({ force: true })
await expect(page.getByRole('gridcell', { name: '789' })).toBeVisible()
await expect(page.getByRole('gridcell', { name: '456' })).not.toBeVisible()
await expect(page.getByRole('gridcell', { name: '123' })).not.toBeVisible()
- // view table definition
+ await deleteTable(page, ref, tableName)
+ })
+
+ test('view table definition works as expected', async ({ ref }) => {
+ const tableName = 'pw_table_definition'
+ const colName = 'pw_column'
+ if (!page.url().includes('/editor')) {
+ await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
+ await waitForTableToLoad(page, ref)
+ }
+ await createTable(page, ref, tableName)
+ await page.getByRole('button', { name: `View ${tableName}`, exact: true }).click()
+ await page.waitForURL(/\/editor\/\d+\?schema=public$/)
await page.getByText('definition', { exact: true }).click()
- await waitForApiResponse(page, 'pg-meta', ref, 'query?key=table-definition-') // wait for table definition
+ await waitForApiResponse(page, 'pg-meta', ref, 'query?key=table-definition-')
await expect(page.locator('.view-lines')).toContainText(
- `create table public.${tableNameUpdated} ( id bigint generated by default as identity not null, created_at timestamp with time zone null default now(), ${columnNameUpdated} text null, constraint ${tableNameGridEditor}_pkey primary key (id)) TABLESPACE pg_default;`
+ `create table public.${tableName} ( id bigint generated by default as identity not null, created_at timestamp with time zone null default now(), ${colName} text null, constraint ${tableName}_pkey primary key (id)) TABLESPACE pg_default;`
)
+ await deleteTable(page, ref, tableName)
+ })
+
+ test('sorting rows works as expected', async ({ ref }) => {
+ const tableName = 'pw_table_sorting'
+ const colName = 'pw_column'
+
+ // Ensure we're on editor
+ if (!page.url().includes('/editor')) {
+ await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
+ await waitForTableToLoad(page, ref)
+ }
+
+ // Create a small table and three rows
+ await createTable(page, ref, tableName)
+ await page.getByRole('button', { name: `View ${tableName}`, exact: true }).click()
+ await page.waitForURL(/\/editor\/\d+\?schema=public$/)
+
+ for (const value of ['789', '456', '123']) {
+ await page.getByTestId('table-editor-insert-new-row').click()
+ await page.getByRole('menuitem', { name: 'Insert row Insert a new row' }).click()
+ await page.getByTestId(`${colName}-input`).fill(value)
+ await page.getByTestId('action-bar-save-row').click()
+ await waitForApiResponse(page, 'pg-meta', ref, 'query?key=', { method: 'POST' })
+ }
+
+ // Apply sorting
+ await page.getByRole('button', { name: 'Sort', exact: true }).click()
+ await page.getByRole('button', { name: 'Pick a column to sort by' }).click()
+ await page.getByRole('menuitem', { name: colName }).click()
+ await page.getByRole('button', { name: 'Apply sorting' }).click()
+ await waitForGridDataToLoad(page, ref)
+ await page.getByRole('button', { name: 'Sorted by 1 rule' }).click()
+
+ // Verify sorted row content asc lexicographically for strings
+ await page.waitForTimeout(500)
+ expect(await page.getByRole('gridcell').nth(3).textContent()).toBe('123')
+ expect(await page.getByRole('gridcell').nth(8).textContent()).toBe('456')
+ expect(await page.getByRole('gridcell').nth(13).textContent()).toBe('789')
- await deleteTable(page, ref, tableNameUpdated)
+ // Cleanup
+ await deleteTable(page, ref, tableName)
})
test('importing, pagination and large data actions works as expected', async ({ ref }) => {
@@ -524,9 +641,10 @@ test.describe('table editor', () => {
await page.getByRole('row', { name: 'value 6 to export' }).getByRole('checkbox').click()
await page.getByRole('button', { name: 'Export' }).click()
- const downloadSqlPromise = page.waitForEvent('download')
- await page.getByRole('menuitem', { name: 'Export as SQL' }).click()
- const downloadSql = await downloadSqlPromise
+ const [downloadSql] = await Promise.all([
+ page.waitForEvent('download'),
+ page.getByRole('menuitem', { name: 'Export as SQL' }).click(),
+ ])
expect(downloadSql.suggestedFilename()).toContain('.sql')
const downloadSqlPath = await downloadSql.path()
const sqlContent = fs.readFileSync(downloadSqlPath, 'utf-8')
@@ -536,10 +654,16 @@ test.describe('table editor', () => {
await page.waitForTimeout(1000) // wait for event processing to complete
fs.unlinkSync(downloadSqlPath)
+ // Close menu to prevent overlap with next export
+ await page.keyboard.press('Escape')
+ await page.keyboard.press('Escape')
+ await page.waitForTimeout(500)
+
await page.getByRole('button', { name: 'Export' }).click()
- const downloadJsonPromise = page.waitForEvent('download')
- await page.getByRole('menuitem', { name: 'Export as JSON' }).click()
- const downloadJson = await downloadJsonPromise
+ const [downloadJson] = await Promise.all([
+ page.waitForEvent('download'),
+ page.getByRole('menuitem', { name: 'Export as JSON' }).click(),
+ ])
expect(downloadJson.suggestedFilename()).toContain('.json')
const downloadJsonPath = await downloadJson.path()
const jsonContent = fs.readFileSync(downloadJsonPath, 'utf-8')
@@ -549,10 +673,16 @@ test.describe('table editor', () => {
await page.waitForTimeout(1000) // wait for event processing to complete
fs.unlinkSync(downloadJsonPath)
+ // Close menu to prevent overlap with next export
+ await page.keyboard.press('Escape')
+ await page.keyboard.press('Escape')
+ await page.waitForTimeout(500)
+
await page.getByRole('button', { name: 'Export' }).click()
- const downloadCsvPromise = page.waitForEvent('download')
- await page.getByRole('menuitem', { name: 'Export as CSV' }).click()
- const downloadCsv = await downloadCsvPromise
+ const [downloadCsv] = await Promise.all([
+ page.waitForEvent('download'),
+ page.getByRole('menuitem', { name: 'Export as CSV' }).click(),
+ ])
expect(downloadCsv.suggestedFilename()).toContain('.csv')
const downloadCsvPath = await downloadCsv.path()
const csvContent = fs.readFileSync(downloadCsvPath, 'utf-8').replace(/\r?\n/g, '\n')
@@ -573,6 +703,11 @@ test.describe('table editor', () => {
await page.waitForTimeout(1000) // wait for event processing to complete
fs.unlinkSync(downloadCsvPath)
+ // Close menu to avoid leaving it open
+ await page.keyboard.press('Escape')
+ await page.keyboard.press('Escape')
+ await page.waitForTimeout(500)
+
// select all actions works (delete action)
await page.getByRole('checkbox', { name: 'Select All' }).click()
await page.getByRole('button', { name: 'Delete 98 rows' }).click()
diff --git a/e2e/studio/package.json b/e2e/studio/package.json
index 41d49dfa76a17..e88c363639a55 100644
--- a/e2e/studio/package.json
+++ b/e2e/studio/package.json
@@ -3,7 +3,8 @@
"version": "1.0.0",
"main": "index.js",
"scripts": {
- "e2e": "playwright test"
+ "e2e": "playwright test",
+ "e2e:ui": "playwright test --ui"
},
"keywords": [],
"author": "",
diff --git a/e2e/studio/playwright.config.ts b/e2e/studio/playwright.config.ts
index 3eca664c46473..eec7e9d939d68 100644
--- a/e2e/studio/playwright.config.ts
+++ b/e2e/studio/playwright.config.ts
@@ -8,18 +8,24 @@ dotenv.config({ path: path.resolve(__dirname, '.env.local') })
const IS_CI = !!process.env.CI
export default defineConfig({
- timeout: 60 * 1000,
+ timeout: 90 * 1000,
testDir: './features',
testMatch: /.*\.spec\.ts/,
forbidOnly: IS_CI,
retries: IS_CI ? 3 : 0,
+ maxFailures: 3,
+ fullyParallel: true,
use: {
baseURL: env.STUDIO_URL,
screenshot: 'off',
video: 'retain-on-failure',
- headless: IS_CI,
+ headless: true || IS_CI,
trace: 'retain-on-failure',
permissions: ['clipboard-read', 'clipboard-write'],
+ extraHTTPHeaders: {
+ 'x-vercel-protection-bypass': process.env.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO,
+ 'x-vercel-set-bypass-cookie': 'true',
+ },
},
projects: [
{
@@ -34,7 +40,9 @@ export default defineConfig({
use: {
browserName: 'chromium',
screenshot: 'off',
- storageState: STORAGE_STATE_PATH,
+ // Only use storage state if authentication is enabled. When AUTHENTICATION=false
+ // we should not require a pre-generated storage state file.
+ storageState: env.AUTHENTICATION ? STORAGE_STATE_PATH : undefined,
},
},
],
diff --git a/e2e/studio/supabase/.gitignore b/e2e/studio/supabase/.gitignore
new file mode 100644
index 0000000000000..ad9264f0b14b2
--- /dev/null
+++ b/e2e/studio/supabase/.gitignore
@@ -0,0 +1,8 @@
+# Supabase
+.branches
+.temp
+
+# dotenvx
+.env.keys
+.env.local
+.env.*.local
diff --git a/e2e/studio/supabase/config.toml b/e2e/studio/supabase/config.toml
new file mode 100644
index 0000000000000..4450397aae594
--- /dev/null
+++ b/e2e/studio/supabase/config.toml
@@ -0,0 +1,335 @@
+# For detailed configuration reference documentation, visit:
+# https://supabase.com/docs/guides/local-development/cli/config
+# A string used to distinguish different Supabase projects on the same host. Defaults to the
+# working directory name when running `supabase init`.
+project_id = "studio"
+
+[api]
+enabled = true
+# Port to use for the API URL.
+port = 54321
+# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API
+# endpoints. `public` and `graphql_public` schemas are included by default.
+schemas = ["public", "graphql_public"]
+# Extra schemas to add to the search_path of every request.
+extra_search_path = ["public", "extensions"]
+# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size
+# for accidental or malicious requests.
+max_rows = 1000
+
+[api.tls]
+# Enable HTTPS endpoints locally using a self-signed certificate.
+enabled = false
+
+[db]
+# Port to use for the local database URL.
+port = 54322
+# Port used by db diff command to initialize the shadow database.
+shadow_port = 54320
+# The database major version to use. This has to be the same as your remote database's. Run `SHOW
+# server_version;` on the remote database to check.
+major_version = 17
+
+[db.pooler]
+enabled = false
+# Port to use for the local connection pooler.
+port = 54329
+# Specifies when a server connection can be reused by other clients.
+# Configure one of the supported pooler modes: `transaction`, `session`.
+pool_mode = "transaction"
+# How many server connections to allow per user/database pair.
+default_pool_size = 20
+# Maximum number of client connections allowed.
+max_client_conn = 100
+
+# [db.vault]
+# secret_key = "env(SECRET_VALUE)"
+
+[db.migrations]
+# If disabled, migrations will be skipped during a db push or reset.
+enabled = true
+# Specifies an ordered list of schema files that describe your database.
+# Supports glob patterns relative to supabase directory: "./schemas/*.sql"
+schema_paths = []
+
+[db.seed]
+# If enabled, seeds the database after migrations during a db reset.
+enabled = true
+# Specifies an ordered list of seed files to load during db reset.
+# Supports glob patterns relative to supabase directory: "./seeds/*.sql"
+sql_paths = ["./seed.sql"]
+
+[db.network_restrictions]
+# Enable management of network restrictions.
+enabled = false
+# List of IPv4 CIDR blocks allowed to connect to the database.
+# Defaults to allow all IPv4 connections. Set empty array to block all IPs.
+allowed_cidrs = ["0.0.0.0/0"]
+# List of IPv6 CIDR blocks allowed to connect to the database.
+# Defaults to allow all IPv6 connections. Set empty array to block all IPs.
+allowed_cidrs_v6 = ["::/0"]
+
+[realtime]
+enabled = true
+# Bind realtime via either IPv4 or IPv6. (default: IPv4)
+# ip_version = "IPv6"
+# The maximum length in bytes of HTTP request headers. (default: 4096)
+# max_header_length = 4096
+
+[studio]
+enabled = true
+# Port to use for Supabase Studio.
+port = 54323
+# External URL of the API server that frontend connects to.
+api_url = "http://127.0.0.1"
+# OpenAI API Key to use for Supabase AI in the Supabase Studio.
+openai_api_key = "env(OPENAI_API_KEY)"
+
+# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they
+# are monitored, and you can view the emails that would have been sent from the web interface.
+[inbucket]
+enabled = true
+# Port to use for the email testing server web interface.
+port = 54324
+# Uncomment to expose additional ports for testing user applications that send emails.
+# smtp_port = 54325
+# pop3_port = 54326
+# admin_email = "admin@email.com"
+# sender_name = "Admin"
+
+[storage]
+enabled = true
+# The maximum file size allowed (e.g. "5MB", "500KB").
+file_size_limit = "50MiB"
+
+# Image transformation API is available to Supabase Pro plan.
+# [storage.image_transformation]
+# enabled = true
+
+# Uncomment to configure local storage buckets
+# [storage.buckets.images]
+# public = false
+# file_size_limit = "50MiB"
+# allowed_mime_types = ["image/png", "image/jpeg"]
+# objects_path = "./images"
+
+[auth]
+enabled = true
+# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used
+# in emails.
+site_url = "http://127.0.0.1:3000"
+# A list of *exact* URLs that auth providers are permitted to redirect to post authentication.
+additional_redirect_urls = ["https://127.0.0.1:3000"]
+# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week).
+jwt_expiry = 3600
+# Path to JWT signing key. DO NOT commit your signing keys file to git.
+# signing_keys_path = "./signing_keys.json"
+# If disabled, the refresh token will never expire.
+enable_refresh_token_rotation = true
+# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds.
+# Requires enable_refresh_token_rotation = true.
+refresh_token_reuse_interval = 10
+# Allow/disallow new user signups to your project.
+enable_signup = true
+# Allow/disallow anonymous sign-ins to your project.
+enable_anonymous_sign_ins = false
+# Allow/disallow testing manual linking of accounts
+enable_manual_linking = false
+# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more.
+minimum_password_length = 6
+# Passwords that do not meet the following requirements will be rejected as weak. Supported values
+# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols`
+password_requirements = ""
+
+[auth.rate_limit]
+# Number of emails that can be sent per hour. Requires auth.email.smtp to be enabled.
+email_sent = 2
+# Number of SMS messages that can be sent per hour. Requires auth.sms to be enabled.
+sms_sent = 30
+# Number of anonymous sign-ins that can be made per hour per IP address. Requires enable_anonymous_sign_ins = true.
+anonymous_users = 30
+# Number of sessions that can be refreshed in a 5 minute interval per IP address.
+token_refresh = 150
+# Number of sign up and sign-in requests that can be made in a 5 minute interval per IP address (excludes anonymous users).
+sign_in_sign_ups = 30
+# Number of OTP / Magic link verifications that can be made in a 5 minute interval per IP address.
+token_verifications = 30
+# Number of Web3 logins that can be made in a 5 minute interval per IP address.
+web3 = 30
+
+# Configure one of the supported captcha providers: `hcaptcha`, `turnstile`.
+# [auth.captcha]
+# enabled = true
+# provider = "hcaptcha"
+# secret = ""
+
+[auth.email]
+# Allow/disallow new user signups via email to your project.
+enable_signup = true
+# If enabled, a user will be required to confirm any email change on both the old, and new email
+# addresses. If disabled, only the new email is required to confirm.
+double_confirm_changes = true
+# If enabled, users need to confirm their email address before signing in.
+enable_confirmations = false
+# If enabled, users will need to reauthenticate or have logged in recently to change their password.
+secure_password_change = false
+# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email.
+max_frequency = "1s"
+# Number of characters used in the email OTP.
+otp_length = 6
+# Number of seconds before the email OTP expires (defaults to 1 hour).
+otp_expiry = 3600
+
+# Use a production-ready SMTP server
+# [auth.email.smtp]
+# enabled = true
+# host = "smtp.sendgrid.net"
+# port = 587
+# user = "apikey"
+# pass = "env(SENDGRID_API_KEY)"
+# admin_email = "admin@email.com"
+# sender_name = "Admin"
+
+# Uncomment to customize email template
+# [auth.email.template.invite]
+# subject = "You have been invited"
+# content_path = "./supabase/templates/invite.html"
+
+[auth.sms]
+# Allow/disallow new user signups via SMS to your project.
+enable_signup = false
+# If enabled, users need to confirm their phone number before signing in.
+enable_confirmations = false
+# Template for sending OTP to users
+template = "Your code is {{ .Code }}"
+# Controls the minimum amount of time that must pass before sending another sms otp.
+max_frequency = "5s"
+
+# Use pre-defined map of phone number to OTP for testing.
+# [auth.sms.test_otp]
+# 4152127777 = "123456"
+
+# Configure logged in session timeouts.
+# [auth.sessions]
+# Force log out after the specified duration.
+# timebox = "24h"
+# Force log out if the user has been inactive longer than the specified duration.
+# inactivity_timeout = "8h"
+
+# This hook runs before a new user is created and allows developers to reject the request based on the incoming user object.
+# [auth.hook.before_user_created]
+# enabled = true
+# uri = "pg-functions://postgres/auth/before-user-created-hook"
+
+# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used.
+# [auth.hook.custom_access_token]
+# enabled = true
+# uri = "pg-functions:////"
+
+# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`.
+[auth.sms.twilio]
+enabled = false
+account_sid = ""
+message_service_sid = ""
+# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead:
+auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)"
+
+# Multi-factor-authentication is available to Supabase Pro plan.
+[auth.mfa]
+# Control how many MFA factors can be enrolled at once per user.
+max_enrolled_factors = 10
+
+# Control MFA via App Authenticator (TOTP)
+[auth.mfa.totp]
+enroll_enabled = false
+verify_enabled = false
+
+# Configure MFA via Phone Messaging
+[auth.mfa.phone]
+enroll_enabled = false
+verify_enabled = false
+otp_length = 6
+template = "Your code is {{ .Code }}"
+max_frequency = "5s"
+
+# Configure MFA via WebAuthn
+# [auth.mfa.web_authn]
+# enroll_enabled = true
+# verify_enabled = true
+
+# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`,
+# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`,
+# `twitter`, `slack`, `spotify`, `workos`, `zoom`.
+[auth.external.apple]
+enabled = false
+client_id = ""
+# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead:
+secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)"
+# Overrides the default auth redirectUrl.
+redirect_uri = ""
+# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure,
+# or any other third-party OIDC providers.
+url = ""
+# If enabled, the nonce check will be skipped. Required for local sign in with Google auth.
+skip_nonce_check = false
+
+# Allow Solana wallet holders to sign in to your project via the Sign in with Solana (SIWS, EIP-4361) standard.
+# You can configure "web3" rate limit in the [auth.rate_limit] section and set up [auth.captcha] if self-hosting.
+[auth.web3.solana]
+enabled = false
+
+# Use Firebase Auth as a third-party provider alongside Supabase Auth.
+[auth.third_party.firebase]
+enabled = false
+# project_id = "my-firebase-project"
+
+# Use Auth0 as a third-party provider alongside Supabase Auth.
+[auth.third_party.auth0]
+enabled = false
+# tenant = "my-auth0-tenant"
+# tenant_region = "us"
+
+# Use AWS Cognito (Amplify) as a third-party provider alongside Supabase Auth.
+[auth.third_party.aws_cognito]
+enabled = false
+# user_pool_id = "my-user-pool-id"
+# user_pool_region = "us-east-1"
+
+# Use Clerk as a third-party provider alongside Supabase Auth.
+[auth.third_party.clerk]
+enabled = false
+# Obtain from https://clerk.com/setup/supabase
+# domain = "example.clerk.accounts.dev"
+
+[edge_runtime]
+enabled = true
+# Supported request policies: `oneshot`, `per_worker`.
+# `per_worker` (default) — enables hot reload during local development.
+# `oneshot` — fallback mode if hot reload causes issues (e.g. in large repos or with symlinks).
+policy = "per_worker"
+# Port to attach the Chrome inspector for debugging edge functions.
+inspector_port = 8083
+# The Deno major version to use.
+deno_version = 2
+
+# [edge_runtime.secrets]
+# secret_key = "env(SECRET_VALUE)"
+
+[analytics]
+enabled = true
+port = 54327
+# Configure one of the supported backends: `postgres`, `bigquery`.
+backend = "postgres"
+
+# Experimental features may be deprecated any time
+[experimental]
+# Configures Postgres storage engine to use OrioleDB (S3)
+orioledb_version = ""
+# Configures S3 bucket URL, eg. .s3-.amazonaws.com
+s3_host = "env(S3_HOST)"
+# Configures S3 bucket region, eg. us-east-1
+s3_region = "env(S3_REGION)"
+# Configures AWS_ACCESS_KEY_ID for S3 bucket
+s3_access_key = "env(S3_ACCESS_KEY)"
+# Configures AWS_SECRET_ACCESS_KEY for S3 bucket
+s3_secret_key = "env(S3_SECRET_KEY)"
diff --git a/e2e/studio/utils/test.ts b/e2e/studio/utils/test.ts
index 2d96f75db9c7d..0aa9754ed7e0a 100644
--- a/e2e/studio/utils/test.ts
+++ b/e2e/studio/utils/test.ts
@@ -16,6 +16,6 @@ export interface TestOptions {
export const test = base.extend({
env: env.STUDIO_URL,
- ref: env.PROJECT_REF,
+ ref: 'default',
apiUrl: env.API_URL,
})
diff --git a/e2e/studio/utils/wait-for-response.ts b/e2e/studio/utils/wait-for-response.ts
index b434da4fad6bd..cade0849525e9 100644
--- a/e2e/studio/utils/wait-for-response.ts
+++ b/e2e/studio/utils/wait-for-response.ts
@@ -17,22 +17,75 @@ export async function waitForApiResponse(
action: string,
options?: Options
): Promise {
- // regex trims "/" both start and end.
+ return createApiResponseWaiter(page, basePath, ref, action, options)
+}
+
+function buildUrlMatcher(basePath: string, ref: string, action: string, method?: HttpMethod) {
+ // Normalize inputs and build a tolerant matcher that works across environments
const trimmedBasePath = basePath.replace(/^\/+|\/+$/g, '')
- const httpMethod = options?.method
+ const refAlternatives = [ref, 'default']
+
+ return (response: any) => {
+ const url = response.url()
+ const requestMethod = response.request().method()
+
+ // Must include base path and one of the ref alternatives
+ const hasBasePath = url.includes(`${trimmedBasePath}/`)
+ const hasRef = refAlternatives.some((r) => url.includes(`/${r}/`))
- await page.waitForResponse((response) => {
- const urlMatches =
- response.url().includes(`${trimmedBasePath}/${ref}/${action}`) ||
- response.url().includes(`${trimmedBasePath}/default/${action}`)
+ // Action match should be tolerant to extra query params ordering
+ const hasAction = url.includes(action)
+
+ const urlMatches = hasBasePath && hasRef && hasAction
+ if (method) return urlMatches && requestMethod === method
+ return urlMatches
+ }
+}
+
+/**
+ * Starts listening for a specific API response and returns a promise you can await later.
+ * Use this to avoid races by creating the waiter BEFORE triggering navigation/clicks.
+ *
+ * Example:
+ * const wait = createApiResponseWaiter(page, 'pg-meta', ref, 'query?key=schemas')
+ * await page.goto(...)
+ * await wait
+ */
+export function createApiResponseWaiter(
+ page: Page,
+ basePath: string,
+ ref: string,
+ action: string,
+ options?: Options
+): Promise {
+ const matcher = buildUrlMatcher(basePath, ref, action, options?.method)
- // checks HTTP method if exists
- return httpMethod ? urlMatches && response.request().method() === httpMethod : urlMatches
- })
+ return page
+ .waitForResponse(matcher, { timeout: options?.timeout })
+ .then(() => {})
+ .catch((error) => {
+ const trimmedBasePath = basePath.replace(/^\/+|\/+$/g, '')
+ const message = `Error waiting for response: ${error}. Method: ${options?.method}, URL contains: ${trimmedBasePath}/(default|${ref})/${action}`
+ if (options?.soft) {
+ console.warn(`[soft-wait] ${message}`)
+ const fallback = options?.fallbackWaitMs ?? 0
+ if (fallback > 0) {
+ return page.waitForTimeout(fallback).then(() => {})
+ }
+ return
+ } else {
+ console.error(message)
+ throw error
+ }
+ })
}
type Options = {
method?: HttpMethod
+ timeout?: number
+ // When true, do not throw on timeout/error; optionally wait fallbackWaitMs and continue
+ soft?: boolean
+ fallbackWaitMs?: number
}
export async function waitForTableToLoad(page: Page, ref: string, schema?: string) {
diff --git a/packages/api-types/package.json b/packages/api-types/package.json
index e69d0b6063ce1..e52ae5cd19b42 100644
--- a/packages/api-types/package.json
+++ b/packages/api-types/package.json
@@ -13,6 +13,7 @@
"license": "MIT",
"devDependencies": {
"openapi-typescript": "^7.4.3",
- "prettier": "3.2.4"
+ "prettier": "3.2.4",
+ "typescript": "~5.5.0"
}
}
diff --git a/packages/config/package.json b/packages/config/package.json
index 1d5d26c284747..4976450ada9f1 100644
--- a/packages/config/package.json
+++ b/packages/config/package.json
@@ -17,7 +17,8 @@
"tailwindcss-radix": "^2.0.0"
},
"devDependencies": {
- "tailwindcss": "^3.4.1",
- "tailwindcss-animate": "^1.0.6"
+ "tailwindcss": "catalog:",
+ "tailwindcss-animate": "^1.0.6",
+ "typescript": "~5.5.0"
}
}
diff --git a/packages/eslint-config-supabase/package.json b/packages/eslint-config-supabase/package.json
index ff4a28f167cbe..ad473e5d296a8 100644
--- a/packages/eslint-config-supabase/package.json
+++ b/packages/eslint-config-supabase/package.json
@@ -11,5 +11,8 @@
"eslint-config-next": "15.3.1",
"eslint-config-prettier": "^9.1.0",
"eslint-config-turbo": "^2.0.4"
+ },
+ "devDependencies": {
+ "typescript": "~5.5.0"
}
}
diff --git a/packages/ui/package.json b/packages/ui/package.json
index 48cab5d0e95da..2ce39e2db26aa 100644
--- a/packages/ui/package.json
+++ b/packages/ui/package.json
@@ -76,7 +76,7 @@
"recharts": "^2.12.7",
"sonner": "^1.5.0",
"tailwind-merge": "^1.13.2",
- "tailwindcss": "^3.4.1",
+ "tailwindcss": "catalog:",
"vaul": "^0.9.9"
},
"devDependencies": {
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 6a59749143a07..48cfa1c5e3d74 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -33,6 +33,9 @@ catalogs:
react-dom:
specifier: ^18.3.0
version: 18.3.1
+ tailwindcss:
+ specifier: 3.4.1
+ version: 3.4.1
valtio:
specifier: ^1.12.0
version: 1.12.0
@@ -324,7 +327,7 @@ importers:
specifier: ^1.1.7
version: 1.6.0
tailwindcss:
- specifier: ^3.3.0
+ specifier: 'catalog:'
version: 3.4.1(ts-node@10.9.2(@types/node@22.13.14)(typescript@5.5.2))
tsconfig:
specifier: workspace:*
@@ -1211,7 +1214,7 @@ importers:
specifier: ^7.5.2
version: 7.5.2(supports-color@8.1.1)
tailwindcss:
- specifier: ^3.4.1
+ specifier: 'catalog:'
version: 3.4.1(ts-node@10.9.2(@types/node@22.13.14)(typescript@5.5.2))
tsx:
specifier: ^4.19.3
@@ -1494,7 +1497,7 @@ importers:
specifier: ^1.1.7
version: 1.6.0
tailwindcss:
- specifier: ^3.3.0
+ specifier: 'catalog:'
version: 3.4.1(ts-node@10.9.2(@types/node@22.13.14)(typescript@5.5.2))
tsconfig:
specifier: workspace:*
@@ -1843,6 +1846,9 @@ importers:
prettier:
specifier: 3.2.4
version: 3.2.4
+ typescript:
+ specifier: ~5.5.0
+ version: 5.5.2
packages/build-icons:
devDependencies:
@@ -1963,11 +1969,14 @@ importers:
version: 2.8.0
devDependencies:
tailwindcss:
- specifier: ^3.4.1
+ specifier: 'catalog:'
version: 3.4.1(ts-node@10.9.2(@types/node@22.13.14)(typescript@5.5.2))
tailwindcss-animate:
specifier: ^1.0.6
version: 1.0.7(tailwindcss@3.4.1(ts-node@10.9.2(@types/node@22.13.14)(typescript@5.5.2)))
+ typescript:
+ specifier: ~5.5.0
+ version: 5.5.2
packages/eslint-config-supabase:
dependencies:
@@ -1980,6 +1989,10 @@ importers:
eslint-config-turbo:
specifier: ^2.0.4
version: 2.0.4(eslint@8.57.0(supports-color@8.1.1))
+ devDependencies:
+ typescript:
+ specifier: ~5.5.0
+ version: 5.5.2
packages/generator:
devDependencies:
@@ -2233,7 +2246,7 @@ importers:
specifier: ^1.13.2
version: 1.14.0
tailwindcss:
- specifier: ^3.4.1
+ specifier: 'catalog:'
version: 3.4.1(ts-node@10.9.2(@types/node@22.13.14)(typescript@5.5.2))
vaul:
specifier: ^0.9.9
@@ -4278,10 +4291,6 @@ packages:
resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
- '@jridgewell/gen-mapping@0.3.5':
- resolution: {integrity: sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==}
- engines: {node: '>=6.0.0'}
-
'@jridgewell/gen-mapping@0.3.8':
resolution: {integrity: sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==}
engines: {node: '>=6.0.0'}
@@ -13013,10 +13022,6 @@ packages:
resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}
engines: {node: '>= 10.13.0'}
- jiti@1.20.0:
- resolution: {integrity: sha512-3TV69ZbrvV6U5DfQimop50jE9Dl6J8O1ja1dvBbMba/sZ3YBEQqJ2VZRoQPVnhlzjNtU1vaXRZVrVjU4qtm8yA==}
- hasBin: true
-
jiti@1.21.7:
resolution: {integrity: sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==}
hasBin: true
@@ -14974,9 +14979,6 @@ packages:
phenomenon@1.6.0:
resolution: {integrity: sha512-7h9/fjPD3qNlgggzm88cY58l9sudZ6Ey+UmZsizfhtawO6E3srZQXywaNm2lBwT72TbpHYRPy7ytIHeBUD/G0A==}
- picocolors@1.0.1:
- resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==}
-
picocolors@1.1.1:
resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==}
@@ -21423,12 +21425,6 @@ snapshots:
dependencies:
'@sinclair/typebox': 0.27.8
- '@jridgewell/gen-mapping@0.3.5':
- dependencies:
- '@jridgewell/set-array': 1.2.1
- '@jridgewell/sourcemap-codec': 1.5.0
- '@jridgewell/trace-mapping': 0.3.25
-
'@jridgewell/gen-mapping@0.3.8':
dependencies:
'@jridgewell/set-array': 1.2.1
@@ -27012,7 +27008,7 @@ snapshots:
'@ts-morph/common@0.23.0':
dependencies:
- fast-glob: 3.3.2
+ fast-glob: 3.3.3
minimatch: 9.0.5
mkdirp: 3.0.1
path-browserify: 1.0.1
@@ -32234,8 +32230,6 @@ snapshots:
merge-stream: 2.0.0
supports-color: 8.1.1
- jiti@1.20.0: {}
-
jiti@1.21.7: {}
jiti@2.4.2: {}
@@ -35026,8 +35020,6 @@ snapshots:
phenomenon@1.6.0: {}
- picocolors@1.0.1: {}
-
picocolors@1.1.1: {}
picomatch@2.3.1: {}
@@ -37400,7 +37392,7 @@ snapshots:
sucrase@3.34.0:
dependencies:
- '@jridgewell/gen-mapping': 0.3.5
+ '@jridgewell/gen-mapping': 0.3.8
commander: 4.1.1
glob: 7.1.6
lines-and-columns: 1.2.4
@@ -37497,25 +37489,25 @@ snapshots:
dependencies:
'@alloc/quick-lru': 5.2.0
arg: 5.0.2
- chokidar: 3.5.3
+ chokidar: 3.6.0
didyoumean: 1.2.2
dlv: 1.1.3
- fast-glob: 3.3.2
+ fast-glob: 3.3.3
glob-parent: 6.0.2
is-glob: 4.0.3
- jiti: 1.20.0
+ jiti: 1.21.7
lilconfig: 2.1.0
micromatch: 4.0.8
normalize-path: 3.0.0
object-hash: 3.0.0
- picocolors: 1.0.1
+ picocolors: 1.1.1
postcss: 8.5.3
postcss-import: 15.1.0(postcss@8.5.3)
postcss-js: 4.0.1(postcss@8.5.3)
postcss-load-config: 4.0.1(postcss@8.5.3)(ts-node@10.9.2(@types/node@22.13.14)(typescript@5.5.2))
postcss-nested: 6.0.1(postcss@8.5.3)
postcss-selector-parser: 6.0.13
- resolve: 1.22.8
+ resolve: 1.22.10
sucrase: 3.34.0
transitivePeerDependencies:
- ts-node
diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml
index 1fe15fdc9284f..fae691358f489 100644
--- a/pnpm-workspace.yaml
+++ b/pnpm-workspace.yaml
@@ -16,6 +16,7 @@ catalog:
'valtio': '^1.12.0'
'vite': '^6.2.7'
'zod': '^3.25.76'
+ 'tailwindcss': '3.4.1'
minimumReleaseAge: 10080
minimumReleaseAgeExclude: