diff --git a/.cursor/rules/docs-graphql.mdc b/.cursor/rules/docs-graphql.mdc new file mode 100644 index 0000000000000..d8bcbc5d932bc --- /dev/null +++ b/.cursor/rules/docs-graphql.mdc @@ -0,0 +1,267 @@ +--- +description: Docs GraphQL Architecture +globs: apps/docs/resources/**/*.ts +alwaysApply: false +--- + +# Docs GraphQL Architecture + +## Overview + +The `/apps/docs/resources` folder contains the GraphQL endpoint architecture for the docs GraphQL endpoint at `/api/graphql`. It follows a modular pattern where each top-level query is organized into its own folder with consistent file structure. + +## Architecture Pattern + +Each GraphQL query follows this structure: + +``` +resources/ +├── queryObject/ +│ ├── queryObjectModel.ts # Data models and business logic +│ ├── queryObjectSchema.ts # GraphQL type definitions +│ ├── queryObjectResolver.ts # Query resolver and arguments +│ ├── queryObjectTypes.ts # TypeScript interfaces (optional) +│ └── queryObjectSync.ts # Functions for syncing repo content to the database (optional) +├── utils/ +│ ├── connections.ts # GraphQL connection/pagination utilities +│ └── fields.ts # GraphQL field selection utilities +├── rootSchema.ts # Main GraphQL schema with all queries +└── rootSync.ts # Root sync script for syncing to database +``` + +## Example queries + +1. **searchDocs** (`globalSearch/`) - Vector-based search across all docs content +2. **error** (`error/`) - Error code lookup for Supabase services +3. **schema** - GraphQL schema introspection + +## Key Files + +### `rootSchema.ts` +- Main GraphQL schema definition +- Imports all resolvers and combines them into the root query +- Defines the `RootQueryType` with all top-level fields + +### `utils/connections.ts` +- Provides `createCollectionType()` for paginated collections +- `GraphQLCollectionBuilder` for building collection responses +- Standard pagination arguments and edge/node patterns + +### `utils/fields.ts` +- `graphQLFields()` utility to analyze requested fields in resolvers +- Used for optimizing data fetching based on what fields are actually requested + +## Creating a New Top-Level Query + +To add a new GraphQL query, follow these steps: + +### 1. Create Query Folder Structure +```bash +mkdir resources/newQuery +touch resources/newQuery/newQueryModel.ts +touch resources/newQuery/newQuerySchema.ts +touch resources/newQuery/newQueryResolver.ts +``` + +### 2. Define GraphQL Schema (`newQuerySchema.ts`) +```typescript +import { GraphQLObjectType, GraphQLString } from 'graphql' + +export const GRAPHQL_FIELD_NEW_QUERY = 'newQuery' as const + +export const GraphQLObjectTypeNewQuery = new GraphQLObjectType({ + name: 'NewQuery', + description: 'Description of what this query returns', + fields: { + id: { + type: GraphQLString, + description: 'Unique identifier', + }, + // Add other fields... + }, +}) +``` + +### 3. Create Data Model (`newQueryModel.ts`) + +> [!NOTE] +> The data model should be agnostic to GraphQL. It may import argument types +> from `~/__generated__/graphql`, but otherwise all functions and classes +> should be unaware of whether they are called for GraphQL resolution. + +> [!TIP] +> The types in `~/__generated__/graphql` for a new endpoint will not exist +> until the code generation is run in the next step. + +```typescript +import { type RootQueryTypeNewQueryArgs } from '~/__generated__/graphql' +import { convertPostgrestToApiError, type ApiErrorGeneric } from '~/app/api/utils' +import { Result } from '~/features/helpers.fn' +import { supabase } from '~/lib/supabase' + +export class NewQueryModel { + constructor(public readonly data: { + id: string + // other properties... + }) {} + + static async loadData( + args: RootQueryTypeNewQueryArgs, + requestedFields: Array + ): Promise> { + // Implement data fetching logic + const result = new Result( + await supabase() + .from('your_table') + .select('*') + // Add filters based on args + ) + .map((data) => data.map((item) => new NewQueryModel(item))) + .mapError(convertPostgrestToApiError) + + return result + } +} +``` + +### 4. Create Resolver (`newQueryResolver.ts`) +```typescript +import { GraphQLError, GraphQLNonNull, GraphQLString, type GraphQLResolveInfo } from 'graphql' +import { type RootQueryTypeNewQueryArgs } from '~/__generated__/graphql' +import { convertUnknownToApiError } from '~/app/api/utils' +import { Result } from '~/features/helpers.fn' +import { graphQLFields } from '../utils/fields' +import { NewQueryModel } from './newQueryModel' +import { GRAPHQL_FIELD_NEW_QUERY, GraphQLObjectTypeNewQuery } from './newQuerySchema' + +async function resolveNewQuery( + _parent: unknown, + args: RootQueryTypeNewQueryArgs, + _context: unknown, + info: GraphQLResolveInfo +): Promise { + return ( + await Result.tryCatchFlat( + resolveNewQueryImpl, + convertUnknownToApiError, + args, + info + ) + ).match( + (data) => data, + (error) => { + console.error(`Error resolving ${GRAPHQL_FIELD_NEW_QUERY}:`, error) + return new GraphQLError(error.isPrivate() ? 'Internal Server Error' : error.message) + } + ) +} + +async function resolveNewQueryImpl( + args: RootQueryTypeNewQueryArgs, + info: GraphQLResolveInfo +): Promise> { + const fieldsInfo = graphQLFields(info) + const requestedFields = Object.keys(fieldsInfo) + return await NewQueryModel.loadData(args, requestedFields) +} + +export const newQueryRoot = { + [GRAPHQL_FIELD_NEW_QUERY]: { + description: 'Description of what this query does', + args: { + id: { + type: new GraphQLNonNull(GraphQLString), + description: 'Required argument description', + }, + // Add other arguments... + }, + type: GraphQLObjectTypeNewQuery, // or createCollectionType() for lists + resolve: resolveNewQuery, + }, +} +``` + +### 5. Register in Root Schema +In `rootSchema.ts`, add your resolver: + +```typescript +// Import your resolver +import { newQueryRoot } from './newQuery/newQueryResolver' + +// Add to the query fields +export const rootGraphQLSchema = new GraphQLSchema({ + query: new GraphQLObjectType({ + name: 'RootQueryType', + fields: { + ...introspectRoot, + ...searchRoot, + ...errorRoot, + ...newQueryRoot, // Add this line + }, + }), + types: [ + GraphQLObjectTypeGuide, + GraphQLObjectTypeReferenceCLICommand, + GraphQLObjectTypeReferenceSDKFunction, + GraphQLObjectTypeTroubleshooting, + ], +}) +``` + +### 6. Update TypeScript Types +Run the GraphQL codegen to update TypeScript types: +```bash +pnpm run -F docs codegen:graphql +``` + +## Best Practices + +1. **Error Handling**: Error handling always uses the Result class, defined in apps/docs/features/helpers.fn.ts +2. **Field Optimization**: Use `graphQLFields()` to only fetch requested data +3. **Collections**: Use `createCollectionType()` for paginated lists +4. **Naming**: Use `GRAPHQL_FIELD_*` constants for field names +5. **Documentation**: Add GraphQL descriptions to all fields and types +6. **Database**: Use `supabase()` client for database operations with `convertPostgrestToApiError` + +## Testing + +Tests are located in apps/docs/app/api/graphql/tests. Each top-level query +should have its own test file, located at .test.ts. + +### Test data + +Test data uses a local database, seeded with the file at supabase/seed.sql. Add +any data required for running your new query. + +### Integration tests + +Integration tests import the POST function defined in +apps/docs/api/graphql/route.ts, then make a request to this function. + +For example: + +```ts +import { POST } from '../route' + +it('test name', async () => { + const query = ` + query { + ... + } + ` + const request = new Request('http://localhost/api/graphql', { + method: 'POST', + body: JSON.stringify({ query }), + }) + + const result = await POST(request) +}) +``` + +Include at least the following tests: + +1. A test that requests all fields (including nested fields) on the new query + object, and asserts that there are no errors, and the requested fields are + properly returned. +2. A test that triggers and error, and asserts that a GraphQL error is properly + returned. diff --git a/.cursor/rules/docs-test-requirements.mdc b/.cursor/rules/docs-test-requirements.mdc new file mode 100644 index 0000000000000..f76ca2ba7cff6 --- /dev/null +++ b/.cursor/rules/docs-test-requirements.mdc @@ -0,0 +1,71 @@ +--- +description: Docs Testing Procedure +globs: apps/docs/**/*.test.ts +alwaysApply: false +--- + +# Docs Test Requirements + +Rules for running tests in the docs application, ensuring proper Supabase setup and test execution. + + +name: docs_test_requirements +description: Standards for running tests in the docs application with proper Supabase setup +filters: + # Match test files in the docs app + - type: file_extension + pattern: "\\.(test|spec)\\.(ts|tsx)$" + - type: path + pattern: "^apps/docs/.*" + # Match test execution events + - type: event + pattern: "test_execution" + +actions: + - type: suggest + message: | + Before running tests in the docs app: + + 1. Check Supabase status: + ```bash + pnpm supabase status + ``` + + 2. If Supabase is not running: + ```bash + pnpm supabase start + ``` + + 3. Reset the database to ensure clean state: + ```bash + pnpm supabase db reset --local + ``` + + 4. Run the tests: + ```bash + pnpm run -F docs test:local:unwatch + ``` + + Important notes: + - Always ensure Supabase is running before tests + - Database must be reset to ensure clean state + - Use test:local:unwatch to run tests without watch mode + - Tests are located in apps/docs/**/*.{test,spec}.{ts,tsx} + +examples: + - input: | + # Bad: Running tests without proper setup + pnpm run -F docs test + pnpm run -F docs test:local + + # Good: Proper test execution sequence + pnpm supabase status + pnpm supabase start # if not running + pnpm supabase db reset --local + pnpm run -F docs test:local:unwatch + output: "Correctly executed docs tests with proper Supabase setup" + +metadata: + priority: high + version: 1.0 + diff --git a/.cursor/rules/unit-integration-testing.mdc b/.cursor/rules/unit-integration-testing.mdc new file mode 100644 index 0000000000000..e3a659a7c53dd --- /dev/null +++ b/.cursor/rules/unit-integration-testing.mdc @@ -0,0 +1,6 @@ +--- +description: +globs: apps/studio/**/*.test.ts,apps/studio/**/*.test.tsx +alwaysApply: false +--- +Make sure to follow the guidelines in this file to write tests: [README.md](mdc:apps/studio/tests/README.md) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 92882cec37692..436f6aeeff6dc 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,5 +1,7 @@ +/packages/ui/ @supabase/design /packages/shared-data/pricing.ts @roryw10 @kevcodez /packages/shared-data/plans.ts @roryw10 @kevcodez +/packages/common/telemetry-constants.ts @4L3k51 @supabase/growth-eng /apps/studio/ @supabase/Dashboard @@ -17,5 +19,3 @@ /apps/studio/components/interfaces/Organization/BillingSettings/ @supabase/security /apps/studio/components/interfaces/Organization/Documents/ @supabase/security /apps/studio/pages/new/index.tsx @supabase/security - -/apps/studio/lib/constants/telemetry.ts @4L3k51 @loong @pamelachia \ No newline at end of file diff --git a/.github/workflows/ai-tests.yml b/.github/workflows/ai-tests.yml index 357b2600c328c..6684dc0949c72 100644 --- a/.github/workflows/ai-tests.yml +++ b/.github/workflows/ai-tests.yml @@ -17,6 +17,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: test: runs-on: ubuntu-latest @@ -33,14 +36,18 @@ jobs: with: sparse-checkout: | packages + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false - name: Use Node.js uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - cache: 'npm' + cache: 'pnpm' - name: Install deps - run: npm ci + run: pnpm i - name: Type check - run: npm run typecheck + run: pnpm run typecheck - name: Run tests - run: npm run test + run: pnpm run test diff --git a/.github/workflows/auto-label-issues.yml b/.github/workflows/auto-label-issues.yml index d5b441349a484..59dfb2f88c3d4 100644 --- a/.github/workflows/auto-label-issues.yml +++ b/.github/workflows/auto-label-issues.yml @@ -42,4 +42,31 @@ jobs: run: | echo "Applying triage label for new issue" gh issue edit "$NUMBER" --add-label "$LABELS" - + spam-detection: + runs-on: ubuntu-latest + permissions: + issues: write + steps: + - name: Check GitHub Issue for spam + env: + POST_URL: ${{ secrets.POST_URL }} + BEARER_TOKEN: ${{ secrets.BEARER_TOKEN }} + NUMBER: ${{ github.event.issue.number }} + run: | + RESPONSE=$(curl -s -X POST "$POST_URL" \ + -H "Authorization: Bearer $BEARER_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"issue_id": $NUMBER}') + echo "spam_response=$RESPONSE" >> $GITHUB_OUTPUT + - name: Use spam detector output to label issue + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_REPO: ${{ github.repository }} + NUMBER: ${{ github.event.issue.number }} + LABEL: flagged + run: | + IS_SPAM=$(echo "$SPAM_RESPONSE" | jq -r '.spam') + if [ "$IS_SPAM" == "true" ]; then + echo "Applying flagged label for new issue suspected of spam" + gh issue edit "$NUMBER" --add-label "$LABEL" + fi diff --git a/.github/workflows/autofix_linters.yml b/.github/workflows/autofix_linters.yml index d181e750d8051..9b2f1d5c6b552 100644 --- a/.github/workflows/autofix_linters.yml +++ b/.github/workflows/autofix_linters.yml @@ -5,44 +5,50 @@ on: types: - labeled -permissions: - contents: write - # Cancel old builds on new commit for same workflow + branch/PR concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: autofix: runs-on: ubuntu-latest + permissions: + contents: write if: ${{ github.event_name == 'pull_request' && (github.event.label.name == 'autofix') }} steps: + - name: Calculate number of commits + run: echo "PR_FETCH_DEPTH=$(( ${{ github.event.pull_request.commits }} + 1 ))" >> "${GITHUB_ENV}" + - uses: actions/checkout@v4 with: - sparse-checkout: apps ref: ${{ github.head_ref }} + token: ${{ secrets.PAT_AUTOFIX }} + fetch-depth: ${{ env.PR_FETCH_DEPTH }} + sparse-checkout: | + packages + apps + + - uses: pnpm/action-setup@v4 + name: Install pnpm - uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' + cache: pnpm - name: Install required dependencies - run: npm ci + run: pnpm i - name: Run Prettier in fix mode - run: npm run format + run: pnpm run format - name: Commit changes and push to existing branch - run: | - git config --global user.name 'github-tidy-bot' - git config --global user.email 'github-tidy-bot@supabase.com' - if [[ `git status --porcelain` ]]; then - echo "[bot] Changes detected, committing." - - echo "[bot] Running in non-squash mode." - git commit -am "ci: Autofix updates from GitHub workflow" - git push - else - echo "[bot] No changes detected, nothing to commit." - fi + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: "ci: Autofix updates from GitHub workflow" + commit_user_name: "kevcodez" + commit_user_email: "k.grueneberg1994@gmail.com" diff --git a/.github/workflows/avoid-typos.yml b/.github/workflows/avoid-typos.yml index 8b4f6acaf440e..3a03edd58f56b 100644 --- a/.github/workflows/avoid-typos.yml +++ b/.github/workflows/avoid-typos.yml @@ -8,6 +8,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: misspell: name: runner / misspell @@ -25,7 +28,7 @@ jobs: exclude: | *.css **/package.json - **/package-lock.json + **/pnpm-lock.yaml ./.git/* *.ipynb ./i18n/README.*.md diff --git a/.github/workflows/docs-last-changed.yml b/.github/workflows/docs-last-changed.yml index 71764de9626b9..5025e4b0d1051 100644 --- a/.github/workflows/docs-last-changed.yml +++ b/.github/workflows/docs-last-changed.yml @@ -10,13 +10,16 @@ on: required: false type: boolean +permissions: + contents: read + jobs: deploy: runs-on: ubuntu-latest env: NEXT_PUBLIC_SUPABASE_URL: ${{ secrets.SEARCH_SUPABASE_URL }} - SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SEARCH_SUPABASE_SERVICE_ROLE_KEY }} + SUPABASE_SECRET_KEY: ${{ secrets.SEARCH_SUPABASE_SERVICE_ROLE_KEY }} steps: - name: Check out repo @@ -26,20 +29,25 @@ jobs: sparse-checkout: | apps/docs + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + - name: Setup node uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - name: Download dependencies - run: npm ci + run: pnpm i - name: Update last-changed dates working-directory: ./apps/docs if: ${{ !inputs.reset }} - run: npm run last-changed + run: pnpm run last-changed - name: Reset last-changed dates working-directory: ./apps/docs if: ${{ inputs.reset }} - run: npm run last-changed:reset + run: pnpm run last-changed:reset diff --git a/.github/workflows/docs-lint-v2-comment.yml b/.github/workflows/docs-lint-v2-comment.yml new file mode 100644 index 0000000000000..cd882ce80bc7c --- /dev/null +++ b/.github/workflows/docs-lint-v2-comment.yml @@ -0,0 +1,67 @@ +name: docs_lint_comment_external + +# This is a continuation of ./docs-lint-v2.yml, to write comments on external +# PRs. +# +# SECURITY: +# This workflow runs with write permissions, in the context of code from an +# external PR. This is safe because no external code is executed. The +# stringified Markdown output from the linter (downloaded as an artifact) is +# directly written as the body of a PR comment. + +on: + workflow_run: + workflows: [docs_lint] + types: + - completed + +permissions: + pull-requests: write + +jobs: + comment_on_pr: + runs-on: ubuntu-latest + if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'failure' + steps: + - id: download_artifact + name: 'Download artifact' + uses: actions/github-script@v7 + with: + script: | + const artifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{ github.event.workflow_run.id }} + }); + const matchingArtifact = artifacts?.data?.artifacts?.find( + (artifact) => artifact.name == 'lint_results' + ); + if (matchingArtifact) { + core.setOutput('contains_results', 'true') + const download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchingArtifact.id, + archive_format: 'zip', + }); + const fs = require('fs'); + fs.writeFileSync('${{ github.workspace }}/lint_results.zip', Buffer.from(download.data)); + } + - id: unzip_results + name: Unzip results file + if: steps.download_artifact.outputs.contains_results == 'true' + run: unzip lint_results.zip + - name: 'Comment on PR' + if: steps.download_artifact.outputs.contains_results == 'true' + uses: actions/github-script@v7 + with: + script: | + const fs = require('fs'); + const prNumber = Number(fs.readFileSync('./pr_number.txt')); + const lintResults = fs.readFileSync('./lint_results.txt', 'utf8'); + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + body: lintResults + }); diff --git a/.github/workflows/docs-lint-v2-scheduled.yml b/.github/workflows/docs-lint-v2-scheduled.yml new file mode 100644 index 0000000000000..f476dfcf92094 --- /dev/null +++ b/.github/workflows/docs-lint-v2-scheduled.yml @@ -0,0 +1,61 @@ +name: '[Docs] Lint v2 (scheduled)' +on: + schedule: + - cron: '0 0 * * *' + workflow_dispatch: + +env: + CARGO_NET_GIT_FETCH_WITH_CLI: true + +permissions: + contents: write + pull-requests: write + +jobs: + lint-all: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + sparse-checkout: | + supa-mdx-lint.config.toml + supa-mdx-lint + apps/docs/content + - name: cache cargo + id: cache-cargo + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + key: b848086c298be920a40aa9b26c65c7575ae8deca + - name: install linter + if: steps.cache-cargo.outputs.cache-hit != 'true' + run: cargo install --locked --git https://github.com/supabase-community/supa-mdx-lint --rev b848086c298be920a40aa9b26c65c7575ae8deca + - name: run linter + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + supa-mdx-lint apps/docs/content || { + echo "Linter failed, attempting to fix errors..." + git config --global user.name 'github-docs-bot' + git config --global user.email 'github-docs-bot@supabase.com' + BRANCH_NAME="bot/docs-lint-fixes" + EXISTING_BRANCH=$(git ls-remote --heads origin $BRANCH_NAME) + if [[ -n "$EXISTING_BRANCH" ]]; then + git push origin --delete $BRANCH_NAME + fi + git checkout -b $BRANCH_NAME + supa-mdx-lint apps/docs/content --fix || FIX_FAILED=1 + git add . + git commit -m '[bot] fix lint errors' || true + git push origin $BRANCH_NAME + gh pr create --title '[bot] fix lint errors' --body 'This PR fixes lint errors in the documentation.' --head $BRANCH_NAME + if [ "${FIX_FAILED:-0}" -eq 1 ]; then + echo "Fix did not correct all errors." + exit 1 + fi + } diff --git a/.github/workflows/docs-lint-v2.yml b/.github/workflows/docs-lint-v2.yml index 5ec6e0d312745..4ceb1f821026c 100644 --- a/.github/workflows/docs-lint-v2.yml +++ b/.github/workflows/docs-lint-v2.yml @@ -1,20 +1,26 @@ -name: '[Docs] Lint v2' +name: docs_lint + +# Runs the docs linter on PRs that edit docs content. +# There are two branches of this workflow for internal and external PRs, due +# to the security design of GitHub Actions. +# +# Internal PRs: +# Have write permissions, so comments are written directly by reviewdog. +# +# External PRs: +# Have read-only permissions, so lint results are uploaded as an artifact, to +# be written to the PR in a subsequent workflow_run action that has write +# permissions. See ./docs/lint-v2-comment.yml. +# +# See https://securitylab.github.com/resources/github-actions-preventing-pwn-requests/ + on: pull_request: - paths: - - '.github/workflows/docs-lint-v2.yml' - - 'supa-mdx-lint.config.toml' - - 'apps/docs/content/**' - push: - branches: - - master - workflow_dispatch: env: CARGO_NET_GIT_FETCH_WITH_CLI: true permissions: - contents: write pull-requests: write jobs: @@ -22,15 +28,24 @@ jobs: name: supa-mdx-lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 sparse-checkout: | supa-mdx-lint.config.toml supa-mdx-lint apps/docs/content + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 #v3.0.2 + id: filter + with: + filters: | + docs: + - 'apps/docs/content/**' + - 'supa-mdx-lint/**' + - 'supa-mdx-lint.config.toml' - name: cache cargo id: cache-cargo + if: steps.filter.outputs.docs == 'true' uses: actions/cache@v4 with: path: | @@ -38,50 +53,55 @@ jobs: ~/.cargo/registry/index/ ~/.cargo/registry/cache/ ~/.cargo/git/db/ - key: 6435a4cd1eeea7c2bbd343731de7e8a5127cb2d1 + key: b848086c298be920a40aa9b26c65c7575ae8deca - name: install linter - if: steps.cache-cargo.outputs.cache-hit != 'true' - run: cargo install --locked --git https://github.com/supabase-community/supa-mdx-lint --rev 6435a4cd1eeea7c2bbd343731de7e8a5127cb2d1 + if: steps.filter.outputs.docs == 'true' && steps.cache-cargo.outputs.cache-hit != 'true' + run: cargo install --locked --git https://github.com/supabase-community/supa-mdx-lint --rev b848086c298be920a40aa9b26c65c7575ae8deca - name: install reviewdog + if: steps.filter.outputs.docs == 'true' uses: reviewdog/action-setup@3f401fe1d58fe77e10d665ab713057375e39b887 # v1.3.0 with: reviewdog_version: v0.20.2 - - name: run linter on PR - if: github.event_name == 'pull_request' + - name: run linter (internal) + if: steps.filter.outputs.docs == 'true' && github.event.pull_request.head.repo.full_name == github.repository env: BASE_REF: ${{ github.base_ref }} REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | set -o pipefail - git diff --name-only origin/$BASE_REF HEAD \ - | { grep -E "^apps/docs/content/guides/" || test $? = 1; } \ + git diff --name-only "origin/$BASE_REF" HEAD \ + | { grep -E "^apps/docs/content/" || test $? = 1; } \ | xargs -r supa-mdx-lint --format rdf \ - | reviewdog -f=rdjsonl -reporter=github-pr-review - - name: run linter on push or workflow dispatch - if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' + | reviewdog -f=rdjsonl -reporter=github-pr-review -tee + - id: external_lint + name: run linter (external) + if: steps.filter.outputs.docs == 'true' && github.event.pull_request.head.repo.full_name != github.repository env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BASE_REF: ${{ github.base_ref }} + PR_NUMBER: ${{ github.event.pull_request.number }} run: | - supa-mdx-lint apps/docs/content || { - echo "Linter failed, attempting to fix errors..." - git config --global user.name 'github-docs-bot' - git config --global user.email 'github-docs-bot@supabase.com' - BRANCH_NAME="bot/docs-lint-fixes" - EXISTING_BRANCH=$(git ls-remote --heads origin $BRANCH_NAME) - if [ -n "$EXISTING_BRANCH" ]; then - git checkout $BRANCH_NAME - else - git checkout -b $BRANCH_NAME - fi - supa-mdx-lint apps/docs/content --fix || FIX_FAILED=1 - git add . - git commit -m '[bot] fix lint errors' || true - git push origin $BRANCH_NAME - if [ -z "$EXISTING_BRANCH" ]; then - gh pr create --title '[bot] fix lint errors' --body 'This PR fixes lint errors in the documentation.' --head $BRANCH_NAME - fi - if [ "${FIX_FAILED:-0}" -eq 1 ]; then - echo "Fix did not correct all errors." - exit 1 - fi + set -o pipefail + run_lints() { + git diff --name-only "origin/$BASE_REF" HEAD \ + | { grep -E "^apps/docs/content/" || test $? = 1; } \ + | xargs -rx -n 1000000000 supa-mdx-lint --format markdown } + set +e + LINT_RESULTS=$(run_lints) + LINT_EXIT_CODE=$? + set -e + echo "LINT_EXIT_CODE=$LINT_EXIT_CODE" >> $GITHUB_OUTPUT + if [[ $LINT_EXIT_CODE -ne 0 ]]; then + mkdir -p ./__github_actions__pr + echo "${{ github.event.number }}" > ./__github_actions__pr/pr_number.txt + echo "$LINT_RESULTS" > ./__github_actions__pr/lint_results.txt + fi + - name: save results as artifact (external) + if: steps.filter.outputs.docs == 'true' && github.event.pull_request.head.repo.full_name != github.repository && steps.external_lint.outputs.LINT_EXIT_CODE != 0 + uses: actions/upload-artifact@v4 + with: + name: lint_results + path: __github_actions__pr/ + - name: fail if linter fails (external) + if: steps.filter.outputs.docs == 'true' && github.event.pull_request.head.repo.full_name != github.repository && steps.external_lint.outputs.LINT_EXIT_CODE != 0 + run: exit 1 diff --git a/.github/workflows/docs-mgmt-api-update.yml b/.github/workflows/docs-mgmt-api-update.yml index bfb12b7214bfb..7272f722f363e 100644 --- a/.github/workflows/docs-mgmt-api-update.yml +++ b/.github/workflows/docs-mgmt-api-update.yml @@ -6,36 +6,45 @@ on: - cron: '0 0 * * 1' workflow_dispatch: +permissions: + pull-requests: write + contents: read + jobs: update-docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - with: - ref: master - sparse-checkout: | - apps/docs - - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version-file: '.nvmrc' - cache: 'npm' - - - name: Install deps - run: npm ci - - - name: Change to apps/docs/spec directory and run make command - working-directory: apps/docs/spec - run: make download.api.v1 dereference.api.v1 generate.sections.api.v1 format - - - name: Create pull request - uses: peter-evans/create-pull-request@v6 - with: - token: ${{ secrets.GITHUB_TOKEN }} - commit-message: 'feat: update mgmt api docs' - title: 'feat: update mgmt api docs' - body: 'This PR updates mgmt api docs automatically.' - branch: 'gha/auto-update-mgmt-api-docs' - base: 'master' + - uses: actions/checkout@v4 + with: + ref: master + sparse-checkout: | + apps/docs + + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version-file: '.nvmrc' + cache: 'pnpm' + + - name: Install deps + run: pnpm i + + - name: Change to apps/docs/spec directory and run make command + working-directory: apps/docs/spec + run: make download.api.v1 dereference.api.v1 generate.sections.api.v1 format + + - name: Create pull request + uses: peter-evans/create-pull-request@v6 + with: + token: ${{ secrets.GITHUB_TOKEN }} + commit-message: 'feat: update mgmt api docs' + title: 'feat: update mgmt api docs' + body: 'This PR updates mgmt api docs automatically.' + branch: 'gha/auto-update-mgmt-api-docs' + base: 'master' diff --git a/.github/workflows/docs-sync-troubleshooting.yml b/.github/workflows/docs-sync-troubleshooting.yml index 0b2d3d04e862c..52c0d793e7c10 100644 --- a/.github/workflows/docs-sync-troubleshooting.yml +++ b/.github/workflows/docs-sync-troubleshooting.yml @@ -17,9 +17,9 @@ jobs: runs-on: ubuntu-latest env: - SEARCH_GITHUB_APP_ID: ${{ secrets.SEARCH_GITHUB_APP_ID }} - SEARCH_GITHUB_APP_INSTALLATION_ID: ${{ secrets.SEARCH_GITHUB_APP_INSTALLATION_ID }} - SEARCH_GITHUB_APP_PRIVATE_KEY: ${{ secrets.SEARCH_GITHUB_APP_PRIVATE_KEY }} + DOCS_GITHUB_APP_ID: ${{ secrets.SEARCH_GITHUB_APP_ID }} + DOCS_GITHUB_APP_INSTALLATION_ID: ${{ secrets.SEARCH_GITHUB_APP_INSTALLATION_ID }} + DOCS_GITHUB_APP_PRIVATE_KEY: ${{ secrets.SEARCH_GITHUB_APP_PRIVATE_KEY }} NEXT_PUBLIC_SUPABASE_URL: ${{ secrets.SEARCH_SUPABASE_URL }} SUPABASE_SECRET_KEY: ${{ secrets.SEARCH_SUPABASE_SERVICE_ROLE_KEY }} @@ -29,17 +29,22 @@ jobs: sparse-checkout: | apps/docs - - name: Set up Node.js + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + + - name: Use Node.js uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - cache: 'npm' + cache: 'pnpm' - name: Install dependencies - run: npm ci + run: pnpm i - name: Run Troubleshooting script - run: npm --prefix=apps/docs run troubleshooting:sync + run: pnpm run -F docs troubleshooting:sync - name: Create Pull Request uses: peter-evans/create-pull-request@5e914681df9dc83aa4e4905692ca88beb2f9e91f # v7.0.5 diff --git a/.github/workflows/docs-sync.yml b/.github/workflows/docs-sync.yml new file mode 100644 index 0000000000000..aa0ba2f714ebd --- /dev/null +++ b/.github/workflows/docs-sync.yml @@ -0,0 +1,49 @@ +# All syncs from the repo to the DB will eventually be consolidated in a single +# script run by this workflow. + +name: docs_sync + +on: + push: + branches: + - master + paths: + # Resync if the content changes + - 'apps/docs/content/**' + # Resync if the resource definition or sync scripts change + - 'apps/docs/resources/**' + workflow_dispatch: + +permissions: + contents: read + +jobs: + sync: + runs-on: ubuntu-latest + + env: + NEXT_PUBLIC_SUPABASE_URL: ${{ secrets.SEARCH_SUPABASE_URL }} + SUPABASE_SECRET_KEY: ${{ secrets.SEARCH_SUPABASE_SERVICE_ROLE_KEY }} + + steps: + - uses: actions/checkout@v4 + with: + sparse-checkout: | + apps/docs + + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version-file: '.nvmrc' + cache: 'pnpm' + + - name: Install dependencies + run: pnpm i + + - name: Run sync script + run: pnpm run -F docs sync diff --git a/.github/workflows/docs-tests-smoke.yml b/.github/workflows/docs-tests-smoke.yml index 0ebf7512c1976..8dac770f6df4a 100644 --- a/.github/workflows/docs-tests-smoke.yml +++ b/.github/workflows/docs-tests-smoke.yml @@ -11,6 +11,9 @@ concurrency: group: ${{ github.workflow }} cancel-in-progress: true +permissions: + contents: read + jobs: build: runs-on: ubuntu-latest @@ -22,14 +25,19 @@ jobs: apps/docs packages + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + - name: Use Node.js uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - cache: 'npm' + cache: 'pnpm' - name: Install deps - run: npm ci + run: pnpm i - name: Run tests - run: npm --prefix="apps/docs" run test:smoke + run: pnpm --prefix="apps/docs" run test:smoke diff --git a/.github/workflows/docs-tests.yml b/.github/workflows/docs-tests.yml index 9f89a86c660f8..dc029e56c5b86 100644 --- a/.github/workflows/docs-tests.yml +++ b/.github/workflows/docs-tests.yml @@ -11,6 +11,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: build: runs-on: ubuntu-latest @@ -22,15 +25,21 @@ jobs: apps/docs examples packages + supabase + + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false - name: Use Node.js uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - cache: 'npm' + cache: 'pnpm' - name: Install deps - run: npm ci + run: pnpm i - name: Run tests - run: npm run test:docs + run: pnpm run test:docs diff --git a/.github/workflows/external-pr-comment.yml b/.github/workflows/external-pr-comment.yml new file mode 100644 index 0000000000000..5e3c36dd35884 --- /dev/null +++ b/.github/workflows/external-pr-comment.yml @@ -0,0 +1,31 @@ +name: Comment on external PRs + +on: + pull_request_target: + types: [opened] + +permissions: + pull-requests: write + +jobs: + comment-on-external-pr: + runs-on: ubuntu-latest + if: github.event.pull_request.head.repo.full_name != github.repository + steps: + - name: Comment on PR + uses: actions/github-script@v7 + with: + script: | + const commentBody = ` + Thanks for contributing to Supabase! ❤️ Our team will review your PR. + + A few tips for a smoother review process: + - If you have a local version of the repo, run \`pnpm run format\` to make sure formatting checks pass. + - Once we've reviewed your PR, please don't trivially merge master (don't click \`Update branch\` if there are no merge conflicts to be fixed). This invalidates any pre-merge checks we've run. + `; + github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + body: commentBody + }); diff --git a/.github/workflows/fix-typos.yml b/.github/workflows/fix-typos.yml index afded9429c59d..84c9bbf4d3468 100644 --- a/.github/workflows/fix-typos.yml +++ b/.github/workflows/fix-typos.yml @@ -9,6 +9,10 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + contents: read + pull-requests: write + jobs: build: runs-on: ubuntu-latest diff --git a/.github/workflows/lint-prose.yml b/.github/workflows/lint-prose.yml deleted file mode 100644 index a652acb37c33e..0000000000000 --- a/.github/workflows/lint-prose.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: reviewdog -on: - pull_request: - paths: - - '.vale.ini' - - 'vale/**' - - 'apps/docs/content/**' - -jobs: - vale: - name: runner / vale - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - sparse-checkout: | - .vale.ini - vale - apps/docs/content - - name: Get changed files - id: changed-files - env: - BASE_REF: ${{ github.base_ref }} - SHA: ${{ github.sha }} - run: | - git fetch origin $BASE_REF - echo "CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRT origin/$BASE_REF $SHA | grep '^apps/docs/content/' | tr '\n' ',' | sed 's/,$//')" >> $GITHUB_OUTPUT - - uses: errata-ai/vale-action@reviewdog - with: - files: '${{ steps.changed-files.outputs.CHANGED_FILES }}' - separator: ',' diff --git a/.github/workflows/og_images.yml b/.github/workflows/og_images.yml index afec7f34149d3..6c2166e755894 100644 --- a/.github/workflows/og_images.yml +++ b/.github/workflows/og_images.yml @@ -8,6 +8,9 @@ on: - 'supabase/functions/og-images/**' workflow_dispatch: +permissions: + contents: read + jobs: deploy: runs-on: ubuntu-latest @@ -18,7 +21,7 @@ jobs: steps: - name: Check out repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup the Supabase CLI uses: supabase/setup-cli@v1 diff --git a/.github/workflows/pg-meta-tests.yml b/.github/workflows/pg-meta-tests.yml new file mode 100644 index 0000000000000..7dda782e0f9f5 --- /dev/null +++ b/.github/workflows/pg-meta-tests.yml @@ -0,0 +1,54 @@ +name: PG Meta Tests + +on: + push: + branches: ['master'] + paths: + - 'packages/pg-meta/**/*' + pull_request: + branches: ['master'] + paths: + - 'packages/pg-meta/**/*' + +# Cancel old builds on new commit for same workflow + branch/PR +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + id-token: write + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + sparse-checkout: | + packages/pg-meta + packages/tsconfig + + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version-file: '.nvmrc' + cache: 'pnpm' + + - name: Install deps + run: pnpm i + + - name: Run tests + run: pnpm --filter=@supabase/pg-meta run test + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + directory: packages/pg-meta/coverage + flags: pg-meta diff --git a/.github/workflows/playwright.yml b/.github/workflows/playwright.yml deleted file mode 100644 index 2508ef1331581..0000000000000 --- a/.github/workflows/playwright.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: Playwright Tests -on: - push: - branches: [master] - paths: - - 'apps/studio/**' - - 'package-lock.json' - pull_request: - branches: [master] - paths: - - 'apps/studio/**' - - 'package-lock.json' - -# Cancel old builds on new commit for same workflow + branch/PR -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - test: - timeout-minutes: 60 - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version-file: '.nvmrc' - cache: 'npm' - - uses: supabase/setup-cli@v1 - - name: Install dependencies - run: npm ci - - name: Install Playwright Browsers - run: npx playwright install --with-deps - - name: Run Playwright tests - run: npm run test:playwright - # mark the action as succeeded even if the tests failed. This is temporarily until we make the tests more stable. - # continue-on-error: true - - uses: actions/upload-artifact@v4 - if: always() - with: - name: playwright-report - path: playwright-tests/playwright-report/ - retention-days: 7 diff --git a/.github/workflows/prettier.yml b/.github/workflows/prettier.yml index 9d7053db16be8..913d1d07cdbd4 100644 --- a/.github/workflows/prettier.yml +++ b/.github/workflows/prettier.yml @@ -10,6 +10,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: format: runs-on: ubuntu-latest @@ -18,17 +21,21 @@ jobs: uses: actions/checkout@v4 with: sparse-checkout: apps + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false - name: Setup node uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - # Installing all dependencies takes up to three minutes, hacking around to only installing prettier+deps + cache: 'pnpm' - name: Download dependencies run: | - npm ci + pnpm i - name: Run prettier run: |- - npm run test:prettier + pnpm run test:prettier # i18n is not a node package, so we handle that one separately format-i18n: @@ -39,19 +46,21 @@ jobs: with: sparse-checkout: | i18n + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false - name: Setup node uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - # Installing all dependencies takes up to three minutes, hacking around to only installing prettier+deps + cache: 'pnpm' - name: Download dependencies run: | - rm package.json - rm package-lock.json - npm i prettier@4.0.0-alpha.8 prettier-plugin-sql-cst + pnpm i - name: Run prettier run: |- - npx prettier -c 'i18n/**/*.{js,jsx,ts,tsx,css,md,mdx,json}' + pnpm exec prettier -c 'i18n/**/*.{js,jsx,ts,tsx,css,md,mdx,json}' format-sql: runs-on: ubuntu-latest @@ -62,13 +71,19 @@ jobs: sparse-checkout: | apps/docs/pages apps/docs/content - # Installing all dependencies takes up to three minutes, hacking around to only installing prettier+deps + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version-file: '.nvmrc' + cache: 'pnpm' - name: Download dependencies run: | - rm package.json - rm package-lock.json - npm i prettier@4.0.0-alpha.8 prettier-plugin-sql-cst + pnpm i - name: Run prettier run: |- # Check mdx files which contain sql code blocks - grep -lr '```sql' apps/docs/{pages,content}/**/*.mdx | xargs npx prettier -c + grep -lr '```sql' apps/docs/{pages,content}/**/*.mdx | xargs pnpm exec prettier -c diff --git a/.github/workflows/publish_image.yml b/.github/workflows/publish_image.yml index 2caf2f5d2d7c2..8d7a0291ad186 100644 --- a/.github/workflows/publish_image.yml +++ b/.github/workflows/publish_image.yml @@ -20,8 +20,8 @@ jobs: flavor: | latest=true tags: | - type=sha,prefix={{date 'YYYYMMDD'}}-,enable=${{ github.event_name == 'schedule' }} - type=sha,prefix={{date 'YYYYMMDD'}}-,enable=${{ github.event_name == 'workflow_dispatch' }} + type=sha,prefix={{date 'YYYY.MM.DD'}}-sha-,enable=${{ github.event_name == 'schedule' }} + type=sha,prefix={{date 'YYYY.MM.DD'}}-sha-,enable=${{ github.event_name == 'workflow_dispatch' }} release_x86: needs: settings diff --git a/.github/workflows/search.yml b/.github/workflows/search.yml index 3986992f2f6fa..9d5aa59959ae6 100644 --- a/.github/workflows/search.yml +++ b/.github/workflows/search.yml @@ -17,17 +17,26 @@ on: schedule: - cron: '0 0 * * *' +concurrency: + group: ${{ github.workflow }}-prod + cancel-in-progress: false + +permissions: + contents: read + jobs: deploy: runs-on: ubuntu-latest env: - OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + DOCS_GITHUB_APP_ID: ${{ secrets.SEARCH_GITHUB_APP_ID }} + DOCS_GITHUB_APP_INSTALLATION_ID: ${{ secrets.SEARCH_GITHUB_APP_INSTALLATION_ID }} + DOCS_GITHUB_APP_PRIVATE_KEY: ${{ secrets.SEARCH_GITHUB_APP_PRIVATE_KEY }} + NEXT_PUBLIC_MISC_URL: ${{ secrets.NEXT_PUBLIC_MISC_URL}} + NEXT_PUBLIC_MISC_ANON_KEY: ${{ secrets.NEXT_PUBLIC_MISC_ANON_KEY }} NEXT_PUBLIC_SUPABASE_URL: ${{ secrets.SEARCH_SUPABASE_URL }} - SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SEARCH_SUPABASE_SERVICE_ROLE_KEY }} - SEARCH_GITHUB_APP_ID: ${{ secrets.SEARCH_GITHUB_APP_ID }} - SEARCH_GITHUB_APP_INSTALLATION_ID: ${{ secrets.SEARCH_GITHUB_APP_INSTALLATION_ID }} - SEARCH_GITHUB_APP_PRIVATE_KEY: ${{ secrets.SEARCH_GITHUB_APP_PRIVATE_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + SUPABASE_SECRET_KEY: ${{ secrets.SEARCH_SUPABASE_SERVICE_ROLE_KEY }} steps: - name: Check out repo @@ -38,24 +47,25 @@ jobs: apps/www/.env.local.example supabase + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + - name: Setup node uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - name: Download dependencies - run: npm ci - - # Need the miscellaneous use API, which is available publicly (by design) in www - - name: Copy environment variables - run: cp apps/www/.env.local.example apps/docs/.env + run: pnpm i - name: Update embeddings working-directory: ./apps/docs if: ${{ !inputs.refresh }} - run: npm run embeddings + run: pnpm run embeddings - name: Refresh embeddings working-directory: ./apps/docs if: ${{ inputs.refresh }} - run: npm run embeddings:refresh + run: pnpm run embeddings:refresh diff --git a/.github/workflows/self-host-tests-smoke.yml b/.github/workflows/self-host-tests-smoke.yml index dd3d1b76a0c35..f5f6066ec3554 100644 --- a/.github/workflows/self-host-tests-smoke.yml +++ b/.github/workflows/self-host-tests-smoke.yml @@ -11,6 +11,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: build: runs-on: ubuntu-latest diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 8281d285e2a2f..b059f21708527 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -3,6 +3,11 @@ on: schedule: - cron: '30 1 * * *' +permissions: + issues: write + pull-requests: write + contents: read + jobs: build: runs-on: ubuntu-latest diff --git a/.github/workflows/studio-e2e-tests.yml b/.github/workflows/studio-e2e-tests.yml new file mode 100644 index 0000000000000..1a57cd0b0df89 --- /dev/null +++ b/.github/workflows/studio-e2e-tests.yml @@ -0,0 +1,91 @@ +name: Studio E2E Tests +on: + push: + branches: [master] + paths: + - 'packages/pg-meta/**/*' + - 'apps/studio/**' + - 'e2e/studio/**' + - 'pnpm-lock.yaml' + pull_request: + branches: [master] + paths: + - 'packages/pg-meta/**/*' + - 'apps/studio/**' + - 'e2e/studio/**' + - 'pnpm-lock.yaml' + +# Cancel old builds on new commit for same workflow + branch/PR +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +permissions: + contents: write + +jobs: + test: + timeout-minutes: 60 + runs-on: ubuntu-latest + # Make the job non-blocking + continue-on-error: true + + env: + EMAIL: ${{ secrets.CI_EMAIL }} + PASSWORD: ${{ secrets.CI_PASSWORD }} + PROJECT_REF: ${{ secrets.CI_PROJECT_REF }} + NEXT_PUBLIC_IS_PLATFORM: true + NEXT_PUBLIC_API_URL: https://api.supabase.green + VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }} + VERCEL_PROJECT_ID: ${{ secrets.VERCEL_STUDIO_HOSTED_PROJECT_ID }} + NEXT_PUBLIC_HCAPTCHA_SITE_KEY: 10000000-ffff-ffff-ffff-000000000001 + + steps: + - uses: actions/checkout@v4 + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version-file: '.nvmrc' + cache: 'pnpm' + + - name: Install dependencies + run: pnpm i + + - name: Install Vercel CLI + run: pnpm add --global vercel@latest + + - name: Pull Vercel Environment Information (Preview) + run: vercel pull --yes --environment=preview --token=${{ secrets.VERCEL_TOKEN }} + + - name: Build Project Artifacts for Vercel + run: vercel build --token=${{ secrets.VERCEL_TOKEN }} + + - name: Deploy Project to Vercel and Get URL + id: deploy_vercel + run: | + DEPLOY_URL=$(vercel deploy --prebuilt --token=${{ secrets.VERCEL_TOKEN }}) + echo "Vercel Preview URL: $DEPLOY_URL" + echo "DEPLOY_URL=$DEPLOY_URL" >> $GITHUB_OUTPUT + + - name: Install Playwright Browsers + run: pnpm -C e2e/studio exec playwright install --with-deps + + - name: Run Playwright tests + id: playwright + env: + AUTHENTICATION: true + STUDIO_URL: ${{ steps.deploy_vercel.outputs.DEPLOY_URL }}/dashboard + run: pnpm e2e + + - uses: actions/upload-artifact@v4 + if: always() + with: + name: playwright-artifacts + path: | + e2e/studio/playwright-report/ + e2e/studio/test-results/ + retention-days: 7 diff --git a/.github/workflows/studio-tests.yml b/.github/workflows/studio-tests.yml deleted file mode 100644 index 1dbc0bd8dd638..0000000000000 --- a/.github/workflows/studio-tests.yml +++ /dev/null @@ -1,47 +0,0 @@ -# This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions - -name: Studio Unit Tests & Build Check - -on: - push: - branches: [master, studio] - paths: - - 'apps/studio/**' - - 'package-lock.json' - pull_request: - branches: [master, studio] - paths: - - 'apps/studio/**' - - 'package-lock.json' - -# Cancel old builds on new commit for same workflow + branch/PR -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - check: - # Uses larger hosted runner as it significantly decreases build times - runs-on: [larger-runner-4cpu] - - steps: - - uses: actions/checkout@v4 - with: - sparse-checkout: | - apps/studio - packages - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version-file: '.nvmrc' - cache: 'npm' - - name: Install deps - run: npm ci - working-directory: ./ - - name: Run Tests - env: - # Default is 2 GB, increase to have less frequent OOM errors - NODE_OPTIONS: '--max_old_space_size=3072' - run: npm run test:studio - working-directory: ./ diff --git a/.github/workflows/studio-unit-tests.yml b/.github/workflows/studio-unit-tests.yml new file mode 100644 index 0000000000000..f82f29127b5f2 --- /dev/null +++ b/.github/workflows/studio-unit-tests.yml @@ -0,0 +1,78 @@ +# This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions + +name: Studio Unit Tests & Build Check + +on: + push: + branches: [master, studio] + paths: + - 'apps/studio/**' + - 'pnpm-lock.yaml' + pull_request: + branches: [master, studio] + paths: + - 'apps/studio/**' + - 'pnpm-lock.yaml' + +# Cancel old builds on new commit for same workflow + branch/PR +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + test: + # Uses larger hosted runner as it significantly decreases build times + runs-on: + group: Default Larger Runners + strategy: + matrix: + test_number: [1] + + steps: + - uses: actions/checkout@v4 + with: + sparse-checkout: | + apps/studio + packages + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version-file: '.nvmrc' + cache: 'pnpm' + - name: Install deps + run: pnpm i + working-directory: ./ + - name: Run Tests + env: + # Default is 2 GB, increase to have less frequent OOM errors + NODE_OPTIONS: '--max_old_space_size=3072' + run: pnpm run test:ci + working-directory: ./apps/studio + + - name: Upload coverage results to Coveralls + uses: coverallsapp/github-action@master + with: + parallel: true + flag-name: studio-tests + github-token: ${{ secrets.GITHUB_TOKEN }} + path-to-lcov: ./apps/studio/coverage/lcov.info + base-path: './apps/studio' + + finish: + needs: test + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: coverallsapp/github-action@master + with: + parallel-finished: true + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/typecheck.yml b/.github/workflows/typecheck.yml index 70481bb7bb3de..1ea8e4613028e 100644 --- a/.github/workflows/typecheck.yml +++ b/.github/workflows/typecheck.yml @@ -10,26 +10,35 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: typecheck: # Uses larger hosted runner as it significantly decreases build times - runs-on: [larger-runner-4cpu] + runs-on: + group: Default Larger Runners steps: - name: Checkout uses: actions/checkout@v4 + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + - name: Use Node.js uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - cache: 'npm' + cache: 'pnpm' - name: Install deps - run: npm ci + run: pnpm i - name: Run TypeScript type check - run: npx turbo run typecheck + run: pnpm exec turbo run typecheck - name: Run Lint - run: npx turbo run lint + run: pnpm exec turbo run lint diff --git a/.github/workflows/ui-patterns-tests.yml b/.github/workflows/ui-patterns-tests.yml index a2fe4dc775f2a..766466ee0a88c 100644 --- a/.github/workflows/ui-patterns-tests.yml +++ b/.github/workflows/ui-patterns-tests.yml @@ -11,6 +11,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: build: runs-on: ubuntu-latest @@ -21,14 +24,19 @@ jobs: sparse-checkout: | packages + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + - name: Use Node.js uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - cache: 'npm' + cache: 'pnpm' - name: Install deps - run: npm ci + run: pnpm i - name: Run tests - run: npm run test:ui-patterns + run: pnpm run test:ui-patterns diff --git a/.github/workflows/ui-tests.yml b/.github/workflows/ui-tests.yml index 126edd5b9a578..6284f0cae2b51 100644 --- a/.github/workflows/ui-tests.yml +++ b/.github/workflows/ui-tests.yml @@ -11,9 +11,15 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: - build: + test: runs-on: ubuntu-latest + strategy: + matrix: + test_number: [1] steps: - uses: actions/checkout@v4 @@ -21,14 +27,40 @@ jobs: sparse-checkout: | packages + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + run_install: false + - name: Use Node.js uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - cache: 'npm' + cache: 'pnpm' - name: Install deps - run: npm ci + run: pnpm i - name: Run tests - run: npm run test:ui + run: pnpm run test:ci + working-directory: ./packages/ui + + - name: Upload coverage results to Coveralls + uses: coverallsapp/github-action@master + with: + parallel: true + flag-name: ui-tests + github-token: ${{ secrets.GITHUB_TOKEN }} + path-to-lcov: ./packages/ui/coverage/lcov.info + base-path: './packages/ui' + + finish: + needs: test + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: coverallsapp/github-action@master + with: + parallel-finished: true + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index b6ce235b350ce..979f53fef92d6 100644 --- a/.gitignore +++ b/.gitignore @@ -133,3 +133,13 @@ gcloud.json # sitemaps # apps/www/public/*.xml # apps/docs/public/*.xml + +# CLI version file +.temp/cli-latest + +.pnpm-store/* + +# Sentry CLI config +**/.sentryclirc + +keys.json \ No newline at end of file diff --git a/.npmrc b/.npmrc index 4fd021952d5a1..19c081e2d4f63 100644 --- a/.npmrc +++ b/.npmrc @@ -1 +1,3 @@ -engine-strict=true \ No newline at end of file +engine-strict=true +update-notifier=false +@jsr:registry=https://npm.jsr.io diff --git a/.prettierignore b/.prettierignore index 37dd5d06e3c19..3fb187bebd90b 100644 --- a/.prettierignore +++ b/.prettierignore @@ -9,11 +9,22 @@ apps/**/out apps/www/schema.sql apps/www/public/images/* apps/docs/**/generated/* +apps/docs/examples/* examples/slack-clone/nextjs-slack-clone/full-schema.sql # ignore files with custom js formatting -apps/docs/pages/guides/auth/*.mdx -apps/docs/pages/guides/integrations/*.mdx apps/studio/public apps/**/.turbo apps/docs/CONTRIBUTING.md -apps/design-system/__registry__ \ No newline at end of file +apps/design-system/__registry__ +packages/icons/__registry__ +packages/icons/src/icons/*.ts +apps/ui-library/__registry__ +apps/ui-library/public/r +apps/**/.contentlayer +# invalid JSON file +packages/ui/src/components/Form/examples/PhoneProvidersSchema.json +apps/cms/config/api.ts +# files auto-generated by payload cms +apps/cms/src/app/* +apps/cms/src/migrations/* +apps/cms/src/payload-types.ts \ No newline at end of file diff --git a/DEVELOPERS.md b/DEVELOPERS.md index 0c39efba50230..dd9828d7625c6 100644 --- a/DEVELOPERS.md +++ b/DEVELOPERS.md @@ -12,7 +12,7 @@ - [Shared components](#shared-components) - [Installing packages](#installing-packages) - [Running Docker for Supabase Studio](#running-docker-for-supabase-studio) - - [Prerequsites](#prerequsites) + - [Prerequisites](#prerequisites) - [Get Started](#get-started) - [Create a pull request](#create-a-pull-request) - [Issue assignment](#issue-assignment) @@ -22,8 +22,6 @@ - [Community channels](#community-channels) - [Contributors](#contributors) -- [Common tasks](#common-tasks) - - [Add a redirect](#add-a-redirect) - [Community channels](#community-channels) ## Getting started @@ -36,9 +34,9 @@ To ensure a positive and inclusive environment, please read our [code of conduct You will need to install and configure the following dependencies on your machine to build [Supabase](https://supabase.com): -- [Git](http://git-scm.com/) -- [Node.js v20.x (LTS)](http://nodejs.org) -- [npm](https://www.npmjs.com/) version 10.x.x or higher +- [Git](https://git-scm.com/) +- [Node.js v20.x (LTS)](https://nodejs.org) +- [pnpm](https://pnpm.io/) version 9.x.x or higher - [make](https://www.gnu.org/software/make/) or the equivalent to `build-essentials` for your OS - [Docker](https://docs.docker.com/get-docker/) (to run studio locally) @@ -70,7 +68,7 @@ To contribute code to [Supabase](https://supabase.com), you must fork the [Supab 1. Install the dependencies in the root of the repo. ```sh - npm install # install dependencies + pnpm install # install dependencies ``` 2. Copy the example `.env.local.example` to `.env.local` @@ -81,7 +79,7 @@ To contribute code to [Supabase](https://supabase.com), you must fork the [Supab 3. After that you can run the apps simultaneously with the following. ```sh - npm run dev # start all the applications + pnpm dev # start all the applications ``` Then visit, and edit, any of the following sites: @@ -97,7 +95,7 @@ Then visit, and edit, any of the following sites: You can run any of the sites individually by using the scope name. For example: ```sh -npm run dev:www +pnpm dev:www ``` Note: Particularly for `www` make sure you have copied `apps/www/.env.local.example` to `apps/www/.env.local` @@ -115,17 +113,13 @@ The monorepo has a set of shared components under `/packages`: #### Installing packages -Installing a package with NPM workspaces requires you to add the `-w` flag to tell NPM which workspace you want to install into. Do not install dependencies in their local folder, install them from the route using the `-w` flag. - -The format is: `npm install -w=`. +Installing a package in a specific workspace requires you to move to the workspace and then run the install command. For example: -- `npm install react -w common`: installs into `./packages/common` -- `npm install react -w www`: installs into `./apps/www` -- `npm install react -w studio`: installs into `./apps/studio` +1. `cd apps/studio`: move to the `studio` workspace +2. `pnpm add react`: installs `react` into `studio` workspace -You do not need to install `devDependencies` in each workspace. These can all be installed in the root package. --- @@ -133,7 +127,7 @@ You do not need to install `devDependencies` in each workspace. These can all be To run Studio locally, you'll need to setup Docker in addition to your NextJS frontend. -#### Prerequsites +#### Prerequisites First, make sure you have the Docker installed on your device. You can download and install it from [here](https://docs.docker.com/get-docker/). diff --git a/README.md b/README.md index 437878278b6e1..e74a9fd281484 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ # Supabase -[Supabase](https://supabase.com) is an open source Firebase alternative. We're building the features of Firebase using enterprise-grade open source tools. +[Supabase](https://supabase.com) is the Postgres development platform. We're building the features of Firebase using enterprise-grade open source tools. - [x] Hosted Postgres Database. [Docs](https://supabase.com/docs/guides/database) - [x] Authentication and Authorization. [Docs](https://supabase.com/docs/guides/auth) @@ -52,10 +52,10 @@ You can also [self-host](https://supabase.com/docs/guides/hosting/overview) and - [Postgres](https://www.postgresql.org/) is an object-relational database system with over 30 years of active development that has earned it a strong reputation for reliability, feature robustness, and performance. - [Realtime](https://github.com/supabase/realtime) is an Elixir server that allows you to listen to PostgreSQL inserts, updates, and deletes using websockets. Realtime polls Postgres' built-in replication functionality for database changes, converts changes to JSON, then broadcasts the JSON over websockets to authorized clients. -- [PostgREST](http://postgrest.org/) is a web server that turns your PostgreSQL database directly into a RESTful API -- [GoTrue](https://github.com/supabase/gotrue) is a JWT based API for managing users and issuing JWT tokens. -- [Storage](https://github.com/supabase/storage-api) provides a RESTful interface for managing Files stored in S3, using Postgres to manage permissions. -- [pg_graphql](http://github.com/supabase/pg_graphql/) a PostgreSQL extension that exposes a GraphQL API +- [PostgREST](http://postgrest.org/) is a web server that turns your PostgreSQL database directly into a RESTful API. +- [GoTrue](https://github.com/supabase/gotrue) is a JWT-based authentication API that simplifies user sign-ups, logins, and session management in your applications. +- [Storage](https://github.com/supabase/storage-api) a RESTful API for managing files in S3, with Postgres handling permissions. +- [pg_graphql](http://github.com/supabase/pg_graphql/) a PostgreSQL extension that exposes a GraphQL API. - [postgres-meta](https://github.com/supabase/postgres-meta) is a RESTful API for managing your Postgres, allowing you to fetch tables, add roles, and run queries, etc. - [Kong](https://github.com/Kong/kong) is a cloud-native API gateway. diff --git a/apps/cms/.env.example b/apps/cms/.env.example new file mode 100644 index 0000000000000..b8b497ca4769c --- /dev/null +++ b/apps/cms/.env.example @@ -0,0 +1,13 @@ +DATABASE_URI=postgres://postgres:@127.0.0.1:5432/your-database-name +PAYLOAD_SECRET=YOUR_SECRET_HERE + +S3_BUCKET= +S3_ACCESS_KEY_ID= +S3_SECRET_ACCESS_KEY= +S3_REGION= +S3_ENDPOINT= + +NEXT_PUBLIC_SERVER_URL=http://localhost:3000/blog +CRON_SECRET=YOUR_CRON_SECRET_HERE +PREVIEW_SECRET=YOUR_SECRET_HERE +BLOG_APP_URL=http://localhost:3000 \ No newline at end of file diff --git a/apps/docs/.eslintrc.js b/apps/cms/.eslintrc.js similarity index 100% rename from apps/docs/.eslintrc.js rename to apps/cms/.eslintrc.js diff --git a/apps/cms/.gitignore b/apps/cms/.gitignore new file mode 100644 index 0000000000000..3d53a40bcb8cf --- /dev/null +++ b/apps/cms/.gitignore @@ -0,0 +1,43 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js +.yarn/install-state.gz + +/.idea/* +!/.idea/runConfigurations + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# local env files +.env*.local + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts + +.env + +/media diff --git a/apps/cms/Dockerfile b/apps/cms/Dockerfile new file mode 100644 index 0000000000000..93465cfa57acc --- /dev/null +++ b/apps/cms/Dockerfile @@ -0,0 +1,71 @@ +# To use this Dockerfile, you have to set `output: 'standalone'` in your next.config.mjs file. +# From https://github.com/vercel/next.js/blob/canary/examples/with-docker/Dockerfile + +FROM node:22.12.0-alpine AS base + +# Install dependencies only when needed +FROM base AS deps +# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed. +RUN apk add --no-cache libc6-compat +WORKDIR /app + +# Install dependencies based on the preferred package manager +COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* ./ +RUN \ + if [ -f yarn.lock ]; then yarn --frozen-lockfile; \ + elif [ -f package-lock.json ]; then npm ci; \ + elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm i --frozen-lockfile; \ + else echo "Lockfile not found." && exit 1; \ + fi + + +# Rebuild the source code only when needed +FROM base AS builder +WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules +COPY . . + +# Next.js collects completely anonymous telemetry data about general usage. +# Learn more here: https://nextjs.org/telemetry +# Uncomment the following line in case you want to disable telemetry during the build. +# ENV NEXT_TELEMETRY_DISABLED 1 + +RUN \ + if [ -f yarn.lock ]; then yarn run build; \ + elif [ -f package-lock.json ]; then npm run build; \ + elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm run build; \ + else echo "Lockfile not found." && exit 1; \ + fi + +# Production image, copy all the files and run next +FROM base AS runner +WORKDIR /app + +ENV NODE_ENV production +# Uncomment the following line in case you want to disable telemetry during runtime. +# ENV NEXT_TELEMETRY_DISABLED 1 + +RUN addgroup --system --gid 1001 nodejs +RUN adduser --system --uid 1001 nextjs + +# Remove this line if you do not have this folder +COPY --from=builder /app/public ./public + +# Set the correct permission for prerender cache +RUN mkdir .next +RUN chown nextjs:nodejs .next + +# Automatically leverage output traces to reduce image size +# https://nextjs.org/docs/advanced-features/output-file-tracing +COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ +COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static + +USER nextjs + +EXPOSE 3000 + +ENV PORT 3000 + +# server.js is created by next build from the standalone output +# https://nextjs.org/docs/pages/api-reference/next-config-js/output +CMD HOSTNAME="0.0.0.0" node server.js diff --git a/apps/cms/README.md b/apps/cms/README.md new file mode 100644 index 0000000000000..f910262ecbd71 --- /dev/null +++ b/apps/cms/README.md @@ -0,0 +1,23 @@ +# Payload CMS + +### Local Development + +1. run `cd apps/cms && supabase start` to start the local supabase project +2. run `cp .env.example .env` to copy the example environment variables and update the variables. You'll need to add the `S3_` variables to your `.env` to use Supabase Storage +3. `pnpm install && pnpm generate:importmap` to install dependencies and start the dev server +4. run `pnpm dev` in the apps/cms folder or `pnpm dev:cms` from the root +5. open `http://localhost:3030` to open the app in your browser + +Follow the on-screen instructions to login and create the first admin user. + +### Collections + +Collections are what data looks like in the Payload cms schema. The following are the collections currently configured in the app. + +- Authors +- Categories +- Events +- Media +- Posts +- Tags +- Users diff --git a/apps/cms/docker-compose.yml b/apps/cms/docker-compose.yml new file mode 100644 index 0000000000000..dc5ddababa24e --- /dev/null +++ b/apps/cms/docker-compose.yml @@ -0,0 +1,30 @@ +version: '3' + +services: + payload: + image: node:18-alpine + ports: + - '3030:3030' + volumes: + - .:/home/node/app + - node_modules:/home/node/app/node_modules + working_dir: /home/node/app/ + command: sh -c "corepack enable && corepack prepare pnpm@latest --activate && pnpm install && pnpm dev" + depends_on: + - postgres + env_file: + - .env + + # Uncomment the following to use postgres + postgres: + restart: always + image: postgres:latest + volumes: + - pgdata:/var/lib/postgresql/data + ports: + - "5432:5432" + +volumes: + data: + # pgdata: + node_modules: diff --git a/apps/cms/next.config.mjs b/apps/cms/next.config.mjs new file mode 100644 index 0000000000000..cc7ee7f02ba40 --- /dev/null +++ b/apps/cms/next.config.mjs @@ -0,0 +1,31 @@ +import { withPayload } from '@payloadcms/next/withPayload' + +import redirects from './redirects.js' + +const NEXT_PUBLIC_SERVER_URL = process.env.VERCEL_PROJECT_PRODUCTION_URL + ? `https://${process.env.VERCEL_PROJECT_PRODUCTION_URL}` + : undefined || process.env.NEXT_PUBLIC_SERVER_URL || 'http://localhost:3000' + +/** @type {import('next').NextConfig} */ +const nextConfig = { + images: { + remotePatterns: [ + ...[NEXT_PUBLIC_SERVER_URL /* 'https://example.com' */].map((item) => { + const url = new URL(item) + + return { + hostname: url.hostname, + protocol: url.protocol.replace(':', ''), + } + }), + ], + }, + reactStrictMode: true, + redirects, + eslint: { + // We are already running linting via GH action, this will skip linting during production build on Vercel + ignoreDuringBuilds: true, + }, +} + +export default withPayload(nextConfig, { devBundleServerPackages: false }) diff --git a/apps/cms/package.json b/apps/cms/package.json new file mode 100644 index 0000000000000..56250c8af6365 --- /dev/null +++ b/apps/cms/package.json @@ -0,0 +1,47 @@ +{ + "name": "cms", + "version": "1.0.0", + "description": "Payload CMS for Supabase", + "license": "MIT", + "scripts": { + "build": "cross-env NODE_OPTIONS=--no-deprecation next build --turbopack", + "dev": "cross-env NODE_OPTIONS=--no-deprecation next dev --turbopack --port 3030", + "devsafe": "rm -rf .next && cross-env NODE_OPTIONS=--no-deprecation next dev", + "generate:importmap": "cross-env NODE_OPTIONS=--no-deprecation payload generate:importmap", + "generate:types": "cross-env NODE_OPTIONS=--no-deprecation payload generate:types", + "lint": "cross-env NODE_OPTIONS=--no-deprecation next lint", + "payload": "cross-env NODE_OPTIONS=--no-deprecation payload", + "start": "cross-env NODE_OPTIONS=--no-deprecation next start", + "ci": "cross-env NODE_OPTIONS=--no-deprecation payload migrate && pnpm build", + "clean": "rimraf node_modules .next", + "typecheck_IGNORED": "tsc --noEmit" + }, + "dependencies": { + "@payloadcms/db-postgres": "3.33.0", + "@payloadcms/live-preview-react": "^3.33.0", + "@payloadcms/next": "3.33.0", + "@payloadcms/payload-cloud": "3.33.0", + "@payloadcms/plugin-form-builder": "3.33.0", + "@payloadcms/plugin-nested-docs": "3.33.0", + "@payloadcms/plugin-seo": "3.33.0", + "@payloadcms/richtext-lexical": "3.33.0", + "@payloadcms/storage-s3": "3.33.0", + "@payloadcms/ui": "3.33.0", + "common": "workspace:*", + "config": "workspace:*", + "cross-env": "^7.0.3", + "eslint-config-supabase": "workspace:*", + "graphql": "^16.8.1", + "next": "catalog:", + "payload": "3.33.0", + "react": "catalog:", + "react-dom": "catalog:", + "sharp": "0.32.6" + }, + "devDependencies": { + "@types/node": "catalog:", + "@types/react": "catalog:", + "@types/react-dom": "catalog:", + "typescript": "5.7.3" + } +} diff --git a/apps/cms/redirects.js b/apps/cms/redirects.js new file mode 100644 index 0000000000000..21b76ecc1b86e --- /dev/null +++ b/apps/cms/redirects.js @@ -0,0 +1,20 @@ +const redirects = async () => { + const internetExplorerRedirect = { + destination: '/ie-incompatible.html', + has: [ + { + type: 'header', + key: 'user-agent', + value: '(.*Trident.*)', // all ie browsers + }, + ], + permanent: false, + source: '/:path((?!ie-incompatible.html$).*)', // all pages except the incompatibility page + } + + const redirects = [internetExplorerRedirect] + + return redirects +} + +export default redirects diff --git a/apps/cms/src/access/isAdmin.ts b/apps/cms/src/access/isAdmin.ts new file mode 100644 index 0000000000000..67922028caa9e --- /dev/null +++ b/apps/cms/src/access/isAdmin.ts @@ -0,0 +1,14 @@ +import type { AccessArgs, FieldAccess } from 'payload' +import type { User } from '@/payload-types' + +type isAdmin = (args: AccessArgs) => boolean + +export const isAdmin: isAdmin = ({ req: { user } }) => { + // Return true or false based on if the user has an admin role + return Boolean(user?.roles?.includes('admin')) +} + +export const isAdminFieldLevel: FieldAccess<{ id: string }, User> = ({ req: { user } }) => { + // Return true or false based on if the user has an admin role + return Boolean(user?.roles?.includes('admin')) +} diff --git a/apps/cms/src/access/isAdminOrSelf.ts b/apps/cms/src/access/isAdminOrSelf.ts new file mode 100644 index 0000000000000..ad0c05e8abb4d --- /dev/null +++ b/apps/cms/src/access/isAdminOrSelf.ts @@ -0,0 +1,21 @@ +import type { Access } from 'payload' + +export const isAdminOrSelf: Access = ({ req: { user } }) => { + // Need to be logged in + if (user) { + // If user has role of 'admin' + if (user.roles?.includes('admin')) { + return true + } + + // If any other type of user, only provide access to themselves + return { + id: { + equals: user.id, + }, + } + } + + // Reject everyone else + return false +} diff --git a/apps/cms/src/access/isAnyone.ts b/apps/cms/src/access/isAnyone.ts new file mode 100644 index 0000000000000..beb086b46c3ee --- /dev/null +++ b/apps/cms/src/access/isAnyone.ts @@ -0,0 +1,3 @@ +import type { Access } from 'payload' + +export const isAnyone: Access = () => true diff --git a/apps/cms/src/access/isAuthenticated.ts b/apps/cms/src/access/isAuthenticated.ts new file mode 100644 index 0000000000000..4112452bc3db8 --- /dev/null +++ b/apps/cms/src/access/isAuthenticated.ts @@ -0,0 +1,9 @@ +import type { AccessArgs } from 'payload' + +import type { User } from '@/payload-types' + +type isAuthenticated = (args: AccessArgs) => boolean + +export const isAuthenticated: isAuthenticated = ({ req: { user } }) => { + return Boolean(user) +} diff --git a/apps/cms/src/app/(frontend)/layout.tsx b/apps/cms/src/app/(frontend)/layout.tsx new file mode 100644 index 0000000000000..8d03e9142969e --- /dev/null +++ b/apps/cms/src/app/(frontend)/layout.tsx @@ -0,0 +1,19 @@ +import React from 'react' +import './styles.css' + +export const metadata = { + description: 'Content Management System for the Supabase website', + title: 'Supabase CMS', +} + +export default async function RootLayout(props: { children: React.ReactNode }) { + const { children } = props + + return ( + + +
{children}
+ + + ) +} diff --git a/apps/cms/src/app/(frontend)/page.tsx b/apps/cms/src/app/(frontend)/page.tsx new file mode 100644 index 0000000000000..a136f82f8b193 --- /dev/null +++ b/apps/cms/src/app/(frontend)/page.tsx @@ -0,0 +1,5 @@ +import { redirect } from 'next/navigation' + +export default async function HomePage() { + redirect('/admin') +} diff --git a/apps/cms/src/app/(frontend)/styles.css b/apps/cms/src/app/(frontend)/styles.css new file mode 100644 index 0000000000000..d1fb9419d6f8c --- /dev/null +++ b/apps/cms/src/app/(frontend)/styles.css @@ -0,0 +1,164 @@ +:root { + --font-mono: 'Roboto Mono', monospace; +} + +* { + box-sizing: border-box; +} + +html { + font-size: 18px; + line-height: 32px; + + background: rgb(0, 0, 0); + -webkit-font-smoothing: antialiased; +} + +html, +body, +#app { + height: 100%; +} + +body { + font-family: system-ui; + font-size: 18px; + line-height: 32px; + + margin: 0; + color: rgb(1000, 1000, 1000); + + @media (max-width: 1024px) { + font-size: 15px; + line-height: 24px; + } +} + +img { + max-width: 100%; + height: auto; + display: block; +} + +h1 { + margin: 40px 0; + font-size: 64px; + line-height: 70px; + font-weight: bold; + + @media (max-width: 1024px) { + margin: 24px 0; + font-size: 42px; + line-height: 42px; + } + + @media (max-width: 768px) { + font-size: 38px; + line-height: 38px; + } + + @media (max-width: 400px) { + font-size: 32px; + line-height: 32px; + } +} + +p { + margin: 24px 0; + + @media (max-width: 1024px) { + margin: calc(var(--base) * 0.75) 0; + } +} + +a { + color: currentColor; + + &:focus { + opacity: 0.8; + outline: none; + } + + &:active { + opacity: 0.7; + outline: none; + } +} + +svg { + vertical-align: middle; +} + +.home { + display: flex; + flex-direction: column; + justify-content: space-between; + align-items: center; + height: 100vh; + padding: 45px; + max-width: 1024px; + margin: 0 auto; + overflow: hidden; + + @media (max-width: 400px) { + padding: 24px; + } + + .content { + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + flex-grow: 1; + + h1 { + text-align: center; + } + } + + .links { + display: flex; + align-items: center; + gap: 12px; + + a { + text-decoration: none; + padding: 0.25rem 0.5rem; + border-radius: 4px; + } + + .admin { + color: rgb(0, 0, 0); + background: rgb(1000, 1000, 1000); + border: 1px solid rgb(0, 0, 0); + } + + .docs { + color: rgb(1000, 1000, 1000); + background: rgb(0, 0, 0); + border: 1px solid rgb(1000, 1000, 1000); + } + } + + .footer { + display: flex; + align-items: center; + gap: 8px; + + @media (max-width: 1024px) { + flex-direction: column; + gap: 6px; + } + + p { + margin: 0; + } + + .codeLink { + text-decoration: none; + padding: 0 0.5rem; + background: rgb(60, 60, 60); + border-radius: 4px; + } + } +} diff --git a/apps/cms/src/app/(payload)/admin/[[...segments]]/not-found.tsx b/apps/cms/src/app/(payload)/admin/[[...segments]]/not-found.tsx new file mode 100644 index 0000000000000..64108365fd93c --- /dev/null +++ b/apps/cms/src/app/(payload)/admin/[[...segments]]/not-found.tsx @@ -0,0 +1,24 @@ +/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */ +/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */ +import type { Metadata } from 'next' + +import config from '@payload-config' +import { NotFoundPage, generatePageMetadata } from '@payloadcms/next/views' +import { importMap } from '../importMap' + +type Args = { + params: Promise<{ + segments: string[] + }> + searchParams: Promise<{ + [key: string]: string | string[] + }> +} + +export const generateMetadata = ({ params, searchParams }: Args): Promise => + generatePageMetadata({ config, params, searchParams }) + +const NotFound = ({ params, searchParams }: Args) => + NotFoundPage({ config, params, searchParams, importMap }) + +export default NotFound diff --git a/apps/cms/src/app/(payload)/admin/[[...segments]]/page.tsx b/apps/cms/src/app/(payload)/admin/[[...segments]]/page.tsx new file mode 100644 index 0000000000000..0de685cd62bc1 --- /dev/null +++ b/apps/cms/src/app/(payload)/admin/[[...segments]]/page.tsx @@ -0,0 +1,24 @@ +/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */ +/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */ +import type { Metadata } from 'next' + +import config from '@payload-config' +import { RootPage, generatePageMetadata } from '@payloadcms/next/views' +import { importMap } from '../importMap' + +type Args = { + params: Promise<{ + segments: string[] + }> + searchParams: Promise<{ + [key: string]: string | string[] + }> +} + +export const generateMetadata = ({ params, searchParams }: Args): Promise => + generatePageMetadata({ config, params, searchParams }) + +const Page = ({ params, searchParams }: Args) => + RootPage({ config, params, searchParams, importMap }) + +export default Page diff --git a/apps/cms/src/app/(payload)/admin/importMap.js b/apps/cms/src/app/(payload)/admin/importMap.js new file mode 100644 index 0000000000000..ab697bf4c47ab --- /dev/null +++ b/apps/cms/src/app/(payload)/admin/importMap.js @@ -0,0 +1,43 @@ +import { SlugComponent as SlugComponent_92cc057d0a2abb4f6cf0307edf59f986 } from '@/fields/slug/SlugComponent' +import { RscEntryLexicalCell as RscEntryLexicalCell_44fe37237e0ebf4470c9990d8cb7b07e } from '@payloadcms/richtext-lexical/rsc' +import { RscEntryLexicalField as RscEntryLexicalField_44fe37237e0ebf4470c9990d8cb7b07e } from '@payloadcms/richtext-lexical/rsc' +import { LexicalDiffComponent as LexicalDiffComponent_44fe37237e0ebf4470c9990d8cb7b07e } from '@payloadcms/richtext-lexical/rsc' +import { HorizontalRuleFeatureClient as HorizontalRuleFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client' +import { InlineToolbarFeatureClient as InlineToolbarFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client' +import { FixedToolbarFeatureClient as FixedToolbarFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client' +import { BlocksFeatureClient as BlocksFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client' +import { ParagraphFeatureClient as ParagraphFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client' +import { UnderlineFeatureClient as UnderlineFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client' +import { BoldFeatureClient as BoldFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client' +import { ItalicFeatureClient as ItalicFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client' +import { LinkFeatureClient as LinkFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client' +import { HeadingFeatureClient as HeadingFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client' +import { OverviewComponent as OverviewComponent_a8a977ebc872c5d5ea7ee689724c0860 } from '@payloadcms/plugin-seo/client' +import { MetaTitleComponent as MetaTitleComponent_a8a977ebc872c5d5ea7ee689724c0860 } from '@payloadcms/plugin-seo/client' +import { MetaImageComponent as MetaImageComponent_a8a977ebc872c5d5ea7ee689724c0860 } from '@payloadcms/plugin-seo/client' +import { MetaDescriptionComponent as MetaDescriptionComponent_a8a977ebc872c5d5ea7ee689724c0860 } from '@payloadcms/plugin-seo/client' +import { PreviewComponent as PreviewComponent_a8a977ebc872c5d5ea7ee689724c0860 } from '@payloadcms/plugin-seo/client' +import { S3ClientUploadHandler as S3ClientUploadHandler_f97aa6c64367fa259c5bc0567239ef24 } from '@payloadcms/storage-s3/client' + +export const importMap = { + "@/fields/slug/SlugComponent#SlugComponent": SlugComponent_92cc057d0a2abb4f6cf0307edf59f986, + "@payloadcms/richtext-lexical/rsc#RscEntryLexicalCell": RscEntryLexicalCell_44fe37237e0ebf4470c9990d8cb7b07e, + "@payloadcms/richtext-lexical/rsc#RscEntryLexicalField": RscEntryLexicalField_44fe37237e0ebf4470c9990d8cb7b07e, + "@payloadcms/richtext-lexical/rsc#LexicalDiffComponent": LexicalDiffComponent_44fe37237e0ebf4470c9990d8cb7b07e, + "@payloadcms/richtext-lexical/client#HorizontalRuleFeatureClient": HorizontalRuleFeatureClient_e70f5e05f09f93e00b997edb1ef0c864, + "@payloadcms/richtext-lexical/client#InlineToolbarFeatureClient": InlineToolbarFeatureClient_e70f5e05f09f93e00b997edb1ef0c864, + "@payloadcms/richtext-lexical/client#FixedToolbarFeatureClient": FixedToolbarFeatureClient_e70f5e05f09f93e00b997edb1ef0c864, + "@payloadcms/richtext-lexical/client#BlocksFeatureClient": BlocksFeatureClient_e70f5e05f09f93e00b997edb1ef0c864, + "@payloadcms/richtext-lexical/client#ParagraphFeatureClient": ParagraphFeatureClient_e70f5e05f09f93e00b997edb1ef0c864, + "@payloadcms/richtext-lexical/client#UnderlineFeatureClient": UnderlineFeatureClient_e70f5e05f09f93e00b997edb1ef0c864, + "@payloadcms/richtext-lexical/client#BoldFeatureClient": BoldFeatureClient_e70f5e05f09f93e00b997edb1ef0c864, + "@payloadcms/richtext-lexical/client#ItalicFeatureClient": ItalicFeatureClient_e70f5e05f09f93e00b997edb1ef0c864, + "@payloadcms/richtext-lexical/client#LinkFeatureClient": LinkFeatureClient_e70f5e05f09f93e00b997edb1ef0c864, + "@payloadcms/richtext-lexical/client#HeadingFeatureClient": HeadingFeatureClient_e70f5e05f09f93e00b997edb1ef0c864, + "@payloadcms/plugin-seo/client#OverviewComponent": OverviewComponent_a8a977ebc872c5d5ea7ee689724c0860, + "@payloadcms/plugin-seo/client#MetaTitleComponent": MetaTitleComponent_a8a977ebc872c5d5ea7ee689724c0860, + "@payloadcms/plugin-seo/client#MetaImageComponent": MetaImageComponent_a8a977ebc872c5d5ea7ee689724c0860, + "@payloadcms/plugin-seo/client#MetaDescriptionComponent": MetaDescriptionComponent_a8a977ebc872c5d5ea7ee689724c0860, + "@payloadcms/plugin-seo/client#PreviewComponent": PreviewComponent_a8a977ebc872c5d5ea7ee689724c0860, + "@payloadcms/storage-s3/client#S3ClientUploadHandler": S3ClientUploadHandler_f97aa6c64367fa259c5bc0567239ef24 +} diff --git a/apps/cms/src/app/(payload)/api/[...slug]/route.ts b/apps/cms/src/app/(payload)/api/[...slug]/route.ts new file mode 100644 index 0000000000000..e58c50f50cade --- /dev/null +++ b/apps/cms/src/app/(payload)/api/[...slug]/route.ts @@ -0,0 +1,19 @@ +/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */ +/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */ +import config from '@payload-config' +import '@payloadcms/next/css' +import { + REST_DELETE, + REST_GET, + REST_OPTIONS, + REST_PATCH, + REST_POST, + REST_PUT, +} from '@payloadcms/next/routes' + +export const GET = REST_GET(config) +export const POST = REST_POST(config) +export const DELETE = REST_DELETE(config) +export const PATCH = REST_PATCH(config) +export const PUT = REST_PUT(config) +export const OPTIONS = REST_OPTIONS(config) diff --git a/apps/cms/src/app/(payload)/api/graphql-playground/route.ts b/apps/cms/src/app/(payload)/api/graphql-playground/route.ts new file mode 100644 index 0000000000000..17d2954ca2d25 --- /dev/null +++ b/apps/cms/src/app/(payload)/api/graphql-playground/route.ts @@ -0,0 +1,7 @@ +/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */ +/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */ +import config from '@payload-config' +import '@payloadcms/next/css' +import { GRAPHQL_PLAYGROUND_GET } from '@payloadcms/next/routes' + +export const GET = GRAPHQL_PLAYGROUND_GET(config) diff --git a/apps/cms/src/app/(payload)/api/graphql/route.ts b/apps/cms/src/app/(payload)/api/graphql/route.ts new file mode 100644 index 0000000000000..2069ff86b0aaf --- /dev/null +++ b/apps/cms/src/app/(payload)/api/graphql/route.ts @@ -0,0 +1,8 @@ +/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */ +/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */ +import config from '@payload-config' +import { GRAPHQL_POST, REST_OPTIONS } from '@payloadcms/next/routes' + +export const POST = GRAPHQL_POST(config) + +export const OPTIONS = REST_OPTIONS(config) diff --git a/packages/ui/src/components/Form/Form.css b/apps/cms/src/app/(payload)/custom.scss similarity index 100% rename from packages/ui/src/components/Form/Form.css rename to apps/cms/src/app/(payload)/custom.scss diff --git a/apps/cms/src/app/(payload)/layout.tsx b/apps/cms/src/app/(payload)/layout.tsx new file mode 100644 index 0000000000000..8df141aeb2b24 --- /dev/null +++ b/apps/cms/src/app/(payload)/layout.tsx @@ -0,0 +1,31 @@ +/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */ +/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */ +import config from '@payload-config' +import '@payloadcms/next/css' +import type { ServerFunctionClient } from 'payload' +import { handleServerFunctions, RootLayout } from '@payloadcms/next/layouts' +import React from 'react' + +import { importMap } from './admin/importMap.js' +import './custom.scss' + +type Args = { + children: React.ReactNode +} + +const serverFunction: ServerFunctionClient = async function (args) { + 'use server' + return handleServerFunctions({ + ...args, + config, + importMap, + }) +} + +const Layout = ({ children }: Args) => ( + + {children} + +) + +export default Layout diff --git a/apps/cms/src/app/my-route/route.ts b/apps/cms/src/app/my-route/route.ts new file mode 100644 index 0000000000000..a6422f3733485 --- /dev/null +++ b/apps/cms/src/app/my-route/route.ts @@ -0,0 +1,14 @@ +import configPromise from '@payload-config' +import { getPayload } from 'payload' + +export const GET = async () => { + const payload = await getPayload({ + config: configPromise, + }) + + const data = await payload.find({ + collection: 'users', + }) + + return Response.json(data) +} diff --git a/apps/cms/src/blocks/ArchiveBlock/Component.tsx b/apps/cms/src/blocks/ArchiveBlock/Component.tsx new file mode 100644 index 0000000000000..48a57ec5d692a --- /dev/null +++ b/apps/cms/src/blocks/ArchiveBlock/Component.tsx @@ -0,0 +1,65 @@ +import type { Post, ArchiveBlock as ArchiveBlockProps } from '@/payload-types' + +import configPromise from '@payload-config' +import { getPayload } from 'payload' +import React from 'react' +import RichText from '@/components/RichText' + +import { CollectionArchive } from '@/components/CollectionArchive' + +export const ArchiveBlock: React.FC< + ArchiveBlockProps & { + id?: string + } +> = async (props) => { + const { id, categories, introContent, limit: limitFromProps, populateBy, selectedDocs } = props + + const limit = limitFromProps || 3 + + let posts: Post[] = [] + + if (populateBy === 'collection') { + const payload = await getPayload({ config: configPromise }) + + const flattenedCategories = categories?.map((category) => { + if (typeof category === 'object') return category.id + else return category + }) + + const fetchedPosts = await payload.find({ + collection: 'posts', + depth: 1, + limit, + ...(flattenedCategories && flattenedCategories.length > 0 + ? { + where: { + categories: { + in: flattenedCategories, + }, + }, + } + : {}), + }) + + posts = fetchedPosts.docs + } else { + if (selectedDocs?.length) { + const filteredSelectedPosts = selectedDocs.map((post) => { + if (typeof post.value === 'object') return post.value + }) as Post[] + + posts = filteredSelectedPosts + } + } + + return ( +
+ {introContent && ( +
+ +
+ )} + +
+ ) +} diff --git a/apps/cms/src/blocks/ArchiveBlock/config.ts b/apps/cms/src/blocks/ArchiveBlock/config.ts new file mode 100644 index 0000000000000..f87a376bd51f3 --- /dev/null +++ b/apps/cms/src/blocks/ArchiveBlock/config.ts @@ -0,0 +1,94 @@ +import type { Block } from 'payload' + +import { + FixedToolbarFeature, + HeadingFeature, + InlineToolbarFeature, + lexicalEditor, +} from '@payloadcms/richtext-lexical' + +export const Archive: Block = { + slug: 'archive', + interfaceName: 'ArchiveBlock', + fields: [ + { + name: 'introContent', + type: 'richText', + editor: lexicalEditor({ + features: ({ rootFeatures }) => { + return [ + ...rootFeatures, + HeadingFeature({ enabledHeadingSizes: ['h1', 'h2', 'h3', 'h4'] }), + FixedToolbarFeature(), + InlineToolbarFeature(), + ] + }, + }), + label: 'Intro Content', + }, + { + name: 'populateBy', + type: 'select', + defaultValue: 'collection', + options: [ + { + label: 'Collection', + value: 'collection', + }, + { + label: 'Individual Selection', + value: 'selection', + }, + ], + }, + { + name: 'relationTo', + type: 'select', + admin: { + condition: (_, siblingData) => siblingData.populateBy === 'collection', + }, + defaultValue: 'posts', + label: 'Collections To Show', + options: [ + { + label: 'Posts', + value: 'posts', + }, + ], + }, + { + name: 'categories', + type: 'relationship', + admin: { + condition: (_, siblingData) => siblingData.populateBy === 'collection', + }, + hasMany: true, + label: 'Categories To Show', + relationTo: 'categories', + }, + { + name: 'limit', + type: 'number', + admin: { + condition: (_, siblingData) => siblingData.populateBy === 'collection', + step: 1, + }, + defaultValue: 10, + label: 'Limit', + }, + { + name: 'selectedDocs', + type: 'relationship', + admin: { + condition: (_, siblingData) => siblingData.populateBy === 'selection', + }, + hasMany: true, + label: 'Selection', + relationTo: ['posts'], + }, + ], + labels: { + plural: 'Archives', + singular: 'Archive', + }, +} diff --git a/apps/cms/src/blocks/Banner/Component.tsx b/apps/cms/src/blocks/Banner/Component.tsx new file mode 100644 index 0000000000000..6abb8ad09a6e1 --- /dev/null +++ b/apps/cms/src/blocks/Banner/Component.tsx @@ -0,0 +1,26 @@ +import type { BannerBlock as BannerBlockProps } from 'src/payload-types' + +import { cn } from '@/utilities/ui' +import React from 'react' +import RichText from '@/components/RichText' + +type Props = { + className?: string +} & BannerBlockProps + +export const BannerBlock: React.FC = ({ className, content, style }) => { + return ( +
+
+ +
+
+ ) +} diff --git a/apps/cms/src/blocks/Banner/config.ts b/apps/cms/src/blocks/Banner/config.ts new file mode 100644 index 0000000000000..53e46b5cbc4ed --- /dev/null +++ b/apps/cms/src/blocks/Banner/config.ts @@ -0,0 +1,37 @@ +import type { Block } from 'payload' + +import { + FixedToolbarFeature, + InlineToolbarFeature, + lexicalEditor, +} from '@payloadcms/richtext-lexical' + +export const Banner: Block = { + slug: 'banner', + fields: [ + { + name: 'style', + type: 'select', + defaultValue: 'info', + options: [ + { label: 'Info', value: 'info' }, + { label: 'Warning', value: 'warning' }, + { label: 'Error', value: 'error' }, + { label: 'Success', value: 'success' }, + ], + required: true, + }, + { + name: 'content', + type: 'richText', + editor: lexicalEditor({ + features: ({ rootFeatures }) => { + return [...rootFeatures, FixedToolbarFeature(), InlineToolbarFeature()] + }, + }), + label: false, + required: true, + }, + ], + interfaceName: 'BannerBlock', +} diff --git a/apps/cms/src/blocks/CallToAction/Component.tsx b/apps/cms/src/blocks/CallToAction/Component.tsx new file mode 100644 index 0000000000000..6b3771cf232ea --- /dev/null +++ b/apps/cms/src/blocks/CallToAction/Component.tsx @@ -0,0 +1,23 @@ +import React from 'react' + +import type { CallToActionBlock as CTABlockProps } from '@/payload-types' + +import RichText from '@/components/RichText' +import { CMSLink } from '@/components/Link' + +export const CallToActionBlock: React.FC = ({ links, richText }) => { + return ( +
+
+
+ {richText && } +
+
+ {(links || []).map(({ link }, i) => { + return + })} +
+
+
+ ) +} diff --git a/apps/cms/src/blocks/CallToAction/config.ts b/apps/cms/src/blocks/CallToAction/config.ts new file mode 100644 index 0000000000000..f4ffa77c6a17e --- /dev/null +++ b/apps/cms/src/blocks/CallToAction/config.ts @@ -0,0 +1,42 @@ +import type { Block } from 'payload' + +import { + FixedToolbarFeature, + HeadingFeature, + InlineToolbarFeature, + lexicalEditor, +} from '@payloadcms/richtext-lexical' + +import { linkGroup } from '../../fields/linkGroup' + +export const CallToAction: Block = { + slug: 'cta', + interfaceName: 'CallToActionBlock', + fields: [ + { + name: 'richText', + type: 'richText', + editor: lexicalEditor({ + features: ({ rootFeatures }) => { + return [ + ...rootFeatures, + HeadingFeature({ enabledHeadingSizes: ['h1', 'h2', 'h3', 'h4'] }), + FixedToolbarFeature(), + InlineToolbarFeature(), + ] + }, + }), + label: false, + }, + linkGroup({ + appearances: ['default', 'outline'], + overrides: { + maxRows: 2, + }, + }), + ], + labels: { + plural: 'Calls to Action', + singular: 'Call to Action', + }, +} diff --git a/apps/cms/src/blocks/Code/Component.client.tsx b/apps/cms/src/blocks/Code/Component.client.tsx new file mode 100644 index 0000000000000..fc8fb62545a74 --- /dev/null +++ b/apps/cms/src/blocks/Code/Component.client.tsx @@ -0,0 +1,33 @@ +'use client' +import { Highlight, themes } from 'prism-react-renderer' +import React from 'react' +import { CopyButton } from './CopyButton' + +type Props = { + code: string + language?: string +} + +export const Code: React.FC = ({ code, language = '' }) => { + if (!code) return null + + return ( + + {({ getLineProps, getTokenProps, tokens }) => ( +
+          {tokens.map((line, i) => (
+            
+ {i + 1} + + {line.map((token, key) => ( + + ))} + +
+ ))} + +
+ )} +
+ ) +} diff --git a/apps/cms/src/blocks/Code/Component.tsx b/apps/cms/src/blocks/Code/Component.tsx new file mode 100644 index 0000000000000..7f776d74de339 --- /dev/null +++ b/apps/cms/src/blocks/Code/Component.tsx @@ -0,0 +1,21 @@ +import React from 'react' + +import { Code } from './Component.client' + +export type CodeBlockProps = { + code: string + language?: string + blockType: 'code' +} + +type Props = CodeBlockProps & { + className?: string +} + +export const CodeBlock: React.FC = ({ className, code, language }) => { + return ( +
+ +
+ ) +} diff --git a/apps/cms/src/blocks/Code/CopyButton.tsx b/apps/cms/src/blocks/Code/CopyButton.tsx new file mode 100644 index 0000000000000..8d204bfffd680 --- /dev/null +++ b/apps/cms/src/blocks/Code/CopyButton.tsx @@ -0,0 +1,33 @@ +'use client' +import { Button } from '@/components/ui/button' +import { CopyIcon } from '@payloadcms/ui/icons/Copy' +import { useState } from 'react' + +export function CopyButton({ code }: { code: string }) { + const [text, setText] = useState('Copy') + + function updateCopyStatus() { + if (text === 'Copy') { + setText(() => 'Copied!') + setTimeout(() => { + setText(() => 'Copy') + }, 1000) + } + } + + return ( +
+ +
+ ) +} diff --git a/apps/cms/src/blocks/Code/config.ts b/apps/cms/src/blocks/Code/config.ts new file mode 100644 index 0000000000000..7b26f805db2ef --- /dev/null +++ b/apps/cms/src/blocks/Code/config.ts @@ -0,0 +1,33 @@ +import type { Block } from 'payload' + +export const Code: Block = { + slug: 'code', + interfaceName: 'CodeBlock', + fields: [ + { + name: 'language', + type: 'select', + defaultValue: 'typescript', + options: [ + { + label: 'Typescript', + value: 'typescript', + }, + { + label: 'Javascript', + value: 'javascript', + }, + { + label: 'CSS', + value: 'css', + }, + ], + }, + { + name: 'code', + type: 'code', + label: false, + required: true, + }, + ], +} diff --git a/apps/cms/src/blocks/Content/Component.tsx b/apps/cms/src/blocks/Content/Component.tsx new file mode 100644 index 0000000000000..2c2550b7598c6 --- /dev/null +++ b/apps/cms/src/blocks/Content/Component.tsx @@ -0,0 +1,43 @@ +import { cn } from '@/utilities/ui' +import React from 'react' +import RichText from '@/components/RichText' + +import type { ContentBlock as ContentBlockProps } from '@/payload-types' + +import { CMSLink } from '../../components/Link' + +export const ContentBlock: React.FC = (props) => { + const { columns } = props + + const colsSpanClasses = { + full: '12', + half: '6', + oneThird: '4', + twoThirds: '8', + } + + return ( +
+
+ {columns && + columns.length > 0 && + columns.map((col, index) => { + const { enableLink, link, richText, size } = col + + return ( +
+ {richText && } + + {enableLink && } +
+ ) + })} +
+
+ ) +} diff --git a/apps/cms/src/blocks/Content/config.ts b/apps/cms/src/blocks/Content/config.ts new file mode 100644 index 0000000000000..5c2fb0771d3ca --- /dev/null +++ b/apps/cms/src/blocks/Content/config.ts @@ -0,0 +1,79 @@ +import type { Block, Field } from 'payload' + +import { + FixedToolbarFeature, + HeadingFeature, + InlineToolbarFeature, + lexicalEditor, +} from '@payloadcms/richtext-lexical' + +import { link } from '@/fields/link' + +const columnFields: Field[] = [ + { + name: 'size', + type: 'select', + defaultValue: 'oneThird', + options: [ + { + label: 'One Third', + value: 'oneThird', + }, + { + label: 'Half', + value: 'half', + }, + { + label: 'Two Thirds', + value: 'twoThirds', + }, + { + label: 'Full', + value: 'full', + }, + ], + }, + { + name: 'richText', + type: 'richText', + editor: lexicalEditor({ + features: ({ rootFeatures }) => { + return [ + ...rootFeatures, + HeadingFeature({ enabledHeadingSizes: ['h2', 'h3', 'h4'] }), + FixedToolbarFeature(), + InlineToolbarFeature(), + ] + }, + }), + label: false, + }, + { + name: 'enableLink', + type: 'checkbox', + }, + link({ + overrides: { + admin: { + condition: (_data, siblingData) => { + return Boolean(siblingData?.enableLink) + }, + }, + }, + }), +] + +export const Content: Block = { + slug: 'content', + interfaceName: 'ContentBlock', + fields: [ + { + name: 'columns', + type: 'array', + admin: { + initCollapsed: true, + }, + fields: columnFields, + }, + ], +} diff --git a/apps/cms/src/blocks/Form/Checkbox/index.tsx b/apps/cms/src/blocks/Form/Checkbox/index.tsx new file mode 100644 index 0000000000000..633d5db0d8400 --- /dev/null +++ b/apps/cms/src/blocks/Form/Checkbox/index.tsx @@ -0,0 +1,45 @@ +import type { CheckboxField } from '@payloadcms/plugin-form-builder/types' +import type { FieldErrorsImpl, FieldValues, UseFormRegister } from 'react-hook-form' + +import { useFormContext } from 'react-hook-form' + +import { Checkbox as CheckboxUi } from '@/components/ui/checkbox' +import { Label } from '@/components/ui/label' +import React from 'react' + +import { Error } from '../Error' +import { Width } from '../Width' + +export const Checkbox: React.FC< + CheckboxField & { + errors: Partial + register: UseFormRegister + } +> = ({ name, defaultValue, errors, label, register, required, width }) => { + const props = register(name, { required: required }) + const { setValue } = useFormContext() + + return ( + +
+ { + setValue(props.name, checked) + }} + /> + +
+ {errors[name] && } +
+ ) +} diff --git a/apps/cms/src/blocks/Form/Component.tsx b/apps/cms/src/blocks/Form/Component.tsx new file mode 100644 index 0000000000000..7cae8e52e6e67 --- /dev/null +++ b/apps/cms/src/blocks/Form/Component.tsx @@ -0,0 +1,162 @@ +'use client' +import type { FormFieldBlock, Form as FormType } from '@payloadcms/plugin-form-builder/types' + +import { useRouter } from 'next/navigation' +import React, { useCallback, useState } from 'react' +import { useForm, FormProvider } from 'react-hook-form' +import RichText from '@/components/RichText' +import { Button } from '@/components/ui/button' +import type { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical' + +import { fields } from './fields' +import { getClientSideURL } from '@/utilities/getURL' + +export type FormBlockType = { + blockName?: string + blockType?: 'formBlock' + enableIntro: boolean + form: FormType + introContent?: SerializedEditorState +} + +export const FormBlock: React.FC< + { + id?: string + } & FormBlockType +> = (props) => { + const { + enableIntro, + form: formFromProps, + form: { id: formID, confirmationMessage, confirmationType, redirect, submitButtonLabel } = {}, + introContent, + } = props + + const formMethods = useForm({ + defaultValues: formFromProps.fields, + }) + const { + control, + formState: { errors }, + handleSubmit, + register, + } = formMethods + + const [isLoading, setIsLoading] = useState(false) + const [hasSubmitted, setHasSubmitted] = useState() + const [error, setError] = useState<{ message: string; status?: string } | undefined>() + const router = useRouter() + + const onSubmit = useCallback( + (data: FormFieldBlock[]) => { + let loadingTimerID: ReturnType + const submitForm = async () => { + setError(undefined) + + const dataToSend = Object.entries(data).map(([name, value]) => ({ + field: name, + value, + })) + + // delay loading indicator by 1s + loadingTimerID = setTimeout(() => { + setIsLoading(true) + }, 1000) + + try { + const req = await fetch(`${getClientSideURL()}/api/form-submissions`, { + body: JSON.stringify({ + form: formID, + submissionData: dataToSend, + }), + headers: { + 'Content-Type': 'application/json', + }, + method: 'POST', + }) + + const res = await req.json() + + clearTimeout(loadingTimerID) + + if (req.status >= 400) { + setIsLoading(false) + + setError({ + message: res.errors?.[0]?.message || 'Internal Server Error', + status: res.status, + }) + + return + } + + setIsLoading(false) + setHasSubmitted(true) + + if (confirmationType === 'redirect' && redirect) { + const { url } = redirect + + const redirectUrl = url + + if (redirectUrl) router.push(redirectUrl) + } + } catch (err) { + console.warn(err) + setIsLoading(false) + setError({ + message: 'Something went wrong.', + }) + } + } + + void submitForm() + }, + [router, formID, redirect, confirmationType] + ) + + return ( +
+ {enableIntro && introContent && !hasSubmitted && ( + + )} +
+ + {!isLoading && hasSubmitted && confirmationType === 'message' && ( + + )} + {isLoading && !hasSubmitted &&

Loading, please wait...

} + {error &&
{`${error.status || '500'}: ${error.message || ''}`}
} + {!hasSubmitted && ( +
+
+ {formFromProps && + formFromProps.fields && + formFromProps.fields?.map((field, index) => { + const Field: React.FC = fields?.[field.blockType as keyof typeof fields] + if (Field) { + return ( +
+ +
+ ) + } + return null + })} +
+ + +
+ )} +
+
+
+ ) +} diff --git a/apps/cms/src/blocks/Form/Country/index.tsx b/apps/cms/src/blocks/Form/Country/index.tsx new file mode 100644 index 0000000000000..9c85b753972b6 --- /dev/null +++ b/apps/cms/src/blocks/Form/Country/index.tsx @@ -0,0 +1,65 @@ +import type { CountryField } from '@payloadcms/plugin-form-builder/types' +import type { Control, FieldErrorsImpl } from 'react-hook-form' + +import { Label } from '@/components/ui/label' +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from '@/components/ui/select' +import React from 'react' +import { Controller } from 'react-hook-form' + +import { Error } from '../Error' +import { Width } from '../Width' +import { countryOptions } from './options' + +export const Country: React.FC< + CountryField & { + control: Control + errors: Partial + } +> = ({ name, control, errors, label, required, width }) => { + return ( + + + { + const controlledValue = countryOptions.find((t) => t.value === value) + + return ( + + ) + }} + rules={{ required }} + /> + {errors[name] && } + + ) +} diff --git a/apps/cms/src/blocks/Form/Country/options.ts b/apps/cms/src/blocks/Form/Country/options.ts new file mode 100644 index 0000000000000..f952c1df89001 --- /dev/null +++ b/apps/cms/src/blocks/Form/Country/options.ts @@ -0,0 +1,982 @@ +export const countryOptions = [ + { + label: 'Afghanistan', + value: 'AF', + }, + { + label: 'Åland Islands', + value: 'AX', + }, + { + label: 'Albania', + value: 'AL', + }, + { + label: 'Algeria', + value: 'DZ', + }, + { + label: 'American Samoa', + value: 'AS', + }, + { + label: 'Andorra', + value: 'AD', + }, + { + label: 'Angola', + value: 'AO', + }, + { + label: 'Anguilla', + value: 'AI', + }, + { + label: 'Antarctica', + value: 'AQ', + }, + { + label: 'Antigua and Barbuda', + value: 'AG', + }, + { + label: 'Argentina', + value: 'AR', + }, + { + label: 'Armenia', + value: 'AM', + }, + { + label: 'Aruba', + value: 'AW', + }, + { + label: 'Australia', + value: 'AU', + }, + { + label: 'Austria', + value: 'AT', + }, + { + label: 'Azerbaijan', + value: 'AZ', + }, + { + label: 'Bahamas', + value: 'BS', + }, + { + label: 'Bahrain', + value: 'BH', + }, + { + label: 'Bangladesh', + value: 'BD', + }, + { + label: 'Barbados', + value: 'BB', + }, + { + label: 'Belarus', + value: 'BY', + }, + { + label: 'Belgium', + value: 'BE', + }, + { + label: 'Belize', + value: 'BZ', + }, + { + label: 'Benin', + value: 'BJ', + }, + { + label: 'Bermuda', + value: 'BM', + }, + { + label: 'Bhutan', + value: 'BT', + }, + { + label: 'Bolivia', + value: 'BO', + }, + { + label: 'Bosnia and Herzegovina', + value: 'BA', + }, + { + label: 'Botswana', + value: 'BW', + }, + { + label: 'Bouvet Island', + value: 'BV', + }, + { + label: 'Brazil', + value: 'BR', + }, + { + label: 'British Indian Ocean Territory', + value: 'IO', + }, + { + label: 'Brunei Darussalam', + value: 'BN', + }, + { + label: 'Bulgaria', + value: 'BG', + }, + { + label: 'Burkina Faso', + value: 'BF', + }, + { + label: 'Burundi', + value: 'BI', + }, + { + label: 'Cambodia', + value: 'KH', + }, + { + label: 'Cameroon', + value: 'CM', + }, + { + label: 'Canada', + value: 'CA', + }, + { + label: 'Cape Verde', + value: 'CV', + }, + { + label: 'Cayman Islands', + value: 'KY', + }, + { + label: 'Central African Republic', + value: 'CF', + }, + { + label: 'Chad', + value: 'TD', + }, + { + label: 'Chile', + value: 'CL', + }, + { + label: 'China', + value: 'CN', + }, + { + label: 'Christmas Island', + value: 'CX', + }, + { + label: 'Cocos (Keeling) Islands', + value: 'CC', + }, + { + label: 'Colombia', + value: 'CO', + }, + { + label: 'Comoros', + value: 'KM', + }, + { + label: 'Congo', + value: 'CG', + }, + { + label: 'Congo, The Democratic Republic of the', + value: 'CD', + }, + { + label: 'Cook Islands', + value: 'CK', + }, + { + label: 'Costa Rica', + value: 'CR', + }, + { + label: "Cote D'Ivoire", + value: 'CI', + }, + { + label: 'Croatia', + value: 'HR', + }, + { + label: 'Cuba', + value: 'CU', + }, + { + label: 'Cyprus', + value: 'CY', + }, + { + label: 'Czech Republic', + value: 'CZ', + }, + { + label: 'Denmark', + value: 'DK', + }, + { + label: 'Djibouti', + value: 'DJ', + }, + { + label: 'Dominica', + value: 'DM', + }, + { + label: 'Dominican Republic', + value: 'DO', + }, + { + label: 'Ecuador', + value: 'EC', + }, + { + label: 'Egypt', + value: 'EG', + }, + { + label: 'El Salvador', + value: 'SV', + }, + { + label: 'Equatorial Guinea', + value: 'GQ', + }, + { + label: 'Eritrea', + value: 'ER', + }, + { + label: 'Estonia', + value: 'EE', + }, + { + label: 'Ethiopia', + value: 'ET', + }, + { + label: 'Falkland Islands (Malvinas)', + value: 'FK', + }, + { + label: 'Faroe Islands', + value: 'FO', + }, + { + label: 'Fiji', + value: 'FJ', + }, + { + label: 'Finland', + value: 'FI', + }, + { + label: 'France', + value: 'FR', + }, + { + label: 'French Guiana', + value: 'GF', + }, + { + label: 'French Polynesia', + value: 'PF', + }, + { + label: 'French Southern Territories', + value: 'TF', + }, + { + label: 'Gabon', + value: 'GA', + }, + { + label: 'Gambia', + value: 'GM', + }, + { + label: 'Georgia', + value: 'GE', + }, + { + label: 'Germany', + value: 'DE', + }, + { + label: 'Ghana', + value: 'GH', + }, + { + label: 'Gibraltar', + value: 'GI', + }, + { + label: 'Greece', + value: 'GR', + }, + { + label: 'Greenland', + value: 'GL', + }, + { + label: 'Grenada', + value: 'GD', + }, + { + label: 'Guadeloupe', + value: 'GP', + }, + { + label: 'Guam', + value: 'GU', + }, + { + label: 'Guatemala', + value: 'GT', + }, + { + label: 'Guernsey', + value: 'GG', + }, + { + label: 'Guinea', + value: 'GN', + }, + { + label: 'Guinea-Bissau', + value: 'GW', + }, + { + label: 'Guyana', + value: 'GY', + }, + { + label: 'Haiti', + value: 'HT', + }, + { + label: 'Heard Island and Mcdonald Islands', + value: 'HM', + }, + { + label: 'Holy See (Vatican City State)', + value: 'VA', + }, + { + label: 'Honduras', + value: 'HN', + }, + { + label: 'Hong Kong', + value: 'HK', + }, + { + label: 'Hungary', + value: 'HU', + }, + { + label: 'Iceland', + value: 'IS', + }, + { + label: 'India', + value: 'IN', + }, + { + label: 'Indonesia', + value: 'ID', + }, + { + label: 'Iran, Islamic Republic Of', + value: 'IR', + }, + { + label: 'Iraq', + value: 'IQ', + }, + { + label: 'Ireland', + value: 'IE', + }, + { + label: 'Isle of Man', + value: 'IM', + }, + { + label: 'Israel', + value: 'IL', + }, + { + label: 'Italy', + value: 'IT', + }, + { + label: 'Jamaica', + value: 'JM', + }, + { + label: 'Japan', + value: 'JP', + }, + { + label: 'Jersey', + value: 'JE', + }, + { + label: 'Jordan', + value: 'JO', + }, + { + label: 'Kazakhstan', + value: 'KZ', + }, + { + label: 'Kenya', + value: 'KE', + }, + { + label: 'Kiribati', + value: 'KI', + }, + { + label: "Democratic People's Republic of Korea", + value: 'KP', + }, + { + label: 'Korea, Republic of', + value: 'KR', + }, + { + label: 'Kosovo', + value: 'XK', + }, + { + label: 'Kuwait', + value: 'KW', + }, + { + label: 'Kyrgyzstan', + value: 'KG', + }, + { + label: "Lao People's Democratic Republic", + value: 'LA', + }, + { + label: 'Latvia', + value: 'LV', + }, + { + label: 'Lebanon', + value: 'LB', + }, + { + label: 'Lesotho', + value: 'LS', + }, + { + label: 'Liberia', + value: 'LR', + }, + { + label: 'Libyan Arab Jamahiriya', + value: 'LY', + }, + { + label: 'Liechtenstein', + value: 'LI', + }, + { + label: 'Lithuania', + value: 'LT', + }, + { + label: 'Luxembourg', + value: 'LU', + }, + { + label: 'Macao', + value: 'MO', + }, + { + label: 'Macedonia, The Former Yugoslav Republic of', + value: 'MK', + }, + { + label: 'Madagascar', + value: 'MG', + }, + { + label: 'Malawi', + value: 'MW', + }, + { + label: 'Malaysia', + value: 'MY', + }, + { + label: 'Maldives', + value: 'MV', + }, + { + label: 'Mali', + value: 'ML', + }, + { + label: 'Malta', + value: 'MT', + }, + { + label: 'Marshall Islands', + value: 'MH', + }, + { + label: 'Martinique', + value: 'MQ', + }, + { + label: 'Mauritania', + value: 'MR', + }, + { + label: 'Mauritius', + value: 'MU', + }, + { + label: 'Mayotte', + value: 'YT', + }, + { + label: 'Mexico', + value: 'MX', + }, + { + label: 'Micronesia, Federated States of', + value: 'FM', + }, + { + label: 'Moldova, Republic of', + value: 'MD', + }, + { + label: 'Monaco', + value: 'MC', + }, + { + label: 'Mongolia', + value: 'MN', + }, + { + label: 'Montenegro', + value: 'ME', + }, + { + label: 'Montserrat', + value: 'MS', + }, + { + label: 'Morocco', + value: 'MA', + }, + { + label: 'Mozambique', + value: 'MZ', + }, + { + label: 'Myanmar', + value: 'MM', + }, + { + label: 'Namibia', + value: 'NA', + }, + { + label: 'Nauru', + value: 'NR', + }, + { + label: 'Nepal', + value: 'NP', + }, + { + label: 'Netherlands', + value: 'NL', + }, + { + label: 'Netherlands Antilles', + value: 'AN', + }, + { + label: 'New Caledonia', + value: 'NC', + }, + { + label: 'New Zealand', + value: 'NZ', + }, + { + label: 'Nicaragua', + value: 'NI', + }, + { + label: 'Niger', + value: 'NE', + }, + { + label: 'Nigeria', + value: 'NG', + }, + { + label: 'Niue', + value: 'NU', + }, + { + label: 'Norfolk Island', + value: 'NF', + }, + { + label: 'Northern Mariana Islands', + value: 'MP', + }, + { + label: 'Norway', + value: 'NO', + }, + { + label: 'Oman', + value: 'OM', + }, + { + label: 'Pakistan', + value: 'PK', + }, + { + label: 'Palau', + value: 'PW', + }, + { + label: 'Palestinian Territory, Occupied', + value: 'PS', + }, + { + label: 'Panama', + value: 'PA', + }, + { + label: 'Papua New Guinea', + value: 'PG', + }, + { + label: 'Paraguay', + value: 'PY', + }, + { + label: 'Peru', + value: 'PE', + }, + { + label: 'Philippines', + value: 'PH', + }, + { + label: 'Pitcairn', + value: 'PN', + }, + { + label: 'Poland', + value: 'PL', + }, + { + label: 'Portugal', + value: 'PT', + }, + { + label: 'Puerto Rico', + value: 'PR', + }, + { + label: 'Qatar', + value: 'QA', + }, + { + label: 'Reunion', + value: 'RE', + }, + { + label: 'Romania', + value: 'RO', + }, + { + label: 'Russian Federation', + value: 'RU', + }, + { + label: 'Rwanda', + value: 'RW', + }, + { + label: 'Saint Helena', + value: 'SH', + }, + { + label: 'Saint Kitts and Nevis', + value: 'KN', + }, + { + label: 'Saint Lucia', + value: 'LC', + }, + { + label: 'Saint Pierre and Miquelon', + value: 'PM', + }, + { + label: 'Saint Vincent and the Grenadines', + value: 'VC', + }, + { + label: 'Samoa', + value: 'WS', + }, + { + label: 'San Marino', + value: 'SM', + }, + { + label: 'Sao Tome and Principe', + value: 'ST', + }, + { + label: 'Saudi Arabia', + value: 'SA', + }, + { + label: 'Senegal', + value: 'SN', + }, + { + label: 'Serbia', + value: 'RS', + }, + { + label: 'Seychelles', + value: 'SC', + }, + { + label: 'Sierra Leone', + value: 'SL', + }, + { + label: 'Singapore', + value: 'SG', + }, + { + label: 'Slovakia', + value: 'SK', + }, + { + label: 'Slovenia', + value: 'SI', + }, + { + label: 'Solomon Islands', + value: 'SB', + }, + { + label: 'Somalia', + value: 'SO', + }, + { + label: 'South Africa', + value: 'ZA', + }, + { + label: 'South Georgia and the South Sandwich Islands', + value: 'GS', + }, + { + label: 'Spain', + value: 'ES', + }, + { + label: 'Sri Lanka', + value: 'LK', + }, + { + label: 'Sudan', + value: 'SD', + }, + { + label: 'Suriname', + value: 'SR', + }, + { + label: 'Svalbard and Jan Mayen', + value: 'SJ', + }, + { + label: 'Swaziland', + value: 'SZ', + }, + { + label: 'Sweden', + value: 'SE', + }, + { + label: 'Switzerland', + value: 'CH', + }, + { + label: 'Syrian Arab Republic', + value: 'SY', + }, + { + label: 'Taiwan', + value: 'TW', + }, + { + label: 'Tajikistan', + value: 'TJ', + }, + { + label: 'Tanzania, United Republic of', + value: 'TZ', + }, + { + label: 'Thailand', + value: 'TH', + }, + { + label: 'Timor-Leste', + value: 'TL', + }, + { + label: 'Togo', + value: 'TG', + }, + { + label: 'Tokelau', + value: 'TK', + }, + { + label: 'Tonga', + value: 'TO', + }, + { + label: 'Trinidad and Tobago', + value: 'TT', + }, + { + label: 'Tunisia', + value: 'TN', + }, + { + label: 'Turkey', + value: 'TR', + }, + { + label: 'Turkmenistan', + value: 'TM', + }, + { + label: 'Turks and Caicos Islands', + value: 'TC', + }, + { + label: 'Tuvalu', + value: 'TV', + }, + { + label: 'Uganda', + value: 'UG', + }, + { + label: 'Ukraine', + value: 'UA', + }, + { + label: 'United Arab Emirates', + value: 'AE', + }, + { + label: 'United Kingdom', + value: 'GB', + }, + { + label: 'United States', + value: 'US', + }, + { + label: 'United States Minor Outlying Islands', + value: 'UM', + }, + { + label: 'Uruguay', + value: 'UY', + }, + { + label: 'Uzbekistan', + value: 'UZ', + }, + { + label: 'Vanuatu', + value: 'VU', + }, + { + label: 'Venezuela', + value: 'VE', + }, + { + label: 'Viet Nam', + value: 'VN', + }, + { + label: 'Virgin Islands, British', + value: 'VG', + }, + { + label: 'Virgin Islands, U.S.', + value: 'VI', + }, + { + label: 'Wallis and Futuna', + value: 'WF', + }, + { + label: 'Western Sahara', + value: 'EH', + }, + { + label: 'Yemen', + value: 'YE', + }, + { + label: 'Zambia', + value: 'ZM', + }, + { + label: 'Zimbabwe', + value: 'ZW', + }, +] diff --git a/apps/cms/src/blocks/Form/Email/index.tsx b/apps/cms/src/blocks/Form/Email/index.tsx new file mode 100644 index 0000000000000..fc9fd2804cdde --- /dev/null +++ b/apps/cms/src/blocks/Form/Email/index.tsx @@ -0,0 +1,38 @@ +import type { EmailField } from '@payloadcms/plugin-form-builder/types' +import type { FieldErrorsImpl, FieldValues, UseFormRegister } from 'react-hook-form' + +import { Input } from '@/components/ui/input' +import { Label } from '@/components/ui/label' +import React from 'react' + +import { Error } from '../Error' +import { Width } from '../Width' + +export const Email: React.FC< + EmailField & { + errors: Partial + register: UseFormRegister + } +> = ({ name, defaultValue, errors, label, register, required, width }) => { + return ( + + + + + {errors[name] && } + + ) +} diff --git a/apps/cms/src/blocks/Form/Error/index.tsx b/apps/cms/src/blocks/Form/Error/index.tsx new file mode 100644 index 0000000000000..a7b9e47e805c6 --- /dev/null +++ b/apps/cms/src/blocks/Form/Error/index.tsx @@ -0,0 +1,15 @@ +'use client' + +import * as React from 'react' +import { useFormContext } from 'react-hook-form' + +export const Error = ({ name }: { name: string }) => { + const { + formState: { errors }, + } = useFormContext() + return ( +
+ {(errors[name]?.message as string) || 'This field is required'} +
+ ) +} diff --git a/apps/cms/src/blocks/Form/Message/index.tsx b/apps/cms/src/blocks/Form/Message/index.tsx new file mode 100644 index 0000000000000..5924cf95f40b0 --- /dev/null +++ b/apps/cms/src/blocks/Form/Message/index.tsx @@ -0,0 +1,13 @@ +import RichText from '@/components/RichText' +import React from 'react' + +import { Width } from '../Width' +import { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical' + +export const Message: React.FC<{ message: SerializedEditorState }> = ({ message }) => { + return ( + + {message && } + + ) +} diff --git a/apps/cms/src/blocks/Form/Number/index.tsx b/apps/cms/src/blocks/Form/Number/index.tsx new file mode 100644 index 0000000000000..f26e54a44860c --- /dev/null +++ b/apps/cms/src/blocks/Form/Number/index.tsx @@ -0,0 +1,36 @@ +import type { TextField } from '@payloadcms/plugin-form-builder/types' +import type { FieldErrorsImpl, FieldValues, UseFormRegister } from 'react-hook-form' + +import { Input } from '@/components/ui/input' +import { Label } from '@/components/ui/label' +import React from 'react' + +import { Error } from '../Error' +import { Width } from '../Width' +export const Number: React.FC< + TextField & { + errors: Partial + register: UseFormRegister + } +> = ({ name, defaultValue, errors, label, register, required, width }) => { + return ( + + + + {errors[name] && } + + ) +} diff --git a/apps/cms/src/blocks/Form/Select/index.tsx b/apps/cms/src/blocks/Form/Select/index.tsx new file mode 100644 index 0000000000000..30c0e8312acbf --- /dev/null +++ b/apps/cms/src/blocks/Form/Select/index.tsx @@ -0,0 +1,63 @@ +import type { SelectField } from '@payloadcms/plugin-form-builder/types' +import type { Control, FieldErrorsImpl } from 'react-hook-form' + +import { Label } from '@/components/ui/label' +import { + Select as SelectComponent, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from '@/components/ui/select' +import React from 'react' +import { Controller } from 'react-hook-form' + +import { Error } from '../Error' +import { Width } from '../Width' + +export const Select: React.FC< + SelectField & { + control: Control + errors: Partial + } +> = ({ name, control, errors, label, options, required, width, defaultValue }) => { + return ( + + + { + const controlledValue = options.find((t) => t.value === value) + + return ( + onChange(val)} value={controlledValue?.value}> + + + + + {options.map(({ label, value }) => { + return ( + + {label} + + ) + })} + + + ) + }} + rules={{ required }} + /> + {errors[name] && } + + ) +} diff --git a/apps/cms/src/blocks/Form/State/index.tsx b/apps/cms/src/blocks/Form/State/index.tsx new file mode 100644 index 0000000000000..29e49cae0c322 --- /dev/null +++ b/apps/cms/src/blocks/Form/State/index.tsx @@ -0,0 +1,64 @@ +import type { StateField } from '@payloadcms/plugin-form-builder/types' +import type { Control, FieldErrorsImpl } from 'react-hook-form' + +import { Label } from '@/components/ui/label' +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from '@/components/ui/select' +import React from 'react' +import { Controller } from 'react-hook-form' + +import { Error } from '../Error' +import { Width } from '../Width' +import { stateOptions } from './options' + +export const State: React.FC< + StateField & { + control: Control + errors: Partial + } +> = ({ name, control, errors, label, required, width }) => { + return ( + + + { + const controlledValue = stateOptions.find((t) => t.value === value) + + return ( + + ) + }} + rules={{ required }} + /> + {errors[name] && } + + ) +} diff --git a/apps/cms/src/blocks/Form/State/options.ts b/apps/cms/src/blocks/Form/State/options.ts new file mode 100644 index 0000000000000..8dff991e7abf2 --- /dev/null +++ b/apps/cms/src/blocks/Form/State/options.ts @@ -0,0 +1,52 @@ +export const stateOptions = [ + { label: 'Alabama', value: 'AL' }, + { label: 'Alaska', value: 'AK' }, + { label: 'Arizona', value: 'AZ' }, + { label: 'Arkansas', value: 'AR' }, + { label: 'California', value: 'CA' }, + { label: 'Colorado', value: 'CO' }, + { label: 'Connecticut', value: 'CT' }, + { label: 'Delaware', value: 'DE' }, + { label: 'Florida', value: 'FL' }, + { label: 'Georgia', value: 'GA' }, + { label: 'Hawaii', value: 'HI' }, + { label: 'Idaho', value: 'ID' }, + { label: 'Illinois', value: 'IL' }, + { label: 'Indiana', value: 'IN' }, + { label: 'Iowa', value: 'IA' }, + { label: 'Kansas', value: 'KS' }, + { label: 'Kentucky', value: 'KY' }, + { label: 'Louisiana', value: 'LA' }, + { label: 'Maine', value: 'ME' }, + { label: 'Maryland', value: 'MD' }, + { label: 'Massachusetts', value: 'MA' }, + { label: 'Michigan', value: 'MI' }, + { label: 'Minnesota', value: 'MN' }, + { label: 'Mississippi', value: 'MS' }, + { label: 'Missouri', value: 'MO' }, + { label: 'Montana', value: 'MT' }, + { label: 'Nebraska', value: 'NE' }, + { label: 'Nevada', value: 'NV' }, + { label: 'New Hampshire', value: 'NH' }, + { label: 'New Jersey', value: 'NJ' }, + { label: 'New Mexico', value: 'NM' }, + { label: 'New York', value: 'NY' }, + { label: 'North Carolina', value: 'NC' }, + { label: 'North Dakota', value: 'ND' }, + { label: 'Ohio', value: 'OH' }, + { label: 'Oklahoma', value: 'OK' }, + { label: 'Oregon', value: 'OR' }, + { label: 'Pennsylvania', value: 'PA' }, + { label: 'Rhode Island', value: 'RI' }, + { label: 'South Carolina', value: 'SC' }, + { label: 'South Dakota', value: 'SD' }, + { label: 'Tennessee', value: 'TN' }, + { label: 'Texas', value: 'TX' }, + { label: 'Utah', value: 'UT' }, + { label: 'Vermont', value: 'VT' }, + { label: 'Virginia', value: 'VA' }, + { label: 'Washington', value: 'WA' }, + { label: 'West Virginia', value: 'WV' }, + { label: 'Wisconsin', value: 'WI' }, + { label: 'Wyoming', value: 'WY' }, +] diff --git a/apps/cms/src/blocks/Form/Text/index.tsx b/apps/cms/src/blocks/Form/Text/index.tsx new file mode 100644 index 0000000000000..be1e0ff12bdfa --- /dev/null +++ b/apps/cms/src/blocks/Form/Text/index.tsx @@ -0,0 +1,32 @@ +import type { TextField } from '@payloadcms/plugin-form-builder/types' +import type { FieldErrorsImpl, FieldValues, UseFormRegister } from 'react-hook-form' + +import { Input } from '@/components/ui/input' +import { Label } from '@/components/ui/label' +import React from 'react' + +import { Error } from '../Error' +import { Width } from '../Width' + +export const Text: React.FC< + TextField & { + errors: Partial + register: UseFormRegister + } +> = ({ name, defaultValue, errors, label, register, required, width }) => { + return ( + + + + {errors[name] && } + + ) +} diff --git a/apps/cms/src/blocks/Form/Textarea/index.tsx b/apps/cms/src/blocks/Form/Textarea/index.tsx new file mode 100644 index 0000000000000..ecb6e21afdb6b --- /dev/null +++ b/apps/cms/src/blocks/Form/Textarea/index.tsx @@ -0,0 +1,40 @@ +import type { TextField } from '@payloadcms/plugin-form-builder/types' +import type { FieldErrorsImpl, FieldValues, UseFormRegister } from 'react-hook-form' + +import { Label } from '@/components/ui/label' +import { Textarea as TextAreaComponent } from '@/components/ui/textarea' +import React from 'react' + +import { Error } from '../Error' +import { Width } from '../Width' + +export const Textarea: React.FC< + TextField & { + errors: Partial + register: UseFormRegister + rows?: number + } +> = ({ name, defaultValue, errors, label, register, required, rows = 3, width }) => { + return ( + + + + + + {errors[name] && } + + ) +} diff --git a/apps/cms/src/blocks/Form/Width/index.tsx b/apps/cms/src/blocks/Form/Width/index.tsx new file mode 100644 index 0000000000000..bcc51a3333365 --- /dev/null +++ b/apps/cms/src/blocks/Form/Width/index.tsx @@ -0,0 +1,13 @@ +import * as React from 'react' + +export const Width: React.FC<{ + children: React.ReactNode + className?: string + width?: number | string +}> = ({ children, className, width }) => { + return ( +
+ {children} +
+ ) +} diff --git a/apps/cms/src/blocks/Form/config.ts b/apps/cms/src/blocks/Form/config.ts new file mode 100644 index 0000000000000..5334289f3a0bf --- /dev/null +++ b/apps/cms/src/blocks/Form/config.ts @@ -0,0 +1,51 @@ +import type { Block } from 'payload' + +import { + FixedToolbarFeature, + HeadingFeature, + InlineToolbarFeature, + lexicalEditor, +} from '@payloadcms/richtext-lexical' + +export const FormBlock: Block = { + slug: 'formBlock', + interfaceName: 'FormBlock', + fields: [ + { + name: 'form', + type: 'relationship', + relationTo: 'forms', + required: true, + }, + { + name: 'enableIntro', + type: 'checkbox', + label: 'Enable Intro Content', + }, + { + name: 'introContent', + type: 'richText', + admin: { + condition: (_, { enableIntro }) => Boolean(enableIntro), + }, + editor: lexicalEditor({ + features: ({ rootFeatures }) => { + return [ + ...rootFeatures, + HeadingFeature({ enabledHeadingSizes: ['h1', 'h2', 'h3', 'h4'] }), + FixedToolbarFeature(), + InlineToolbarFeature(), + ] + }, + }), + label: 'Intro Content', + }, + ], + graphQL: { + singularName: 'FormBlock', + }, + labels: { + plural: 'Form Blocks', + singular: 'Form Block', + }, +} diff --git a/apps/cms/src/blocks/Form/fields.tsx b/apps/cms/src/blocks/Form/fields.tsx new file mode 100644 index 0000000000000..fa660f7e39861 --- /dev/null +++ b/apps/cms/src/blocks/Form/fields.tsx @@ -0,0 +1,21 @@ +import { Checkbox } from './Checkbox' +import { Country } from './Country' +import { Email } from './Email' +import { Message } from './Message' +import { Number } from './Number' +import { Select } from './Select' +import { State } from './State' +import { Text } from './Text' +import { Textarea } from './Textarea' + +export const fields = { + checkbox: Checkbox, + country: Country, + email: Email, + message: Message, + number: Number, + select: Select, + state: State, + text: Text, + textarea: Textarea, +} diff --git a/apps/cms/src/blocks/MediaBlock/Component.tsx b/apps/cms/src/blocks/MediaBlock/Component.tsx new file mode 100644 index 0000000000000..013fd87f849ff --- /dev/null +++ b/apps/cms/src/blocks/MediaBlock/Component.tsx @@ -0,0 +1,67 @@ +import type { StaticImageData } from 'next/image' + +import { cn } from '@/utilities/ui' +import React from 'react' +import RichText from '@/components/RichText' + +import type { MediaBlock as MediaBlockProps } from '@/payload-types' + +import { Media } from '../../components/Media' + +type Props = MediaBlockProps & { + breakout?: boolean + captionClassName?: string + className?: string + enableGutter?: boolean + imgClassName?: string + staticImage?: StaticImageData + disableInnerContainer?: boolean +} + +export const MediaBlock: React.FC = (props) => { + const { + captionClassName, + className, + enableGutter = true, + imgClassName, + media, + staticImage, + disableInnerContainer, + } = props + + let caption + if (media && typeof media === 'object') caption = media.caption + + return ( +
+ {(media || staticImage) && ( + + )} + {caption && ( +
+ +
+ )} +
+ ) +} diff --git a/apps/cms/src/blocks/MediaBlock/config.ts b/apps/cms/src/blocks/MediaBlock/config.ts new file mode 100644 index 0000000000000..7beb79b7eeb7b --- /dev/null +++ b/apps/cms/src/blocks/MediaBlock/config.ts @@ -0,0 +1,14 @@ +import type { Block } from 'payload' + +export const MediaBlock: Block = { + slug: 'mediaBlock', + interfaceName: 'MediaBlock', + fields: [ + { + name: 'media', + type: 'upload', + relationTo: 'media', + required: true, + }, + ], +} diff --git a/apps/cms/src/blocks/Quote/Component.tsx b/apps/cms/src/blocks/Quote/Component.tsx new file mode 100644 index 0000000000000..90717fa1e6da0 --- /dev/null +++ b/apps/cms/src/blocks/Quote/Component.tsx @@ -0,0 +1,16 @@ +import type { QuoteBlock as QuoteBlockProps } from 'src/payload-types' + +import React from 'react' + +type Props = { + className?: string +} & QuoteBlockProps + +export const QuoteBlock: React.FC = ({ className, img, caption, text }) => { + return ` + +{${text}} + + +` +} diff --git a/apps/cms/src/blocks/Quote/config.ts b/apps/cms/src/blocks/Quote/config.ts new file mode 100644 index 0000000000000..e3495fe190d48 --- /dev/null +++ b/apps/cms/src/blocks/Quote/config.ts @@ -0,0 +1,27 @@ +import type { Block } from 'payload' + +export const Quote: Block = { + slug: 'quote', + fields: [ + { + name: 'img', + type: 'upload', + relationTo: 'media', + label: 'Avatar', + required: false, + }, + { + name: 'caption', + type: 'text', + label: 'Caption', + required: false, + }, + { + name: 'text', + type: 'textarea', + label: 'Quote Text', + required: true, + }, + ], + interfaceName: 'QuoteBlock', +} diff --git a/apps/cms/src/blocks/RelatedPosts/Component.tsx b/apps/cms/src/blocks/RelatedPosts/Component.tsx new file mode 100644 index 0000000000000..8d8c9798b207a --- /dev/null +++ b/apps/cms/src/blocks/RelatedPosts/Component.tsx @@ -0,0 +1,32 @@ +import clsx from 'clsx' +import React from 'react' +import RichText from '@/components/RichText' + +import type { Post } from '@/payload-types' + +import { Card } from '../../components/Card' +import { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical' + +export type RelatedPostsProps = { + className?: string + docs?: Post[] + introContent?: SerializedEditorState +} + +export const RelatedPosts: React.FC = (props) => { + const { className, docs, introContent } = props + + return ( +
+ {introContent && } + +
+ {docs?.map((doc, index) => { + if (typeof doc === 'string') return null + + return + })} +
+
+ ) +} diff --git a/apps/cms/src/blocks/RenderBlocks.tsx b/apps/cms/src/blocks/RenderBlocks.tsx new file mode 100644 index 0000000000000..c84634a64adbc --- /dev/null +++ b/apps/cms/src/blocks/RenderBlocks.tsx @@ -0,0 +1,51 @@ +import React, { Fragment } from 'react' + +import type { Page } from '@/payload-types' + +import { ArchiveBlock } from '@/blocks/ArchiveBlock/Component' +import { CallToActionBlock } from '@/blocks/CallToAction/Component' +import { ContentBlock } from '@/blocks/Content/Component' +import { FormBlock } from '@/blocks/Form/Component' +import { MediaBlock } from '@/blocks/MediaBlock/Component' + +const blockComponents = { + archive: ArchiveBlock, + content: ContentBlock, + cta: CallToActionBlock, + formBlock: FormBlock, + mediaBlock: MediaBlock, +} + +export const RenderBlocks: React.FC<{ + blocks: Page['layout'][0][] +}> = (props) => { + const { blocks } = props + + const hasBlocks = blocks && Array.isArray(blocks) && blocks.length > 0 + + if (hasBlocks) { + return ( + + {blocks.map((block, index) => { + const { blockType } = block + + if (blockType && blockType in blockComponents) { + const Block = blockComponents[blockType] + + if (Block) { + return ( +
+ {/* @ts-expect-error there may be some mismatch between the expected types here */} + +
+ ) + } + } + return null + })} +
+ ) + } + + return null +} diff --git a/apps/cms/src/blocks/YouTube/Component.tsx b/apps/cms/src/blocks/YouTube/Component.tsx new file mode 100644 index 0000000000000..0704f2f775726 --- /dev/null +++ b/apps/cms/src/blocks/YouTube/Component.tsx @@ -0,0 +1,22 @@ +import type { YouTubeBlock as YouTubeBlockProps } from 'src/payload-types' + +import { cn } from '@/utilities/ui' +import React from 'react' + +type Props = { + className?: string +} & YouTubeBlockProps + +export const YouTubeBlock: React.FC = ({ className, youtubeId }) => { + return ( +
+
-As we mentioned, the Vault uses `pgsodium`'s Transparent Column Encryption (TCE) to store secrets in an authenticated encrypted form. There are some details around that you may be curious about, what does authenticated mean, and where are encryption keys store? This section explains those details. +As we mentioned, Vault uses Transparent Column Encryption (TCE) to store secrets in an authenticated encrypted form. There are some details around that you may be curious about. What does authenticated mean? Where is the encryption key stored? This section explains those details. ### Authenticated encryption with associated data @@ -218,34 +185,11 @@ The first important feature of TCE is that it uses an [Authenticated Encryption **Associated Data** means that you can include any other columns from the same row as part of the signature computation. This doesn't encrypt those other columns - rather it ensures that your encrypted value is only associated with columns from that row. If an attacker were to copy an encrypted value from another row to the current one, the signature would be rejected (assuming you used a unique column in the associated data). -Another important feature of `pgsodium` is that the encryption keys are never stored in the database alongside the encrypted data. Instead, only a **Key ID** is stored, which is a reference to the key that is only accessible outside of SQL. Even if an attacker can capture a dump of your entire database, they will see only encrypted data and key IDs, _never the raw key itself_. +Another important feature is that the encryption key is never stored in the database alongside the encrypted data. Even if an attacker can capture a dump of your entire database, they will see only encrypted data, _never the encryption key itself_. This is an important safety precaution - there is little value in storing the encryption key in the database itself as this would be like locking your front door but leaving the key in the lock! Storing the key outside the database fixes this issue. -Where are the keys stored? Supabase creates and manages the root keys (from which all key IDs are derived) in our secured backend systems. We keep this root key safe and separate from your data. You remain in control of your keys - a separate API endpoint is available that you can use to access the key if you want to decrypt your data outside of Supabase. - -### Internal details - -To encrypt data, you need a _key id_. You can use the default key id created automatically for every project, or create your own key ids Using the `pgsodium.create_key()` function. Key ids are used to internally derive the encryption key used to encrypt secrets in the vault. Vault users typically do not have access to the key itself, only the key id. - -Both `vault.create_secret()` and `vault.update_secret()` take an optional fourth `new_key_id` argument. This argument can be used to store a different key id for the secret instead of the default value. - -{/* prettier-ignore */} -```sql -select vault.create_secret( - 'another_s3kre3t_key', - 'another_unique_name', - 'This is another description', - (pgsodium.create_key()).id -); -``` - -Result: - -```sh --[ RECORD 1 ]-+------------------------------------- -create_secret | cec9e005-a44d-4b19-86e1-febf3cd40619 -``` +Where is the key stored? Supabase creates and manages the encryption key in our secured backend systems. We keep this key safe and separate from your data. You remain in control of your key - a separate API endpoint is available that you can use to access the key if you want to decrypt your data outside of Supabase. Which roles should have access to the `vault.secrets` table should be carefully considered. There are two ways to grant access, the first is that the `postgres` user can explicitly grant access to the vault table itself. diff --git a/apps/docs/content/guides/database/webhooks.mdx b/apps/docs/content/guides/database/webhooks.mdx index 93a17de9ed515..ab35a734d6ad8 100644 --- a/apps/docs/content/guides/database/webhooks.mdx +++ b/apps/docs/content/guides/database/webhooks.mdx @@ -27,7 +27,7 @@ This video demonstrates how you can create a new customer in Stripe each time a ## Creating a webhook -1. Create a new [Database Webhook](https://supabase.com/dashboard/project/_/integrations/hooks) in the Dashboard. +1. Create a new [Database Webhook](https://supabase.com/dashboard/project/_/integrations/webhooks/overview) in the Dashboard. 1. Give your Webhook a name. 1. Select the table you want to hook into. 1. Select one or more events (table inserts, updates, or deletes) you want to hook into. @@ -38,7 +38,7 @@ Since webhooks are just database triggers, you can also create one from SQL stat create trigger "my_webhook" after insert on "public"."my_table" for each row execute function "supabase_functions"."http_request"( - 'http://localhost:3000', + 'http://host.docker.internal:3000', 'POST', '{"Content-Type":"application/json"}', '{}', @@ -80,6 +80,20 @@ type DeletePayload = { Logging history of webhook calls is available under the `net` schema of your database. For more info, see the [GitHub Repo](https://github.com/supabase/pg_net). +## Local development + +When using Database Webhooks on your local Supabase instance, you need to be aware that the Postgres database runs inside a Docker container. This means that `localhost` or `127.0.0.1` in your webhook URL will refer to the container itself, not your host machine where your application is running. + +To target services running on your host machine, use `host.docker.internal`. If that doesn't work, you may need to use your machine's local IP address instead. + +For example, if you want to trigger an edge function when a webhook fires, your webhook URL would be: + +``` +http://host.docker.internal:54321/functions/v1/my-function-name +``` + +If you're experiencing connection issues with webhooks locally, verify you're using the correct hostname instead of `localhost`. + ## Resources -- [pg_net](/docs/guides/database/extensions/pgnet): an async networking extension for PostgreSQL +- [pg_net](/docs/guides/database/extensions/pgnet): an async networking extension for Postgres diff --git a/apps/docs/content/guides/deployment/branching.mdx b/apps/docs/content/guides/deployment/branching.mdx index 783b8cf29b468..2f107c17a013c 100644 --- a/apps/docs/content/guides/deployment/branching.mdx +++ b/apps/docs/content/guides/deployment/branching.mdx @@ -4,7 +4,7 @@ description: 'Use Supabase Branches to test and preview changes.' subtitle: 'Use Supabase Branches to test and preview changes' --- -Use branching to safely and easily experiment with changes to your Supabase project. +Use branching to safely experiment with changes to your Supabase project. Supabase branches work like Git branches. They let you create and test changes like new configurations, database schemas, or features in a separate, temporary instance without affecting your production setup. @@ -14,9 +14,24 @@ If you understand Git, you already understand Supabase Branching. ## How branching works -Supabase Branching works with Git. You can test changes in a separate, temporary environment without affecting your production setup. When you're ready to ship your changes, merge your branch to update your production instance with the new changes. +- **Separate Environments**: Each branch is a separate environment with its own Supabase instance and API credentials. +- **Git Integration**: Branching works with Git, currently supporting GitHub repositories. +- **Preview Branches**: You can create multiple Preview Branches for testing. +- **Migrations and Seeding**: Branches run migrations from your repository and can seed data using a `seed.sql` file. -You can run multiple Preview Branches for every Supabase project. Branches contain all the Supabase features with their own API credentials. Preview Environments pause automatically after branching.inactivity_period_in_minutes minutes of inactivity. Note that `pg_cron` executions will be impacted by inactivity related pausing. +## Prerequisites + +- **Supabase Project**: You need an existing Supabase project. +- **GitHub Repository**: Your project must be connected to a GitHub repository containing your Supabase directory. + +You can run multiple Preview Branches for every Supabase project. Branches contain all the Supabase features with their own API credentials. + +Preview Environments auto-pause after branching.inactivity_period_in_minutes minutes of inactivity. Upon receiving a new request to your database or REST API, the paused branch will automatically resume to serve the request. The implications of this architecture means + +- `pg_cron` jobs will not execute in an auto-paused database. +- Larger variance in request latency due to database cold starts. + +If you need higher performance guarantees on your Preview Environment, you can switch individual branches to [persistent](/docs/guides/deployment/branching#persistent-branches) so they are not auto-paused. ) for GitLab, BitBucket, and non-Git based Branching. +To manage code changes, your Supabase project must be connected to a Git repository. At this stage, we only support [GitHub](#branching-with-github). If you are interested in other Git providers, join the [discussion](https://github.com/orgs/supabase/discussions/18936) for GitLab, Bitbucket, and non-Git based Branching. ### Branching with GitHub Supabase Branching uses the Supabase GitHub integration to read files from your GitHub repository. With this integration, Supabase watches all commits, branches, and pull requests of your GitHub repository. -In Git, you have a Production Branch (typically this is `main`, `master`, `prod`, etc). This should also be your Supabase project's Production Branch. - -You can create a corresponding Preview Branch for any Git branch in your repository. Each time a new Preview Branch is created, the migrations in the Git branch of that Preview Branch are run on the Preview Branch. +You can create a corresponding Preview Branch for any Git branch in your repository. Each time a new Preview Branch is created and configured based on the [`config.toml`](/docs/guides/local-development/cli/config) configuration on this branch, the migrations from the corresponding Git branch are run on the Preview Branch. -The Preview Branch is also seeded with sample data based on `./supabase/seed.sql` by default, if that file exists. +The Preview Branch is also [seeded](/docs/guides/local-development/seeding-your-database) with sample data based on `./supabase/seed.sql` by default, if that file exists. Supabase Branching follows the [Trunk Based Development](https://trunkbaseddevelopment.com/) workflow, with one main Production branch and multiple development branches: @@ -60,47 +73,10 @@ Supabase Branching follows the [Trunk Based Development](https://trunkbaseddevel }} /> -### Production branch - -In Git, you have a Production Branch (typically this is `main`, `master`, `prod`, etc). This should also be your Supabase project's Production Branch. - -### Preview branches - -After connecting your Supabase project to one of the supported [Git providers](#git-providers), a corresponding Supabase Preview will be created whenever a new Git branch is created. - -The Git integration can read files from your Git provider, watching every commit and pull request. Each time a commit is pushed with new migrations in the `./supabase/migrations` directory, the migrations are run on the matching Supabase Preview environment: - -New migration files trigger migrations on the preview instance. - -### Data changes - -The Preview Branch is seeded with sample data based on `./supabase/seed.sql` by default, if that file exists. - -For security reasons, Preview Branches do not contain production data. Future versions of Branching may allow for automated data cloning after we are confident that we can provide safe data masking. +When you merge your Git branch into the production branch, all new migrations will be applied to your Production environment. If you have declared Storage buckets or Edge Functions in `config.toml`, they will also be deployed automatically. All other configurations, including API, Auth, and seed files, will be ignored by default. -### Merging production changes - -When you merge your Git branch into the production branch, all new migrations will be applied to your Production environment. - -### Git providers - -We currently support [GitHub](#branching-with-github). If you are interested in other Git providers, join the [discussion](<(https://github.com/orgs/supabase/discussions/18936)>) for GitLab, BitBucket, and non-Git based Branching. - -## How to use Supabase branching - -Supabase Branching requires a hosted [Git provider](#git-providers). Follow these steps to connect your Supabase project to a Git provider, and enable branching. - ### Preparing your Git repository You can use the [Supabase CLI](/docs/guides/cli) to manage changes inside a local `./supabase` directory: @@ -120,31 +96,27 @@ You can use the [Supabase CLI](/docs/guides/cli) to manage changes inside a loca If you don't have a `./supabase` directory, you can create one: - - ```markdown supabase init ``` - - Pull your database changes using `supabase db pull`. You can find your database URL in your [database settings](https://supabase.com/dashboard/project/_/settings/database), under the URI tab of the Connection String settings panel. Make sure **Use connection pooling** is checked so you can use the IPv4-enabled connection pooler. (Without connection pooling, your database is only accessible over IPv6, which isn't yet supported by all network providers.) - - + Pull your database changes using `supabase db pull`. To get your database connection string, go to your project dashboard, click [Connect](https://supabase.com/dashboard/project/_?showConnect=true) and look for the Session pooler connection string. ```markdown - supabase db pull --db-url + supabase db pull --db-url - # Your Database URL looks something like: + # Your Database connection string will look like this: # postgres://postgres.xxxx:password@xxxx.pooler.supabase.com:6543/postgres ``` - - + + If you're in an [IPv6 environment](https://github.com/orgs/supabase/discussions/27034) or have the IPv4 Add-On, you can use the direct connection string instead of Supavisor in Session mode. + @@ -154,16 +126,12 @@ You can use the [Supabase CLI](/docs/guides/cli) to manage changes inside a loca Commit the `supabase` directory to Git, and push your changes to your remote repository. - - ```bash git add supabase git commit -m "Initial migration" git push ``` - - @@ -197,11 +165,9 @@ If your repository doesn't have all the migration files, your production branch -
- +
-
@@ -253,14 +219,40 @@ If your repository doesn't have all the migration files, your production branch - -
-### Create your first preview branch +### Open a pull request + +When you open a pull request on GitHub, the Supabase integration automatically checks for a matching preview branch. If one doesn't exist, it gets created. + +A comment is added to your PR with the deployment status of your preview branch. Statuses are shown separately for Database, Services, and APIs. + +GitHub view of the deployment status of your preview branch + +Every time a new commit is pushed that changes the migration files in `./supabase/migrations`, the new migrations are run against the preview branch. You can check the status of these runs in the comment's Tasks table. + +### Preventing migration failures + +We highly recommend turning on a 'required check' for the Supabase integration. You can do this from your GitHub repository settings. This prevents PRs from being merged when migration checks fail, and stops invalid migrations from being merged into your production branch. + +Check the "Require status checks to pass before merging" option. + +### Manually create a preview branch Preview branches are automatically created for each pull request, but you can also manually create one. @@ -274,15 +266,12 @@ Preview branches are automatically created for each pull request, but you can al
You can use the GitHub dashboard or command line to create a new branch. In this example, the new branch is called `feat/add-members`.
- - You can use the GitHub dashboard or command line to create a new branch. In this example, the new branch is called `feat/add-members`. - - In the Supabase dashboard, look for the branch dropdown on the right-hand side of the top bar. It should be set to your production branch by default. Open the dropdown and click `Manage branches`. + In the Supabase dashboard, look for the branch dropdown on the right-hand side of the top bar. It should be set to your production branch by default. Open the dropdown and click [`Manage branches`](/dashboard/project/_/branches).
@@ -309,7 +298,7 @@ Preview branches are automatically created for each pull request, but you can al
- +
@@ -317,8 +306,6 @@ Preview branches are automatically created for each pull request, but you can al -### Make changes to your branch - The Git integration watches for changes in the `supabase` directory. This includes: - All SQL migration files, under the subdirectory `migrations` @@ -342,16 +329,12 @@ The Supabase CLI provides two options: [manual migrations](https://supabase.com/ - Start supabase locally: - - + Start Supabase locally: ```bash supabase start ``` - - Then proceed to [localhost:54323](http://localhost:54323) to access your local Supabase dashboard. You can make changes in either the [Table Editor](http://localhost:54323/project/default/editor) or the [SQL Editor]((http://localhost:54323/project/default/sql)). @@ -364,26 +347,18 @@ The Supabase CLI provides two options: [manual migrations](https://supabase.com/ Once you are finished making database changes, run `supabase db diff` to create a new migration file. For example: - - ```bash supabase db diff -f "add_employees_table" ``` - - This will create a SQL file called `./supabase/migrations/[timestamp]add_employees_table.sql`. This file will reflect the changes that you made in your local dashboard. If you want to continue making changes, you can manually edit this migration file, then use the `db reset` command to pick up your edits: - - ```bash supabase db reset ``` - - This will reset the database and run all the migrations again. The local dashboard at [localhost:54323](http://localhost:54323) will reflect the new changes you made. @@ -394,16 +369,12 @@ The Supabase CLI provides two options: [manual migrations](https://supabase.com/ Commit and push your migration file to your remote GitHub repository. For example: - - ```bash git add supabase/migrations git commit -m "Add employees table" git push --set-upstream origin new-employee ``` - - The Supabase integration detects the new migration and runs it on the remote Preview Branch. It can take up to 10 minutes for migrations to be applied. If you have a PR for your branch, errors are reflected in the GitHub check run status and in a PR comment. If you need to reset your database to a clean state (that is, discard any changes that aren't reflected in the migration files), run `supabase db reset` locally. Then, delete the preview branch and recreate it by closing, and reopening your pull request. @@ -442,15 +413,15 @@ Dashboard changes aren't automatically reflected in your Git repository. If you' - Make changes to your schema with either with the [Table Editor](https://supabase.com/dashboard/project/_/editor) or the [SQL Editor]((https://supabase.com/dashboard/project/_/sql)). + Make changes to your schema with either the [Table Editor](https://supabase.com/dashboard/project/_/editor) or the [SQL Editor]((https://supabase.com/dashboard/project/_/sql)). - + - If you don't know the password, you must Reset the database password so you know the password. Go to the [database setting page](https://supabase.com/dashboard/project/_/settings/database) and click `Reset database password`. + If you don't know the password, you must reset the database password so you know the password. Go to the [database setting page](https://supabase.com/dashboard/project/_/settings/database) and click `Reset database password`. Save the new password securely for future use. @@ -464,14 +435,10 @@ Dashboard changes aren't automatically reflected in your Git repository. If you' Make sure to use the database URL for your branch: - - ```bash supabase db pull --db-url "postgres://postgres.xxxx:password@xxxx.pooler.supabase.com:6543/postgres" ``` - - @@ -488,100 +455,232 @@ Dashboard changes aren't automatically reflected in your Git repository. If you' -### Open a pull request +### Disable branching -When you open a pull request on GitHub, the Supabase integration automatically checks for a matching preview branch. If one doesn't exist, it gets created. +You can disable branching at any time. Navigate to the [Branches](/dashboard/project/_/branches) page, which can be found via the Branches dropdown menu on the top navigation, then click "Manage Branches" in the menu. Click the 'Disable branching' button at the top of the Overview section. -A comment is added to your PR with the deployment status of your preview branch. Statuses are shown separately for Database, Services, and APIs. +### Persistent branches -GitHub view of the deployment status of your preview branch +Persistent branches are the type of branches that will remain active even after the underlying PR is closed. Any PR based on a persistent branch will also have a corresponding preview branch created automatically. -Every time a new commit is pushed that changes the migration files in `./supabase/migrations`, the new migrations are run against the preview branch. You can check the status of these runs in the comment's Tasks table. +You can change any branch to be persistent on the [Branches](/dashboard/project/_/branches) page by clicking the triple dots icon next to the branch you want to modify, and selecting "Switch to persistent". -### Preventing migration failures +All persistent branches can be toggled back to be an ephemeral branch in the exact same way. -We highly recommend turning on a 'required check' for the Supabase integration. You can do this from your GitHub repository settings. This prevents PRs from being merged when migration checks fail, and stops invalid migrations from being merged into your production branch. +## Migration and seeding behavior -Check the "Require status checks to pass before merging" option. +Migrations are run in sequential order. Each migration builds upon the previous one. -### Disable branching +The preview branch has a record of which migrations have been applied, and only applies new migrations for each commit. This can create an issue when rolling back migrations. -You can disable branching at any time. Navigate to the [Branches](/dashboard/project/_/branches) page, which can be found via the Branches dropdown menu on the top navigation, then click "Manage Branches" in the menu. Click the 'Disable branching' button at the top of the Overview section. +### Using ORM or custom seed scripts + +If you want to use your own ORM for managing migrations and seed scripts, you will need to run them in GitHub Actions after the preview branch is ready. The branch credentials can be fetched using the following example GHA workflow. + +```yaml +on: + pull_request: + types: + - opened + - reopened + - synchronize + branches: + - main + paths: + - 'supabase/**' + +jobs: + wait: + runs-on: ubuntu-latest + outputs: + status: ${{ steps.check.outputs.conclusion }} + steps: + - uses: fountainhead/action-wait-for-check@v1.2.0 + id: check + with: + checkName: Supabase Preview + ref: ${{ github.event.pull_request.head.sha }} + token: ${{ secrets.GITHUB_TOKEN }} + + migrate: + needs: + - wait + if: ${{ needs.wait.outputs.status == 'success' }} + runs-on: ubuntu-latest + steps: + - uses: supabase/setup-cli@v1 + with: + version: latest + - run: supabase --experimental branches get "$GITHUB_HEAD_REF" -o env >> $GITHUB_ENV + - name: Custom ORM migration + run: psql "$POSTGRES_URL_NON_POOLING" -c 'select 1' +``` -### Persistent branches +## Branch configuration with remotes -Persistent branches are the type of branches that will remain active even after the underlying PR is closed. -You can change any branch to be persistent on [Branches](/dashboard/project/_/branches) page by clicking triple dots icon next to the branch you want to modify, and selecting "Switch to persistent". -All persistent branches can be toggled back to be an ephemeral branch in the exact same way. +When Branching is enabled, your `config.toml` settings automatically sync to all ephemeral branches through a one-to-one mapping between your Git and Supabase branches. -## Migration and seeding behavior +### Basic configuration -Migrations are run in sequential order. Each migration builds upon the previous one. +To update configuration for a Supabase branch, modify `config.toml` and push to git. The Supabase integration will detect the changes and apply them to the corresponding branch. -The preview branch has a record of which migrations have been applied, and only applies new migrations for each commit. This can create an issue when rolling back migrations. +### Remote-specific configuration -### Rolling back migrations +For persistent branches that need specific settings, you can use the `[remotes]` block in your `config.toml`. Each remote configuration must reference an existing project ID. -You might want to roll back changes you've made in an earlier migration change. For example, you may have pushed a migration file containing schema changes you no longer want. +Here's an example of configuring a separate seed script for a staging environment: -To fix this, push your latest changes, then delete the preview branch in Supabase and reopen it. +```toml +[remotes.staging] +project_id = "your-project-ref" -The new preview branch is reseeded from your `./supabase/seed.sql` file by default. Any additional data changes you made on the old preview branch are lost. This is equivalent to running `supabase db reset` locally. All migrations are rerun in sequential order. +[remotes.staging.db.seed] +sql_paths = ["./seeds/staging.sql"] +``` -### Seeding behavior +Since the `project_id` field must reference an existing branch, you need to create the persistent branch before adding its configuration. Use the CLI to create a persistent branch first: -Your Preview Branches are seeded with sample data from the file `./supabase/seed.sql` by default. +```bash +supabase --experimental branches create --persistent +# Do you want to create a branch named develop? [Y/n] +``` -The database is only seeded once, when the preview branch is created. To rerun seeding, delete the preview branch and recreate it by closing, and reopening your pull request. +### Configuration merging -## Troubleshooting +When merging a PR into a persistent branch, the Supabase integration: -### Migrations are failing +1. Checks for configuration changes +2. Logs the changes +3. Applies them to the target remote -The GitHub integration automatically checks for new migrations on every commit. It runs any new migrations found in `./supabase/migrations`. +If no remote is declared or the project ID is incorrect, the configuration step is skipped. -A migration might fail for various reasons, including invalid SQL statements, and schema conflicts. If a migration fails, the Supabase integration check is shown as failing. +### Available configuration options -To check the error message, see the Supabase integration comment on your PR. +All standard configuration options are available in the `[remotes]` block. This includes: -### Schemas drift between preview branches +- Database settings +- API configurations +- Authentication settings +- Edge Functions configuration +- And more -If you have multiple preview branches, each preview branch might contain different schema changes. This is similar to Git branches, where each branch might contain different code changes. +You can use this to maintain different configurations for different environments while keeping them all in version control. -When a preview branch is merged into the production branch, it creates a schema drift between the production branch and the preview branches you haven't merged yet. +### Managing secrets for branches -You can solve these conflicts the way you would solve normal Git Conflicts: merge or rebase from your production Git branch to your preview Git branch. Since migrations are applied sequentially, ensure that migration files are timestamped correctly after the rebase. Changes that build on top of earlier changes should always have later timestamps. +For sensitive configuration like SMTP credentials or API keys, you can use the Supabase CLI to manage secrets for your branches. This is especially useful for custom SMTP setup or other services that require secure credentials. -### Changing production branch +To set secrets for a persistent branch: -It's not possible to change the Git branch used as the Production branch for Supabase Branching. The only way to change it is to disable and re-enable branching. See [Disable Branching](#disable-branching). +```bash +# Set secrets from a .env file +supabase secrets set --env-file ./supabase/.env -## Branching and hosting providers +# Or set individual secrets +supabase secrets set SMTP_HOST=smtp.example.com +supabase secrets set SMTP_USER=your-username +supabase secrets set SMTP_PASSWORD=your-password +``` -Branching works with hosting providers that support preview deployments. +These secrets will be available to your branch's services and can be used in your configuration. For example, in your `config.toml`: -With the Supabase branching integration, you can sync the Git branch used by the hosting provider with the corresponding Supabase preview branch. This means that the preview deployment built by your hosting provider is matched to the correct database schema, edge functions, and other Supabase configurations. +```toml +[auth.smtp] +host = "env(SMTP_HOST)" +user = "env(SMTP_USER)" +password = "env(SMTP_PASSWORD)" +``` -### Vercel + + Secrets set for one branch are not automatically available in other branches. You'll need to set + them separately for each branch that needs them. + + +#### Using dotenvx for git-based workflow + +For managing environment variables across different branches, you can use [dotenvx](https://dotenvx.com/) to securely manage your configurations. This approach is particularly useful for teams working with Git branches and preview deployments. + +##### Environment file structure + +Following the conventions used in the [example repository](https://github.com/supabase/supabase/blob/master/examples/slack-clone/nextjs-slack-clone-dotenvx/README.md), environments are configured using dotenv files in the `supabase` directory: + +| File | Environment | `.gitignore` it? | Encrypted | +| --------------- | ----------- | ---------------- | --------- | +| .env.keys | All | Yes | No | +| .env.local | Local | Yes | No | +| .env.production | Production | No | Yes | +| .env.preview | Branches | No | Yes | +| .env | Any | Maybe | Yes | + +##### Setting up encrypted secrets + +1. Generate key pair and encrypt your secrets: + +```bash +npx @dotenvx/dotenvx set SUPABASE_AUTH_EXTERNAL_GITHUB_SECRET "" -f supabase/.env.preview +``` - +This creates a new encryption key in `supabase/.env.preview` and a new decryption key in `supabase/.env.keys`. - The Vercel Integration for Supabase branching is working only with Supabase managed projects. There is currently no support for Vercel Marketplace managed resources, however the support is planned in the future. +2. Update project secrets: +```bash +npx supabase secrets set --env-file supabase/.env.keys +``` + +3. Choose your configuration approach in `config.toml`: + +Option A: Use encrypted values directly: + +```toml +[auth.external.github] +enabled = true +secret = "encrypted:" +``` + +Option B: Use environment variables: + +```toml +[auth.external.github] +enabled = true +client_id = "env(SUPABASE_AUTH_EXTERNAL_GITHUB_CLIENT_ID)" +secret = "env(SUPABASE_AUTH_EXTERNAL_GITHUB_SECRET)" +``` + + + The `encrypted:` syntax only works for designated "secret" fields in the configuration (like + `secret` in auth providers). Using encrypted values in other fields will not be automatically + decrypted and may cause issues. For non-secret fields, use environment variables with the `env()` + syntax instead. +##### Using with preview branches + +When you commit your `.env.preview` file with encrypted values, the branching executor will automatically retrieve and use these values when deploying your branch. This allows you to maintain different configurations for different branches while keeping sensitive information secure. + +### Rolling back migrations + +You might want to roll back changes you've made in an earlier migration change. For example, you may have pushed a migration file containing schema changes you no longer want. + +To fix this, push the latest changes, then delete the preview branch in Supabase and reopen it. + +The new preview branch is reseeded from the `./supabase/seed.sql` file by default. Any additional data changes made on the old preview branch are lost. This is equivalent to running `supabase db reset` locally. All migrations are rerun in sequential order. + +### Seeding behavior + +Your Preview Branches are seeded with sample data using the same as [local seeding behavior](/docs/guides/local-development/seeding-your-database). + +The database is only seeded once, when the preview branch is created. To rerun seeding, delete the preview branch and recreate it by closing, and reopening your pull request. + +## Branching and hosting providers + +Branching works with hosting providers that support preview deployments. + +With the Supabase branching integration, you can sync the Git branch used by the hosting provider with the corresponding Supabase preview branch. This means that the preview deployment built by your hosting provider is matched to the correct database schema, edge functions, and other Supabase configurations. + +### Vercel + Install the Vercel integration: - From the [Vercel marketplace](https://vercel.com/integrations/supabase) or @@ -593,8 +692,10 @@ For branching to work with Vercel, you also need the [Vercel GitHub integration] -Supabase automatically updates your Vercel project with the correct environment variables for the corresponding preview branches. -The synchronization happens at the time of Pull Request being opened, not at the time of branch creation. +And make sure you have [connected](/dashboard/org/_/integrations) your Supabase project to your Vercel project. + +Supabase automatically updates your Vercel project with the correct environment variables for the corresponding preview branches. The synchronization happens at the time of Pull Request being opened, not at the time of branch creation. + As branching integration is tied to the Preview Deployments feature in Vercel, there are possible race conditions between Supabase setting correct variables, and Vercel running a deployment process. Because of that, Supabase is always automatically re-deploying the most recent deployment of the given pull request. ## Other Git providers @@ -603,31 +704,67 @@ There are multiple alternative Git providers under consideration. If you're inte ## Alternatives to branching -If you don't turn on branching, your Supabase project continues to work as a single branch, on a single instance. You have a single set of API keys for each project, and no preview instances are created. It's the Git equivalent of working directly on the `main` branch. +Under the hood, you can see Supabase branching as a way to programmatically "duplicate" your Supabase project via git flow. This allows spawning a new configured (via [`config.toml`](/docs/guides/local-development/cli/config)) and seeded instance of the database and the adjacent Supabase services (buckets, edge functions, etc.). + +1. A new project is deployed on behalf of the user on the Supabase side as the new "branch" if it doesn't already exist. This includes the database, storage, edge-function, and all Supabase-related services. +2. The branch is cloned and the new project is configured based on the [`config.toml`](/docs/guides/local-development/cli/config) committed into this project branch. +3. Migrations are applied and seeding scripts are run (the first time) for this branch. + +You can make a similar setup with a distinct project for each environment. Or just have two environments, the localhost and the production one. + +## Pricing + +Branching is available on the Pro Plan and above. The price is: -If you prefer not to use branching, you can manage your environments and tests in other ways: +- Each Preview branch costs per day +- Each Preview branch is billed until it is removed -1. ##### Host a project per environment, and test against a staging project +## Troubleshooting - Create multiple projects on Supabase with the same schema. Use one project as a staging environment to test any changes. Then migrate tested changes to the production project. +### Rolling back migrations -2. ##### Host a single production project, and test locally +You might want to roll back changes you've made in an earlier migration change. For example, you may have pushed a migration file containing schema changes you no longer want. - Create a single project to host your production instance. Test any changes locally, then run the migrations against your hosted production project. +To fix this, push the latest changes, then delete the preview branch in Supabase and reopen it. -You can also combine both strategies to perform both local and staging tests. +The new preview branch is reseeded from the `./supabase/seed.sql` file by default. Any additional data changes made on the old preview branch are lost. This is equivalent to running `supabase db reset` locally. All migrations are rerun in sequential order. -## Pricing +### Deployment failures -Branching is available on the Pro Plan and above. The price is: +A deployment might fail for various reasons, including invalid SQL statements and schema conflicts in migrations, errors within the `config.toml` config, or something else. -- Each Preview branch costs $0.32 per day -- Each Preview branch is billed until it is removed +To check the error message, see the Supabase workflow run for your branch under the [View logs](/dashboard/project/_/branches) section. + +### Network restrictions -Prices listed are subject to change. +If you enable [network restrictions](/docs/guides/platform/network-restrictions) on your project, the branching cluster will be blocked from connecting to your project by default. This often results in database connection failures when migrating your production project after merging a development branch. + +The workaround is to explicitly allow the IPv6 CIDR range of the branching cluster in your project's [database settings](https://supabase.com/dashboard/project/_/settings/database) page: `2600:1f18:2b7d:f600::/56` + +Network restrictions to allow connections from branching cluster + +### Schema drift between preview branches + +If multiple preview branches exist, each preview branch might contain different schema changes. This is similar to Git branches, where each branch might contain different code changes. + +When a preview branch is merged into the production branch, it creates a schema drift between the production branch and the preview branches that haven't been merged yet. + +These conflicts can be resolved in the same way as normal Git Conflicts: merge or rebase from the production Git branch to the preview Git branch. Since migrations are applied sequentially, ensure that migration files are timestamped correctly after the rebase. Changes that build on top of earlier changes should always have later timestamps. + +### Changing production branch + +It's not possible to change the Git branch used as the Production branch for Supabase Branching. The only way to change it is to disable and re-enable branching. See [Disable Branching](#disable-branching). ## Feedback -Supabase branching is a new and exciting new part of the Supabase development ecosystem. We're monitoring its success and open to any feedback. +Supabase branching is a new and exciting part of the Supabase development ecosystem. Feedback is welcome. You can join the [conversation over in GitHub discussions](https://github.com/orgs/supabase/discussions/18937). diff --git a/apps/docs/content/guides/deployment/database-migrations.mdx b/apps/docs/content/guides/deployment/database-migrations.mdx index 11059b17bd895..6495069cbf5ec 100644 --- a/apps/docs/content/guides/deployment/database-migrations.mdx +++ b/apps/docs/content/guides/deployment/database-migrations.mdx @@ -24,7 +24,7 @@ You will need to [install](/docs/guides/local-development#quickstart) the Supaba -```bash Terminal +```bash name=Terminal supabase migration new create_employees_table ``` @@ -44,7 +44,7 @@ supabase migration new create_employees_table -```sql supabase/migrations/_create_employees_table.sql +```sql name=supabase/migrations/_create_employees_table.sql create table if not exists employees ( id bigint primary key generated always as identity, name text not null, @@ -69,7 +69,7 @@ create table if not exists employees ( -```bash Terminal +```bash name=Terminal supabase migration up ``` @@ -87,7 +87,7 @@ supabase migration up -```bash Terminal +```bash name=Terminal supabase migration new add_department_column ``` @@ -105,7 +105,7 @@ supabase migration new add_department_column -```sql supabase/migrations/_add_department_column.sql +```sql name=supabase/migrations/_add_department_column.sql alter table if exists public.employees add department text default 'Hooli'; ``` @@ -124,7 +124,7 @@ add department text default 'Hooli'; -```bash Terminal +```bash name=Terminal supabase migration up ``` @@ -135,7 +135,7 @@ supabase migration up Finally, you should see the `department` column added to your `employees` table in the local Dashboard. - + View the [complete code](https://github.com/supabase/supabase/tree/master/examples/database/employees) for this example on GitHub. @@ -156,7 +156,7 @@ Now that you are managing your database with migrations scripts, it would be gre -```sql supabase/seed.sql +```sql name=supabase/seed.sql insert into public.employees (name) values @@ -179,7 +179,7 @@ values -```bash Terminal +```bash name=Terminal supabase db reset ``` @@ -205,7 +205,7 @@ This workflow is great if you know SQL and are comfortable creating tables and c -```bash Terminal +```bash name=Terminal supabase db diff -f create_cities_table ``` @@ -225,7 +225,7 @@ supabase db diff -f create_cities_table -```sql supabase/migrations/_create_cities_table.sql +```sql name=supabase/migrations/_create_cities_table.sql create table "public"."cities" ( "id" bigint primary key generated always as identity, "name" text, @@ -247,7 +247,7 @@ create table "public"."cities" ( -```bash Terminal +```bash name=Terminal supabase db reset ``` @@ -273,7 +273,7 @@ Head over to [Supabase](https://supabase.com/dashboard) and create a new project -```bash Terminal +```bash name=Terminal supabase login ``` @@ -291,7 +291,7 @@ supabase login -```bash Terminal +```bash name=Terminal supabase link ``` @@ -303,13 +303,13 @@ supabase link - + [Push](/docs/reference/cli/supabase-db-push) your migrations to the remote database. -```bash Terminal +```bash name=Terminal supabase db push ``` @@ -318,4 +318,22 @@ supabase db push + + + + + [Push](/docs/reference/cli/supabase-db-push) your migrations and seed the remote database. + + + + +```bash name=Terminal +supabase db push --include-seed +``` + + + + + + Visiting your live project on [Supabase](https://supabase.com/dashboard/project/_), you'll see a new `employees` table, complete with the `department` column you added in the second migration above. diff --git a/apps/docs/content/guides/deployment/going-into-prod.mdx b/apps/docs/content/guides/deployment/going-into-prod.mdx index dba01cb4e90b8..8d36e73e709aa 100644 --- a/apps/docs/content/guides/deployment/going-into-prod.mdx +++ b/apps/docs/content/guides/deployment/going-into-prod.mdx @@ -42,7 +42,7 @@ After developing your project and deciding it's Production Ready, you should run - Tools like [k6](https://k6.io/) can simulate traffic from many different users. - Upgrade your database if you require more resources. If you need anything beyond what is listed, contact enterprise@supabase.io. - If you are expecting a surge in traffic (for a big launch) and are on a Team or Enterprise Plan, [contact support](https://supabase.com/dashboard/support/new) with more details about your launch and we'll help keep an eye on your project. -- If you expect your database size to be > 4 GB, [enable](https://supabase.com/dashboard/project/_/settings/addons?panel=pitr) the Point in Time Recovery (PITR) addon. Daily backups can take up resources from your database when the backup is in progress. PITR is more resource efficient, since only the changes to the database are backed up. +- If you expect your database size to be > 4 GB, [enable](https://supabase.com/dashboard/project/_/settings/addons?panel=pitr) the Point in Time Recovery (PITR) add-on. Daily backups can take up resources from your database when the backup is in progress. PITR is more resource efficient, since only the changes to the database are backed up. - Check and review issues in your database using [Performance Advisor](https://supabase.com/dashboard/project/_/database/performance-advisor). ## Availability @@ -50,7 +50,7 @@ After developing your project and deciding it's Production Ready, you should run - Use your own SMTP credentials so that you have full control over the deliverability of your transactional auth emails (see Settings > Auth) - you can grab SMTP credentials from any major email provider such as SendGrid, AWS SES, etc. You can refer to our [SMTP guide](/docs/guides/auth/auth-smtp) for more details. - The default rate limit for auth emails when using a custom SMTP provider is 30 new users per hour, if doing a major public announcement you will likely require more than this. -- If your application is on the Free Plan and is **not** expected to be queried at least once every 7 days, then it may be paused by Supabase to save on server resources. +- Applications on the Free Plan that exhibit extremely low activity in a 7 day period may be paused by Supabase to save on server resources. - You can restore paused projects from the Supabase dashboard. - Upgrade to Pro to guarantee that your project will not be paused for inactivity. - Database backups are not available for download on the Free Plan. @@ -75,7 +75,7 @@ After developing your project and deciding it's Production Ready, you should run | ------------------------------------------------ | -------------------------------------------------------------- | ------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | All endpoints that send emails | `/auth/v1/signup` `/auth/v1/recover` `/auth/v1/user`[^1] | Sum of combined requests | As of 3 Sep 2024, this has been updated to auth.rate_limits.email.inbuilt_smtp_per_hour.value emails per hour. You can only change this with your own [custom SMTP setup](/docs/guides/auth/auth-smtp). | | All endpoints that send One-Time-Passwords (OTP) | `/auth/v1/otp` | Sum of combined requests | Defaults to 360 OTPs per hour. Is customizable. | -| Send OTPs or magiclinks | `/auth/v1/otp` | Last request | Defaults to 60 seconds window before a new request is allowed. Is customizable. | +| Send OTPs or magic links | `/auth/v1/otp` | Last request | Defaults to 60 seconds window before a new request is allowed. Is customizable. | | Signup confirmation request | `/auth/v1/signup` | Last request | Defaults to 60 seconds window before a new request is allowed. Is customizable. | | Password Reset Request | `/auth/v1/recover` | Last request | Defaults to 60 seconds window before a new request is allowed. Is customizable. | | Verification requests | `/auth/v1/verify` | IP Address | 360 requests per hour (with bursts up to 30 requests) | @@ -90,14 +90,36 @@ After developing your project and deciding it's Production Ready, you should run ### Abuse prevention -- Supabase provides CAPTCHA protection on the signup, sign-in and password reset endpoints. Please refer to [our guide](/docs/guides/auth/auth-captcha) on how to protect against abuse using this method. +- Supabase provides CAPTCHA protection on the signup, sign-in and password reset endpoints. Refer to [our guide](/docs/guides/auth/auth-captcha) on how to protect against abuse using this method. ### Email link validity - When working with enterprise systems, email scanners may scan and make a `GET` request to the reset password link or sign up link in your email. Since links in Supabase Auth are single use, a user who opens an email post-scan to click on a link will receive an error. To get around this problem, consider altering the email template to replace the original magic link with a link to a domain you control. The domain can present the user with a "Sign-in" button which redirect the user to the original magic link URL when clicked. -- When using a custom SMTP service, some services might have link tracking enabled which may overwrite or malform the email confirmation links sent by Supabase Auth. To prevent this from happening, we recommend that you disable link tracking when using a custom SMTP service. +- When using a custom SMTP service, some services might have link tracking enabled which may overwrite or disform the email confirmation links sent by Supabase Auth. To prevent this from happening, we recommend that you disable link tracking when using a custom SMTP service. + +## Subscribe to Supabase status page + +Stay informed about Supabase service status by subscribing to the [Status Page](https://status.supabase.com/). We recommend setting up Slack notifications through an RSS feed to ensure your team receives timely updates about service status changes. + +### Setting up Slack notifications + +1. Install the RSS app in Slack: + + - Visit the [RSS app page](https://slack.com/marketplace/A0F81R7U7-rss) in the Slack marketplace + - Click `Add to Slack` if not already installed + - Otherwise you will get straight to next step, no need to reinstall the app + +2. Configure the Supabase status feed: + + - Create a channel (e.g., `#supabase-status-alerts`) for status updates + - On the [RSS app page](https://slack.com/marketplace/A0F81R7U7-rss) go to _Add a Feed_ section and set Feed URL to `https://status.supabase.com/history.rss` + - Select your designated channel and click "Subscribe to this feed" + +Once configured, your team will receive automatic notifications in Slack whenever the Supabase Status Page is updated. + +For detailed setup instructions, see the [Add RSS feeds to Slack](https://slack.com/intl/en-nz/help/articles/218688467-Add-RSS-feeds-to-Slack). ## Next steps diff --git a/apps/docs/content/guides/deployment/managing-environments.mdx b/apps/docs/content/guides/deployment/managing-environments.mdx index bde4fefce16e7..b2319c1cbca02 100644 --- a/apps/docs/content/guides/deployment/managing-environments.mdx +++ b/apps/docs/content/guides/deployment/managing-environments.mdx @@ -146,7 +146,7 @@ Commit the new migration script to git and you are ready to deploy. -Alternatively, you may pass in the `--use-migra` experimental flag to generate a more concise migration using [migra](https://github.com/djrobstep/migra). +Alternatively, you may pass in the `--use-migra` experimental flag to generate a more concise migration using [`migra`](https://github.com/djrobstep/migra). Without the `-f` file flag, the output is written to stdout by default. @@ -212,7 +212,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: supabase/setup-cli@v1 with: @@ -253,7 +253,7 @@ jobs: SUPABASE_PROJECT_ID: ${{ secrets.STAGING_PROJECT_ID }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: supabase/setup-cli@v1 with: @@ -285,7 +285,7 @@ jobs: SUPABASE_PROJECT_ID: ${{ secrets.PRODUCTION_PROJECT_ID }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: supabase/setup-cli@v1 with: @@ -360,7 +360,7 @@ Once pushed, check that the migration version is up to date for both local and r supabase migration list ``` -### Permission denied on db pull +### Permission denied on `db pull` If you have been using Supabase hosted projects for a long time, you might encounter the following permission error when executing `db pull`. @@ -378,7 +378,7 @@ grant all on all functions in schema graphql to postgres, anon, authenticated, s grant all on all sequences in schema graphql to postgres, anon, authenticated, service_role; ``` -### Permission denied on db push +### Permission denied on `db push` If you created a table through Supabase dashboard, and your new migration script contains `ALTER TABLE` statements, you might run into permission error when applying them on staging or production databases. diff --git a/apps/docs/content/guides/deployment/shared-responsibility-model.mdx b/apps/docs/content/guides/deployment/shared-responsibility-model.mdx index 1e57ad99edf26..01ae5b3671499 100644 --- a/apps/docs/content/guides/deployment/shared-responsibility-model.mdx +++ b/apps/docs/content/guides/deployment/shared-responsibility-model.mdx @@ -48,7 +48,7 @@ None of these are right or wrong. It depends on the stage of your project. You _ Supabase isn't a silver-bullet for bad architectural decisions. A poorly designed database will run poorly, no matter where it’s hosted. -You can get away with a poorly-designed database for a while by simply adding compute. After a while, things will start to break. The database schema is the area you want to spend _the most_ time thinking about. That’s the benefit of Supabase - you can spend more time designing a scalable database system and less time thinking about the mundane tasks like implementing CRUD APIs. +You can get away with a poorly-designed database for a while by adding compute. After a while, things will start to break. The database schema is the area you want to spend _the most_ time thinking about. That’s the benefit of Supabase - you can spend more time designing a scalable database system and less time thinking about the mundane tasks like implementing CRUD APIs. If you don’t want to implement logic inside your database, that is 100% fine. You can use _any_ tools which work with Postgres. @@ -61,7 +61,7 @@ Supabase offers a lot of opportunities for flexibly integrating with third-party - Calls to external APIs within Postgres functions or triggers - Calls to external APIs within Edge Functions -You are free to use and integrate with any service, but you're also responsible for ensuring that the performance, availability, and security of the services you use match up with your application's requirements. We do not monitor for outages or performance issues within integrations with third-party services. Depending on the implementation, an issue with such an integration could also result in performance degradataion or an outage for your Supabase project. +You are free to use and integrate with any service, but you're also responsible for ensuring that the performance, availability, and security of the services you use match up with your application's requirements. We do not monitor for outages or performance issues within integrations with third-party services. Depending on the implementation, an issue with such an integration could also result in performance degradation or an outage for your Supabase project. If your application architecture relies on such integrations, you should monitor the relevant logs and metrics to ensure optimal performance. @@ -81,6 +81,10 @@ You are responsible of provisioning enough compute to run the workload that your We recommend reviewing and applying the recommendations offered in our [Production Checklist](/docs/guides/platform/going-into-prod). This checklist covers the responsibilities discussed here and a few additional general production readiness best practices. +## SOC 2 and compliance + +Supabase provides a SOC 2 compliant environment for hosting and managing sensitive data. We recommend reviewing the [SOC 2 compliance responsibilities document](/docs/guides/security/soc-2-compliance) alongside the aforementioned production checklist. + ## Managing healthcare data You can use Supabase to store and process Protected Health Information (PHI). You are responsible for the following @@ -97,3 +101,5 @@ You can use Supabase to store and process Protected Health Information (PHI). Yo - Not using [Edge functions](/docs/guides/functions) to process PHI. - Not storing PHI in [public Storage buckets](/docs/guides/storage/buckets/fundamentals#public-buckets). - Not [transferring projects](/docs/guides/platform/project-transfer) to a non-HIPAA organization. + +For more information on the shared responsibilities and rules under HIPAA, review the [HIPAA compliance responsibilities document](/docs/guides/security/hipaa-compliance). diff --git a/apps/docs/content/guides/functions/ai-models.mdx b/apps/docs/content/guides/functions/ai-models.mdx index 60b9df79bcde5..dcf300e1d04c6 100644 --- a/apps/docs/content/guides/functions/ai-models.mdx +++ b/apps/docs/content/guides/functions/ai-models.mdx @@ -165,7 +165,7 @@ curl --get "http://localhost:54321/functions/v1/ollama-test" \ -Follow the [Llamafile Quickstart](https://github.com/Mozilla-Ocho/llamafile?tab=readme-ov-file#quickstart) to download an run a llamafile locally on your machine. +Follow the [Llamafile Quickstart](https://github.com/Mozilla-Ocho/llamafile?tab=readme-ov-file#quickstart) to download an run a Llamafile locally on your machine. Since Llamafile provides an OpenAI API compatible server, you can either use it with `@supabase/functions-js` or with the official OpenAI Deno SDK. @@ -190,7 +190,7 @@ Create a new function with the following code supabase functions new llamafile-test ``` - + Note that the model parameter doesn't have any effect here! The model depends on which Llamafile is currently running! @@ -246,7 +246,7 @@ Create a new function with the following code supabase functions new llamafile-test ``` - + Note that the model parameter doesn't have any effect here! The model depends on which Llamafile is currently running! diff --git a/apps/docs/content/guides/functions/auth.mdx b/apps/docs/content/guides/functions/auth.mdx index 639e667ff34bb..0ade89bb3ac17 100644 --- a/apps/docs/content/guides/functions/auth.mdx +++ b/apps/docs/content/guides/functions/auth.mdx @@ -12,16 +12,26 @@ Edge Functions work seamlessly with [Supabase Auth](/docs/guides/auth). When a user makes a request to an Edge Function, you can use the Authorization header to set the Auth context in the Supabase client: ```js -import { createClient } from 'jsr:@supabase/supabase-js@2' +import { createClient } from 'npm:@supabase/supabase-js@2' Deno.serve(async (req: Request) => { - const authHeader = req.headers.get('Authorization')! const supabaseClient = createClient( Deno.env.get('SUPABASE_URL') ?? '', Deno.env.get('SUPABASE_ANON_KEY') ?? '', - { global: { headers: { Authorization: authHeader } } } - ) + // Create client with Auth context of the user that called the function. + // This way your row-level-security (RLS) policies are applied. + { + global: { + headers: { Authorization: req.headers.get('Authorization')! }, + }, + } + ); + + // Get the session or user object + const authHeader = req.headers.get('Authorization')!; + const token = authHeader.replace('Bearer ', ''); + const { data } = await supabaseClient.auth.getUser(token); }) ``` @@ -30,16 +40,21 @@ Importantly, this is done _inside_ the `Deno.serve()` callback argument, so that ## Fetching the user -After initializing a Supabase client with the Auth context, you can use `getUser()` to fetch the user object, and run queries in the context of the user with [Row Level Security (RLS)](/docs/guides/database/postgres/row-level-security) policies enforced. +By getting the JWT from the `Authorization` header, you can provide the token to `getUser()` to fetch the user object to obtain metadata for the logged in user. ```js -import { createClient } from 'jsr:@supabase/supabase-js@2' +import { createClient } from 'npm:@supabase/supabase-js@2' Deno.serve(async (req: Request) => { const supabaseClient = createClient( Deno.env.get('SUPABASE_URL') ?? '', Deno.env.get('SUPABASE_ANON_KEY') ?? '', + { + global: { + headers: { Authorization: req.headers.get('Authorization') }, + }, + } ) // Get the session or user object @@ -61,18 +76,27 @@ Deno.serve(async (req: Request) => { After initializing a Supabase client with the Auth context, all queries will be executed with the context of the user. For database queries, this means [Row Level Security](/docs/guides/database/postgres/row-level-security) will be enforced. ```js -import { createClient } from 'jsr:@supabase/supabase-js@2' +import { createClient } from 'npm:@supabase/supabase-js@2' Deno.serve(async (req: Request) => { const supabaseClient = createClient( Deno.env.get('SUPABASE_URL') ?? '', Deno.env.get('SUPABASE_ANON_KEY') ?? '', - { global: { headers: { Authorization: req.headers.get('Authorization')! } } } - ) + // Create client with Auth context of the user that called the function. + // This way your row-level-security (RLS) policies are applied. + { + global: { + headers: { Authorization: req.headers.get('Authorization')! }, + }, + } + ); - // Database queries will have RLS policies enforced - const { data, error } = await supabaseClient.from('profiles').select('*') + // Get the session or user object + const authHeader = req.headers.get('Authorization')!; + const token = authHeader.replace('Bearer ', ''); + const { data: userData } = await supabaseClient.auth.getUser(token); + const { data, error } = await supabaseClient.from('profiles').select('*'); return new Response(JSON.stringify({ data }), { headers: { 'Content-Type': 'application/json' }, diff --git a/apps/docs/content/guides/functions/cicd-workflow.mdx b/apps/docs/content/guides/functions/cicd-workflow.mdx index ea127827fb90f..d47e4768913a2 100644 --- a/apps/docs/content/guides/functions/cicd-workflow.mdx +++ b/apps/docs/content/guides/functions/cicd-workflow.mdx @@ -32,7 +32,7 @@ jobs: PROJECT_ID: your-project-id steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: supabase/setup-cli@v1 with: diff --git a/apps/docs/content/guides/functions/compression.mdx b/apps/docs/content/guides/functions/compression.mdx index 77964769461b3..c735106fb9971 100644 --- a/apps/docs/content/guides/functions/compression.mdx +++ b/apps/docs/content/guides/functions/compression.mdx @@ -43,7 +43,7 @@ Deno.serve(async (req) => { }) ``` - + Edge functions have a runtime memory limit of 150MB. Overly large compressed payloads may result in an out-of-memory error. diff --git a/apps/docs/content/guides/functions/connect-to-postgres.mdx b/apps/docs/content/guides/functions/connect-to-postgres.mdx index 22b0ada259bfd..4dda5fef52f1c 100644 --- a/apps/docs/content/guides/functions/connect-to-postgres.mdx +++ b/apps/docs/content/guides/functions/connect-to-postgres.mdx @@ -14,7 +14,7 @@ You can also use other Postgres clients like [Deno Postgres](https://deno.land/x The `supabase-js` client is a great option for connecting to your Supabase database since it handles authorization with Row Level Security, and it automatically formats your response as JSON. ```ts index.ts -import { createClient } from 'jsr:@supabase/supabase-js@2' +import { createClient } from 'npm:@supabase/supabase-js@2' Deno.serve(async (req) => { try { diff --git a/apps/docs/content/guides/functions/dart-edge.mdx b/apps/docs/content/guides/functions/dart-edge.mdx index 3f022d5c988f5..4071734d6df13 100644 --- a/apps/docs/content/guides/functions/dart-edge.mdx +++ b/apps/docs/content/guides/functions/dart-edge.mdx @@ -6,10 +6,10 @@ description: 'Write your functions using Dart.' -Please be aware that the Dart Edge project is currently not actively maintained due to numerous breaking changes in Dart's development of (WASM) support. +Be aware that the Dart Edge project is currently not actively maintained due to numerous breaking changes in Dart's development of (WASM) support. [Dart Edge](https://docs.dartedge.dev/) is an experimental project that enables you to write Supabase Edge Functions using Dart. It's built and maintained by [Invertase](https://invertase.io/). -For detailed information on how to set up and use Dart Edge with Supabase, please refer to the [official Dart Edge documentation for Supabase](https://invertase.docs.page/dart_edge/platform/supabase). +For detailed information on how to set up and use Dart Edge with Supabase, refer to the [official Dart Edge documentation for Supabase](https://invertase.docs.page/dart_edge/platform/supabase). diff --git a/apps/docs/content/guides/functions/deno2.mdx b/apps/docs/content/guides/functions/deno2.mdx new file mode 100644 index 0000000000000..4d762ca1c12b1 --- /dev/null +++ b/apps/docs/content/guides/functions/deno2.mdx @@ -0,0 +1,103 @@ +--- +id: 'deno2' +title: 'Using Deno 2' +description: 'Everything you need to know about the Deno 2 runtime' +subtitle: 'Everything you need to know about the Deno 2 runtime' +--- + + + +This feature is in Public Alpha. [Submit a support ticket](https://supabase.help) if you have any issues. + + + +### What is Deno 2? + +Deno 2 is a major upgrade to the Deno runtime that powers Supabase Edge Functions. It focuses on scalability and seamless ecosystem compatibility while maintaining Deno's core principles of security, simplicity, and developer experience. + +**Key improvements include** + +- **Node.js and npm compatibility**: Dramatically improved support for npm packages and Node.js code +- **Better dependency management**: New tools like `deno install`, `deno add`, and `deno remove` for simplified package management +- **Improved performance**: Enhanced runtime execution and startup times +- **Workspace and monorepo support**: Better handling of complex project structures +- **Framework compatibility**: Support for Next.js, SvelteKit, Remix, and other popular frameworks +- **Full package.json support**: Works seamlessly with existing Node.js projects and npm workspaces + +While these improvements are exciting, they come with some changes that may affect your existing functions. We'll support Deno 1.x functions for a limited time, but we recommend migrating to Deno 2 within the next few months to ensure continued functionality. + +### How to use Deno 2 + +Deno 2 will soon become the default choice for creating new functions. For now, Deno 2 is available in preview mode for local development. + +Here's how you can build and deploy a function with Deno 2: + +- [Install Deno 2.1](https://docs.deno.com/runtime/getting_started/installation/) or newer version on your machine + +- Go to your Supabase project. `cd my-supabase-project` + +- Open `supabase/config.toml` and set `deno_version = 2` + +```toml +[edge_runtime] +deno_version = 2 +``` + +- All your existing functions should work as before. + +To scaffold a new function as a Deno 2 project: + +```bash +deno init --serve hello-world +``` + +- Open `supabase/config.toml` and add the following: + +``` +[functions.hello-world] +entrypoint = "./functions/hello-world/main.ts" +``` + +- Open supabase/functions/hello-world/main.ts and modify line 10 to: + +```typescript +if (url.pathname === "/hello-world") { +``` + +- Use `npx supabase@beta functions serve --no-verify-jwt` to start the dev server. + +- Visit http://localhost:54321/functions/v1/hello-world. + +- To run built-in tests, `cd supabase/functions/hello-world; deno test` + +### How to migrate existing functions from Deno 1 to Deno 2 + +For a comprehensive migration guide, see the [official Deno 1.x to 2.x migration guide](https://docs.deno.com/runtime/reference/migration_guide/#content). + +Most Deno 1 Edge Functions will be compatible out of the box with Deno 2, and no action needs to be taken. When we upgrade our hosted runtime, your functions will automatically be deployed on a Deno 2 cluster. + +However, for a small number of functions, this may break existing functionality. + +The most common issue to watch for is that some Deno 1 API calls are incompatible with Deno 2 runtime. + +For instance if you are using: + +- `Deno.Closer` + +Use [`Closer`](https://jsr.io/@std/io/doc/types/~/Closer) from the Standard Library instead. + +```tsx ++ import type { Closer } from "jsr:@std/io/types"; +- function foo(closer: Deno.Closer) { ++ function foo(closer: Closer) { + // ... +} +``` + +The best way to validate your APIs are up to date is to use the Deno lint, which has [rules to disallow deprecated APIs](https://docs.deno.com/lint/rules/no-deprecated-deno-api/). + + ```bash + deno lint + ``` + +For a full list of API changes, see the [official Deno 2 list](https://docs.deno.com/runtime/reference/migration_guide/#api-changes). diff --git a/apps/docs/content/guides/functions/dependencies.mdx b/apps/docs/content/guides/functions/dependencies.mdx new file mode 100644 index 0000000000000..d13bd7fa1878c --- /dev/null +++ b/apps/docs/content/guides/functions/dependencies.mdx @@ -0,0 +1,216 @@ +--- +id: 'functions-import-maps' +title: 'Managing dependencies' +description: 'Managing packages and dependencies.' +subtitle: 'Managing packages and dependencies.' +tocVideo: 'ILr3cneZuFk' +--- + +## Importing dependencies + +Supabase Edge Functions support several ways to import dependencies: + +- JavaScript modules from npm (https://docs.deno.com/examples/npm/) +- Built-in [Node APIs](https://docs.deno.com/runtime/manual/node/compatibility) +- Modules published to [JSR](https://jsr.io/) or [deno.land/x](https://deno.land/x) + +### NPM modules + +You can import npm modules using the `npm:` specifier: + +```ts +import { createClient } from 'npm:@supabase/supabase-js@2' +``` + +### Node.js built-ins + +For Node.js built-in APIs, use the `node:` specifier: + +```ts +import process from 'node:process' +``` + +Learn more about npm specifiers and Node built-in APIs in [Deno's documentation](https://docs.deno.com/runtime/manual/node/npm_specifiers). + +### JSR + +You can import JS modules published to [JSR](https://jsr.io/) (e.g.: Deno's standard library), using the `jsr:` specifier: + +```ts +import path from 'jsr:@std/path@1.0.8' +``` + +## Managing dependencies + +Developing with Edge Functions is similar to developing with Node.js, but with a few key differences. + +In the Deno ecosystem, each function should be treated as an independent project with its own set of dependencies and configurations. This "isolation by design" approach: + +- Ensures each function has explicit control over its dependencies +- Prevents unintended side effects between functions +- Makes deployments more predictable and maintainable +- Allows for different versions of the same dependency across functions + +For these reasons, we recommend maintaining separate configuration files (`deno.json`, `.npmrc`, or `import_map.json`) within each function's directory, even if it means duplicating some configurations. + +There are two ways to manage your dependencies in Supabase Edge Functions: + +### Using deno.json (recommended) + + + +This feature requires Supabase CLI version 1.215.0 or higher. + + + +Each function should have its own `deno.json` file to manage dependencies and configure Deno-specific settings. This ensures proper isolation between functions and is the recommended approach for deployment. For a complete list of supported options, see the [official Deno configuration documentation](https://docs.deno.com/runtime/manual/getting_started/configuration_file). + +```json supabase/functions/my-function/deno.json +{ + "imports": { + "lodash": "https://cdn.skypack.dev/lodash" + } +} +``` + +The recommended file structure for deployment: + +```bash +└── supabase + ├── functions + │ ├── function-one + │ │ ├── index.ts + │ │ ├─- deno.json # Function-specific Deno configuration + │ │ └── .npmrc # Function-specific npm configuration (if needed) + │ └── function-two + │ ├── index.ts + │ ├─- deno.json # Function-specific Deno configuration + │ └── .npmrc # Function-specific npm configuration (if needed) + └── config.toml +``` + + + While it's possible to use a global `deno.json` in the `/supabase/functions` directory for local + development, this approach is not recommended for deployment. Each function should maintain its + own configuration to ensure proper isolation and dependency management. + + +### Using import maps (legacy) + +Import Maps are a legacy way to manage dependencies, similar to a `package.json` file. While still supported, we recommend using `deno.json`. If both exist, `deno.json` takes precedence. + +Each function should have its own `import_map.json` file for proper isolation: + +```json supabase/functions/my-function/import_map.json +{ + "imports": { + "lodash": "https://cdn.skypack.dev/lodash" + } +} +``` + +The recommended file structure: + +```bash +└── supabase + ├── functions + │ ├── function-one + │ │ ├── index.ts + │ │ └── import_map.json # Function-specific import map + │ └── function-two + │ ├── index.ts + │ └── import_map.json # Function-specific import map + └── config.toml +``` + + + While it's possible to use a global `import_map.json` in the `/supabase/functions` directory for + local development, this approach is not recommended for deployment. Each function should maintain + its own import map to ensure proper isolation. + + +If using import maps with VSCode, update your `.vscode/settings.json` to point to your function-specific import map: + +```json settings.json +{ + "deno.enable": true, + "deno.unstable": [ + "bare-node-builtins", + "byonm" + // ... other flags ... + ], + "deno.importMap": "./supabase/functions/my-function/import_map.json" +} +``` + +You can override the default import map location using the `--import-map ` flag with `serve` and `deploy` commands, or by setting the `import_map` property in your `config.toml` file: + +```toml supabase/config.toml +[functions.my-function] +import_map = "./supabase/functions/my-function/import_map.json" +``` + +### Importing from private registries + +This feature requires Supabase CLI version 1.207.9 or higher. + +To use private npm packages, create a `.npmrc` file within your function directory. This ensures proper isolation and dependency management for each function. + +```bash +└── supabase + └── functions + └── my-function + ├── index.ts + ├── deno.json + └── .npmrc # Function-specific npm configuration +``` + +Add your registry details in the `.npmrc` file. Follow [this guide](https://docs.npmjs.com/cli/v10/configuring-npm/npmrc) to learn more about the syntax of npmrc files. + +```plaintext +@myorg:registry=https://npm.registryhost.com +//npm.registryhost.com/:_authToken=VALID_AUTH_TOKEN +``` + + + While it's possible to use a global `.npmrc` in the `/supabase/functions` directory for local + development, we recommend using function-specific `.npmrc` files for deployment to maintain proper + isolation. + + +After configuring your `.npmrc`, you can import the private package in your function code: + +```ts +import MyPackage from 'npm:@myorg/private-package@v1.0.1' + +// use MyPackage +``` + +### Using a custom NPM registry + +This feature requires Supabase CLI version 2.2.8 or higher. + +Some organizations require a custom NPM registry for security and compliance purposes. In such instances, you can specify the custom NPM registry to use via `NPM_CONFIG_REGISTRY` environment variable. + +You can define it in the project's `.env` file or directly specify it when running the deploy command: + +```bash +NPM_CONFIG_REGISTRY=https://custom-registry/ supabase functions deploy my-function +``` + +## Importing types + +If your [environment is set up properly](/docs/guides/functions/local-development) and the module you're importing is exporting types, the import will have types and autocompletion support. + +Some npm packages may not ship out of the box types and you may need to import them from a separate package. You can specify their types with a `@deno-types` directive: + +```ts +// @deno-types="npm:@types/express@^4.17" +import express from 'npm:express@^4.17' +``` + +To include types for built-in Node APIs, add the following line to the top of your imports: + +```ts +/// +``` diff --git a/apps/docs/content/guides/functions/deploy.mdx b/apps/docs/content/guides/functions/deploy.mdx index 9e4c6eebef1d8..51c24baf7ba1b 100644 --- a/apps/docs/content/guides/functions/deploy.mdx +++ b/apps/docs/content/guides/functions/deploy.mdx @@ -76,16 +76,16 @@ Be careful when using this flag, as it will allow anyone to invoke your Edge Fun You can now invoke your Edge Function using the project's `ANON_KEY`, which can be found in the [API settings](https://supabase.com/dashboard/project/_/settings/api) of the Supabase Dashboard. - +<$CodeTabs> -```bash cURL +```bash name=cURL curl --request POST 'https://.supabase.co/functions/v1/hello-world' \ --header 'Authorization: Bearer ANON_KEY' \ --header 'Content-Type: application/json' \ --data '{ "name":"Functions" }' ``` -```js JavaScript +```js name=JavaScript import { createClient } from '@supabase/supabase-js' // Create a single supabase client for interacting with your database @@ -96,6 +96,6 @@ const { data, error } = await supabase.functions.invoke('hello-world', { }) ``` - + You should receive the response `{ "message":"Hello Functions!" }`. diff --git a/apps/docs/content/guides/functions/development-tips.mdx b/apps/docs/content/guides/functions/development-tips.mdx new file mode 100644 index 0000000000000..eb4e34841bf27 --- /dev/null +++ b/apps/docs/content/guides/functions/development-tips.mdx @@ -0,0 +1,114 @@ +--- +id: 'functions-development-tips' +title: 'Development tips' +description: 'Tips for getting started with Edge Functions.' +subtitle: 'Tips for getting started with Edge Functions.' +--- + +Here are a few recommendations when you first start developing Edge Functions. + +### Skipping authorization checks + +By default, Edge Functions require a valid JWT in the authorization header. If you want to use Edge Functions without Authorization checks (commonly used for Stripe webhooks), you can pass the `--no-verify-jwt` flag when serving your Edge Functions locally. + +```bash +supabase functions serve hello-world --no-verify-jwt +``` + +Be careful when using this flag, as it will allow anyone to invoke your Edge Function without a valid JWT. The Supabase client libraries automatically handle authorization. + +### Using HTTP methods + +Edge Functions support `GET`, `POST`, `PUT`, `PATCH`, `DELETE`, and `OPTIONS`. A Function can be designed to perform different actions based on a request's HTTP method. See the [example on building a RESTful service](https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/restful-tasks) to learn how to handle different HTTP methods in your Function. + + + +HTML content is not supported. `GET` requests that return `text/html` will be rewritten to `text/plain`. + + + +### Naming Edge Functions + +We recommend using hyphens to name functions because hyphens are the most URL-friendly of all the naming conventions (snake_case, camelCase, PascalCase). + +### Organizing your Edge Functions + +We recommend developing "fat functions". This means that you should develop few large functions, rather than many small functions. One common pattern when developing Functions is that you need to share code between two or more Functions. To do this, you can store any shared code in a folder prefixed with an underscore (`_`). We also recommend a separate folder for [Unit Tests](/docs/guides/functions/unit-test) including the name of the function followed by a `-test` suffix. +We recommend this folder structure: + +```bash +└── supabase + ├── functions + │ ├── import_map.json # A top-level import map to use across functions. + │ ├── _shared + │ │ ├── supabaseAdmin.ts # Supabase client with SERVICE_ROLE key. + │ │ └── supabaseClient.ts # Supabase client with ANON key. + │ │ └── cors.ts # Reusable CORS headers. + │ ├── function-one # Use hyphens to name functions. + │ │ └── index.ts + │ └── function-two + │ │ └── index.ts + │ └── tests + │ └── function-one-test.ts + │ └── function-two-test.ts + ├── migrations + └── config.toml +``` + +### Using config.toml + +Individual function configuration like [JWT verification](/docs/guides/cli/config#functions.function_name.verify_jwt) and [import map location](/docs/guides/cli/config#functions.function_name.import_map) can be set via the `config.toml` file. + +```toml supabase/config.toml +[functions.hello-world] +verify_jwt = false +import_map = './import_map.json' +``` + +### Not using TypeScript + +When you create a new Edge Function, it will use TypeScript by default. However, it is possible to write and deploy Edge Functions using pure JavaScript. + +Save your Function as a JavaScript file (e.g. `index.js`) and then update the `supabase/config.toml` as follows: + + + +`entrypoint` is available only in Supabase CLI version 1.215.0 or higher. + + + +```toml supabase/config.toml +[functions.hello-world] +# other entries +entrypoint = './functions/hello-world/index.js' # path must be relative to config.toml +``` + +You can use any `.ts`, `.js`, `.tsx`, `.jsx` or `.mjs` file as the `entrypoint` for a Function. + +### Error handling + +The `supabase-js` library provides several error types that you can use to handle errors that might occur when invoking Edge Functions: + +```js +import { FunctionsHttpError, FunctionsRelayError, FunctionsFetchError } from '@supabase/supabase-js' + +const { data, error } = await supabase.functions.invoke('hello', { + headers: { 'my-custom-header': 'my-custom-header-value' }, + body: { foo: 'bar' }, +}) + +if (error instanceof FunctionsHttpError) { + const errorMessage = await error.context.json() + console.log('Function returned an error', errorMessage) +} else if (error instanceof FunctionsRelayError) { + console.log('Relay error:', error.message) +} else if (error instanceof FunctionsFetchError) { + console.log('Fetch error:', error.message) +} +``` + +### Database Functions vs Edge Functions + +For data-intensive operations we recommend using [Database Functions](/docs/guides/database/functions), which are executed within your database and can be called remotely using the [REST and GraphQL API](/docs/guides/api). + +For use-cases which require low-latency we recommend [Edge Functions](/docs/guides/functions), which are globally-distributed and can be written in TypeScript. diff --git a/apps/docs/content/guides/functions/ephemeral-storage.mdx b/apps/docs/content/guides/functions/ephemeral-storage.mdx index 55bc58780ece9..4d5672309778d 100644 --- a/apps/docs/content/guides/functions/ephemeral-storage.mdx +++ b/apps/docs/content/guides/functions/ephemeral-storage.mdx @@ -14,7 +14,7 @@ Ephemeral storage will reset on each function invocation. This means the files y Here are some use cases where ephemeral storage can be useful: - Unzip an archive of CSVs and then add them as records to the DB -- Custom image manipulation workflows (using [MagickWasm](https://supabase.com/docs/guides/functions/examples/image-manipulation)) +- Custom image manipulation workflows (using [`magick-wasm`](https://supabase.com/docs/guides/functions/examples/image-manipulation)) You can use [Background Tasks](https://supabase.com/docs/guides/functions/background-tasks) to handle slow file processing outside of a request. @@ -45,9 +45,9 @@ Deno.serve(async (req) => { ### Unavailable APIs -Currently, the synchronous APIs (eg: `Deno.writeFileSync` or `Deno.mkdirSync`) for creating or writing files are not supported. +Currently, the synchronous APIs (e.g. `Deno.writeFileSync` or `Deno.mkdirSync`) for creating or writing files are not supported. -You can use sync variations of read APIs (eg: `Deno.readFileSync`). +You can use sync variations of read APIs (e.g. `Deno.readFileSync`). ### Limits diff --git a/apps/docs/content/guides/functions/examples/amazon-bedrock-image-generator.mdx b/apps/docs/content/guides/functions/examples/amazon-bedrock-image-generator.mdx index 7b3c2b107301f..b9b3003f01e0e 100644 --- a/apps/docs/content/guides/functions/examples/amazon-bedrock-image-generator.mdx +++ b/apps/docs/content/guides/functions/examples/amazon-bedrock-image-generator.mdx @@ -147,4 +147,4 @@ supabase functions deploy amazon-bedrock supabase secrets set --env-file supabase/.env ``` -That's it, you've now deployed a serverless function that uses AI to generate and upload images to your Supabase storage bucket. +You've now deployed a serverless function that uses AI to generate and upload images to your Supabase storage bucket. diff --git a/apps/docs/content/guides/functions/examples/auth-send-email-hook-react-email-resend.mdx b/apps/docs/content/guides/functions/examples/auth-send-email-hook-react-email-resend.mdx index f7c7ef537bf30..aa095e5c6184a 100644 --- a/apps/docs/content/guides/functions/examples/auth-send-email-hook-react-email-resend.mdx +++ b/apps/docs/content/guides/functions/examples/auth-send-email-hook-react-email-resend.mdx @@ -6,7 +6,7 @@ tocVideo: 'tlA7BomSCgU' Use the [send email hook](/docs/guides/auth/auth-hooks/send-email-hook?queryGroups=language&language=http) to send custom auth emails with [React Email](https://react.email/) and [Resend](https://resend.com/) in Supabase Edge Functions. - + Prefer to jump straight to the code? [Check out the example on GitHub](https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/auth-hook-react-email-resend). @@ -251,7 +251,7 @@ const code = { } ``` - + You can find a selection of React Email templates in the [React Email Examples](https://react.email/examples). @@ -282,7 +282,7 @@ RESEND_API_KEY=your_resend_api_key SEND_EMAIL_HOOK_SECRET= ``` - + You can generate the secret in the [Auth Hooks](/dashboard/project/_/auth/hooks) section of the Supabase dashboard. Make sure to remove the `v1,whsec_` prefix! @@ -294,7 +294,7 @@ Set the secrets from the `.env` file: supabase secrets set --env-file supabase/functions/.env ``` -That's it, now your Supabase Edge Function will be triggered anytime an Auth Email needs to be send to the user! +Now your Supabase Edge Function will be triggered anytime an Auth Email needs to be sent to the user! ## More resources diff --git a/apps/docs/content/guides/functions/examples/discord-bot.mdx b/apps/docs/content/guides/functions/examples/discord-bot.mdx index 48b0481bbc3af..aa00105e557a8 100644 --- a/apps/docs/content/guides/functions/examples/discord-bot.mdx +++ b/apps/docs/content/guides/functions/examples/discord-bot.mdx @@ -21,7 +21,7 @@ video: 'https://www.youtube.com/v/J24Bvo_m7DM' 3. Name your application and click on **Create**. 4. Go to **Bot** section, click on **Add Bot**, and finally on **Yes, do it!** to confirm. -That's it. A new application is created which will hold our Slash Command. Don't close the tab as we need information from this application page throughout our development. +A new application is created which will hold our Slash Command. Don't close the tab as we need information from this application page throughout our development. Before we can write some code, we need to curl a discord endpoint to register a Slash Command in our app. diff --git a/apps/docs/content/guides/functions/examples/elevenlabs-generate-speech-stream.mdx b/apps/docs/content/guides/functions/examples/elevenlabs-generate-speech-stream.mdx new file mode 100644 index 0000000000000..b54da3e98ce9c --- /dev/null +++ b/apps/docs/content/guides/functions/examples/elevenlabs-generate-speech-stream.mdx @@ -0,0 +1,245 @@ +--- +title: Streaming Speech with ElevenLabs +subtitle: Generate and stream speech through Supabase Edge Functions. Store speech in Supabase Storage and cache responses via built-in CDN. +tocVideo: '4Roog4PAmZ8' +--- + +## Introduction + +In this tutorial you will learn how to build an edge API to generate, stream, store, and cache speech using Supabase Edge Functions, Supabase Storage, and [ElevenLabs text to speech API](https://elevenlabs.io/text-to-speech). + + + Find the [example project on + GitHub](https://github.com/elevenlabs/elevenlabs-examples/tree/main/examples/text-to-speech/supabase/stream-and-cache-storage). + + +## Requirements + +- An ElevenLabs account with an [API key](/app/settings/api-keys). +- A [Supabase](https://supabase.com) account (you can sign up for a free account via [database.new](https://database.new)). +- The [Supabase CLI](https://supabase.com/docs/guides/local-development) installed on your machine. +- The [Deno runtime](https://docs.deno.com/runtime/getting_started/installation/) installed on your machine and optionally [setup in your favourite IDE](https://docs.deno.com/runtime/getting_started/setup_your_environment). + +## Setup + +### Create a Supabase project locally + +After installing the [Supabase CLI](https://supabase.com/docs/guides/local-development), run the following command to create a new Supabase project locally: + +```bash +supabase init +``` + +### Configure the storage bucket + +You can configure the Supabase CLI to automatically generate a storage bucket by adding this configuration in the `config.toml` file: + +```toml ./supabase/config.toml +[storage.buckets.audio] +public = false +file_size_limit = "50MiB" +allowed_mime_types = ["audio/mp3"] +objects_path = "./audio" +``` + + + Upon running `supabase start` this will create a new storage bucket in your local Supabase + project. Should you want to push this to your hosted Supabase project, you can run `supabase seed + buckets --linked`. + + +### Configure background tasks for Supabase Edge Functions + +To use background tasks in Supabase Edge Functions when developing locally, you need to add the following configuration in the `config.toml` file: + +```toml ./supabase/config.toml +[edge_runtime] +policy = "per_worker" +``` + + + When running with `per_worker` policy, Function won't auto-reload on edits. You will need to + manually restart it by running `supabase functions serve`. + + +### Create a Supabase Edge Function for speech generation + +Create a new Edge Function by running the following command: + +```bash +supabase functions new text-to-speech +``` + +If you're using VS Code or Cursor, select `y` when the CLI prompts "Generate VS Code settings for Deno? [y/N]"! + +### Set up the environment variables + +Within the `supabase/functions` directory, create a new `.env` file and add the following variables: + +```env supabase/functions/.env +# Find / create an API key at https://elevenlabs.io/app/settings/api-keys +ELEVENLABS_API_KEY=your_api_key +``` + +### Dependencies + +The project uses a couple of dependencies: + +- The [@supabase/supabase-js](https://supabase.com/docs/reference/javascript) library to interact with the Supabase database. +- The ElevenLabs [JavaScript SDK](/docs/quickstart) to interact with the text-to-speech API. +- The open-source [object-hash](https://www.npmjs.com/package/object-hash) to generate a hash from the request parameters. + +Since Supabase Edge Function uses the [Deno runtime](https://deno.land/), you don't need to install the dependencies, rather you can [import](https://docs.deno.com/examples/npm/) them via the `npm:` prefix. + +## Code the Supabase Edge Function + +In your newly created `supabase/functions/text-to-speech/index.ts` file, add the following code: + +```ts supabase/functions/text-to-speech/index.ts +// Setup type definitions for built-in Supabase Runtime APIs +import 'jsr:@supabase/functions-js/edge-runtime.d.ts' +import { createClient } from 'npm:@supabase/supabase-js@2' +import { ElevenLabsClient } from 'npm:elevenlabs@1.52.0' +import * as hash from 'npm:object-hash' + +const supabase = createClient( + Deno.env.get('SUPABASE_URL')!, + Deno.env.get('SUPABASE_SERVICE_ROLE_KEY')! +) + +const client = new ElevenLabsClient({ + apiKey: Deno.env.get('ELEVENLABS_API_KEY'), +}) + +// Upload audio to Supabase Storage in a background task +async function uploadAudioToStorage(stream: ReadableStream, requestHash: string) { + const { data, error } = await supabase.storage + .from('audio') + .upload(`${requestHash}.mp3`, stream, { + contentType: 'audio/mp3', + }) + + console.log('Storage upload result', { data, error }) +} + +Deno.serve(async (req) => { + // To secure your function for production, you can for example validate the request origin, + // or append a user access token and validate it with Supabase Auth. + console.log('Request origin', req.headers.get('host')) + const url = new URL(req.url) + const params = new URLSearchParams(url.search) + const text = params.get('text') + const voiceId = params.get('voiceId') ?? 'JBFqnCBsd6RMkjVDRZzb' + + const requestHash = hash.MD5({ text, voiceId }) + console.log('Request hash', requestHash) + + // Check storage for existing audio file + const { data } = await supabase.storage.from('audio').createSignedUrl(`${requestHash}.mp3`, 60) + + if (data) { + console.log('Audio file found in storage', data) + const storageRes = await fetch(data.signedUrl) + if (storageRes.ok) return storageRes + } + + if (!text) { + return new Response(JSON.stringify({ error: 'Text parameter is required' }), { + status: 400, + headers: { 'Content-Type': 'application/json' }, + }) + } + + try { + console.log('ElevenLabs API call') + const response = await client.textToSpeech.convertAsStream(voiceId, { + output_format: 'mp3_44100_128', + model_id: 'eleven_multilingual_v2', + text, + }) + + const stream = new ReadableStream({ + async start(controller) { + for await (const chunk of response) { + controller.enqueue(chunk) + } + controller.close() + }, + }) + + // Branch stream to Supabase Storage + const [browserStream, storageStream] = stream.tee() + + // Upload to Supabase Storage in the background + EdgeRuntime.waitUntil(uploadAudioToStorage(storageStream, requestHash)) + + // Return the streaming response immediately + return new Response(browserStream, { + headers: { + 'Content-Type': 'audio/mpeg', + }, + }) + } catch (error) { + console.log('error', { error }) + return new Response(JSON.stringify({ error: error.message }), { + status: 500, + headers: { 'Content-Type': 'application/json' }, + }) + } +}) +``` + +## Run locally + +To run the function locally, run the following commands: + +```bash +supabase start +``` + +Once the local Supabase stack is up and running, run the following command to start the function and observe the logs: + +```bash +supabase functions serve +``` + +### Try it out + +Navigate to `http://127.0.0.1:54321/functions/v1/text-to-speech?text=hello%20world` to hear the function in action. + +Afterwards, navigate to `http://127.0.0.1:54323/project/default/storage/buckets/audio` to see the audio file in your local Supabase Storage bucket. + +## Deploy to Supabase + +If you haven't already, create a new Supabase account at [database.new](https://database.new) and link the local project to your Supabase account: + +```bash +supabase link +``` + +Once done, run the following command to deploy the function: + +```bash +supabase functions deploy +``` + +### Set the function secrets + +Now that you have all your secrets set locally, you can run the following command to set the secrets in your Supabase project: + +```bash +supabase secrets set --env-file supabase/functions/.env +``` + +## Test the function + +The function is designed in a way that it can be used directly as a source for an ` Firebase Cloud Messaging (FCM) is a push notification service offered by Google that allows you to send push notifications to your users' devices on iOS, Android, and Web. - + This guide will show you how to send push notifications to your app when a new row is inserted into a table using FCM, Supabase Edge Functions, and database web hooks. ## Supabase setup diff --git a/apps/docs/content/guides/functions/examples/screenshots.mdx b/apps/docs/content/guides/functions/examples/screenshots.mdx index 6fb9f4e0d2af2..2e346f9ec9ec3 100644 --- a/apps/docs/content/guides/functions/examples/screenshots.mdx +++ b/apps/docs/content/guides/functions/examples/screenshots.mdx @@ -12,6 +12,6 @@ description: 'Take screenshots in Edge Functions with Puppeteer and Browserless. > -[Puppeteer](https://pptr.dev/) is a handy tool to programmatically take screenshots and generate PDFs. However, trying to do so in Edge Functions can be challenging due to the size restrictions. Luckily there is a [serverless browser offering available](https://www.browserless.io/) that we can connect to via websockets. +[Puppeteer](https://pptr.dev/) is a handy tool to programmatically take screenshots and generate PDFs. However, trying to do so in Edge Functions can be challenging due to the size restrictions. Luckily there is a [serverless browser offering available](https://www.browserless.io/) that we can connect to via WebSockets. Find the code on [GitHub](https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/puppeteer). diff --git a/apps/docs/content/guides/functions/examples/semantic-search.mdx b/apps/docs/content/guides/functions/examples/semantic-search.mdx index 7534d6bafaea0..043721ca96c5e 100644 --- a/apps/docs/content/guides/functions/examples/semantic-search.mdx +++ b/apps/docs/content/guides/functions/examples/semantic-search.mdx @@ -8,12 +8,12 @@ tocVideo: 'w4Rr_1whU-U' [Semantic search](/docs/guides/ai/semantic-search) interprets the meaning behind user queries rather than exact [keywords](/docs/guides/ai/keyword-search). It uses machine learning to capture the intent and context behind the query, handling language nuances like synonyms, phrasing variations, and word relationships. -Since Supabase Edge Runtime [v1.36.0](https://github.com/supabase/edge-runtime/releases/tag/v1.36.0) you can run the [`gte-small` model](https://huggingface.co/Supabase/gte-small) natively within Supabase Edge Functions without any external dependencies! This allows you to easily generate text embeddings without calling any external APIs! +Since Supabase Edge Runtime [v1.36.0](https://github.com/supabase/edge-runtime/releases/tag/v1.36.0) you can run the [`gte-small` model](https://huggingface.co/Supabase/gte-small) natively within Supabase Edge Functions without any external dependencies! This allows you to generate text embeddings without calling any external APIs! In this tutorial you're implementing three parts: 1. A [`generate-embedding`](https://github.com/supabase/supabase/tree/master/examples/ai/edge-functions/supabase/functions/generate-embedding/index.ts) database webhook edge function which generates embeddings when a content row is added (or updated) in the [`public.embeddings`](https://github.com/supabase/supabase/tree/master/examples/ai/edge-functions/supabase/migrations/20240408072601_embeddings.sql) table. -2. A [`query_embeddings` Postgres function](https://github.com/supabase/supabase/tree/master/examples/ai/edge-functions/supabase/migrations/20240410031515_vector-search.sql) which allows us to perform similarity search from an egde function via [Remote Procedure Call (RPC)](https://supabase.com/docs/guides/database/functions?language=js). +2. A [`query_embeddings` Postgres function](https://github.com/supabase/supabase/tree/master/examples/ai/edge-functions/supabase/migrations/20240410031515_vector-search.sql) which allows us to perform similarity search from an Edge Function via [Remote Procedure Call (RPC)](https://supabase.com/docs/guides/database/functions?language=js). 3. A [`search` edge function](https://github.com/supabase/supabase/tree/master/examples/ai/edge-functions/supabase/functions/search/index.ts) which generates the embedding for the search term, performs the similarity search via RPC function call, and returns the result. You can find the complete example code on [GitHub](https://github.com/supabase/supabase/tree/master/examples/ai/edge-functions) @@ -126,4 +126,4 @@ Deno.serve(async (req) => { }) ``` -That's it, you now have AI powered semantic search set up without any external dependencies! Just you, pgvector, and Supabase Edge Functions! +You now have AI powered semantic search set up without any external dependencies! Just you, pgvector, and Supabase Edge Functions! diff --git a/apps/docs/content/guides/functions/examples/send-emails.mdx b/apps/docs/content/guides/functions/examples/send-emails.mdx index 4a28a8285ef27..8dc64ae068321 100644 --- a/apps/docs/content/guides/functions/examples/send-emails.mdx +++ b/apps/docs/content/guides/functions/examples/send-emails.mdx @@ -77,7 +77,7 @@ Deploy function to Supabase: supabase functions deploy resend --no-verify-jwt ``` - + When you deploy to Supabase, make sure that your `RESEND_API_KEY` is set in [Edge Function Secrets Management](https://supabase.com/dashboard/project/_/settings/functions) diff --git a/apps/docs/content/guides/functions/examples/sentry-monitoring.mdx b/apps/docs/content/guides/functions/examples/sentry-monitoring.mdx index 29bf251619383..5547de7496cf0 100644 --- a/apps/docs/content/guides/functions/examples/sentry-monitoring.mdx +++ b/apps/docs/content/guides/functions/examples/sentry-monitoring.mdx @@ -3,7 +3,7 @@ title: 'Monitoring with Sentry' description: 'Monitor Edge Functions with the Sentry Deno SDK.' --- -Add the [Sentry Deno SDK](https://docs.sentry.io/platforms/javascript/guides/deno/) to your Supabase Edge Functions to easily track exceptions and get notified of errors or performance issues. +Add the [Sentry Deno SDK](https://docs.sentry.io/platforms/javascript/guides/deno/) to your Supabase Edge Functions to track exceptions and get notified of errors or performance issues. ### Prerequisites diff --git a/apps/docs/content/guides/functions/examples/stripe-webhooks.mdx b/apps/docs/content/guides/functions/examples/stripe-webhooks.mdx index d95378ffdc0d6..60bcf64cf3da5 100644 --- a/apps/docs/content/guides/functions/examples/stripe-webhooks.mdx +++ b/apps/docs/content/guides/functions/examples/stripe-webhooks.mdx @@ -13,3 +13,9 @@ description: 'Handling signed Stripe Webhooks with Edge Functions.' Handling signed Stripe Webhooks with Edge Functions. [View on GitHub](https://github.com/supabase/supabase/blob/master/examples/edge-functions/supabase/functions/stripe-webhooks/index.ts). + +<$CodeSample +path="edge-functions/supabase/functions/stripe-webhooks/index.ts" +lines={[[1, -1]]} +meta="index.ts" +/> diff --git a/apps/docs/content/guides/functions/import-maps.mdx b/apps/docs/content/guides/functions/import-maps.mdx deleted file mode 100644 index 176aa73d4f272..0000000000000 --- a/apps/docs/content/guides/functions/import-maps.mdx +++ /dev/null @@ -1,173 +0,0 @@ ---- -id: 'functions-import-maps' -title: 'Managing dependencies' -description: 'Managing packages and dependencies.' -subtitle: 'Managing packages and dependencies.' -tocVideo: 'ILr3cneZuFk' ---- - -Developing with Edge Functions is similar to developing with Node.js, but with a few key differences. This guide will help you understand how to manage your dependencies. - -## Managing dependencies - -There are two ways to manage your dependencies in Supabase Edge Functions: - -### Using deno.json (recommended) - - - -This feature requires Supabase CLI version 1.215.0 or higher. - - - -Each function can have its own `deno.json` or `deno.jsonc` file to manage dependencies and configure Deno-specific settings. For a complete list of supported options, see the [official Deno configuration documentation](https://docs.deno.com/runtime/manual/getting_started/configuration_file). - -```json supabase/functions/my-function/deno.json -{ - "imports": { - "lodash": "https://cdn.skypack.dev/lodash" - } -} -``` - -This allows you to use simplified imports: - -```ts -import lodash from 'lodash' -``` - -The recommended file structure when using `deno.json`: - -```bash -└── supabase - ├── functions - │ ├── function-one - │ │ ├── index.ts - │ │ └── deno.json # Function-specific Deno configuration - │ └── function-two - │ ├── index.ts - │ └── deno.json # Function-specific Deno configuration - └── config.toml -``` - -### Using import maps (legacy) - -Import Maps are a legacy way to manage dependencies, similar to a `package.json` file. While still supported, we recommend using `deno.json`. If both exist, `deno.json` takes precedence. - -```json supabase/functions/import_map.json -{ - "imports": { - "lodash": "https://cdn.skypack.dev/lodash" - } -} -``` - -The import map should be placed in the `/supabase/functions` folder and will be applied to all functions: - -```bash -└── supabase - ├── functions - │ ├── import_map.json # Top-level import map for all functions - │ ├── function-one - │ │ └── index.ts - │ └── function-two - │ └── index.ts - └── config.toml -``` - -You can override the import map location using the `--import-map ` flag with `serve` and `deploy` commands. - -If using import maps with VSCode, update your `.vscode/settings.json`: - -```json settings.json -{ - "deno.enable": true, - "deno.unstable": [ - "bare-node-builtins", - "byonm" - // ... other flags ... - ], - "deno.importMap": "./supabase/functions/import_map.json" -} -``` - -You can override the default import map location in two ways: - -1. Using the `--import-map ` flag with `serve` and `deploy` commands -2. Setting the `import_map` property in your `config.toml` file for specific functions: - -```toml supabase/config.toml -[functions.my-function] -import_map = "./supabase/functions/my-function/custom_import_map.json" -``` - -For more configuration options, see the [CLI Configuration documentation](/docs/guides/local-development/cli/config#functions.function_name.import_map). - -## Importing dependencies - -Supabase Edge Functions support several ways to import dependencies: - -- The Deno [standard library](https://deno.land/std) -- JavaScript modules from npm (https://docs.deno.com/examples/npm/) -- Built-in [Node APIs](https://docs.deno.com/runtime/manual/node/compatibility) -- Third party modules published to [JSR](https://jsr.io/) or [deno.land/x](https://deno.land/x) - -### NPM modules - -You can import npm modules using the `npm:` specifier: - -```ts -import { createClient } from 'npm:@supabase/supabase-js@2' -``` - -### Node.js built-ins - -For Node.js built-in APIs, use the `node:` specifier: - -```ts -import process from 'node:process' -``` - -Learn more about npm specifiers and Node built-in APIs in [Deno's documentation](https://docs.deno.com/runtime/manual/node/npm_specifiers). - -### Importing from private registries - - - -This feature requires Supabase CLI version 1.207.9 or higher. - - - -Create a `.npmrc` file within `supabase/functions`. This will allow you to import the private packages into multiple functions. Alternatively, you can place the `.npmrc` file directly inside `supabase/functions/function-name` directory.) - -Add your registry details in the `.npmrc` file. Follow [this guide](https://docs.npmjs.com/cli/v10/configuring-npm/npmrc) to learn more about the syntax of npmrc files. - -``` -@myorg:registry=https://npm.registryhost.com -//npm.registryhost.com/:_authToken=VALID_AUTH_TOKEN -``` - -After that, you can import the package directly in your function code or add it to the import_map.json (/docs/guides/functions/import-maps#using-import-maps). - -```ts -import MyPackage from 'npm:@myorg/private-package@v1.0.1' - -// use MyPackage -``` - -## Importing types - -If your [environment is set up properly](/docs/guides/functions/local-development) and the module you're importing is exporting types, the import will have types and autocompletion support. - -Some npm packages may not ship out of the box types and you may need to import them from a separate package. You can specify their types with a `@deno-types` directive: - -```ts -// @deno-types="npm:@types/express@^4.17" -import express from 'npm:express@^4.17' -``` - -To include types for built-in Node APIs, add the following line to the top of your imports: - -```ts -/// -``` diff --git a/apps/docs/content/guides/functions/limits.mdx b/apps/docs/content/guides/functions/limits.mdx index 9a41065188b8d..0fd34dfa685aa 100644 --- a/apps/docs/content/guides/functions/limits.mdx +++ b/apps/docs/content/guides/functions/limits.mdx @@ -14,7 +14,15 @@ subtitle: "Limits applied Edge Functions in Supabase's hosted platform." - Paid plans: 400s - Maximum CPU Time: 2s (Amount of actual time spent on the CPU per request - does not include async I/O.) - Request idle timeout: 150s (If an Edge Function doesn't send a response before the timeout, 504 Gateway Timeout will be returned) + +## Platform limits + - Maximum Function Size: 20MB (After bundling using CLI) +- Maximum no. of Functions per project: + - Free: 100 + - Pro: 500 + - Team: 1000 + - Enterprise: Unlimited - Maximum log message length: 10,000 characters - Log event threshold: 100 events per 10 seconds @@ -23,4 +31,4 @@ subtitle: "Limits applied Edge Functions in Supabase's hosted platform." - Outgoing connections to ports `25` and `587` are not allowed. - Serving of HTML content is only supported with [custom domains](/docs/reference/cli/supabase-domains) (Otherwise `GET` requests that return `text/html` will be rewritten to `text/plain`). - Web Worker API (or Node `vm` API) are not available. -- Node Libraries that require multithreading are not supported. Examples: [libvips](https://github.com/libvips/libvips), [sharp](https://github.com/lovell/sharp). +- Node Libraries that require multithreading are not supported. Examples: [`libvips`](https://github.com/libvips/libvips), [sharp](https://github.com/lovell/sharp). diff --git a/apps/docs/content/guides/functions/local-development.mdx b/apps/docs/content/guides/functions/local-development.mdx index 1da210dd2b6ef..90f19fedf4cc4 100644 --- a/apps/docs/content/guides/functions/local-development.mdx +++ b/apps/docs/content/guides/functions/local-development.mdx @@ -13,11 +13,11 @@ You can follow the [Deno guide](https://deno.com/manual@v1.32.5/getting_started/ ## Deno with Visual Studio Code -When using VSCode, you should install both the Deno CLI and the the Deno language server [via this link](vscode:extension/denoland.vscode-deno) or by browsing the extensions in vscode and choosing to install the _Deno_ extension. +When using VSCode, you should install both the Deno CLI and the the Deno language server [via this link](vscode:extension/denoland.vscode-deno) or by browsing the extensions in VSCode and choosing to install the _Deno_ extension. -The Supabase CLI can automatically create helpful Deno settings when running `supabase init`. Simply select `y` when prompted "Generate VS Code settings for Deno? [y/N]"! +The Supabase CLI can automatically create helpful Deno settings when running `supabase init`. Select `y` when prompted "Generate VS Code settings for Deno? [y/N]"! diff --git a/apps/docs/content/guides/functions/local-quickstart.mdx b/apps/docs/content/guides/functions/local-quickstart.mdx new file mode 100644 index 0000000000000..4afbc8a83d1e1 --- /dev/null +++ b/apps/docs/content/guides/functions/local-quickstart.mdx @@ -0,0 +1,125 @@ +--- +id: 'functions-local-quickstart' +title: 'Developing Edge Functions locally' +description: 'Get started with Edge Functions on your local machine.' +subtitle: 'Get started with Edge Functions on your local machine.' +tocVideo: '5OWH9c4u68M' +--- + +Let's create a basic Edge Function on your local machine and then invoke it using the Supabase CLI. + +## Initialize a project + +Create a new Supabase project in a folder on your local machine: + +```bash +supabase init +``` + + + +Check out the [CLI Docs](/docs/guides/cli) to learn how to install the Supabase CLI on your local machine. + + + + + +If you're using VS code you can have the CLI automatically create helpful Deno settings when running `supabase init`. Select `y` when prompted "Generate VS Code settings for Deno? [y/N]"! + + + + + +If you're using an IntelliJ IDEA editor such as WebStorm, you can use the `--with-intellij-settings` flag with `supabase init` to create an auto generated Deno config. + + + +## Create an Edge Function + +Let's create a new Edge Function called `hello-world` inside your project: + +```bash +supabase functions new hello-world +``` + +This creates a function stub in your `supabase` folder: + +```bash +└── supabase + ├── functions + │ └── hello-world + │ │ └── index.ts ## Your function code + └── config.toml +``` + +## How to write the code + +The generated function uses native [Deno.serve](https://docs.deno.com/runtime/manual/runtime/http_server_apis) to handle requests. It gives you access to `Request` and `Response` objects. + +Here's the generated Hello World Edge Function, that accepts a name in the `Request` and responds with a greeting: + +```tsx +Deno.serve(async (req) => { + const { name } = await req.json() + const data = { + message: `Hello ${name}!`, + } + + return new Response(JSON.stringify(data), { headers: { 'Content-Type': 'application/json' } }) +}) +``` + +## Running Edge Functions locally + +You can run your Edge Function locally using [`supabase functions serve`](/docs/reference/cli/usage#supabase-functions-serve): + +```bash +supabase start # start the supabase stack +supabase functions serve # start the Functions watcher +``` + +The `functions serve` command has hot-reloading capabilities. It will watch for any changes to your files and restart the Deno server. + +## Invoking Edge Functions locally + +While serving your local Edge Function, you can invoke it using curl or one of the client libraries. +To call the function from a browser you need to handle CORS requests. See [CORS](/docs/guides/functions/cors). + +<$CodeTabs> + +```bash name=cURL +curl --request POST 'http://localhost:54321/functions/v1/hello-world' \ + --header 'Authorization: Bearer SUPABASE_ANON_KEY' \ + --header 'Content-Type: application/json' \ + --data '{ "name":"Functions" }' +``` + +```js name=JavaScript +import { createClient } from '@supabase/supabase-js' + +const supabase = createClient(process.env.SUPABASE_URL, process.env.SUPABASE_ANON_KEY) + +const { data, error } = await supabase.functions.invoke('hello-world', { + body: { name: 'Functions' }, +}) +``` + + + + + +Run `supabase status` to see your local credentials. + + + +You should see the response `{ "message":"Hello Functions!" }`. + +If you execute the function with a different payload, the response will change. + +Modify the `--data '{"name":"Functions"}'` line to `--data '{"name":"World"}'` and try invoking the command again. + +## Next steps + +Check out the [Deploy to Production](/docs/guides/functions/deploy) guide to make your Edge Function available to the world. + +See the [development tips](/docs/guides/functions/development-tips) for best practices. diff --git a/apps/docs/content/guides/functions/logging.mdx b/apps/docs/content/guides/functions/logging.mdx index 62a5cc3cd7093..980ad500f11e3 100644 --- a/apps/docs/content/guides/functions/logging.mdx +++ b/apps/docs/content/guides/functions/logging.mdx @@ -52,7 +52,7 @@ Deno.serve(async (req) => { return new Response(JSON.stringify(data), { headers: { 'Content-Type': 'application/json' } }) } catch (error) { - console.error(`Error processing request: ${error.message}`) + console.error(`Error processing request: ${error}`) return new Response(JSON.stringify({ error: 'Internal Server Error' }), { status: 500, headers: { 'Content-Type': 'application/json' }, @@ -108,7 +108,7 @@ Deno.serve(async (req) => { This results in something like: -```ts index.ts +``` Request headers: { "accept": "*/*", "accept-encoding": "gzip", diff --git a/apps/docs/content/guides/functions/pricing.mdx b/apps/docs/content/guides/functions/pricing.mdx new file mode 100644 index 0000000000000..0ae33b8bddeba --- /dev/null +++ b/apps/docs/content/guides/functions/pricing.mdx @@ -0,0 +1,8 @@ +--- +id: 'functions-pricing' +title: 'Pricing' +--- + +<$Partial path="billing/pricing/pricing_edge_functions.mdx" /> + +For a detailed explanation of how charges are calculated, refer to [Manage Edge Function Invocations usage](/docs/guides/platform/manage-your-usage/edge-function-invocations). diff --git a/apps/docs/content/guides/functions/quickstart.mdx b/apps/docs/content/guides/functions/quickstart.mdx index 19aedd7811c41..2d8d15240558f 100644 --- a/apps/docs/content/guides/functions/quickstart.mdx +++ b/apps/docs/content/guides/functions/quickstart.mdx @@ -1,235 +1,168 @@ --- id: 'functions-quickstart' -title: 'Developing Edge Functions locally' -description: 'Get started with Edge Functions on your local machine.' -subtitle: 'Get started with Edge Functions on your local machine.' -tocVideo: '5OWH9c4u68M' +title: 'Developing Edge Functions with Supabase' +description: 'Get started with Edge Functions on the Supabase dashboard.' +subtitle: 'Get started with Edge Functions on the Supabase dashboard.' --- -Let's create a basic Edge Function on your local machine and then invoke it using the Supabase CLI. +In this guide we'll cover how to create a basic Edge Function on the Supabase dashboard, and access it using the Supabase CLI. -## Initialize a project +## Deploy from Dashboard -Create a new Supabase project in a folder on your local machine: +Go to your project > Edge Functions > Deploy a new function > Via Editor -```bash -supabase init -``` +Deploy functions from the dashboard - - -Check out the [CLI Docs](/docs/guides/cli) to learn how to install the Supabase CLI on your local machine. - - - - - -If you're using VS code you can have the CLI automatically create helpful Deno settings when running `supabase init`. Simply select `y` when prompted "Generate VS Code settings for Deno? [y/N]"! - - - - - -If you're using an Intellj IDEA editor such as WebStorm, you can use the `--with-intellij-settings` flag with `supabase init` to create an auto generated deno cofig. - - - -## Create an Edge Function - -Let's create a new Edge Function called `hello-world` inside your project: - -```bash -supabase functions new hello-world -``` - -This creates a function stub in your `supabase` folder: - -```bash -└── supabase - ├── functions - │ └── hello-world - │ │ └── index.ts ## Your function code - └── config.toml -``` - -## How to write the code - -The generated function uses native [Deno.serve](https://docs.deno.com/runtime/manual/runtime/http_server_apis) to handle requests. It gives you access to `Request` and `Response` objects. +This will scaffold a new function for you. You can choose from Templates some of the pre-defined functions for common use cases. -Here's the generated Hello World Edge Function, that accepts a name in the `Request` and responds with a greeting: +Scaffold functions through the dashboard editor -```tsx -Deno.serve(async (req) => { - const { name } = await req.json() - const data = { - message: `Hello ${name}!`, - } +Modify the function as needed, name it, and click `Deploy function`. - return new Response(JSON.stringify(data), { headers: { 'Content-Type': 'application/json' } }) -}) -``` +Your function is now active. Navigate to the function's details page, and click on the test button. -## Running Edge Functions locally +You can test your function by providing the expected HTTP method, headers, query parameters, and request body. You can also change the authorization token passed (e.g., anon key or a user key). -You can run your Edge Function locally using [`supabase functions serve`](/docs/reference/cli/usage#supabase-functions-serve): +
+ Provide a request body to test your function +
-```bash -supabase start # start the supabase stack -supabase functions serve # start the Functions watcher -``` +## Access deployed functions via Supabase CLI -The `functions serve` command has hot-reloading capabilities. It will watch for any changes to your files and restart the Deno server. - -## Invoking Edge Functions locally - -While serving your local Edge Function, you can invoke it using curl or one of the client libraries. -To call the function from a browser you need to handle CORS requests. See [CORS](/docs/guides/functions/cors). - - - -```bash cURL -curl --request POST 'http://localhost:54321/functions/v1/hello-world' \ - --header 'Authorization: Bearer SUPABASE_ANON_KEY' \ - --header 'Content-Type: application/json' \ - --data '{ "name":"Functions" }' -``` - -```js JavaScript -import { createClient } from '@supabase/supabase-js' - -const supabase = createClient(process.env.SUPABASE_URL, process.env.SUPABASE_ANON_KEY) - -const { data, error } = await supabase.functions.invoke('hello-world', { - body: { name: 'Functions' }, -}) -``` - - - - + -Run `supabase status` to see your local credentials. +Check out the [CLI Docs](/docs/guides/cli) to learn how to install the Supabase CLI on your local machine. -You should see the response `{ "message":"Hello Functions!" }`. +Now that your function is deployed, you can access it from your local development environment. +Here's how: -If you execute the function with a different payload, the response will change. +1. **Link your project** to your local environment. -Modify the `--data '{"name":"Functions"}'` line to `--data '{"name":"World"}'` and try invoking the command again. + You can find your project reference ID in the URL of your Supabase dashboard or in the project settings. -## Next steps + ```bash + supabase link --project-ref your-project-ref + ``` -Check out the [Deploy to Production](/docs/guides/functions/deploy) guide to make your Edge Function available to the world. +2. **List all Functions** in the linked Supabase project. -Read on for some common development tips. + ```bash + supabase functions list + ``` -## Development tips +3. **Access the specific function** you want to work on. -Here are a few recommendations when developing Edge Functions. + ```bash + supabase functions download function-name + ``` -### Skipping authorization checks +4. **Make local edits** to the function code as needed. -By default, Edge Functions require a valid JWT in the authorization header. If you want to use Edge Functions without Authorization checks (commonly used for Stripe webhooks), you can pass the `--no-verify-jwt` flag when serving your Edge Functions locally. +5. **Run your function locally** before redeploying. -```bash -supabase functions serve hello-world --no-verify-jwt -``` + ```bash + supabase functions serve function-name + ``` -Be careful when using this flag, as it will allow anyone to invoke your Edge Function without a valid JWT. The Supabase client libraries automatically handle authorization. +6. **Redeploy** when you're ready with your changes. -### Using HTTP methods + ```bash + supabase functions deploy function-name + ``` -Edge Functions support `GET`, `POST`, `PUT`, `PATCH`, `DELETE`, and `OPTIONS`. A Function can be designed to perform different actions based on a request's HTTP method. See the [example on building a RESTful service](https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/restful-tasks) to learn how to handle different HTTP methods in your Function. +{/* supa-mdx-lint-disable-next-line Rule001HeadingCase */} - +## Deploy via Assistant -HTML content is not supported. `GET` requests that return `text/html` will be rewritten to `text/plain`. +You can also leverage the Supabase Assistant to help you write and deploy edge functions. - +Go to your project > Edge Functions > Click on the Assistant icon to Create with Supabase Assistant -### Naming Edge Functions +Open Supabase Assistant -We recommend using hyphens to name functions because hyphens are the most URL-friendly of all the naming conventions (snake_case, camelCase, PascalCase). +This brings up an assistant window with a pre-filled prompt for generating edge functions. +Write up your Edge Function requirement, and let Supabase Assistant do the rest. -### Organizing your Edge Functions +
+ Generate a function with the assistant +
-We recommend developing “fat functions”. This means that you should develop few large functions, rather than many small functions. One common pattern when developing Functions is that you need to share code between two or more Functions. To do this, you can store any shared code in a folder prefixed with an underscore (`_`). We also recommend a separate folder for [Unit Tests](/docs/guides/functions/unit-test) including the name of the function followed by a `-test` suffix. -We recommend this folder structure: +Click Deploy and the Assistant will automatically deploy your function. -```bash -└── supabase - ├── functions - │ ├── import_map.json # A top-level import map to use across functions. - │ ├── _shared - │ │ ├── supabaseAdmin.ts # Supabase client with SERVICE_ROLE key. - │ │ └── supabaseClient.ts # Supabase client with ANON key. - │ │ └── cors.ts # Reusable CORS headers. - │ ├── function-one # Use hyphens to name functions. - │ │ └── index.ts - │ └── function-two - │ │ └── index.ts - │ └── tests - │ └── function-one-test.ts - │ └── function-two-test.ts - ├── migrations - └── config.toml -``` +This function requires an OpenAI API key. You can add the key in your Edge Functions secrets page, or ask Assistant for help. -### Using config.toml +1. Navigate to your Edge Functions > Secrets page. +2. Look for the option to add environment variables. +3. Add a new environment variable with the key `OPENAI_API_KEY` and set its value to your actual OpenAI API key. -Individual function configuration like [JWT verification](/docs/guides/cli/config#functions.function_name.verify_jwt) and [import map location](/docs/guides/cli/config#functions.function_name.import_map) can be set via the `config.toml` file. +Once you've set this environment variable, your edge functions will be able to access the OPENAI_API_KEY securely without hardcoding it into the function code. This is a best practice for keeping sensitive information safe. -```toml supabase/config.toml -[functions.hello-world] -verify_jwt = false -import_map = './import_map.json' -``` +With your variable set, you can test by sending a request via the dashboard. Navigate to the function's details page, and click on the test button. Then provide a Request Body your function expects. -### Not using TypeScript +
+ Provide a request body to test your function +
-When you create a new Edge Function, it will use TypeScript by default. However, it is possible to write and deploy Edge Functions using pure JavaScript. +## Editing functions from the Dashboard -Save your Function as a JavaScript file (eg: `index.js`) and then update the `supabase/config.toml` as follows: + - - -`entrypoint` is available only in Supabase CLI version 1.215.0 or higher. +The Dashboard's Edge Function editor currently does not support versioning or rollbacks. We recommend using it only for quick testing and prototypes. When you’re ready to go to production, store Edge Functions code in a source code repository (e.g., git) and deploy it using one of the [CI integrations](https://supabase.com/docs/guides/functions/cicd-workflow). -```toml supabase/config.toml -[functions.hello-world] -# other entries -entrypoint = './functions/hello-world/index.js' # path must be relative to config.toml -``` - -You can use any `.ts`, `.js`, `.tsx`, `.jsx` or `.mjs` file as the `entrypoint` for a Function. +1. From the functions page, click on the function you want to edit. From the function page, click on the Code tab. -### Error handling +2. This opens up a code editor in the dashboard where you can see your deployed function's code. -The `supabase-js` library provides several error types that you can use to handle errors that might occur when invoking Edge Functions: +3. Modify the code as needed, then click Deploy updates. This will overwrite the existing deployment with the newly edited function code. -```js -import { FunctionsHttpError, FunctionsRelayError, FunctionsFetchError } from '@supabase/supabase-js' - -const { data, error } = await supabase.functions.invoke('hello', { - headers: { 'my-custom-header': 'my-custom-header-value' }, - body: { foo: 'bar' }, -}) - -if (error instanceof FunctionsHttpError) { - const errorMessage = await error.context.json() - console.log('Function returned an error', errorMessage) -} else if (error instanceof FunctionsRelayError) { - console.log('Relay error:', error.message) -} else if (error instanceof FunctionsFetchError) { - console.log('Fetch error:', error.message) -} -``` - -### Database Functions vs Edge Functions +## Next steps -For data-intensive operations we recommend using [Database Functions](/docs/guides/database/functions), which are executed within your database and can be called remotely using the [REST and GraphQL API](/docs/guides/api). +Check out the [Local development](/docs/guides/functions/local-quickstart) guide for more details on working with Edge Functions. -For use-cases which require low-latency we recommend [Edge Functions](/docs/guides/functions), which are globally-distributed and can be written in TypeScript. +Read on for some [common development tips](/docs/guides/functions/development-tips). diff --git a/apps/docs/content/guides/functions/regional-invocation.mdx b/apps/docs/content/guides/functions/regional-invocation.mdx index 282009205f238..19fef4112b214 100644 --- a/apps/docs/content/guides/functions/regional-invocation.mdx +++ b/apps/docs/content/guides/functions/regional-invocation.mdx @@ -18,9 +18,9 @@ Supabase provides an option to specify the region when invoking the Function. Use the `x-region` HTTP header when calling an Edge Function to determine where the Function should be executed: - +<$CodeTabs> -```bash cURL +```bash name=cURL # https://supabase.com/docs/guides/functions/deploy#invoking-remote-functions curl --request POST 'https://.supabase.co/functions/v1/hello-world' \ --header 'Authorization: Bearer ANON_KEY' \ @@ -29,7 +29,7 @@ curl --request POST 'https://.supabase.co/functions/v1/hello-world' --data '{ "name":"Functions" }' ``` -```js JavaScript +```js name=JavaScript // https://supabase.com/docs/reference/javascript/installing import { createClient } from '@supabase/supabase-js' @@ -43,7 +43,7 @@ const { data, error } = await supabase.functions.invoke('hello-world', { }) ``` - + You can verify the execution region by looking at the `x-sb-edge-region` HTTP header in the response. You can also find it as metadata in [Edge Function Logs](/docs/guides/functions/logging). @@ -71,7 +71,9 @@ These are the currently supported region values you can provide for `x-region` h You can also specify the region when invoking a Function using the Supabase client library: ```js -const { createClient, FunctionRegion } = require('@supabase/supabase-js') +import { createClient, FunctionRegion } from '@supabase/supabase-js' +const supabase = createClient('SUPABASE_URL', 'SUPABASE_ANON_KEY') + const { data: ret, error } = await supabase.functions.invoke('my-function-name', { headers: { 'Content-Type': 'application/json' }, method: 'GET', diff --git a/apps/docs/content/guides/functions/routing.mdx b/apps/docs/content/guides/functions/routing.mdx index 2944dacf5000c..023e284739f5b 100644 --- a/apps/docs/content/guides/functions/routing.mdx +++ b/apps/docs/content/guides/functions/routing.mdx @@ -5,8 +5,8 @@ description: 'How to handle custom routing within Edge Functions.' subtitle: 'How to handle custom routing within Edge Functions.' --- -Usually, an Edge Function is written to perform a single action (eg: write a record to the database). However, if your app's logic is split into multiple Edge Functions requests to each action may seem slower. -This is because each Edge Function needs to be booted before serving a request (known as cold starts). If an action is performed less frequently (eg: deleting a record), there is a high-chance of that function experiencing a cold-start. +Usually, an Edge Function is written to perform a single action (e.g. write a record to the database). However, if your app's logic is split into multiple Edge Functions requests to each action may seem slower. +This is because each Edge Function needs to be booted before serving a request (known as cold starts). If an action is performed less frequently (e.g. deleting a record), there is a high-chance of that function experiencing a cold-start. One way to reduce the cold starts and increase performance of your app is to combine multiple actions into a single Edge Function. This way only one instance of the Edge Function needs to be booted and it can handle multiple requests to different actions. For example, we can use a single Edge Function to create a typical CRUD API (create, read, update, delete records). @@ -160,10 +160,10 @@ We should see a response printing `Hello Foo!`. ## Using route parameters -We can use route parameters to capture values at specific URL segments (eg: `/tasks/:taskId/notes/:noteId`). +We can use route parameters to capture values at specific URL segments (e.g. `/tasks/:taskId/notes/:noteId`). Here's an example Edge Function implemented using the Framework for managing tasks using route parameters. -Keep in mind paths must be prefixed by function name (ie. `tasks` in this example). Route parameters can only be used after the function name prefix. +Keep in mind paths must be prefixed by function name (i.e. `tasks` in this example). Route parameters can only be used after the function name prefix. { ## URL patterns API -If you prefer not to use a web framework, you can directly use [URLPattern API](https://developer.mozilla.org/en-US/docs/Web/API/URL_Pattern_API) within your Edge Functions to implement routing. +If you prefer not to use a web framework, you can directly use [URL Pattern API](https://developer.mozilla.org/en-US/docs/Web/API/URL_Pattern_API) within your Edge Functions to implement routing. This is ideal for small apps with only couple of routes and you want to have a custom matching algorithm. Here is an example Edge Function using URL Patterns API: https://github.com/supabase/supabase/blob/master/examples/edge-functions/supabase/functions/restful-tasks/index.ts diff --git a/apps/docs/content/guides/functions/schedule-functions.mdx b/apps/docs/content/guides/functions/schedule-functions.mdx index f0d64f319c633..254a37dad9209 100644 --- a/apps/docs/content/guides/functions/schedule-functions.mdx +++ b/apps/docs/content/guides/functions/schedule-functions.mdx @@ -17,10 +17,23 @@ The hosted Supabase Platform supports the [`pg_cron` extension](/docs/guides/dat In combination with the [`pg_net` extension](/docs/guides/database/extensions/pgnet), this allows us to invoke Edge Functions periodically on a set schedule. + + +To access the auth token securely for your Edge Function call, we recommend storing them in [Supabase Vault](/docs/guides/database/vault). + + + ## Examples ### Invoke an Edge Function every minute +Store `project_url` and `anon_key` in Supabase Vault: + +```sql +select vault.create_secret('https://project-ref.supabase.co', 'project_url'); +select vault.create_secret('YOUR_SUPABASE_ANON_KEY', 'anon_key'); +``` + Make a POST request to a Supabase Edge Function every minute: ```sql @@ -31,8 +44,11 @@ select $$ select net.http_post( - url:='https://project-ref.supabase.co/functions/v1/function-name', - headers:='{"Content-Type": "application/json", "Authorization": "Bearer YOUR_ANON_KEY"}'::jsonb, + url:= (select decrypted_secret from vault.decrypted_secrets where name = 'project_url') || '/functions/v1/function-name', + headers:=jsonb_build_object( + 'Content-type', 'application/json', + 'Authorization', 'Bearer ' || (select decrypted_secret from vault.decrypted_secrets where name = 'anon_key') + ), body:=concat('{"time": "', now(), '"}')::jsonb ) as request_id; $$ diff --git a/apps/docs/content/guides/functions/secrets.mdx b/apps/docs/content/guides/functions/secrets.mdx index 722dfc77325b7..5439ff3c8c340 100644 --- a/apps/docs/content/guides/functions/secrets.mdx +++ b/apps/docs/content/guides/functions/secrets.mdx @@ -1,11 +1,13 @@ --- id: 'functions-secrets' -title: 'Managing Environment Variables' +title: 'Managing Secrets (Environment Variables)' description: 'Managing secrets and environment variables.' subtitle: 'Managing secrets and environment variables.' --- -It's common that you will need to use sensitive information or environment-specific variables inside your Edge Functions. You can access these using Deno's built-in handler +It's common that you will need to use environment variables or other sensitive information in Edge Functions. You can manage secrets using the CLI or the Dashboard. + +You can access these using Deno's built-in handler ```js Deno.env.get('MY_SECRET_NAME') @@ -18,7 +20,7 @@ Edge Functions have access to these secrets by default: - `SUPABASE_URL`: The API gateway for your Supabase project. - `SUPABASE_ANON_KEY`: The `anon` key for your Supabase API. This is safe to use in a browser when you have [Row Level Security](/docs/guides/database/postgres/row-level-security) enabled. - `SUPABASE_SERVICE_ROLE_KEY`: The `service_role` key for your Supabase API. This is safe to use in Edge Functions, but it should NEVER be used in a browser. This key will bypass [Row Level Security](/docs/guides/database/postgres/row-level-security). -- `SUPABASE_DB_URL`: The URL for your [PostgreSQL database](/docs/guides/database). You can use this to connect directly to your database. +- `SUPABASE_DB_URL`: The URL for your [Postgres database](/docs/guides/database). You can use this to connect directly to your database. ## Local secrets @@ -57,7 +59,25 @@ When the function starts you should see the name “Yoda” output to the termin ## Production secrets -Let's create a `.env` for production. In this case we'll just use the same as our local secrets: +You will also need to set secrets for your production Edge Functions. You can do this via the Dashboard or using the CLI. + +### Using the Dashboard + +1. Visit [Edge Function Secrets Management](https://supabase.com/dashboard/project/_/settings/functions) page in your Dashboard. +2. Add the Key and Value for your secret and press Save. +3. Note that you can paste multiple secrets at a time. + +Edge Functions Secrets Management + +### Using the CLI + +Let's create a `.env` to help us deploy our secrets to production. In this case we'll just use the same as our local secrets: ```bash cp ./supabase/.env.local ./supabase/.env @@ -67,7 +87,7 @@ This creates a new file `./supabase/.env` for storing your production secrets. -Never check your `.env` files into Git! +Never check your `.env` files into Git! You only use the `.env` file to help deploy your secrets to production. Don't commit it to your repository. diff --git a/apps/docs/content/guides/functions/status-codes.mdx b/apps/docs/content/guides/functions/status-codes.mdx index 5925a2ad3bb8b..dfe712f6d3dbd 100644 --- a/apps/docs/content/guides/functions/status-codes.mdx +++ b/apps/docs/content/guides/functions/status-codes.mdx @@ -33,7 +33,7 @@ Edge Functions only support these HTTP methods: 'POST', 'GET', 'PUT', 'PATCH', ' ### 500 Internal Server Error -Edge Function throwed an uncaught exception (`WORKER_ERROR`). Check Edge Function logs to find the cause. +Edge Function threw an uncaught exception (`WORKER_ERROR`). Check Edge Function logs to find the cause. ### 503 Service Unavailable diff --git a/apps/docs/content/guides/functions/troubleshooting.mdx b/apps/docs/content/guides/functions/troubleshooting.mdx index 2d69c1e0867df..74a28ac557e98 100644 --- a/apps/docs/content/guides/functions/troubleshooting.mdx +++ b/apps/docs/content/guides/functions/troubleshooting.mdx @@ -45,7 +45,7 @@ The 546 error response might occur because: - Run the serve command with the `-debug` flag. - Support engineers can then try to run the provided sample code locally and see if they can reproduce the issue. - Search the [Edge Runtime](https://github.com/supabase/edge-runtime) and [CLI](https://github.com/supabase/cli) repos for the error message, to see if it has been reported before. -- If the output from the commands above does not help you to resolve the issue, please open a support ticket via the Supabase Dashboard (by clicking the "Help" button at the top right) and include all output and details about your commands. +- If the output from the commands above does not help you to resolve the issue, open a support ticket via the Supabase Dashboard (by clicking the "Help" button at the top right) and include all output and details about your commands. ## Advanced techniques diff --git a/apps/docs/content/guides/functions/unit-test.mdx b/apps/docs/content/guides/functions/unit-test.mdx index 5dc3e942fb2a8..523129d33d7c6 100644 --- a/apps/docs/content/guides/functions/unit-test.mdx +++ b/apps/docs/content/guides/functions/unit-test.mdx @@ -34,11 +34,11 @@ The following script is a good example to get started with testing your Edge Fun ```typescript function-one-test.ts // Import required libraries and modules -import { assert, assertEquals } from 'https://deno.land/std@0.192.0/testing/asserts.ts' -import { createClient, SupabaseClient } from 'jsr:@supabase/supabase-js@2' +import { assert, assertEquals } from 'jsr:@std/assert@1' +import { createClient, SupabaseClient } from 'npm:@supabase/supabase-js@2' // Will load the .env file to Deno.env -import 'https://deno.land/x/dotenv@v3.2.2/load.ts' +import 'jsr:@std/dotenv/load' // Set up the configuration for the Supabase client const supabaseUrl = Deno.env.get('SUPABASE_URL') ?? '' @@ -105,9 +105,9 @@ This test case consists of two parts. The first part tests the client library an - The `testHelloWorld` function tests the "Hello-world" Edge Function by invoking it using the Supabase client's `functions.invoke` method. It checks if the response message matches the expected greeting. - We run the tests using the `Deno.test` function, providing a descriptive name for each test case and the corresponding test function. - + -Please make sure to replace the placeholders (`supabaseUrl`, `supabaseKey`, `my_table`) with the actual values relevant to your Supabase setup. +Make sure to replace the placeholders (`supabaseUrl`, `supabaseKey`, `my_table`) with the actual values relevant to your Supabase setup. diff --git a/apps/docs/content/guides/functions/wasm.mdx b/apps/docs/content/guides/functions/wasm.mdx new file mode 100644 index 0000000000000..024d585b2104b --- /dev/null +++ b/apps/docs/content/guides/functions/wasm.mdx @@ -0,0 +1,87 @@ +--- +id: 'function-wasm' +title: 'Using Wasm modules' +description: 'How to use WebAssembly in Edge Functions.' +subtitle: 'How to use WebAssembly in Edge Functions.' +--- + +Edge Functions supports running [WebAssembly (Wasm)](https://developer.mozilla.org/en-US/docs/WebAssembly) modules. WebAssembly is useful if you want to optimize code that's slower to run in JavaScript or require low-level manipulation. + +It also gives you the option to port existing libraries written in other languages to be used with JavaScript. For example, [magick-wasm](https://supabase.com/docs/guides/functions/examples/image-manipulation), which does image manipulation and transforms, is a port of an existing C library to WebAssembly. + +### Writing a Wasm module + +You can use different languages and SDKs to write Wasm modules. For this tutorial, we will write a simple Wasm module in Rust that adds two numbers. + +Follow this [guide on writing Wasm modules in Rust](https://developer.mozilla.org/en-US/docs/WebAssembly/Rust_to_Wasm) to setup your dev environment. + +Create a new Edge Function called `wasm-add`. + +```bash +supabase functions new wasm-add +``` + +Create a new Cargo project for the Wasm module inside the function's directory: + +```bash +cd supabase/functions/wasm-add +cargo new --lib add-wasm +``` + +Add the following code to `add-wasm/src/lib.rs`. + +<$CodeSample +path="edge-functions/supabase/functions/wasm-modules/add-wasm/src/lib.rs" +lines={[[1, -1]]} +meta="lib.rs" +/> + +Update the `add-wasm/Cargo.toml` to include the `wasm-bindgen` dependency. + +<$CodeSample +path="edge-functions/supabase/functions/wasm-modules/add-wasm/Cargo.toml" +lines={[[1, -1]]} +meta="Cargo.toml" +/> + +After that we can build the package, by running: + +```bash +wasm-pack build --target deno +``` + +This will produce a Wasm binary file inside `add-wasm/pkg` directory. + +### Calling the Wasm module from the Edge Function + +Now let's update the Edge Function to call `add` from the Wasm module. + +<$CodeSample +path="edge-functions/supabase/functions/wasm-modules/index.ts" +lines={[[1, -1]]} +meta="index.ts" +/> + + + Supabase Edge Functions currently use Deno 1.46. From [Deno 2.1, importing Wasm + modules](https://deno.com/blog/v2.1) will require even less boilerplate code. + + +### Bundle and deploy the Edge Function + +Before deploying the Edge Function, we need to ensure it bundles the Wasm module with it. We can do this by defining it in the `static_files` for the function in `superbase/config.toml`. + + + You will need update Supabase CLI to 2.7.0 or higher for the `static_files` support. + + +```toml +[functions.wasm-add] +static_files = [ "./functions/wasm-add/add-wasm/pkg/*"] +``` + +Deploy the function by running: + +```bash +supabase functions deploy wasm-add +``` diff --git a/apps/docs/content/guides/functions/websockets.mdx b/apps/docs/content/guides/functions/websockets.mdx index 0c0c82201e88e..14aaa6c116b6f 100644 --- a/apps/docs/content/guides/functions/websockets.mdx +++ b/apps/docs/content/guides/functions/websockets.mdx @@ -1,5 +1,5 @@ --- -id: 'function-websockets' +id: 'function-WebSockets' title: 'Handling WebSockets' description: 'How to handle WebSocket connections in Edge Functions' subtitle: 'How to handle WebSocket connections in Edge Functions' @@ -21,30 +21,30 @@ Here are some basic examples of setting up WebSocket servers using Deno and Node queryGroup="runtime" > -```ts - Deno.serve(req => { - const upgrade = req.headers.get("upgrade") || ""; - if (upgrade.toLowerCase() != "websocket") { - return new Response("request isn't trying to upgrade to websocket.", { status: 400 }); - } +```ts +Deno.serve((req) => { + const upgrade = req.headers.get('upgrade') || '' - const { socket, response } = Deno.upgradeWebSocket(req); + if (upgrade.toLowerCase() != 'WebSocket') { + return new Response("request isn't trying to upgrade to WebSocket.", { status: 400 }) + } - socket.onopen = () => console.log("socket opened"); - socket.onmessage = (e) => { - console.log("socket message:", e.data); - socket.send(new Date().toString()); - }; + const { socket, response } = Deno.upgradeWebSocket(req) - socket.onerror = e => console.log("socket errored:", e.message); - socket.onclose = () => console.log("socket closed"); + socket.onopen = () => console.log('socket opened') + socket.onmessage = (e) => { + console.log('socket message:', e.data) + socket.send(new Date().toString()) + } - return response; + socket.onerror = (e) => console.log('socket errored:', e.message) + socket.onclose = () => console.log('socket closed') -}); + return response +}) +``` -```` @@ -58,13 +58,13 @@ const server = createServer(); const wss = new WebSocketServer({ noServer: true }); wss.on("connection", ws => { - console.log("socket opened"); - ws.on("message", (data /** Buffer */, isBinary /** bool */) => { - if (isBinary) { - console.log("socket message:", data); - } else { - console.log("socket message:", data.toString()); - } +console.log("socket opened"); +ws.on("message", (data /** Buffer \*/, isBinary /** bool \*/) => { +if (isBinary) { +console.log("socket message:", data); +} else { +console.log("socket message:", data.toString()); +} ws.send(new Date().toString()); }); @@ -74,15 +74,17 @@ wss.on("connection", ws => { }); ws.on("close", () => console.log("socket closed")); + }); server.on("upgrade", (req, socket, head) => { - wss.handleUpgrade(req, socket, head, ws => { - wss.emit("connection", ws, req); - }); +wss.handleUpgrade(req, socket, head, ws => { +wss.emit("connection", ws, req); +}); }); server.listen(8080); + ```` @@ -121,7 +123,7 @@ To authenticate the user making WebSocket requests, you can pass the JWT in URL > ```ts - import { createClient } from "jsr:@supabase/supabase-js@2"; + import { createClient } from "npm:@supabase/supabase-js@2"; const supabase = createClient( Deno.env.get("SUPABASE_URL"), @@ -130,8 +132,8 @@ Deno.env.get("SUPABASE_SERVICE_ROLE_KEY"), Deno.serve(req => { const upgrade = req.headers.get("upgrade") || ""; - if (upgrade.toLowerCase() != "websocket") { - return new Response("request isn't trying to upgrade to websocket.", { status: 400 }); + if (upgrade.toLowerCase() != "WebSocket") { + return new Response("request isn't trying to upgrade to WebSocket.", { status: 400 }); } // Please be aware query params may be logged in some logging systems. @@ -167,38 +169,39 @@ return new Response("User is not authenticated", { status: 403 }); }); ```` + ```ts - import { createClient } from "jsr:@supabase/supabase-js@2"; + import { createClient } from "npm:@supabase/supabase-js@2"; const supabase = createClient( - Deno.env.get("SUPABASE_URL"), - Deno.env.get("SUPABASE_SERVICE_ROLE_KEY"), +Deno.env.get("SUPABASE_URL"), +Deno.env.get("SUPABASE_SERVICE_ROLE_KEY"), ); - Deno.serve(req => { - const upgrade = req.headers.get("upgrade") || ""; +Deno.serve(req => { +const upgrade = req.headers.get("upgrade") || ""; - if (upgrade.toLowerCase() != "websocket") { - return new Response("request isn't trying to upgrade to websocket.", { status: 400 }); + if (upgrade.toLowerCase() != "WebSocket") { + return new Response("request isn't trying to upgrade to WebSocket.", { status: 400 }); } - // Sec-WebScoket-Protocol may return multiple protocol values `jwt-TOKEN, value1, value 2` - const customProtocols = (req.headers.get("Sec-WebSocket-Protocol") ?? '').split(',').map(p => p.trim()) - const jwt = customProtocols.find(p => p.startsWith('jwt')).replace('jwt-', '') - if (!jwt) { - console.error("Auth token not provided"); - return new Response("Auth token not provided", { status: 403 }); - } - const { error, data } = await supabase.auth.getUser(jwt); - if (error) { - console.error(error); - return new Response("Invalid token provided", { status: 403 }); - } - if (!data.user) { - console.error("user is not authenticated"); - return new Response("User is not authenticated", { status: 403 }); - } +// Sec-WebScoket-Protocol may return multiple protocol values `jwt-TOKEN, value1, value 2` +const customProtocols = (req.headers.get("Sec-WebSocket-Protocol") ?? '').split(',').map(p => p.trim()) +const jwt = customProtocols.find(p => p.startsWith('jwt')).replace('jwt-', '') +if (!jwt) { +console.error("Auth token not provided"); +return new Response("Auth token not provided", { status: 403 }); +} +const { error, data } = await supabase.auth.getUser(jwt); +if (error) { +console.error(error); +return new Response("Invalid token provided", { status: 403 }); +} +if (!data.user) { +console.error("user is not authenticated"); +return new Response("User is not authenticated", { status: 403 }); +} const { socket, response } = Deno.upgradeWebSocket(req); @@ -212,7 +215,9 @@ const supabase = createClient( socket.onclose = () => console.log("socket closed"); return response; - }); + +}); + ```` @@ -231,6 +236,6 @@ To prevent that, you can update the `supabase/config.toml` with the following se ```toml [edge_runtime] policy = "per_worker" -``` +```` When running with `per_worker` policy, Function won't auto-reload on edits. You will need to manually restart it by running `supabase functions serve`. diff --git a/apps/docs/content/guides/getting-started.mdx b/apps/docs/content/guides/getting-started.mdx index bc458a709d784..3e50b8ff57ba6 100644 --- a/apps/docs/content/guides/getting-started.mdx +++ b/apps/docs/content/guides/getting-started.mdx @@ -111,12 +111,19 @@ hideToc: true hasLightIcon: true, }, { - title: 'NuxtJS', + title: 'Nuxt', href: '/guides/getting-started/quickstarts/nuxtjs', description: - 'Learn how to create a Supabase project, add some sample data to your database, and query the data from a NuxtJS app.', + 'Learn how to create a Supabase project, add some sample data to your database, and query the data from a Nuxt app.', icon: '/docs/img/icons/nuxt-icon', }, + { + title: 'Hono', + href: '/guides/getting-started/quickstarts/hono', + description: + 'Learn how to create a Supabase project, add some sample data to your database, secure it with auth, and query the data from a Hono app.', + icon: '/docs/img/icons/hono-icon', + }, { title: 'RedwoodJS', href: '/guides/getting-started/quickstarts/redwoodjs', diff --git a/apps/docs/content/guides/getting-started/ai-prompts.mdx b/apps/docs/content/guides/getting-started/ai-prompts.mdx index 412364c2bfb62..706a56033ba48 100644 --- a/apps/docs/content/guides/getting-started/ai-prompts.mdx +++ b/apps/docs/content/guides/getting-started/ai-prompts.mdx @@ -3,13 +3,13 @@ title: AI Prompts subtitle: Prompts for working with Supabase using AI-powered IDE tools --- -We've curated a selection of prompts to help you work with Supabase using your favorite AI-powered IDE tools, such as GitHub Copilot or Cursor. +We've curated a selection of prompts to help you work with Supabase using your favorite AI-powered IDE tools, such as Cursor or GitHub Copilot. ## How to use Copy the prompt to a file in your repo. -Use the "include file" feature from your AI tool to include the prompt when chatting with your AI assistant. For example, with GitHub Copilot, use `#`, in Cursor, use `@Files`, and in Zed, use `/file`. +Use the "include file" feature from your AI tool to include the prompt when chatting with your AI assistant. For example, in Cursor, add them as [project rules](https://docs.cursor.com/context/rules-for-ai#project-rules-recommended), with GitHub Copilot, use `#`, and in Zed, use `/file`. ## Prompts diff --git a/apps/docs/content/guides/getting-started/architecture.mdx b/apps/docs/content/guides/getting-started/architecture.mdx index 531cce08e214c..970431df96ad3 100644 --- a/apps/docs/content/guides/getting-started/architecture.mdx +++ b/apps/docs/content/guides/getting-started/architecture.mdx @@ -1,7 +1,7 @@ --- -id: 'architecture' title: 'Architecture' description: 'Supabase design and architecture' +tocVideo: 'T-qAtAKjqwc' --- Supabase is open source. We choose open source tools which are scalable and make them simple to use. @@ -27,14 +27,14 @@ Each Supabase project consists of several tools: }} /> -### PostgreSQL (database) +### Postgres (database) -PostgreSQL is the core of Supabase. We do not abstract the PostgreSQL database—you can access it and use it with full privileges. We simply provide tools which makes PostgreSQL as easy to use as Firebase. +Postgres is the core of Supabase. We do not abstract the Postgres database—you can access it and use it with full privileges. We provide tools which makes Postgres as easy to use as Firebase. - Official Docs: [postgresql.org/docs](https://www.postgresql.org/docs/current/index.html) - Source code: [github.com/postgres/postgres](https://github.com/postgres/postgres) (mirror) -- License: [PostgreSQL License](https://www.postgresql.org/about/licence/) -- Language: C + {/* supa-mdx-lint-disable-next-line Rule004ExcludeWords */} +- License: [PostgreSQL License](https://www.postgresql.org/about/licence/)- Language: C ### Studio (dashboard) @@ -56,7 +56,7 @@ A JWT-based API for managing users and issuing access tokens. This integrates wi ### PostgREST (API) -A standalone web server that turns your PostgreSQL database directly into a RESTful API. +A standalone web server that turns your Postgres database directly into a RESTful API. We use this with our [`pg_graphql`](https://github.com/supabase/pg_graphql) extension to provide a GraphQL API. - Official Docs: [postgrest.org](https://postgrest.org/) @@ -66,7 +66,7 @@ We use this with our [`pg_graphql`](https://github.com/supabase/pg_graphql) exte ### Realtime (API & multiplayer) -A scalable websocket engine for managing user Presence, broadcasting messages, and streaming database changes. +A scalable WebSocket engine for managing user Presence, broadcasting messages, and streaming database changes. - Official Docs: [Supabase Realtime docs](/docs/guides/realtime) - Source code: [github.com/supabase/realtime](https://github.com/supabase/realtime) @@ -80,7 +80,7 @@ An S3-compatible object storage service that stores metadata in Postgres. - Official Docs: [Supabase Storage reference docs](/docs/reference/storage) - Source code: [github.com/supabase/storage-api](https://github.com/supabase/storage-api) - License: [Apache 2.0](https://github.com/supabase/storage-api/blob/master/LICENSE) -- Language: NodeJS / TypeScript +- Language: Node.js / TypeScript ### Deno (Edge Functions) @@ -98,7 +98,7 @@ A RESTful API for managing your Postgres. Fetch tables, add roles, and run queri - Official Docs: [supabase.github.io/postgres-meta](https://supabase.github.io/postgres-meta/) - Source code: [github.com/supabase/postgres-meta](https://github.com/supabase/postgres-meta) - License: [Apache 2.0](https://github.com/supabase/postgres-meta/blob/master/LICENSE) -- Language: NodeJS / TypeScript +- Language: Node.js / TypeScript ### Supavisor diff --git a/apps/docs/content/guides/getting-started/features.mdx b/apps/docs/content/guides/getting-started/features.mdx index fdc5bfda95445..80a3a93924574 100644 --- a/apps/docs/content/guides/getting-started/features.mdx +++ b/apps/docs/content/guides/getting-started/features.mdx @@ -62,10 +62,6 @@ Manage Supabase infrastructure via Terraform, an Infrastructure as Code tool. [D Deploy read-only databases across multiple regions, for lower latency and better resource management. [Docs](/docs/guides/platform/read-replicas). -### Fly Postgres - -Launch the Supabase stack on Fly.io edge network. [Docs](/docs/guides/platform/fly-postgres). - ### Log drains Export Supabase logs at to 3rd party providers and external tooling. [Docs](/docs/guides/platform/log-drains). @@ -82,11 +78,11 @@ Login to the Supabase dashboard via SSO. [Docs](/docs/guides/platform/sso). ### Postgres changes -Receive your database changes through websockets. [Docs](/docs/guides/realtime/postgres-changes). +Receive your database changes through WebSockets. [Docs](/docs/guides/realtime/postgres-changes). ### Broadcast -Send messages between connected users through websockets. [Docs](/docs/guides/realtime/broadcast). +Send messages between connected users through WebSockets. [Docs](/docs/guides/realtime/broadcast). ### Presence @@ -114,9 +110,9 @@ Build passwordless logins via magic links for your application or website. [Docs Control the data each user can access with Postgres Policies. [Docs](/docs/guides/database/postgres/row-level-security). -### Captcha protection +### CAPTCHA protection -Add Captcha to your sign-in, sign-up, and password reset forms. [Docs](/docs/guides/auth/auth-captcha). +Add CAPTCHA to your sign-in, sign-up, and password reset forms. [Docs](/docs/guides/auth/auth-captcha). ### Server-Side Auth @@ -215,7 +211,6 @@ In addition to the Beta requirements, features in GA are covered by the [uptime | Platform | Branching | `public alpha` | N/A | | Platform | Terraform Provider | `public alpha` | N/A | | Platform | Read Replicas | `private alpha` | N/A | -| Platform | Fly Postgres | `private alpha` | N/A | | Platform | Log Drains | `public alpha` | ✅ | | Studio | | `GA` | ✅ | | Studio | SSO | `GA` | ✅ | @@ -240,7 +235,7 @@ In addition to the Beta requirements, features in GA are covered by the [uptime | Auth | Passwordless login | `GA` | ✅ | | Auth | SSO with SAML | `GA` | ✅ | | Auth | Authorization via RLS | `GA` | ✅ | -| Auth | Captcha protection | `GA` | ✅ | +| Auth | CAPTCHA protection | `GA` | ✅ | | Auth | Server-side Auth | `beta` | ✅ | | CLI | | `GA` | ✅ Works with self-hosted | | Management API | | `GA` | N/A | diff --git a/apps/docs/content/guides/getting-started/mcp.mdx b/apps/docs/content/guides/getting-started/mcp.mdx new file mode 100644 index 0000000000000..fa87883024167 --- /dev/null +++ b/apps/docs/content/guides/getting-started/mcp.mdx @@ -0,0 +1,157 @@ +--- +id: 'ai-tools-mcp' +title: 'Model context protocol (MCP)' +subtitle: 'Connect your AI tools to Supabase using MCP' +description: 'Connect your AI tools to Supabase using MCP' +sidebar_label: 'Model context protocol (MCP)' +--- + +The [Model Context Protocol](https://modelcontextprotocol.io/introduction) (MCP) is a standard for connecting Large Language Models (LLMs) to platforms like Supabase. This guide covers how to connect Supabase to the following AI tools using MCP: + +- [Cursor](#cursor) +- [Windsurf](#windsurf) (Codium) +- [Visual Studio Code](#visual-studio-code-copilot) (Copilot) +- [Cline](#cline) (VS Code extension) +- [Claude desktop](#claude-desktop) +- [Claude code](#claude-code) + +Once connected, your AI assistants can interact with and query your Supabase projects on your behalf. + +## Step 1: Create a personal access token (PAT) + +First, go to your [Supabase settings](https://supabase.com/dashboard/account/tokens) and create a personal access token. Give it a name that describes its purpose, like "Cursor MCP Server". This will be used to authenticate the MCP server with your Supabase account. + +## Step 2: Configure in your AI tool + +MCP compatible tools can connect to Supabase using the [Supabase MCP server](https://github.com/supabase-community/supabase-mcp). Below are instructions for connecting to this server using popular AI tools: + +### Cursor + +1. Open [Cursor](https://www.cursor.com/) and create a `.cursor` directory in your project root if it doesn't exist. +1. Create a `.cursor/mcp.json` file if it doesn't exist and open it. +1. Add the following configuration: + + <$Partial path="mcp_supabase_config.mdx" variables={{ "app": "Cursor" }} /> + +1. Save the configuration file. + +1. Open Cursor and navigate to **Settings/MCP**. You should see a green active status after the server is successfully connected. + +### Windsurf + +1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant. +1. Tap on the hammer (MCP) icon, then **Configure** to open the configuration file. +1. Add the following configuration: + + <$Partial path="mcp_supabase_config.mdx" variables={{ "app": "Windsurf" }} /> + +1. Save the configuration file and reload by tapping **Refresh** in the Cascade assistant. + +1. You should see a green active status after the server is successfully connected. + +### Visual Studio Code (Copilot) + +
+ [![Install with NPX in VS + Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=supabase&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22supabase-access-token%22%2C%22description%22%3A%22Supabase%20personal%20access%20token%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40supabase%2Fmcp-server-supabase%40latest%22%5D%2C%22env%22%3A%7B%22SUPABASE_ACCESS_TOKEN%22%3A%22%24%7Binput%3Asupabase-access-token%7D%22%7D%7D) + [![Install with NPX in VS Code + Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=supabase&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22supabase-access-token%22%2C%22description%22%3A%22Supabase%20personal%20access%20token%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40supabase%2Fmcp-server-supabase%40latest%22%5D%2C%22env%22%3A%7B%22SUPABASE_ACCESS_TOKEN%22%3A%22%24%7Binput%3Asupabase-access-token%7D%22%7D%7D&quality=insiders) +
+ +1. Open [VS Code](https://code.visualstudio.com/) and create a `.vscode` directory in your project root if it doesn't exist. +1. Create a `.vscode/mcp.json` file if it doesn't exist and open it. +1. Add the following configuration: + + <$Partial path="mcp_supabase_vscode_config.mdx" /> + +1. Save the configuration file. +1. Open Copilot chat and switch to "Agent" mode. You should see a tool icon that you can tap to confirm the MCP tools are available. Once you begin using the server, you will be prompted to enter your personal access token. Enter the token that you created earlier. + +For more info on using MCP in VS Code, see the [Copilot documentation](https://code.visualstudio.com/docs/copilot/chat/mcp-servers). + +### Cline + +1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon. +1. Tap **Configure MCP Servers** to open the configuration file. +1. Add the following configuration: + + <$Partial path="mcp_supabase_config.mdx" variables={{ "app": "VS Code" }} /> + +1. Save the configuration file. Cline should automatically reload the configuration. + +1. You should see a green active status after the server is successfully connected. + +### Claude desktop + +1. Open [Claude desktop](https://claude.ai/download) and navigate to **Settings**. +1. Under the **Developer** tab, tap **Edit Config** to open the configuration file. +1. Add the following configuration: + + <$Partial path="mcp_supabase_config.mdx" variables={{ "app": "Claude desktop" }} /> + +1. Save the configuration file and restart Claude desktop. + +1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available. + +### Claude code + +You can add the Supabase MCP server to Claude Code in two ways: + +#### Option 1: Project-scoped server (via .mcp.json file) + +1. Create a `.mcp.json` file in your project root if it doesn't exist. +1. Add the following configuration: + + <$Partial path="mcp_supabase_config.mdx" variables={{ "app": "Claude code" }} /> + +1. Save the configuration file. + +1. Restart [Claude code](https://claude.ai/code) to apply the new configuration. + +#### Option 2: Locally-scoped server (via CLI command) + +You can also add the Supabase MCP server as a locally-scoped server, which will only be available to you in the current project: + +1. Run the following command in your terminal: + + ```bash + claude mcp add supabase -s local -e SUPABASE_ACCESS_TOKEN=your_token_here npx -y @supabase/mcp-server-supabase@latest + ``` + +Locally-scoped servers take precedence over project-scoped servers with the same name and are stored in your project-specific user settings. + +### Next steps + +Your AI tool is now connected to Supabase using MCP. Try asking your AI assistant to create a new project, create a table, or fetch project config. + +For a full list of tools available, see the [GitHub README](https://github.com/supabase-community/supabase-mcp#tools). If you experience any issues, [submit an bug report](https://github.com/supabase-community/supabase-mcp/issues/new?template=1.Bug_report.md). + +## MCP for local Supabase instances + +The Supabase MCP server connects directly to the cloud platform to access your database. If you are running a local instance of Supabase, you can instead use the [Postgres MCP server](https://github.com/modelcontextprotocol/servers/tree/main/src/postgres) to connect to your local database. This MCP server runs all queries as read-only transactions. + +### Step 1: Find your database connection string + +To connect to your local Supabase instance, you need to get the connection string for your local database. You can find your connection string by running: + +```shell +supabase status +``` + +or if you are using `npx`: + +```shell +npx supabase status +``` + +This will output a list of details about your local Supabase instance. Copy the `DB URL` field in the output. + +### Step 2: Configure the MCP server + +Configure your client with the following: + +<$Partial path="mcp_postgres_config.mdx" variables={{ "app": "your MCP client" }} /> + +### Next steps + +Your AI tool is now connected to your local Supabase instance using MCP. Try asking the AI tool to query your database using natural language commands. diff --git a/apps/docs/content/guides/getting-started/quickstarts/flutter.mdx b/apps/docs/content/guides/getting-started/quickstarts/flutter.mdx index c9ca79c5b34fa..ec4e0efc14af0 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/flutter.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/flutter.mdx @@ -8,8 +8,8 @@ hideToc: true - - + + <$Partial path="quickstart_db_setup.mdx" /> @@ -23,7 +23,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal flutter create my_app ``` @@ -43,7 +43,7 @@ hideToc: true - ```yaml pubspec.yaml + ```yaml name=pubspec.yaml supabase_flutter: ^2.0.0 ``` @@ -64,7 +64,7 @@ hideToc: true - ```dart lib/main.dart + ```dart name=lib/main.dart import 'package:supabase_flutter/supabase_flutter.dart'; Future main() async { @@ -94,14 +94,14 @@ hideToc: true - ```dart lib/main.dart + ```dart name=lib/main.dart class MyApp extends StatelessWidget { const MyApp({super.key}); @override Widget build(BuildContext context) { return const MaterialApp( - title: 'Countries', + title: 'Instruments', home: HomePage(), ); } @@ -116,7 +116,7 @@ hideToc: true class _HomePageState extends State { final _future = Supabase.instance.client - .from('countries') + .from('instruments') .select(); @override @@ -128,13 +128,13 @@ hideToc: true if (!snapshot.hasData) { return const Center(child: CircularProgressIndicator()); } - final countries = snapshot.data!; + final instruments = snapshot.data!; return ListView.builder( - itemCount: countries.length, + itemCount: instruments.length, itemBuilder: ((context, index) { - final country = countries[index]; + final instrument = instruments[index]; return ListTile( - title: Text(country['name']), + title: Text(instrument['name']), ); }), ); @@ -155,13 +155,13 @@ hideToc: true Run your app on a platform of your choosing! By default an app should launch in your web browser. Note that `supabase_flutter` is compatible with web, iOS, Android, macOS, and Windows apps. - Running the app on MacOS requires additional configuration to [set the entitlements](https://docs.flutter.dev/development/platform-integration/macos/building#setting-up-entitlements). + Running the app on macOS requires additional configuration to [set the entitlements](https://docs.flutter.dev/development/platform-integration/macos/building#setting-up-entitlements). - ```bash Terminal + ```bash name=Terminal flutter run ``` @@ -171,6 +171,10 @@ hideToc: true +## Setup deep links + +Many sign in methods require deep links to redirect the user back to your app after authentication. Read more about setting deep links up for all platforms (including web) in the [Flutter Mobile Guide](/docs/guides/getting-started/tutorials/with-flutter#setup-deep-links). + ## Going to production ### Android diff --git a/apps/docs/content/guides/getting-started/quickstarts/hono.mdx b/apps/docs/content/guides/getting-started/quickstarts/hono.mdx new file mode 100644 index 0000000000000..d85823d199822 --- /dev/null +++ b/apps/docs/content/guides/getting-started/quickstarts/hono.mdx @@ -0,0 +1,91 @@ +--- +title: 'Use Supabase with Hono' +subtitle: 'Learn how to create a Supabase project, add some sample data to your database, secure it with auth, and query the data from a Hono app.' +breadcrumb: 'Framework Quickstarts' +hideToc: true +--- + + + + + + + + Bootstrap the Hono example app from the Supabase Samples using the CLI. + + + + + + ```bash name=Terminal + npx supabase@latest bootstrap hono + ``` + + + + + + + + + The `package.json` file in the project includes the necessary dependencies, including `@supabase/supabase-js` and `@supabase/ssr` to help with server-side auth. + + + + + + ```bash name=Terminal + npm install + ``` + + + + + + + + + Copy the `.env.example` file to `.env` and update the values with your Supabase project URL and anon key. + + Lastly, [enable anonymous sign-ins](https://supabase.com/dashboard/project/_/settings/auth) in the Auth settings. + + + + + + + + + ```bash name=Terminal + cp .env.example .env + ``` + + + + + + + + + Start the app, go to http://localhost:5173. + + Learn how [server side auth](/docs/guides/auth/server-side/creating-a-client?queryGroups=framework&framework=hono) works with Hono. + + + + + + ```bash name=Terminal + npm run dev + ``` + + + + + + +## Next steps + +- Learn how [server side auth](/docs/guides/auth/server-side/creating-a-client?queryGroups=framework&framework=hono) works with Hono. +- [Insert more data](/docs/guides/database/import-data) into your database +- Upload and serve static files using [Storage](/docs/guides/storage) diff --git a/apps/docs/content/guides/getting-started/quickstarts/ios-swiftui.mdx b/apps/docs/content/guides/getting-started/quickstarts/ios-swiftui.mdx index 3563c9b5d2963..6e245725384fc 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/ios-swiftui.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/ios-swiftui.mdx @@ -8,8 +8,8 @@ hideToc: true - - + + <$Partial path="quickstart_db_setup.mdx" /> @@ -39,7 +39,7 @@ hideToc: true - Create a new `Supabase.swift` file add a new supabase instance using your project URL and public API (anon) key: + Create a new `Supabase.swift` file add a new Supabase instance using your project URL and public API (anon) key: @@ -48,7 +48,7 @@ hideToc: true - ```swift Supabase.swift + ```swift name=Supabase.swift import Supabase let supabase = SupabaseClient( @@ -63,18 +63,18 @@ hideToc: true - + Create a decodable struct to deserialize the data from the database. - Add the following code to a new file named `Country.swift`. + Add the following code to a new file named `Instrument.swift`. - ```swift Supabase.swift - struct Country: Decodable, Identifiable { + ```swift name=Supabase.swift + struct Instrument: Decodable, Identifiable { let id: Int let name: String } @@ -96,23 +96,23 @@ hideToc: true - ```swift ContentView.swift + ```swift name=ContentView.swift struct ContentView: View { - @State var countries: [Country] = [] + @State var instruments: [Instrument] = [] var body: some View { - List(countries) { country in - Text(country.name) + List(instruments) { instrument in + Text(instrument.name) } .overlay { - if countries.isEmpty { + if instruments.isEmpty { ProgressView() } } .task { do { - countries = try await supabase.from("countries").select().execute().value + instruments = try await supabase.from("instruments").select().execute().value } catch { dump(error) } diff --git a/apps/docs/content/guides/getting-started/quickstarts/kotlin.mdx b/apps/docs/content/guides/getting-started/quickstarts/kotlin.mdx index 45998483259ea..38c857552c379 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/kotlin.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/kotlin.mdx @@ -8,8 +8,8 @@ hideToc: true - - + + <$Partial path="quickstart_db_setup.mdx" /> @@ -103,7 +103,7 @@ hideToc: true - + Create a serializable data class to represent the data from the database. Add the following below the `createSupabaseClient` function in the `MainActivity.kt` file. @@ -112,7 +112,7 @@ hideToc: true ```kotlin @Serializable - data class Country( + data class Instrument( val id: Int, val name: String, ) @@ -144,7 +144,7 @@ hideToc: true modifier = Modifier.fillMaxSize(), color = MaterialTheme.colorScheme.background ) { - CountriesList() + InstrumentsList() } } } @@ -152,21 +152,21 @@ hideToc: true } @Composable - fun CountriesList() { - var countries by remember { mutableStateOf>(listOf()) } + fun InstrumentsList() { + var instruments by remember { mutableStateOf>(listOf()) } LaunchedEffect(Unit) { withContext(Dispatchers.IO) { - countries = supabase.from("countries") - .select().decodeList() + instruments = supabase.from("instruments") + .select().decodeList() } } LazyColumn { items( - countries, - key = { country -> country.id }, - ) { country -> + instruments, + key = { instrument -> instrument.id }, + ) { instrument -> Text( - country.name, + instrument.name, modifier = Modifier.padding(8.dp), ) } diff --git a/apps/docs/content/guides/getting-started/quickstarts/laravel.mdx b/apps/docs/content/guides/getting-started/quickstarts/laravel.mdx index 3e0f3fd6875cf..134f593cf46b0 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/laravel.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/laravel.mdx @@ -18,7 +18,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal composer create-project laravel/laravel example-app ``` @@ -35,7 +35,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal composer require laravel/breeze --dev php artisan breeze:install ``` @@ -49,21 +49,21 @@ hideToc: true Go to [database.new](https://database.new) and create a new Supabase project. Save your database password securely. - When your project is up and running, navigate to the [database settings](https://supabase.com/dashboard/project/_/settings/database) to find the URI connection string. Make sure **Use connection pooling** is checked and **Session mode** is selected. Then copy the URI. Replace the password placeholder with your saved database password. + When your project is up and running, navigate to your project dashboard and click on [Connect](https://supabase.com/dashboard/project/_?showConnect=true). - + Look for the Session Pooler connection string and copy the string. You will need to replace the Password with your saved database password. You can reset your database password in your [database settings](https://supabase.com/dashboard/project/_/settings/database) if you do not have it. - If your network supports IPv6 connections, you can also use the direct connection string. Uncheck **Use connection pooling** and copy the new URI. - - + + If you're in an [IPv6 environment](https://github.com/orgs/supabase/discussions/27034) or have the IPv4 Add-On, you can use the direct connection string instead of Supavisor in Session mode. + - ```bash .env + ```bash name=.env DB_CONNECTION=pgsql - DATABASE_URL=postgres://postgres.xxxx:password@xxxx.pooler.supabase.com:5432/postgres + DB_URL=postgres://postgres.xxxx:password@xxxx.pooler.supabase.com:5432/postgres ``` @@ -73,24 +73,26 @@ hideToc: true - By default Laravel uses the `public` schema. We recommend changing this as supabase exposes the `public` schema as a [data API](/docs/guides/api). + By default Laravel uses the `public` schema. We recommend changing this as Supabase exposes the `public` schema as a [data API](/docs/guides/api). You can change the schema of your Laravel application by modifying the `search_path` variable `app/config/database.php`. + The schema you specify in `search_path` has to exist on Supabase. You can create a new schema from the [Table Editor](/dashboard/project/_/editor). + - ```php app/config/database.php + ```php name=app/config/database.php 'pgsql' => [ 'driver' => 'pgsql', - 'url' => env('DATABASE_URL'), + 'url' => env('DB_URL'), 'host' => env('DB_HOST', '127.0.0.1'), 'port' => env('DB_PORT', '5432'), - 'database' => env('DB_DATABASE', 'forge'), - 'username' => env('DB_USERNAME', 'forge'), + 'database' => env('DB_DATABASE', 'laravel'), + 'username' => env('DB_USERNAME', 'root'), 'password' => env('DB_PASSWORD', ''), - 'charset' => 'utf8', + 'charset' => env('DB_CHARSET', 'utf8'), 'prefix' => '', 'prefix_indexes' => true, 'search_path' => 'laravel', @@ -113,7 +115,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal php artisan migrate ``` @@ -130,7 +132,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal php artisan serve ``` diff --git a/apps/docs/content/guides/getting-started/quickstarts/nextjs.mdx b/apps/docs/content/guides/getting-started/quickstarts/nextjs.mdx index 88b705e3f90cd..a6633706faa62 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/nextjs.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/nextjs.mdx @@ -9,7 +9,7 @@ hideToc: true - + <$Partial path="quickstart_db_setup.mdx" /> @@ -26,7 +26,7 @@ hideToc: true - ```bash Terminal + ```bash npx create-next-app -e with-supabase ``` @@ -46,10 +46,14 @@ hideToc: true - ```text .env.local - NEXT_PUBLIC_SUPABASE_URL= - NEXT_PUBLIC_SUPABASE_ANON_KEY= - ``` + <$CodeTabs> + + ```text name=.env.local + NEXT_PUBLIC_SUPABASE_URL= + NEXT_PUBLIC_SUPABASE_ANON_KEY= + ``` + + @@ -58,60 +62,60 @@ hideToc: true - Create a new file at `app/countries/page.tsx` and populate with the following. + Create a new file at `app/instruments/page.tsx` and populate with the following. - This will select all the rows from the `countries` table in Supabase and render them on the page. + This will select all the rows from the `instruments` table in Supabase and render them on the page. - + <$CodeTabs> - ```ts app/countries/page.tsx - import { createClient } from '@/utils/supabase/server'; + ```ts name=app/instruments/page.tsx + import { createClient } from '@/utils/supabase/server'; - export default async function Countries() { - const supabase = await createClient(); - const { data: countries } = await supabase.from("countries").select(); + export default async function Instruments() { + const supabase = await createClient(); + const { data: instruments } = await supabase.from("instruments").select(); - return
{JSON.stringify(countries, null, 2)}
- } + return
{JSON.stringify(instruments, null, 2)}
+ } ``` - ```ts utils/supabase/server.ts - import { createServerClient } from '@supabase/ssr' - import { cookies } from 'next/headers' - - export async function createClient() { - const cookieStore = await cookies() - - return createServerClient( - process.env.NEXT_PUBLIC_SUPABASE_URL!, - process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!, - { - cookies: { - getAll() { - return cookieStore.getAll() - }, - setAll(cookiesToSet) { - try { - cookiesToSet.forEach(({ name, value, options }) => - cookieStore.set(name, value, options) - ) - } catch { - // The `setAll` method was called from a Server Component. - // This can be ignored if you have middleware refreshing - // user sessions. - } - }, + ```ts name=utils/supabase/server.ts + import { createServerClient } from '@supabase/ssr' + import { cookies } from 'next/headers' + + export async function createClient() { + const cookieStore = await cookies() + + return createServerClient( + process.env.NEXT_PUBLIC_SUPABASE_URL!, + process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!, + { + cookies: { + getAll() { + return cookieStore.getAll() + }, + setAll(cookiesToSet) { + try { + cookiesToSet.forEach(({ name, value, options }) => + cookieStore.set(name, value, options) + ) + } catch { + // The `setAll` method was called from a Server Component. + // This can be ignored if you have middleware refreshing + // user sessions. + } }, - } - ) - } + }, + } + ) + } ``` -
+
@@ -120,7 +124,7 @@ hideToc: true - Run the development server, go to http://localhost:3000/countries in a browser and you should see the list of countries. + Run the development server, go to http://localhost:3000/instruments in a browser and you should see the list of instruments. diff --git a/apps/docs/content/guides/getting-started/quickstarts/nuxtjs.mdx b/apps/docs/content/guides/getting-started/quickstarts/nuxtjs.mdx index 4de7d70237279..bfc6a12f4ccac 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/nuxtjs.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/nuxtjs.mdx @@ -1,6 +1,6 @@ --- -title: 'Use Supabase with NuxtJS' -subtitle: 'Learn how to create a Supabase project, add some sample data to your database, and query the data from a NuxtJS app.' +title: 'Use Supabase with Nuxt' +subtitle: 'Learn how to create a Supabase project, add some sample data to your database, and query the data from a Nuxt app.' breadcrumb: 'Framework Quickstarts' hideToc: true --- @@ -8,22 +8,22 @@ hideToc: true - - + + <$Partial path="quickstart_db_setup.mdx" /> - + - Create a Nuxt.js app using the `npx nuxi` command. + Create a Nuxt app using the `npx nuxi` command. - ```bash Terminal + ```bash name=Terminal npx nuxi@latest init my-app ``` @@ -34,15 +34,15 @@ hideToc: true - The fastest way to get started is to use the `supabase-js` client library which provides a convenient interface for working with Supabase from a NuxtJS app. + The fastest way to get started is to use the `supabase-js` client library which provides a convenient interface for working with Supabase from a Nuxt app. - Navigate to the NuxtJS app and install `supabase-js`. + Navigate to the Nuxt app and install `supabase-js`. - ```bash Terminal + ```bash name=Terminal cd my-app && npm install @supabase/supabase-js ``` @@ -50,39 +50,71 @@ hideToc: true - - + + - In `app.vue`, create a Supabase client using your project URL and public API (anon) key: + Create a `.env` file and populate with your Supabase connection variables: - Replace the existing content in your `app.vue` file with the following code. + + + + + <$CodeTabs> + + ```text name=.env.local + SUPABASE_URL= + SUPABASE_ANON_KEY= + ``` + + ```ts name=nuxt.config.tsx + export default defineNuxtConfig({ + runtimeConfig: { + public: { + supabaseUrl: process.env.SUPABASE_URL, + supabaseAnonKey: process.env.SUPABASE_ANON_KEY, + }, + }, + }); + ``` + + + + + + + + + + + In `app.vue`, create a Supabase client using your config values and replace the existing content with the following code. - ```vue app.vue + ```vue name=app.vue ``` @@ -91,16 +123,16 @@ hideToc: true - + - Start the app, navigate to http://localhost:3000 in the browser, open the browser console, and you should see the list of countries. + Start the app, navigate to http://localhost:3000 in the browser, open the browser console, and you should see the list of instruments. - ```bash Terminal + ```bash name=Terminal npm run dev ``` diff --git a/apps/docs/content/guides/getting-started/quickstarts/reactjs.mdx b/apps/docs/content/guides/getting-started/quickstarts/reactjs.mdx index dfebf4a9fe8d1..50a5722cd6544 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/reactjs.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/reactjs.mdx @@ -8,8 +8,8 @@ hideToc: true - - + + <$Partial path="quickstart_db_setup.mdx" /> @@ -23,7 +23,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal npm create vite@latest my-app -- --template react ``` @@ -42,7 +42,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal cd my-app && npm install @supabase/supabase-js ``` @@ -51,63 +51,82 @@ hideToc: true - + - In `App.jsx`, create a Supabase client using your project URL and public API (anon) key: + Create a `.env.local` file and populate with your Supabase connection variables: - Add a `getCountries` function to fetch the data and display the query result to the page. - + + <$CodeTabs> + + ```text name=.env.local + VITE_SUPABASE_URL= + VITE_SUPABASE_ANON_KEY= + ``` + + + + + + - ```js src/App.jsx - import { useEffect, useState } from "react"; - import { createClient } from "@supabase/supabase-js"; + + - const supabase = createClient("https://.supabase.co", ""); + In `App.jsx`, add a `getInstruments` function to fetch the data and display the query result to the page using a Supabase client. - function App() { - const [countries, setCountries] = useState([]); + + - useEffect(() => { - getCountries(); - }, []); + ```js name=src/App.jsx + import { useEffect, useState } from "react"; + import { createClient } from "@supabase/supabase-js"; - async function getCountries() { - const { data } = await supabase.from("countries").select(); - setCountries(data); - } + const supabase = createClient(import.meta.env.VITE_SUPABASE_URL, import.meta.env.VITE_SUPABASE_ANON_KEY); - return ( -
    - {countries.map((country) => ( -
  • {country.name}
  • - ))} -
- ); + function App() { + const [instruments, setInstruments] = useState([]); + + useEffect(() => { + getInstruments(); + }, []); + + async function getInstruments() { + const { data } = await supabase.from("instruments").select(); + setInstruments(data); } - export default App; + return ( +
    + {instruments.map((instrument) => ( +
  • {instrument.name}
  • + ))} +
+ ); + } + + export default App; ```
- + - Start the app, go to http://localhost:5173 in a browser, and open the browser console and you should see the list of countries. + Start the app, go to http://localhost:5173 in a browser, and open the browser console and you should see the list of instruments. - ```bash Terminal + ```bash name=Terminal npm run dev ``` diff --git a/apps/docs/content/guides/getting-started/quickstarts/redwoodjs.mdx b/apps/docs/content/guides/getting-started/quickstarts/redwoodjs.mdx index 025a22e5ba296..4d88e8989d719 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/redwoodjs.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/redwoodjs.mdx @@ -11,7 +11,7 @@ hideToc: true [Create a new project](https://supabase.com/dashboard) in the Supabase Dashboard. - + Be sure to make note of the Database Password you used as you will need this later to connect to your database. @@ -64,7 +64,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal yarn create redwood-app my-app --ts ``` @@ -77,7 +77,7 @@ hideToc: true
- ```bash Terminal + ```bash name=Terminal cd my-app code . ``` @@ -97,7 +97,7 @@ hideToc: true
- ```bash .env + ```bash name=.env # Transaction mode connection string used for migrations DATABASE_URL="postgres://postgres.[project-ref]:[db-password]@xxx.pooler.supabase.com:6543/postgres?pgbouncer=true&connection_limit=1" @@ -110,13 +110,13 @@ hideToc: true - By default, RedwoodJS ships with a SQLite database, but we want to use PostgreSQL. + By default, RedwoodJS ships with a SQLite database, but we want to use Postgres. - Update your Prisma schema file `api/db/schema.prisma` to use your Supabase PostgreSQL database connection environment variables you setup in Step 5. + Update your Prisma schema file `api/db/schema.prisma` to use your Supabase Postgres database connection environment variables you setup in Step 5. - ```prisma api/db/schema.prisma + ```prisma name=api/db/schema.prisma datasource db { provider = "postgresql" url = env("DATABASE_URL") @@ -128,13 +128,13 @@ hideToc: true - - Create the Country model in `api/db/schema.prisma` and then run `yarn rw prisma migrate dev` from your terminal to apply the migration. + + Create the Instrument model in `api/db/schema.prisma` and then run `yarn rw prisma migrate dev` from your terminal to apply the migration. - ```prisma api/db/schema.prisma - model Country { + ```prisma name=api/db/schema.prisma + model Instrument { id Int @id @default(autoincrement()) name String @unique } @@ -145,30 +145,30 @@ hideToc: true - Let's seed the database with a few countries. + Let's seed the database with a few instruments. Update the file `scripts/seeds.ts` to contain the following code: - ```ts scripts/seed.ts + ```ts name=scripts/seed.ts import type { Prisma } from '@prisma/client' import { db } from 'api/src/lib/db' export default async () => { try { - const data: Prisma.CountryCreateArgs['data'][] = [ - { name: 'United States' }, - { name: 'Canada' }, - { name: 'Mexico' }, + const data: Prisma.InstrumentCreateArgs['data'][] = [ + { name: 'dulcimer' }, + { name: 'harp' }, + { name: 'guitar' }, ] - console.log('Seeding countries ...') + console.log('Seeding instruments ...') - const countries = await db.country.createMany({ data }) + const instruments = await db.instrument.createMany({ data }) - console.log('Done.', countries) + console.log('Done.', instruments) } catch (error) { console.error(error) } @@ -180,7 +180,7 @@ hideToc: true - Run the seed database command to populate the `Country` table with the countries you just created. + Run the seed database command to populate the `Instrument` table with the instruments you just created. @@ -190,7 +190,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal yarn rw prisma db seed ``` @@ -198,13 +198,13 @@ hideToc: true - - Now, we'll use RedwoodJS generators to scaffold a CRUD UI for the `Country` model. + + Now, we'll use RedwoodJS generators to scaffold a CRUD UI for the `Instrument` model. - ```bash Terminal - yarn rw g scaffold country + ```bash name=Terminal + yarn rw g scaffold instrument ``` @@ -223,15 +223,11 @@ hideToc: true - - Click on `/countries` to visit http://localhost:8910/countries where should see the list of countries. + + Click on `/instruments` to visit http://localhost:8910/instruments where should see the list of instruments. - You may now edit, delete, and add new countries using the scaffolded UI. + You may now edit, delete, and add new books using the scaffolded UI. - - ![RedwoodJS Splash Page](/docs/img/redwoodjs-qs-countries-ui.png) - -
diff --git a/apps/docs/content/guides/getting-started/quickstarts/refine.mdx b/apps/docs/content/guides/getting-started/quickstarts/refine.mdx index b039f19a5cb54..da4656c9f4276 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/refine.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/refine.mdx @@ -8,8 +8,8 @@ hideToc: true - - + + <$Partial path="quickstart_db_setup.mdx" /> @@ -25,7 +25,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal npm create refine-app@latest -- --preset refine-supabase my-app ``` @@ -42,7 +42,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal cd my-app code . ``` @@ -60,7 +60,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal npm run dev ``` @@ -84,7 +84,7 @@ hideToc: true - ```ts src/utility/supabaseClient.ts + ```ts name=src/utility/supabaseClient.ts import { createClient } from "@refinedev/supabase"; const SUPABASE_URL = YOUR_SUPABASE_URL; @@ -105,13 +105,13 @@ hideToc: true
- + - You have to then configure resources and define pages for `countries` resource. + You have to then configure resources and define pages for `instruments` resource. - Use the following command to automatically add resources and generate code for pages for `countries` using refine Inferencer. + Use the following command to automatically add resources and generate code for pages for `instruments` using refine Inferencer. - This defines pages for `list`, `create`, `show` and `edit` actions inside the `src/pages/countries/` directory with `` component. + This defines pages for `list`, `create`, `show` and `edit` actions inside the `src/pages/instruments/` directory with `` component. The `` component depends on `@refinedev/react-table` and `@refinedev/react-hook-form` packages. In order to avoid errors, you should install them as dependencies with `npm install @refinedev/react-table @refinedev/react-hook-form`. @@ -128,19 +128,19 @@ hideToc: true - ```bash Terminal - npm run refine create-resource countries + ```bash name=Terminal + npm run refine create-resource instruments ``` - + Add routes for the `list`, `create`, `show`, and `edit` pages. - + You should remove the `index` route for the Welcome page presented with the `` component. @@ -149,7 +149,7 @@ hideToc: true - ```tsx src/App.tsx + ```tsx name=src/App.tsx import { Refine, WelcomePage } from "@refinedev/core"; import { RefineKbar, RefineKbarProvider } from "@refinedev/kbar"; import routerBindings, { @@ -163,7 +163,7 @@ hideToc: true import "./App.css"; import authProvider from "./authProvider"; import { supabaseClient } from "./utility"; - import { CountriesCreate, CountriesEdit, CountriesList, CountriesShow } from "./pages/countries"; + import { InstrumentsCreate, InstrumentsEdit, InstrumentsList, InstrumentsShow } from "./pages/instruments"; function App() { return ( @@ -179,21 +179,21 @@ hideToc: true warnWhenUnsavedChanges: true, }} resources={[{ - name: "countries", - list: "/countries", - create: "/countries/create", - edit: "/countries/edit/:id", - show: "/countries/show/:id" + name: "instruments", + list: "/instruments", + create: "/instruments/create", + edit: "/instruments/edit/:id", + show: "/instruments/show/:id" }]}> } + element={} /> - - } /> - } /> - } /> - } /> + + } /> + } /> + } /> + } /> @@ -212,16 +212,12 @@ hideToc: true - + - Now you should be able to see the countries pages along the `/countries` routes. You may now edit and add new countries using the Inferencer generated UI. + Now you should be able to see the instruments pages along the `/instruments` routes. You may now edit and add new instruments using the Inferencer generated UI. The Inferencer auto-generated code gives you a good starting point on which to keep building your `list`, `create`, `show` and `edit` pages. They can be obtained by clicking the `Show the auto-generated code` buttons in their respective pages. - - ![refine List Page](/docs/img/refine-qs-countries-ui.png) - -
diff --git a/apps/docs/content/guides/getting-started/quickstarts/ruby-on-rails.mdx b/apps/docs/content/guides/getting-started/quickstarts/ruby-on-rails.mdx index 29da0803dad87..482a1ddcd492b 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/ruby-on-rails.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/ruby-on-rails.mdx @@ -18,7 +18,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal rails new blog -d=postgresql ``` @@ -31,19 +31,19 @@ hideToc: true Go to [database.new](https://database.new) and create a new Supabase project. Save your database password securely. - When your project is up and running, navigate to the [database settings](https://supabase.com/dashboard/project/_/settings/database) to find the URI connection string. Make sure **Use connection pooling** is checked and **Session mode** is selected. Then copy the URI. Replace the password placeholder with your saved database password. + When your project is up and running, navigate to your project dashboard and click on [Connect](https://supabase.com/dashboard/project/_?showConnect=true). - + Look for the Session Pooler connection string and copy the string. You will need to replace the Password with your saved database password. You can reset your database password in your [database settings](https://supabase.com/dashboard/project/_/settings/database) if you do not have it. - If your network supports IPv6 connections, you can also use the direct connection string. Uncheck **Use connection pooling** and copy the new URI. - - + + If you're in an [IPv6 environment](https://github.com/orgs/supabase/discussions/27034) or have the IPv4 Add-On, you can use the direct connection string instead of Supavisor in Session mode. +
- ```bash Terminal + ```bash name=Terminal export DATABASE_URL=postgres://postgres.xxxx:password@xxxx.pooler.supabase.com:5432/postgres ``` @@ -62,7 +62,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal bin/rails generate model Article title:string body:text bin/rails db:migrate ``` @@ -80,11 +80,11 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal bin/rails console ``` - ```rb irb + ```rb name=irb article = Article.new(title: "Hello Rails", body: "I am on Rails!") article.save # Saves the entry to the database @@ -104,7 +104,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal bin/rails server ``` diff --git a/apps/docs/content/guides/getting-started/quickstarts/solidjs.mdx b/apps/docs/content/guides/getting-started/quickstarts/solidjs.mdx index f5c13f9f771c6..88fb3063b58e2 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/solidjs.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/solidjs.mdx @@ -8,8 +8,8 @@ hideToc: true - - + + <$Partial path="quickstart_db_setup.mdx" /> @@ -23,7 +23,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal npx degit solidjs/templates/js my-app ``` @@ -42,7 +42,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal cd my-app && npm install @supabase/supabase-js ``` @@ -51,57 +51,79 @@ hideToc: true
- + - In `App.jsx`, create a Supabase client using your project URL and public API (anon) key: + Create a `.env.local` file and populate with your Supabase connection variables: - Add a `getCountries` function to fetch the data and display the query result to the page. + + + + + <$CodeTabs> + + ```text name=.env.local + VITE_SUPABASE_URL= + VITE_SUPABASE_ANON_KEY= + ``` + + + + + + + + + + + In `App.jsx`, create a Supabase client to fetch the instruments data. + + Add a `getInstruments` function to fetch the data and display the query result to the page. - ```jsx src/App.jsx - import { createClient } from "@supabase/supabase-js"; - import { createResource, For } from "solid-js"; + ```jsx name=src/App.jsx + import { createClient } from "@supabase/supabase-js"; + import { createResource, For } from "solid-js"; - const supabase = createClient('https://.supabase.co', ''); + const supabase = createClient('https://.supabase.co', ''); - async function getCountries() { - const { data } = await supabase.from("countries").select(); - return data; - } + async function getInstruments() { + const { data } = await supabase.from("instruments").select(); + return data; + } - function App() { - const [countries] = createResource(getCountries); + function App() { + const [instruments] = createResource(getInstruments); - return ( -
    - {(country) =>
  • {country.name}
  • }
    -
- ); - } + return ( +
    + {(instrument) =>
  • {instrument.name}
  • }
    +
+ ); + } - export default App; + export default App; ```
- + - Start the app and go to http://localhost:3000 in a browser and you should see the list of countries. + Start the app and go to http://localhost:3000 in a browser and you should see the list of instruments. - ```bash Terminal + ```bash name=Terminal npm run dev ``` diff --git a/apps/docs/content/guides/getting-started/quickstarts/sveltekit.mdx b/apps/docs/content/guides/getting-started/quickstarts/sveltekit.mdx index c78cc43a9b15c..0dcbef0201c25 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/sveltekit.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/sveltekit.mdx @@ -8,8 +8,8 @@ hideToc: true - - + + <$Partial path="quickstart_db_setup.mdx" /> @@ -23,7 +23,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal npx sv create my-app ``` @@ -42,7 +42,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal cd my-app && npm install @supabase/supabase-js ``` @@ -62,7 +62,7 @@ hideToc: true - ```js src/lib/supabaseClient.js + ```js name=src/lib/supabaseClient.js import { createClient } from '@supabase/supabase-js' export const supabase = createClient('https://.supabase.co', '') @@ -83,13 +83,13 @@ hideToc: true - ```js src/routes/+page.server.js + ```js name=src/routes/+page.server.js import { supabase } from "$lib/supabaseClient"; export async function load() { - const { data } = await supabase.from("countries").select(); + const { data } = await supabase.from("instruments").select(); return { - countries: data ?? [], + instruments: data ?? [], }; } ``` @@ -104,14 +104,14 @@ hideToc: true - ```svelte src/routes/+page.svelte + ```svelte name=src/routes/+page.svelte
    - {#each data.countries as country} -
  • {country.name}
  • + {#each data.instruments as instrument} +
  • {instrument.name}
  • {/each}
``` @@ -123,13 +123,13 @@ hideToc: true - Start the app and go to http://localhost:5173 in a browser and you should see the list of countries. + Start the app and go to http://localhost:5173 in a browser and you should see the list of instruments. - ```bash Terminal + ```bash name=Terminal npm run dev ``` diff --git a/apps/docs/content/guides/getting-started/quickstarts/vue.mdx b/apps/docs/content/guides/getting-started/quickstarts/vue.mdx index 4d7443dd468e8..83b5b62d9590c 100644 --- a/apps/docs/content/guides/getting-started/quickstarts/vue.mdx +++ b/apps/docs/content/guides/getting-started/quickstarts/vue.mdx @@ -8,8 +8,8 @@ hideToc: true - - + + <$Partial path="quickstart_db_setup.mdx" /> @@ -23,7 +23,7 @@ hideToc: true - ```sh Terminal + ```sh name=Terminal npm init vue@latest my-app ``` @@ -42,7 +42,7 @@ hideToc: true - ```bash Terminal + ```bash name=Terminal cd my-app && npm install @supabase/supabase-js ``` @@ -50,10 +50,10 @@ hideToc: true - - + + - Create a `/src/lib` directory in your Vue app, create a file called `supabaseClient.js` and add the following code to initialize the Supabase client with your project URL and public API (anon) key: + Create a `.env.local` file and populate with your Supabase connection variables: @@ -62,17 +62,42 @@ hideToc: true - ```js src/lib/supabaseClient.js - import { createClient } from '@supabase/supabase-js' + <$CodeTabs> - export const supabase = createClient('https://.supabase.co', '') - ``` + ```text name=.env.local + VITE_SUPABASE_URL= + VITE_SUPABASE_ANON_KEY= + ``` + + + + + Create a `/src/lib` directory in your Vue app, create a file called `supabaseClient.js` and add the following code to initialize the Supabase client: + + + + + + ```js name=src/lib/supabaseClient.js + import { createClient } from '@supabase/supabase-js' + + const supabaseUrl = import.meta.env.VITE_SUPABASE_URL + const supabaseAnonKey = import.meta.env.VITE_SUPABASE_ANON_KEY + + export const supabase = createClient(supabaseUrl, supabaseAnonKey) + ``` + + + + + + Replace the existing content in your `App.vue` file with the following code. @@ -81,44 +106,44 @@ hideToc: true - ```vue src/App.vue - + onMounted(() => { + getInstruments() + }) + - + ``` - + - Start the app and go to http://localhost:5173 in a browser and you should see the list of countries. + Start the app and go to http://localhost:5173 in a browser and you should see the list of instruments. - ```bash Terminal + ```bash name=Terminal npm run dev ``` diff --git a/apps/docs/content/guides/getting-started/tutorials/with-angular.mdx b/apps/docs/content/guides/getting-started/tutorials/with-angular.mdx index da7aa2b7d1c51..384950af5b960 100644 --- a/apps/docs/content/guides/getting-started/tutorials/with-angular.mdx +++ b/apps/docs/content/guides/getting-started/tutorials/with-angular.mdx @@ -3,7 +3,7 @@ title: 'Build a User Management App with Angular' description: 'Learn how to use Supabase in your Angular App.' --- - +<$Partial path="quickstart_intro.mdx" /> ![Supabase User Management example](/docs/img/user-management-demo.png) @@ -13,7 +13,7 @@ If you get stuck while working through this guide, refer to the [full example on
- +<$Partial path="project_setup.mdx" /> ## Building the app @@ -39,7 +39,9 @@ And finally we want to save the environment variables in the `src/environments/e All we need are the API URL and the `anon` key that you copied [earlier](#get-the-api-keys). These variables will be exposed on the browser, and that's completely fine since we have [Row Level Security](/docs/guides/auth#row-level-security) enabled on our Database. -```ts src/environments/environment.ts +<$CodeTabs> + +```ts name=src/environments/environment.ts export const environment = { production: false, supabaseUrl: 'YOUR_SUPABASE_URL', @@ -47,9 +49,13 @@ export const environment = { } ``` -Now that we have the API credentials in place, let's create a **SupabaseService** with `ng g s supabase` to initialize the Supabase client and implement functions to communicate with the Supabase API. + + +Now that we have the API credentials in place, let's create a `SupabaseService` with `ng g s supabase` to initialize the Supabase client and implement functions to communicate with the Supabase API. + +<$CodeTabs> -```ts src/app/supabase.service.ts +```ts name=src/app/supabase.service.ts import { Injectable } from '@angular/core' import { AuthChangeEvent, @@ -125,14 +131,18 @@ export class SupabaseService { } ``` + + Optionally, update [src/styles.css](https://raw.githubusercontent.com/supabase/supabase/master/examples/user-management/angular-user-management/src/styles.css) to style the app. ### Set up a login component Let's set up an Angular component to manage logins and sign ups. We'll use Magic Links, so users can sign in with their email without using passwords. -Create an **AuthComponent** with `ng g c auth` Angular CLI command. +Create an `AuthComponent` with `ng g c auth` Angular CLI command. + +<$CodeTabs> -```ts src/app/auth/auth.component.ts +```ts name=src/app/auth/auth.component.ts import { Component } from '@angular/core' import { FormBuilder } from '@angular/forms' import { SupabaseService } from '../supabase.service' @@ -173,7 +183,7 @@ export class AuthComponent { } ``` -```html src/app/auth/auth.component.html +```html name=src/app/auth/auth.component.html

Supabase + Angular

@@ -199,12 +209,16 @@ export class AuthComponent {
``` + + ### Account page Users also need a way to edit their profile details and manage their accounts after signing in. -Create an **AccountComponent** with the `ng g c account` Angular CLI command. +Create an `AccountComponent` with the `ng g c account` Angular CLI command. + +<$CodeTabs> -```ts src/app/account/account.component.ts +```ts name=src/app/account/account.component.ts import { Component, Input, OnInit } from '@angular/core' import { FormBuilder } from '@angular/forms' import { AuthSession } from '@supabase/supabase-js' @@ -297,7 +311,7 @@ export class AccountComponent implements OnInit { } ``` -```html src/app/account/account.component.html +```html name=src/app/account/account.component.html
@@ -324,11 +338,15 @@ export class AccountComponent implements OnInit { ``` + + ### Launch! -Now that we have all the components in place, let's update **AppComponent**: +Now that we have all the components in place, let's update `AppComponent`: + +<$CodeTabs> -```ts src/app/app.component.ts +```ts name=src/app/app.component.ts import { Component, OnInit } from '@angular/core' import { SupabaseService } from './supabase.service' @@ -350,7 +368,7 @@ export class AppComponent implements OnInit { } ``` -```html src/app/app.component.html +```html name=src/app/app.component.html
@@ -359,9 +377,13 @@ export class AppComponent implements OnInit {
``` + + `app.module.ts` also needs to be modified to include the `ReactiveFormsModule` from the `@angular/forms` package. -```ts src/app/app.module.ts +<$CodeTabs> + +```ts name=src/app/app.module.ts import { NgModule } from '@angular/core' import { BrowserModule } from '@angular/platform-browser' @@ -380,6 +402,8 @@ import { AvatarComponent } from './avatar/avatar.component' export class AppModule {} ``` + + Once that's done, run this in a terminal window: ```bash @@ -397,9 +421,11 @@ Every Supabase project is configured with [Storage](/docs/guides/storage) for ma ### Create an upload widget Let's create an avatar for the user so that they can upload a profile photo. -Create an **AvatarComponent** with `ng g c avatar` Angular CLI command. +Create an `AvatarComponent` with `ng g c avatar` Angular CLI command. -```ts src/app/avatar/avatar.component.ts +<$CodeTabs> + +```ts name=src/app/avatar/avatar.component.ts import { Component, EventEmitter, Input, Output } from '@angular/core' import { SafeResourceUrl, DomSanitizer } from '@angular/platform-browser' import { SupabaseService } from '../supabase.service' @@ -464,7 +490,7 @@ export class AvatarComponent { } ``` -```html src/app/avatar/avatar.component.html +```html name=src/app/avatar/avatar.component.html
``` + + ### Add the new widget -And then we can add the widget on top of the **AccountComponent** HTML template: +And then we can add the widget on top of the `AccountComponent` HTML template: -```html src/app/account.component.html +<$CodeTabs> + +```html name=src/app/account.component.html
``` -And add an `updateAvatar` function along with an `avatarUrl` getter to the **AccountComponent** typescript file: + + +And add an `updateAvatar` function along with an `avatarUrl` getter to the `AccountComponent` typescript file: -```ts src/app/account.component.ts +<$CodeTabs> + +```ts name=src/app/account.component.ts @Component({ selector: 'app-account', templateUrl: './account.component.html', @@ -525,4 +559,6 @@ export class AccountComponent implements OnInit { } ``` + + At this stage you have a fully functional application! diff --git a/apps/docs/content/guides/getting-started/tutorials/with-expo-react-native.mdx b/apps/docs/content/guides/getting-started/tutorials/with-expo-react-native.mdx index 63a270a0ecba8..dea1fad9bd9ff 100644 --- a/apps/docs/content/guides/getting-started/tutorials/with-expo-react-native.mdx +++ b/apps/docs/content/guides/getting-started/tutorials/with-expo-react-native.mdx @@ -4,7 +4,7 @@ description: 'Learn how to use Supabase in your React Native App.' tocVideo: 'AE7dKIKMJy4' --- - +<$Partial path="quickstart_intro.mdx" /> ![Supabase User Management example](/docs/img/supabase-flutter-demo.png) @@ -14,7 +14,7 @@ If you get stuck while working through this guide, refer to the [full example on - +<$Partial path="project_setup.mdx" /> ## Building the app @@ -51,7 +51,9 @@ These variables are safe to expose in your Expo app since Supabase has > - ```ts lib/supabase.ts + <$CodeTabs> + + ```ts name=lib/supabase.ts import AsyncStorage from '@react-native-async-storage/async-storage' import { createClient } from '@supabase/supabase-js' @@ -68,12 +70,14 @@ These variables are safe to expose in your Expo app since Supabase has }) ``` + + If you wish to encrypt the user's session information, you can use `aes-js` and store the encryption key in [Expo SecureStore](https://docs.expo.dev/versions/latest/sdk/securestore). The [`aes-js` library](https://github.com/ricmoo/aes-js) is a reputable JavaScript-only implementation of the AES encryption algorithm in CTR mode. A new 256-bit encryption key is generated using the `react-native-get-random-values` library. This key is stored inside Expo's SecureStore, while the value is encrypted and placed inside AsyncStorage. - Please make sure that: + Make sure that: - You keep the `expo-secure-storage`, `aes-js` and `react-native-get-random-values` libraries up-to-date. - Choose the correct [`SecureStoreOptions`](https://docs.expo.dev/versions/latest/sdk/securestore/#securestoreoptions) for your app's needs. E.g. [`SecureStore.WHEN_UNLOCKED`](https://docs.expo.dev/versions/latest/sdk/securestore/#securestorewhen_unlocked) regulates when the data can be accessed. - Carefully consider optimizations or other modifications to the above example, as those can lead to introducing subtle security vulnerabilities. @@ -84,12 +88,15 @@ These variables are safe to expose in your Expo app since Supabase has npm install @supabase/supabase-js npm install @rneui/themed @react-native-async-storage/async-storage npm install aes-js react-native-get-random-values + npm install --save-dev @types/aes-js npx expo install expo-secure-store ``` Implement a `LargeSecureStore` class to pass in as Auth storage for the `supabase-js` client: - ```ts lib/supabase.ts + <$CodeTabs> + + ```ts name=lib/supabase.ts import { createClient } from "@supabase/supabase-js"; import AsyncStorage from "@react-native-async-storage/async-storage"; import * as SecureStore from 'expo-secure-store'; @@ -155,6 +162,8 @@ These variables are safe to expose in your Expo app since Supabase has }); ``` + + @@ -163,7 +172,9 @@ These variables are safe to expose in your Expo app since Supabase has Let's set up a React Native component to manage logins and sign ups. Users would be able to sign in with their email and password. -```tsx components/Auth.tsx +<$CodeTabs> + +```tsx name=components/Auth.tsx import React, { useState } from 'react' import { Alert, StyleSheet, View, AppState } from 'react-native' import { supabase } from '../lib/supabase' @@ -261,6 +272,8 @@ const styles = StyleSheet.create({ }) ``` + + By default Supabase Auth requires email verification before a session is created for the users. To support email verification you need to [implement deep link handling](/docs/guides/auth/native-mobile-deep-linking?platform=react-native)! @@ -275,7 +288,9 @@ After a user is signed in we can allow them to edit their profile details and ma Let's create a new component for that called `Account.tsx`. -```tsx components/Account.tsx +<$CodeTabs> + +```tsx name=components/Account.tsx import { useState, useEffect } from 'react' import { supabase } from '../lib/supabase' import { StyleSheet, View, Alert } from 'react-native' @@ -398,11 +413,15 @@ const styles = StyleSheet.create({ }) ``` + + ### Launch! Now that we have all the components in place, let's update `App.tsx`: -```tsx App.tsx +<$CodeTabs> + +```tsx name=App.tsx import { useState, useEffect } from 'react' import { supabase } from './lib/supabase' import Auth from './components/Auth' @@ -431,6 +450,8 @@ export default function App() { } ``` + + Once that's done, run this in a terminal window: ```bash @@ -457,9 +478,9 @@ npx expo install expo-image-picker Let's create an avatar for the user so that they can upload a profile photo. We can start by creating a new component: -{/* */} +<$CodeTabs> -```tsx components/Avatar.tsx +```tsx name=components/Avatar.tsx import { useState, useEffect } from 'react' import { supabase } from '../lib/supabase' import { StyleSheet, View, Alert, Image, Button } from 'react-native' @@ -592,13 +613,15 @@ const styles = StyleSheet.create({ }) ``` -{/* */} + ### Add the new widget And then we can add the widget to the Account page: -```tsx components/Account.tsx +<$CodeTabs> + +```tsx name=components/Account.tsx // Import the new component import Avatar from './Avatar' @@ -622,6 +645,8 @@ return ( // ... ``` + + Now you will need to run the prebuild command to get the application working on your chosen platform. ```bash diff --git a/apps/docs/content/guides/getting-started/tutorials/with-flutter.mdx b/apps/docs/content/guides/getting-started/tutorials/with-flutter.mdx index 64b1e2e1a0649..6b19272384be8 100644 --- a/apps/docs/content/guides/getting-started/tutorials/with-flutter.mdx +++ b/apps/docs/content/guides/getting-started/tutorials/with-flutter.mdx @@ -4,7 +4,7 @@ description: 'Learn how to use Supabase in your Flutter App.' tocVideo: 'r7ysVtZ5Row' --- - +<$Partial path="quickstart_intro.mdx" /> ![Supabase User Management example](/docs/img/supabase-flutter-demo.png) @@ -14,7 +14,7 @@ If you get stuck while working through this guide, refer to the [full example on - +<$Partial path="project_setup.mdx" /> ## Building the app @@ -63,9 +63,11 @@ That is it on Supabase's end and the rest are platform specific settings: Edit the `ios/Runner/Info.plist` file. -Add CFBundleURLTypes to enable deep linking: +Add `CFBundleURLTypes` to enable deep linking: -```xml ios/Runner/Info.plist" +<$CodeTabs> + +```xml name=ios/Runner/Info.plist" @@ -88,13 +90,17 @@ Add CFBundleURLTypes to enable deep linking: ``` + + Edit the `android/app/src/main/AndroidManifest.xml` file. Add an intent-filter to enable deep linking: -```xml android/app/src/main/AndroidManifest.xml +<$CodeTabs> + +```xml name=android/app/src/main/AndroidManifest.xml @@ -117,6 +123,8 @@ Add an intent-filter to enable deep linking: ``` + + Supabase redirects do not work with Flutter's [default URL strategy](https://docs.flutter.dev/ui/navigation/url-strategies). @@ -138,7 +146,9 @@ void main() { Now that we have deep links ready let's initialize the Supabase client inside our `main` function with the API credentials that you copied [earlier](#get-the-api-keys). These variables will be exposed on the app, and that's completely fine since we have [Row Level Security](/docs/guides/auth#row-level-security) enabled on our Database. -```dart lib/main.dart +<$CodeTabs> + +```dart name=lib/main.dart import 'package:flutter/material.dart'; import 'package:supabase_flutter/supabase_flutter.dart'; @@ -175,6 +185,8 @@ extension ContextExtension on BuildContext { } ``` + + Notice that we have a `showSnackBar` extension method that we will use to show snack bars in the app. You could define this method in a separate file and import it where needed, but for simplicity, we will define it here. ### Set up a login page @@ -183,7 +195,9 @@ Let's create a Flutter widget to manage logins and sign ups. We will use Magic L Notice that this page sets up a listener on the user's auth state using `onAuthStateChange`. A new event will fire when the user comes back to the app by clicking their magic link, which this page can catch and redirect the user accordingly. -```dart lib/pages/login_page.dart +<$CodeTabs> + +```dart name=lib/pages/login_page.dart import 'dart:async'; import 'package:flutter/foundation.dart'; @@ -291,12 +305,16 @@ class _LoginPageState extends State { } ``` + + ### Set up account page After a user is signed in we can allow them to edit their profile details and manage their account. Let's create a new widget called `account_page.dart` for that. -```dart lib/pages/account_page.dart" +<$CodeTabs> + +```dart name=lib/pages/account_page.dart" import 'package:flutter/material.dart'; import 'package:supabase_flutter/supabase_flutter.dart'; import 'package:supabase_quickstart/main.dart'; @@ -437,13 +455,17 @@ class _AccountPageState extends State { } ``` + + ### Launch! Now that we have all the components in place, let's update `lib/main.dart`. The `home` of the `MaterialApp`, meaning the initial page shown to the user, will be the `LoginPage` if the user is not authenticated, and the `AccountPage` if the user is authenticated. We also included some theming to make the app look a bit nicer. -```dart lib/main.dart +<$CodeTabs> + +```dart name=lib/main.dart import 'package:flutter/material.dart'; import 'package:supabase_flutter/supabase_flutter.dart'; import 'package:supabase_quickstart/pages/account_page.dart'; @@ -501,6 +523,8 @@ extension ContextExtension on BuildContext { } ``` + + Once that's done, run this in a terminal window to launch on Android or iOS: ```bash @@ -548,7 +572,9 @@ Once you are done with all of the above, it is time to dive into coding. Let's create an avatar for the user so that they can upload a profile photo. We can start by creating a new component: -```dart lib/components/avatar.dart +<$CodeTabs> + +```dart name=lib/components/avatar.dart import 'package:flutter/material.dart'; import 'package:image_picker/image_picker.dart'; import 'package:supabase_flutter/supabase_flutter.dart'; @@ -640,11 +666,15 @@ class _AvatarState extends State { } ``` + + ### Add the new widget And then we can add the widget to the Account page as well as some logic to update the `avatar_url` whenever the user uploads a new avatar. -```dart lib/pages/account_page.dart +<$CodeTabs> + +```dart name=lib/pages/account_page.dart import 'package:flutter/material.dart'; import 'package:supabase_flutter/supabase_flutter.dart'; import 'package:supabase_quickstart/components/avatar.dart'; @@ -820,6 +850,8 @@ class _AccountPageState extends State { } ``` + + Congratulations, you've built a fully functional user management app using Flutter and Supabase! ## See also diff --git a/apps/docs/content/guides/getting-started/tutorials/with-ionic-angular.mdx b/apps/docs/content/guides/getting-started/tutorials/with-ionic-angular.mdx index b22f2f867d355..e366f24d0c733 100644 --- a/apps/docs/content/guides/getting-started/tutorials/with-ionic-angular.mdx +++ b/apps/docs/content/guides/getting-started/tutorials/with-ionic-angular.mdx @@ -3,7 +3,7 @@ title: 'Build a User Management App with Ionic Angular' description: 'Learn how to use Supabase in your Ionic Angular App.' --- - +<$Partial path="quickstart_intro.mdx" /> ![Supabase User Management example](/docs/img/ionic-demos/ionic-angular-account.png) @@ -13,7 +13,7 @@ If you get stuck while working through this guide, refer to the [full example on - +<$Partial path="project_setup.mdx" /> ## Building the app @@ -40,7 +40,9 @@ And finally, we want to save the environment variables in the `src/environments/ All we need are the API URL and the `anon` key that you copied [earlier](#get-the-api-keys). These variables will be exposed on the browser, and that's completely fine since we have [Row Level Security](/docs/guides/auth#row-level-security) enabled on our Database. -```ts environment.ts +<$CodeTabs> + +```ts name=environment.ts export const environment = { production: false, supabaseUrl: 'YOUR_SUPABASE_URL', @@ -48,9 +50,13 @@ export const environment = { } ``` -Now that we have the API credentials in place, let's create a **SupabaseService** with `ionic g s supabase` to initialize the Supabase client and implement functions to communicate with the Supabase API. + + +Now that we have the API credentials in place, let's create a `SupabaseService` with `ionic g s supabase` to initialize the Supabase client and implement functions to communicate with the Supabase API. + +<$CodeTabs> -```ts src/app/supabase.service.ts +```ts name=src/app/supabase.service.ts import { Injectable } from '@angular/core' import { LoadingController, ToastController } from '@ionic/angular' import { AuthChangeEvent, createClient, Session, SupabaseClient } from '@supabase/supabase-js' @@ -133,10 +139,12 @@ export class SupabaseService { } ``` + + ### Set up a login route Let's set up a route to manage logins and signups. We'll use Magic Links so users can sign in with their email without using passwords. -Create a **LoginPage** with the `ionic g page login` Ionic CLI command. +Create a `LoginPage` with the `ionic g page login` Ionic CLI command. @@ -144,7 +152,9 @@ This guide will show the template inline, but the example app will have `templat -```ts src/app/login/login.page.ts +<$CodeTabs> + +```ts name=src/app/login/login.page.ts import { Component, OnInit } from '@angular/core' import { SupabaseService } from '../supabase.service' @@ -201,12 +211,16 @@ export class LoginPage { } ``` + + ### Account page After a user is signed in, we can allow them to edit their profile details and manage their account. -Create an **AccountComponent** with `ionic g page account` Ionic CLI command. +Create an `AccountComponent` with `ionic g page account` Ionic CLI command. + +<$CodeTabs> -```ts src/app/account.page.ts +```ts name=src/app/account.page.ts import { Component, OnInit } from '@angular/core' import { Router } from '@angular/router' import { Profile, SupabaseService } from '../supabase.service' @@ -308,11 +322,15 @@ export class AccountPage implements OnInit { } ``` + + ### Launch! -Now that we have all the components in place, let's update **AppComponent**: +Now that we have all the components in place, let's update `AppComponent`: + +<$CodeTabs> -```ts src/app/app.component.ts +```ts name=src/app/app.component.ts import { Component } from '@angular/core' import { Router } from '@angular/router' import { SupabaseService } from './supabase.service' @@ -341,9 +359,13 @@ export class AppComponent { } ``` -Then update the **AppRoutingModule** + -```ts src/app/app-routing.module.ts" +Then update the `AppRoutingModule` + +<$CodeTabs> + +```ts name=src/app/app-routing.module.ts" import { NgModule } from '@angular/core' import { PreloadAllModules, RouterModule, Routes } from '@angular/router' @@ -369,6 +391,8 @@ const routes: Routes = [ export class AppRoutingModule {} ``` + + Once that's done, run this in a terminal window: ```bash @@ -393,13 +417,15 @@ First, install two packages in order to interact with the user's camera. npm install @ionic/pwa-elements @capacitor/camera ``` -[CapacitorJS](https://capacitorjs.com) is a cross-platform native runtime from Ionic that enables web apps to be deployed through the app store and provides access to native device API. +[Capacitor](https://capacitorjs.com) is a cross-platform native runtime from Ionic that enables web apps to be deployed through the app store and provides access to native device API. Ionic PWA elements is a companion package that will polyfill certain browser APIs that provide no user interface with custom Ionic UI. With those packages installed, we can update our `main.ts` to include an additional bootstrapping call for the Ionic PWA Elements. -```ts src/main.ts +<$CodeTabs> + +```ts name=src/main.ts import { enableProdMode } from '@angular/core' import { platformBrowserDynamic } from '@angular/platform-browser-dynamic' @@ -417,13 +443,17 @@ platformBrowserDynamic() .catch((err) => console.log(err)) ``` -Then create an **AvatarComponent** with this Ionic CLI command: + + +Then create an `AvatarComponent` with this Ionic CLI command: ```bash ionic g component avatar --module=/src/app/account/account.module.ts --create-module ``` -```ts src/app/avatar.component.ts +<$CodeTabs> + +```ts name=src/app/avatar.component.ts import { Component, EventEmitter, Input, OnInit, Output } from '@angular/core' import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser' import { SupabaseService } from '../supabase.service' @@ -534,11 +564,15 @@ export class AvatarComponent { } ``` + + ### Add the new widget -And then, we can add the widget on top of the **AccountComponent** HTML template: +And then, we can add the widget on top of the `AccountComponent` HTML template: -```ts src/app/account.component.ts +<$CodeTabs> + +```ts name=src/app/account.component.ts template: ` @@ -556,6 +590,8 @@ template: ` ` ``` + + At this stage, you have a fully functional application! ## See also diff --git a/apps/docs/content/guides/getting-started/tutorials/with-ionic-react.mdx b/apps/docs/content/guides/getting-started/tutorials/with-ionic-react.mdx index cbfc4db3f49a2..1eeafd561155e 100644 --- a/apps/docs/content/guides/getting-started/tutorials/with-ionic-react.mdx +++ b/apps/docs/content/guides/getting-started/tutorials/with-ionic-react.mdx @@ -3,7 +3,7 @@ title: 'Build a User Management App with Ionic React' description: 'Learn how to use Supabase in your Ionic React App.' --- - +<$Partial path="quickstart_intro.mdx" /> ![Supabase User Management example](/docs/img/ionic-demos/ionic-angular-account.png) @@ -13,7 +13,7 @@ If you get stuck while working through this guide, refer to the [full example on - +<$Partial path="project_setup.mdx" /> ## Building the app @@ -39,15 +39,21 @@ npm install @supabase/supabase-js And finally we want to save the environment variables in a `.env`. All we need are the API URL and the `anon` key that you copied [earlier](#get-the-api-keys). -```bash .env +<$CodeTabs> + +```bash name=.env REACT_APP_SUPABASE_URL=YOUR_SUPABASE_URL REACT_APP_SUPABASE_ANON_KEY=YOUR_SUPABASE_ANON_KEY ``` + + Now that we have the API credentials in place, let's create a helper file to initialize the Supabase client. These variables will be exposed on the browser, and that's completely fine since we have [Row Level Security](/docs/guides/auth#row-level-security) enabled on our Database. -```js src/supabaseClient.js +<$CodeTabs> + +```js name=src/supabaseClient.js import { createClient } from '@supabase/supabase-js' const supabaseUrl = process.env.REACT_APP_SUPABASE_URL @@ -56,11 +62,15 @@ const supabaseAnonKey = process.env.REACT_APP_SUPABASE_ANON_KEY export const supabase = createClient(supabaseUrl, supabaseAnonKey) ``` + + ### Set up a login route Let's set up a React component to manage logins and sign ups. We'll use Magic Links, so users can sign in with their email without using passwords. -```jsx /src/pages/Login.tsx +<$CodeTabs> + +```jsx name=/src/pages/Login.tsx import { useState } from 'react'; import { IonButton, @@ -133,13 +143,17 @@ export function LoginPage() { } ``` + + ### Account page After a user is signed in we can allow them to edit their profile details and manage their account. Let's create a new component for that called `Account.tsx`. -```jsx src/pages/Account.tsx +<$CodeTabs> + +```jsx name=src/pages/Account.tsx import { IonButton, IonContent, @@ -289,11 +303,15 @@ export function AccountPage() { } ``` + + ### Launch! Now that we have all the components in place, let's update `App.tsx`: -```jsx src/App.tsx +<$CodeTabs> + +```jsx name=src/App.tsx import { Redirect, Route } from 'react-router-dom' import { IonApp, IonRouterOutlet, setupIonicReact } from '@ionic/react' import { IonReactRouter } from '@ionic/react-router' @@ -341,6 +359,8 @@ const App: React.FC = () => { export default App ``` + + Once that's done, run this in a terminal window: ```bash @@ -363,13 +383,15 @@ First install two packages in order to interact with the user's camera. npm install @ionic/pwa-elements @capacitor/camera ``` -[CapacitorJS](https://capacitorjs.com) is a cross platform native runtime from Ionic that enables web apps to be deployed through the app store and provides access to native device API. +[Capacitor](https://capacitorjs.com) is a cross platform native runtime from Ionic that enables web apps to be deployed through the app store and provides access to native device API. Ionic PWA elements is a companion package that will polyfill certain browser APIs that provide no user interface with custom Ionic UI. With those packages installed we can update our `index.tsx` to include an additional bootstrapping call for the Ionic PWA Elements. -```ts src/index.tsx +<$CodeTabs> + +```ts name=src/index.tsx import React from 'react' import ReactDOM from 'react-dom' import App from './App' @@ -390,9 +412,13 @@ serviceWorkerRegistration.unregister() reportWebVitals() ``` -Then create an **AvatarComponent**. + + +Then create an `AvatarComponent`. -```jsx src/components/Avatar.tsx +<$CodeTabs> + +```jsx name=src/components/Avatar.tsx import { IonIcon } from '@ionic/react'; import { person } from 'ionicons/icons'; import { Camera, CameraResultType } from '@capacitor/camera'; @@ -471,11 +497,15 @@ export function Avatar({ } ``` + + ### Add the new widget And then we can add the widget to the Account page: -```jsx src/pages/Account.tsx +<$CodeTabs> + +```jsx name=src/pages/Account.tsx // Import the new component import { Avatar } from '../components/Avatar'; @@ -493,4 +523,6 @@ return ( ``` + + At this stage you have a fully functional application! diff --git a/apps/docs/content/guides/getting-started/tutorials/with-ionic-vue.mdx b/apps/docs/content/guides/getting-started/tutorials/with-ionic-vue.mdx index 00b85b6aa6f44..3499a0a2e2792 100644 --- a/apps/docs/content/guides/getting-started/tutorials/with-ionic-vue.mdx +++ b/apps/docs/content/guides/getting-started/tutorials/with-ionic-vue.mdx @@ -3,7 +3,7 @@ title: 'Build a User Management App with Ionic Vue' description: 'Learn how to use Supabase in your Ionic Vue App.' --- - +<$Partial path="quickstart_intro.mdx" /> ![Supabase User Management example](/docs/img/ionic-demos/ionic-angular-account.png) @@ -13,7 +13,7 @@ If you get stuck while working through this guide, refer to the [full example on - +<$Partial path="project_setup.mdx" /> ## Building the app @@ -21,8 +21,7 @@ Let's start building the Vue app from scratch. ### Initialize an Ionic Vue app -We can use the [Ionic CLI](https://ionicframework.com/docs/cli) to initialize -an app called `supabase-ionic-vue`: +We can use the [Ionic CLI](https://ionicframework.com/docs/cli) to initialize an app called `supabase-ionic-vue`: ```bash npm install -g @ionic/cli @@ -37,30 +36,40 @@ npm install @supabase/supabase-js ``` And finally we want to save the environment variables in a `.env`. + All we need are the API URL and the `anon` key that you copied [earlier](#get-the-api-keys). -```bash .env -VUE_APP_SUPABASE_URL=YOUR_SUPABASE_URL -VUE_APP_SUPABASE_ANON_KEY=YOUR_SUPABASE_ANON_KEY +<$CodeTabs> + +```bash name=.env +VITE_SUPABASE_URL=YOUR_SUPABASE_URL +VITE_SUPABASE_ANON_KEY=YOUR_SUPABASE_ANON_KEY ``` -Now that we have the API credentials in place, let's create a helper file to initialize the Supabase client. These variables will be exposed -on the browser, and that's completely fine since we have [Row Level Security](/docs/guides/auth#row-level-security) enabled on our Database. + + +Now that we have the API credentials in place, let's create a helper file to initialize the Supabase client. These variables will be exposed on the browser, and that's completely fine since we have [Row Level Security](/docs/guides/auth#row-level-security) enabled on our Database. -```js src/supabase.ts" +<$CodeTabs> + +```js name=src/supabase.ts import { createClient } from '@supabase/supabase-js'; -const supabaseUrl = process.env.VUE_APP_SUPABASE_URL as string; -const supabaseAnonKey = process.env.VUE_APP_SUPABASE_ANON_KEY as string; +const supabaseUrl = import.meta.env.VITE_SUPABASE_URL as string; +const supabaseAnonKey = import.meta.env.VITE_SUPABASE_ANON_KEY as string; export const supabase = createClient(supabaseUrl, supabaseAnonKey); ``` + + ### Set up a login route Let's set up a Vue component to manage logins and sign ups. We'll use Magic Links, so users can sign in with their email without using passwords. -```html /src/views/Login.vue +<$CodeTabs> + +```html name=/src/views/Login.vue @@ -130,7 +139,7 @@ Let's set up a Vue component to manage logins and sign ups. We'll use Magic Link try { await loader.present() - const { error } = await supabase.auth.signIn({ email: email.value }) + const { error } = await supabase.auth.signInWithOtp({ email: email.value }) if (error) throw error @@ -149,13 +158,17 @@ Let's set up a Vue component to manage logins and sign ups. We'll use Magic Link ``` + + ### Account page After a user is signed in we can allow them to edit their profile details and manage their account. Let's create a new component for that called `Account.vue`. -```html src/views/Account.vue +<$CodeTabs> + +```html name=src/views/Account.vue ``` + + ### Launch! Now that we have all the components in place, let's update `App.vue` and our routes: -```ts src/router.index.ts +<$CodeTabs> + +```ts name=src/router.index.ts import { createRouter, createWebHistory } from '@ionic/vue-router' import { RouteRecordRaw } from 'vue-router' import LoginPage from '../views/Login.vue' @@ -333,14 +375,14 @@ const routes: Array = [ ] const router = createRouter({ - history: createWebHistory(process.env.BASE_URL), + history: createWebHistory(import.meta.env.BASE_URL), routes, }) export default router ``` -```html src/App.vue +```html name=src/App.vue