diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e58bd32eb..25b00f97a 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @supabase/cli +* @supabase/dev-workflows diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b107b1cf4..ce32d176b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,13 +18,16 @@ jobs: go-version-file: go.mod cache: true + # Required by: internal/utils/credentials/keyring_test.go + - uses: t1m0thyj/unlock-keyring@v1 - run: | - go run gotest.tools/gotestsum -- ./... -race -v -count=1 \ - -coverpkg ./cmd/...,./internal/... -coverprofile=coverage.out + go run gotest.tools/gotestsum -- -race -v -count=1 -coverprofile=coverage.out \ + `go list ./... | grep -Ev 'cmd|docs|examples|pkg/api|tools'` - - uses: shogo82148/actions-goveralls@v1 + - uses: coverallsapp/github-action@v2 with: - path-to-profile: coverage.out + file: coverage.out + format: golang lint: name: Lint diff --git a/.github/workflows/deploy-check.yml b/.github/workflows/deploy-check.yml new file mode 100644 index 000000000..44c2806ec --- /dev/null +++ b/.github/workflows/deploy-check.yml @@ -0,0 +1,20 @@ +name: Check Deploy + +on: + pull_request_target: + types: + - opened + - reopened + - synchronize + - edited + branches: + - main + +jobs: + check: + if: github.head_ref != 'develop' + runs-on: ubuntu-latest + steps: + - run: | + echo "Pull requests to main branch are only allowed from develop branch." + exit 1 diff --git a/.github/workflows/mirror-image.yml b/.github/workflows/mirror-image.yml index 99210c98c..b3338a9fa 100644 --- a/.github/workflows/mirror-image.yml +++ b/.github/workflows/mirror-image.yml @@ -26,7 +26,7 @@ jobs: TAG=${{ inputs.image }} echo "image=${TAG##*/}" >> $GITHUB_OUTPUT - name: configure aws credentials - uses: aws-actions/configure-aws-credentials@v4.0.2 + uses: aws-actions/configure-aws-credentials@v4.0.3 with: role-to-assume: ${{ secrets.PROD_AWS_ROLE }} aws-region: us-east-1 diff --git a/.github/workflows/mirror.yml b/.github/workflows/mirror.yml index 85050a7c5..0eb46d868 100644 --- a/.github/workflows/mirror.yml +++ b/.github/workflows/mirror.yml @@ -17,10 +17,6 @@ on: pull_request_review: types: - submitted - paths: - - ".github/workflows/mirror.yml" - - "internal/utils/misc.go" - - "tools/listdep/**" workflow_dispatch: jobs: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 90719b102..8ef8b4383 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -40,17 +40,10 @@ jobs: name: Publish NPM needs: - settings - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - with: - node-version: "16.x" - registry-url: "https://registry.npmjs.org" - - run: npm dist-tag add "supabase@${RELEASE_TAG#v}" latest - env: - RELEASE_TAG: ${{ needs.settings.outputs.release_tag }} - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + uses: ./.github/workflows/tag-npm.yml + with: + release: ${{ needs.settings.outputs.release_tag }} + secrets: inherit compose: name: Bump self-hosted versions diff --git a/.github/workflows/tag-npm.yml b/.github/workflows/tag-npm.yml new file mode 100644 index 000000000..9eda44cba --- /dev/null +++ b/.github/workflows/tag-npm.yml @@ -0,0 +1,29 @@ +name: Tag NPM + +on: + workflow_call: + inputs: + release: + required: true + type: string + workflow_dispatch: + inputs: + release: + description: "v1.0.0" + required: true + type: string + +jobs: + tag: + name: Move latest tag + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: "16.x" + registry-url: "https://registry.npmjs.org" + - run: npm dist-tag add "supabase@${RELEASE_TAG#v}" latest + env: + RELEASE_TAG: ${{ inputs.release }} + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.golangci.yml b/.golangci.yml index 9d5189836..b91c1afd6 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -20,4 +20,4 @@ linters-settings: stylecheck: checks: ["all", "-ST1003"] dupl: - threshold: 200 + threshold: 250 diff --git a/README.md b/README.md index 63ca688be..7da7ff569 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # Supabase CLI (v1) -[![Coverage Status](https://coveralls.io/repos/github/supabase/cli/badge.svg?branch=main)](https://coveralls.io/github/supabase/cli?branch=main) +[![Coverage Status](https://coveralls.io/repos/github/supabase/cli/badge.svg?branch=main)](https://coveralls.io/github/supabase/cli?branch=main) [![Bitbucket Pipelines](https://img.shields.io/bitbucket/pipelines/supabase-cli/setup-cli/master?style=flat-square&label=Bitbucket%20Canary)](https://bitbucket.org/supabase-cli/setup-cli/pipelines) [![Gitlab Pipeline Status](https://img.shields.io/gitlab/pipeline-status/sweatybridge%2Fsetup-cli?label=Gitlab%20Canary) +](https://gitlab.com/sweatybridge/setup-cli/-/pipelines) [Supabase](https://supabase.io) is an open source Firebase alternative. We're building the features of Firebase using enterprise-grade open source tools. @@ -129,7 +130,7 @@ For Bun versions below v1.0.17, you must add `supabase` as a [trusted dependency Add a symlink to the binary in `$PATH` for easier access: ```sh - ln -s "$(go env GOPATH)/cli" /usr/bin/supabase + ln -s "$(go env GOPATH)/bin/cli" /usr/bin/supabase ``` This works on other non-standard Linux distros. diff --git a/api/beta.yaml b/api/beta.yaml index 79f3dc8f0..5163460d6 100644 --- a/api/beta.yaml +++ b/api/beta.yaml @@ -79,6 +79,31 @@ paths: - Environments security: - bearer: [] + /v1/branches/{branch_id}/push: + post: + operationId: v1-push-a-branch + summary: Pushes a database branch + description: Pushes the specified database branch + parameters: + - name: branch_id + required: true + in: path + description: Branch ID + schema: + type: string + responses: + '201': + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/BranchUpdateResponse' + '500': + description: Failed to push database branch + tags: + - Environments + security: + - bearer: [] /v1/branches/{branch_id}/reset: post: operationId: v1-reset-a-branch @@ -97,7 +122,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/BranchResetResponse' + $ref: '#/components/schemas/BranchUpdateResponse' '500': description: Failed to reset database branch tags: @@ -118,7 +143,7 @@ paths: schema: type: array items: - $ref: '#/components/schemas/V1ProjectResponse' + $ref: '#/components/schemas/V1ProjectWithDatabaseResponse' tags: - Projects security: @@ -132,7 +157,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/V1CreateProjectBody' + $ref: '#/components/schemas/V1CreateProjectBodyDto' responses: '201': description: '' @@ -174,7 +199,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/CreateOrganizationBodyV1' + $ref: '#/components/schemas/CreateOrganizationV1Dto' responses: '201': description: '' @@ -272,11 +297,58 @@ paths: security: - oauth2: - write + /v1/oauth/revoke: + post: + operationId: v1-revoke-token + summary: '[Beta] Revoke oauth app authorization and it''s corresponding tokens' + parameters: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthRevokeTokenBodyDto' + responses: + '204': + description: '' + tags: + - OAuth + security: + - oauth2: + - write /v1/snippets: get: operationId: v1-list-all-snippets summary: Lists SQL snippets for the logged in user parameters: + - name: cursor + required: false + in: query + schema: + type: string + - name: limit + required: false + in: query + schema: + type: string + minimum: 1 + maximum: 100 + - name: sort_by + required: false + in: query + schema: + enum: + - name + - inserted_at + type: string + - name: sort_order + required: false + in: query + schema: + enum: + - asc + - desc + type: string - name: project_ref required: false in: query @@ -304,6 +376,7 @@ paths: required: true in: path schema: + format: uuid type: string responses: '200': @@ -331,6 +404,11 @@ paths: minLength: 20 maxLength: 20 type: string + - name: reveal + required: true + in: query + schema: + type: boolean responses: '200': description: '' @@ -340,6 +418,146 @@ paths: type: array items: $ref: '#/components/schemas/ApiKeyResponse' + tags: + - Secrets + security: + - bearer: [] + post: + operationId: createApiKey + summary: '[Alpha] Creates a new API key for the project' + parameters: + - name: ref + required: true + in: path + description: Project ref + schema: + minLength: 20 + maxLength: 20 + type: string + - name: reveal + required: true + in: query + schema: + type: boolean + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/CreateApiKeyBody' + responses: + '201': + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/ApiKeyResponse' + tags: + - Secrets + security: + - bearer: [] + /v1/projects/{ref}/api-keys/{id}: + patch: + operationId: updateApiKey + summary: '[Alpha] Updates an API key for the project' + parameters: + - name: ref + required: true + in: path + description: Project ref + schema: + minLength: 20 + maxLength: 20 + type: string + - name: id + required: true + in: path + schema: + type: string + - name: reveal + required: true + in: query + schema: + type: boolean + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateApiKeyBody' + responses: + '200': + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/ApiKeyResponse' + tags: + - Secrets + security: + - bearer: [] + get: + operationId: getApiKey + summary: '[Alpha] Get API key' + parameters: + - name: ref + required: true + in: path + description: Project ref + schema: + minLength: 20 + maxLength: 20 + type: string + - name: id + required: true + in: path + schema: + type: string + - name: reveal + required: true + in: query + schema: + type: boolean + responses: + '200': + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/ApiKeyResponse' + tags: + - Secrets + security: + - bearer: [] + delete: + operationId: deleteApiKey + summary: '[Alpha] Deletes an API key for the project' + parameters: + - name: ref + required: true + in: path + description: Project ref + schema: + minLength: 20 + maxLength: 20 + type: string + - name: id + required: true + in: path + schema: + type: string + - name: reveal + required: true + in: query + schema: + type: boolean + responses: + '200': + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/ApiKeyResponse' '403': description: '' tags: @@ -817,6 +1035,31 @@ paths: security: - bearer: [] /v1/projects/{ref}: + get: + operationId: v1-get-project + summary: Gets a specific project that belongs to the authenticated user + parameters: + - name: ref + required: true + in: path + description: Project ref + schema: + minLength: 20 + maxLength: 20 + type: string + responses: + '200': + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/V1ProjectWithDatabaseResponse' + '500': + description: Failed to retrieve project + tags: + - Projects + security: + - bearer: [] delete: operationId: v1-delete-a-project summary: Deletes the given project @@ -1192,7 +1435,7 @@ paths: - bearer: [] /v1/projects/{ref}/upgrade/eligibility: get: - operationId: v1-get-postgrest-upgrade-eligibility + operationId: v1-get-postgres-upgrade-eligibility summary: '[Beta] Returns the project''s eligibility for upgrades' parameters: - name: ref @@ -1220,7 +1463,7 @@ paths: - bearer: [] /v1/projects/{ref}/upgrade/status: get: - operationId: v1-get-postgrest-upgrade-status + operationId: v1-get-postgres-upgrade-status summary: '[Beta] Gets the latest status of the project''s upgrade' parameters: - name: ref @@ -1231,6 +1474,11 @@ paths: minLength: 20 maxLength: 20 type: string + - name: tracking_id + required: false + in: query + schema: + type: string responses: '200': description: '' @@ -1397,12 +1645,71 @@ paths: type: array items: $ref: '#/components/schemas/V1ServiceHealthResponse' + '403': + description: '' '500': description: Failed to retrieve project's service health status tags: - Projects security: - bearer: [] + /v1/projects/{ref}/config/storage: + get: + operationId: v1-get-storage-config + summary: Gets project's storage config + parameters: + - name: ref + required: true + in: path + description: Project ref + schema: + minLength: 20 + maxLength: 20 + type: string + responses: + '200': + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/StorageConfigResponse' + '403': + description: '' + '500': + description: Failed to retrieve project's storage config + tags: + - Storage + security: + - bearer: [] + patch: + operationId: v1-update-storage-config + summary: Updates project's storage config + parameters: + - name: ref + required: true + in: path + description: Project ref + schema: + minLength: 20 + maxLength: 20 + type: string + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateStorageConfigBody' + responses: + '200': + description: '' + '403': + description: '' + '500': + description: Failed to update project's storage config + tags: + - Storage + security: + - bearer: [] /v1/projects/{ref}/config/database/postgres: get: operationId: v1-get-postgres-config @@ -1454,6 +1761,8 @@ paths: application/json: schema: $ref: '#/components/schemas/PostgresConfigResponse' + '403': + description: '' '500': description: Failed to update project's Postgres config tags: @@ -1726,10 +2035,137 @@ paths: - Auth security: - bearer: [] - /v1/projects/{ref}/database/query: + /v1/projects/{ref}/pause: post: - operationId: v1-run-a-query - summary: '[Beta] Run sql query' + operationId: v1-pause-a-project + summary: Pauses the given project + parameters: + - name: ref + required: true + in: path + description: Project ref + schema: + minLength: 20 + maxLength: 20 + type: string + responses: + '200': + description: '' + '403': + description: '' + tags: + - Projects + security: + - bearer: [] + /v1/projects/{ref}/restore: + get: + operationId: v1-list-available-restore-versions + summary: Lists available restore versions for the given project + parameters: + - name: ref + required: true + in: path + schema: + type: string + responses: + '200': + description: '' + content: + application/json: + schema: + $ref: >- + #/components/schemas/GetProjectAvailableRestoreVersionsResponse + '403': + description: '' + tags: + - Projects + security: + - bearer: [] + post: + operationId: v1-restore-a-project + summary: Restores the given project + parameters: + - name: ref + required: true + in: path + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/RestoreProjectBodyDto' + responses: + '200': + description: '' + '403': + description: '' + tags: + - Projects + security: + - bearer: [] + /v1/projects/{ref}/restore/cancel: + post: + operationId: v1-cancel-a-project-restoration + summary: Cancels the given project restoration + parameters: + - name: ref + required: true + in: path + schema: + type: string + responses: + '200': + description: '' + '403': + description: '' + tags: + - Projects + security: + - bearer: [] + /v1/projects/{ref}/analytics/endpoints/logs.all: + get: + operationId: getLogs + summary: Gets project's logs + parameters: + - name: iso_timestamp_end + required: false + in: query + schema: + type: string + - name: iso_timestamp_start + required: false + in: query + schema: + type: string + - name: sql + required: false + in: query + schema: + type: string + - name: ref + required: true + in: path + schema: + type: string + responses: + '200': + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/V1AnalyticsResponse' + '403': + description: '' + tags: + - Analytics + security: + - bearer: [] + /v1/projects/{ref}/database/query: + post: + operationId: v1-run-a-query + summary: '[Beta] Run sql query' parameters: - name: ref required: true @@ -1785,8 +2221,38 @@ paths: security: - bearer: [] /v1/projects/{ref}/functions: + get: + operationId: v1-list-all-functions + summary: List all functions + description: Returns all functions you've previously added to the specified project. + parameters: + - name: ref + required: true + in: path + description: Project ref + schema: + minLength: 20 + maxLength: 20 + type: string + responses: + '200': + description: '' + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/FunctionResponse' + '403': + description: '' + '500': + description: Failed to retrieve project's functions + tags: + - Edge Functions + security: + - bearer: [] post: - operationId: createFunction + operationId: v1-create-a-function summary: Create a function description: Creates a function and adds it to the specified project. parameters: @@ -1829,6 +2295,13 @@ paths: in: query schema: type: string + - name: compute_multiplier + required: false + in: query + schema: + minimum: 1 + maximum: 4 + type: number requestBody: required: true content: @@ -1853,36 +2326,6 @@ paths: - Edge Functions security: - bearer: [] - get: - operationId: v1-list-all-functions - summary: List all functions - description: Returns all functions you've previously added to the specified project. - parameters: - - name: ref - required: true - in: path - description: Project ref - schema: - minLength: 20 - maxLength: 20 - type: string - responses: - '200': - description: '' - content: - application/json: - schema: - type: array - items: - $ref: '#/components/schemas/FunctionResponse' - '403': - description: '' - '500': - description: Failed to retrieve project's functions - tags: - - Edge Functions - security: - - bearer: [] /v1/projects/{ref}/functions/{function_slug}: get: operationId: v1-get-a-function @@ -1970,6 +2413,13 @@ paths: in: query schema: type: string + - name: compute_multiplier + required: false + in: query + schema: + minimum: 1 + maximum: 4 + type: number requestBody: required: true content: @@ -2395,28 +2845,33 @@ components: BranchDetailResponse: type: object properties: - db_port: - type: integer - ref: - type: string - postgres_version: - type: string status: + type: string enum: + - INACTIVE - ACTIVE_HEALTHY - ACTIVE_UNHEALTHY - COMING_UP + - UNKNOWN - GOING_DOWN - - INACTIVE - INIT_FAILED - REMOVED - - RESTARTING - - UNKNOWN + - RESTORING - UPGRADING - PAUSING - - RESTORING - RESTORE_FAILED + - RESTARTING - PAUSE_FAILED + - RESIZING + db_port: + type: integer + ref: + type: string + postgres_version: + type: string + postgres_engine: + type: string + release_channel: type: string db_host: type: string @@ -2427,23 +2882,30 @@ components: jwt_secret: type: string required: + - status - db_port - ref - postgres_version - - status + - postgres_engine + - release_channel - db_host UpdateBranchBody: type: object properties: + reset_on_push: + type: boolean + deprecated: true + description: >- + This field is deprecated and will be ignored. Use v1-reset-a-branch + endpoint directly instead. branch_name: type: string git_branch: type: string - reset_on_push: - type: boolean persistent: type: boolean status: + type: string enum: - CREATING_PROJECT - RUNNING_MIGRATIONS @@ -2451,10 +2913,25 @@ components: - MIGRATIONS_FAILED - FUNCTIONS_DEPLOYED - FUNCTIONS_FAILED - type: string BranchResponse: type: object properties: + pr_number: + type: integer + format: int32 + latest_check_run_id: + type: number + deprecated: true + description: This field is deprecated and will not be populated. + status: + type: string + enum: + - CREATING_PROJECT + - RUNNING_MIGRATIONS + - MIGRATIONS_PASSED + - MIGRATIONS_FAILED + - FUNCTIONS_DEPLOYED + - FUNCTIONS_FAILED id: type: string name: @@ -2467,36 +2944,20 @@ components: type: boolean git_branch: type: string - pr_number: - type: number - latest_check_run_id: - type: number - reset_on_push: - type: boolean persistent: type: boolean - status: - enum: - - CREATING_PROJECT - - RUNNING_MIGRATIONS - - MIGRATIONS_PASSED - - MIGRATIONS_FAILED - - FUNCTIONS_DEPLOYED - - FUNCTIONS_FAILED - type: string created_at: type: string updated_at: type: string required: + - status - id - name - project_ref - parent_project_ref - is_default - - reset_on_push - persistent - - status - created_at - updated_at BranchDeleteResponse: @@ -2506,12 +2967,15 @@ components: type: string required: - message - BranchResetResponse: + BranchUpdateResponse: type: object properties: + workflow_run_id: + type: string message: type: string required: + - workflow_run_id - message V1DatabaseResponse: type: object @@ -2522,10 +2986,18 @@ components: version: type: string description: Database version + postgres_engine: + type: string + description: Database engine + release_channel: + type: string + description: Release channel required: - host - version - V1ProjectResponse: + - postgres_engine + - release_channel + V1ProjectWithDatabaseResponse: type: object properties: id: @@ -2545,25 +3017,26 @@ components: type: string description: Creation timestamp example: '2023-03-29T16:32:59Z' - database: - $ref: '#/components/schemas/V1DatabaseResponse' status: + type: string enum: + - INACTIVE - ACTIVE_HEALTHY - ACTIVE_UNHEALTHY - COMING_UP + - UNKNOWN - GOING_DOWN - - INACTIVE - INIT_FAILED - REMOVED - - RESTARTING - - UNKNOWN + - RESTORING - UPGRADING - PAUSING - - RESTORING - RESTORE_FAILED + - RESTARTING - PAUSE_FAILED - type: string + - RESIZING + database: + $ref: '#/components/schemas/V1DatabaseResponse' required: - id - organization_id @@ -2571,20 +3044,8 @@ components: - region - created_at - status - DesiredInstanceSize: - type: string - enum: - - micro - - small - - medium - - large - - xlarge - - 2xlarge - - 4xlarge - - 8xlarge - - 12xlarge - - 16xlarge - V1CreateProjectBody: + - database + V1CreateProjectBodyDto: type: object properties: db_pass: @@ -2592,7 +3053,7 @@ components: description: Database password name: type: string - description: Name of your project, should not contain dots + description: Name of your project organization_id: type: string description: Slug of your organization @@ -2601,15 +3062,16 @@ components: enum: - free - pro + deprecated: true description: >- Subscription Plan is now set on organization level and is ignored in this request - example: free - deprecated: true region: type: string + description: Region you want your server to reside in enum: - us-east-1 + - us-east-2 - us-west-1 - us-west-2 - ap-east-1 @@ -2620,28 +3082,104 @@ components: - eu-west-1 - eu-west-2 - eu-west-3 + - eu-north-1 - eu-central-1 + - eu-central-2 - ca-central-1 - ap-south-1 - sa-east-1 - description: Region you want your server to reside in - example: us-east-1 kps_enabled: type: boolean deprecated: true description: This field is deprecated and is ignored in this request desired_instance_size: - $ref: '#/components/schemas/DesiredInstanceSize' + type: string + enum: + - micro + - small + - medium + - large + - xlarge + - 2xlarge + - 4xlarge + - 8xlarge + - 12xlarge + - 16xlarge template_url: type: string + format: uri description: Template URL used to create the project from the CLI. example: >- https://github.com/supabase/supabase/tree/master/examples/slack-clone/nextjs-slack-clone + release_channel: + type: string + enum: + - internal + - alpha + - beta + - ga + - withdrawn + - preview + description: Release channel. If not provided, GA will be used. + postgres_engine: + type: string + enum: + - '15' + - 17-oriole + description: >- + Postgres engine version. If not provided, the latest version will be + used. required: - db_pass - name - organization_id - region + additionalProperties: false + V1ProjectResponse: + type: object + properties: + id: + type: string + description: Id of your project + organization_id: + type: string + description: Slug of your organization + name: + type: string + description: Name of your project + region: + type: string + description: Region of your project + example: us-east-1 + created_at: + type: string + description: Creation timestamp + example: '2023-03-29T16:32:59Z' + status: + type: string + enum: + - INACTIVE + - ACTIVE_HEALTHY + - ACTIVE_UNHEALTHY + - COMING_UP + - UNKNOWN + - GOING_DOWN + - INIT_FAILED + - REMOVED + - RESTORING + - UPGRADING + - PAUSING + - RESTORE_FAILED + - RESTARTING + - PAUSE_FAILED + - RESIZING + required: + - id + - organization_id + - name + - region + - created_at + - status OrganizationResponseV1: type: object properties: @@ -2652,13 +3190,14 @@ components: required: - id - name - CreateOrganizationBodyV1: + CreateOrganizationV1Dto: type: object properties: name: type: string required: - name + additionalProperties: false OAuthTokenBody: type: object properties: @@ -2686,6 +3225,9 @@ components: OAuthTokenResponse: type: object properties: + expires_in: + type: integer + format: int64 token_type: type: string enum: @@ -2694,18 +3236,32 @@ components: type: string refresh_token: type: string - expires_in: - type: number required: + - expires_in - token_type - access_token - refresh_token - - expires_in + OAuthRevokeTokenBodyDto: + type: object + properties: + client_id: + type: string + format: uuid + client_secret: + type: string + refresh_token: + type: string + required: + - client_id + - client_secret + - refresh_token + additionalProperties: false SnippetProject: type: object properties: id: - type: number + type: integer + format: int64 name: type: string required: @@ -2715,7 +3271,8 @@ components: type: object properties: id: - type: number + type: integer + format: int64 username: type: string required: @@ -2745,6 +3302,7 @@ components: type: string description: type: string + nullable: true project: $ref: '#/components/schemas/SnippetProject' owner: @@ -2758,6 +3316,7 @@ components: - type - visibility - name + - description - project - owner - updated_by @@ -2768,6 +3327,8 @@ components: type: array items: $ref: '#/components/schemas/SnippetMeta' + cursor: + type: string required: - data SnippetContent: @@ -2807,6 +3368,7 @@ components: type: string description: type: string + nullable: true project: $ref: '#/components/schemas/SnippetProject' owner: @@ -2822,25 +3384,123 @@ components: - type - visibility - name + - description - project - owner - updated_by - content + ApiKeySecretJWTTemplate: + type: object + properties: + role: + type: string + required: + - role ApiKeyResponse: type: object properties: + type: + nullable: true + type: string + enum: + - publishable + - secret + - legacy name: type: string api_key: type: string + id: + type: string + nullable: true + prefix: + type: string + nullable: true + description: + type: string + nullable: true + hash: + type: string + nullable: true + secret_jwt_template: + nullable: true + allOf: + - $ref: '#/components/schemas/ApiKeySecretJWTTemplate' + inserted_at: + type: string + nullable: true + updated_at: + type: string + nullable: true required: - name - api_key + CreateApiKeyBody: + type: object + properties: + type: + enum: + - publishable + - secret + type: string + description: + type: string + nullable: true + secret_jwt_template: + nullable: true + allOf: + - $ref: '#/components/schemas/ApiKeySecretJWTTemplate' + required: + - type + UpdateApiKeyBody: + type: object + properties: + description: + type: string + nullable: true + secret_jwt_template: + nullable: true + allOf: + - $ref: '#/components/schemas/ApiKeySecretJWTTemplate' + DesiredInstanceSize: + type: string + enum: + - micro + - small + - medium + - large + - xlarge + - 2xlarge + - 4xlarge + - 8xlarge + - 12xlarge + - 16xlarge + ReleaseChannel: + type: string + enum: + - internal + - alpha + - beta + - ga + - withdrawn + - preview + PostgresEngine: + type: string + description: >- + Postgres engine version. If not provided, the latest version will be + used. + enum: + - '15' + - 17-oriole CreateBranchBody: type: object properties: desired_instance_size: $ref: '#/components/schemas/DesiredInstanceSize' + release_channel: + $ref: '#/components/schemas/ReleaseChannel' + postgres_engine: + $ref: '#/components/schemas/PostgresEngine' branch_name: type: string git_branch: @@ -3094,7 +3754,8 @@ components: type: object properties: id: - type: number + type: integer + format: int64 ref: type: string name: @@ -3196,9 +3857,12 @@ components: UpgradeDatabaseBody: type: object properties: + release_channel: + $ref: '#/components/schemas/ReleaseChannel' target_version: - type: number + type: string required: + - release_channel - target_version ProjectUpgradeInitiateResponse: type: object @@ -3211,15 +3875,22 @@ components: type: object properties: postgres_version: - type: number + $ref: '#/components/schemas/PostgresEngine' + release_channel: + $ref: '#/components/schemas/ReleaseChannel' app_version: type: string required: - postgres_version + - release_channel - app_version ProjectUpgradeEligibilityResponse: type: object properties: + current_app_version_release_channel: + $ref: '#/components/schemas/ReleaseChannel' + duration_estimate_hours: + type: integer eligible: type: boolean current_app_version: @@ -3234,8 +3905,6 @@ components: type: array items: type: string - duration_estimate_hours: - type: number legacy_auth_custom_roles: type: array items: @@ -3245,23 +3914,30 @@ components: items: type: string required: + - current_app_version_release_channel + - duration_estimate_hours - eligible - current_app_version - latest_app_version - target_upgrade_versions - potential_breaking_changes - - duration_estimate_hours - legacy_auth_custom_roles - extension_dependent_objects DatabaseUpgradeStatus: type: object properties: + target_version: + type: integer + status: + enum: + - 0 + - 1 + - 2 + type: integer initiated_at: type: string latest_status_at: type: string - target_version: - type: number error: type: string enum: @@ -3288,17 +3964,11 @@ components: - 8_attached_volume_to_upgraded_instance - 9_completed_upgrade - 10_completed_post_physical_backup - status: - type: number - enum: - - 0 - - 1 - - 2 required: - - initiated_at - - latest_status_at - target_version - status + - initiated_at + - latest_status_at DatabaseUpgradeStatusResponse: type: object properties: @@ -3328,6 +3998,7 @@ components: type: string enum: - us-east-1 + - us-east-2 - us-west-1 - us-west-2 - ap-east-1 @@ -3338,7 +4009,9 @@ components: - eu-west-1 - eu-west-2 - eu-west-3 + - eu-north-1 - eu-central-1 + - eu-central-2 - ca-central-1 - ap-south-1 - sa-east-1 @@ -3358,26 +4031,16 @@ components: properties: name: type: string - version: - type: string - description: - type: string + enum: + - GoTrue required: - name - - version - - description RealtimeHealthResponse: type: object properties: - healthy: - type: boolean - db_connected: - type: boolean connected_cluster: - type: number + type: integer required: - - healthy - - db_connected - connected_cluster V1ServiceHealthResponse: type: object @@ -3409,15 +4072,62 @@ components: - name - healthy - status + StorageFeatureImageTransformation: + type: object + properties: + enabled: + type: boolean + required: + - enabled + StorageFeatureS3Protocol: + type: object + properties: + enabled: + type: boolean + required: + - enabled + StorageFeatures: + type: object + properties: + imageTransformation: + $ref: '#/components/schemas/StorageFeatureImageTransformation' + s3Protocol: + $ref: '#/components/schemas/StorageFeatureS3Protocol' + required: + - imageTransformation + - s3Protocol + StorageConfigResponse: + type: object + properties: + fileSizeLimit: + type: integer + format: int64 + features: + $ref: '#/components/schemas/StorageFeatures' + required: + - fileSizeLimit + - features + UpdateStorageConfigBody: + type: object + properties: + fileSizeLimit: + type: integer + minimum: 0 + maximum: 53687091200 + format: int64 + features: + $ref: '#/components/schemas/StorageFeatures' PostgresConfigResponse: type: object properties: - statement_timeout: - type: string effective_cache_size: type: string + logical_decoding_work_mem: + type: string maintenance_work_mem: type: string + track_activity_query_size: + type: string max_connections: type: integer minimum: 1 @@ -3438,16 +4148,32 @@ components: type: integer minimum: 0 maximum: 1024 + max_replication_slots: + type: integer + max_slot_wal_keep_size: + type: string max_standby_archive_delay: type: string max_standby_streaming_delay: type: string + max_wal_size: + type: string + max_wal_senders: + type: integer max_worker_processes: type: integer minimum: 0 maximum: 262143 shared_buffers: type: string + statement_timeout: + type: string + track_commit_timestamp: + type: boolean + wal_keep_size: + type: string + wal_sender_timeout: + type: string work_mem: type: string session_replication_role: @@ -3459,12 +4185,14 @@ components: UpdatePostgresConfigBody: type: object properties: - statement_timeout: - type: string effective_cache_size: type: string + logical_decoding_work_mem: + type: string maintenance_work_mem: type: string + track_activity_query_size: + type: string max_connections: type: integer minimum: 1 @@ -3485,18 +4213,36 @@ components: type: integer minimum: 0 maximum: 1024 + max_replication_slots: + type: integer + max_slot_wal_keep_size: + type: string max_standby_archive_delay: type: string max_standby_streaming_delay: type: string + max_wal_size: + type: string + max_wal_senders: + type: integer max_worker_processes: type: integer minimum: 0 maximum: 262143 shared_buffers: type: string + statement_timeout: + type: string + track_commit_timestamp: + type: boolean + wal_keep_size: + type: string + wal_sender_timeout: + type: string work_mem: type: string + restart_database: + type: boolean session_replication_role: enum: - origin @@ -3523,12 +4269,20 @@ components: SupavisorConfigResponse: type: object properties: - identifier: - type: string database_type: + type: string enum: - PRIMARY - READ_REPLICA + db_port: + type: integer + default_pool_size: + type: integer + nullable: true + max_client_conn: + type: integer + nullable: true + identifier: type: string is_using_scram_auth: type: boolean @@ -3536,34 +4290,26 @@ components: type: string db_host: type: string - db_port: - type: number db_name: type: string connectionString: type: string - default_pool_size: - type: number - nullable: true - max_client_conn: - type: number - nullable: true pool_mode: enum: - transaction - session type: string required: - - identifier - database_type + - db_port + - default_pool_size + - max_client_conn + - identifier - is_using_scram_auth - db_user - db_host - - db_port - db_name - connectionString - - default_pool_size - - max_client_conn - pool_mode UpdateSupavisorConfigBody: type: object @@ -3584,7 +4330,7 @@ components: type: object properties: default_pool_size: - type: number + type: integer nullable: true pool_mode: enum: @@ -3598,10 +4344,67 @@ components: type: object properties: api_max_request_duration: - type: number + type: integer nullable: true db_max_pool_size: - type: number + type: integer + nullable: true + jwt_exp: + type: integer + nullable: true + mailer_otp_exp: + type: integer + mailer_otp_length: + type: integer + nullable: true + mfa_max_enrolled_factors: + type: integer + nullable: true + mfa_phone_otp_length: + type: integer + mfa_phone_max_frequency: + type: integer + nullable: true + password_min_length: + type: integer + nullable: true + rate_limit_anonymous_users: + type: integer + nullable: true + rate_limit_email_sent: + type: integer + nullable: true + rate_limit_sms_sent: + type: integer + nullable: true + rate_limit_token_refresh: + type: integer + nullable: true + rate_limit_verify: + type: integer + nullable: true + rate_limit_otp: + type: integer + nullable: true + security_refresh_token_reuse_interval: + type: integer + nullable: true + sessions_inactivity_timeout: + type: integer + nullable: true + sessions_timebox: + type: integer + nullable: true + sms_max_frequency: + type: integer + nullable: true + sms_otp_exp: + type: integer + nullable: true + sms_otp_length: + type: integer + smtp_max_frequency: + type: integer nullable: true disable_signup: type: boolean @@ -3861,20 +4664,12 @@ components: hook_send_email_secrets: type: string nullable: true - jwt_exp: - type: number - nullable: true mailer_allow_unverified_email_sign_ins: type: boolean nullable: true mailer_autoconfirm: type: boolean nullable: true - mailer_otp_exp: - type: number - mailer_otp_length: - type: number - nullable: true mailer_secure_email_change_enabled: type: boolean nullable: true @@ -3914,9 +4709,6 @@ components: mailer_templates_recovery_content: type: string nullable: true - mfa_max_enrolled_factors: - type: number - nullable: true mfa_totp_enroll_enabled: type: boolean nullable: true @@ -3929,41 +4721,21 @@ components: mfa_phone_verify_enabled: type: boolean nullable: true - mfa_phone_otp_length: - type: number + mfa_web_authn_enroll_enabled: + type: boolean + nullable: true + mfa_web_authn_verify_enabled: + type: boolean + nullable: true mfa_phone_template: type: string nullable: true - mfa_phone_max_frequency: - type: number - nullable: true password_hibp_enabled: type: boolean nullable: true - password_min_length: - type: number - nullable: true password_required_characters: type: string nullable: true - rate_limit_anonymous_users: - type: number - nullable: true - rate_limit_email_sent: - type: number - nullable: true - rate_limit_sms_sent: - type: number - nullable: true - rate_limit_token_refresh: - type: number - nullable: true - rate_limit_verify: - type: number - nullable: true - rate_limit_otp: - type: number - nullable: true refresh_token_rotation_enabled: type: boolean nullable: true @@ -3973,6 +4745,9 @@ components: saml_external_url: type: string nullable: true + saml_allow_encrypted_assertions: + type: boolean + nullable: true security_captcha_enabled: type: boolean nullable: true @@ -3985,44 +4760,27 @@ components: security_manual_linking_enabled: type: boolean nullable: true - security_refresh_token_reuse_interval: - type: number - nullable: true security_update_password_require_reauthentication: type: boolean nullable: true - sessions_inactivity_timeout: - type: number - nullable: true sessions_single_per_user: type: boolean nullable: true sessions_tags: type: string nullable: true - sessions_timebox: - type: number - nullable: true site_url: type: string nullable: true sms_autoconfirm: type: boolean nullable: true - sms_max_frequency: - type: number - nullable: true sms_messagebird_access_key: type: string nullable: true sms_messagebird_originator: type: string nullable: true - sms_otp_exp: - type: number - nullable: true - sms_otp_length: - type: number sms_provider: type: string nullable: true @@ -4077,9 +4835,6 @@ components: smtp_host: type: string nullable: true - smtp_max_frequency: - type: number - nullable: true smtp_pass: type: string nullable: true @@ -4098,6 +4853,26 @@ components: required: - api_max_request_duration - db_max_pool_size + - jwt_exp + - mailer_otp_exp + - mailer_otp_length + - mfa_max_enrolled_factors + - mfa_phone_otp_length + - mfa_phone_max_frequency + - password_min_length + - rate_limit_anonymous_users + - rate_limit_email_sent + - rate_limit_sms_sent + - rate_limit_token_refresh + - rate_limit_verify + - rate_limit_otp + - security_refresh_token_reuse_interval + - sessions_inactivity_timeout + - sessions_timebox + - sms_max_frequency + - sms_otp_exp + - sms_otp_length + - smtp_max_frequency - disable_signup - external_anonymous_users_enabled - external_apple_additional_client_ids @@ -4184,11 +4959,8 @@ components: - hook_send_email_enabled - hook_send_email_uri - hook_send_email_secrets - - jwt_exp - mailer_allow_unverified_email_sign_ins - mailer_autoconfirm - - mailer_otp_exp - - mailer_otp_length - mailer_secure_email_change_enabled - mailer_subjects_confirmation - mailer_subjects_email_change @@ -4202,43 +4974,30 @@ components: - mailer_templates_magic_link_content - mailer_templates_reauthentication_content - mailer_templates_recovery_content - - mfa_max_enrolled_factors - mfa_totp_enroll_enabled - mfa_totp_verify_enabled - mfa_phone_enroll_enabled - mfa_phone_verify_enabled - - mfa_phone_otp_length + - mfa_web_authn_enroll_enabled + - mfa_web_authn_verify_enabled - mfa_phone_template - - mfa_phone_max_frequency - password_hibp_enabled - - password_min_length - password_required_characters - - rate_limit_anonymous_users - - rate_limit_email_sent - - rate_limit_sms_sent - - rate_limit_token_refresh - - rate_limit_verify - - rate_limit_otp - refresh_token_rotation_enabled - saml_enabled - saml_external_url + - saml_allow_encrypted_assertions - security_captcha_enabled - security_captcha_provider - security_captcha_secret - security_manual_linking_enabled - - security_refresh_token_reuse_interval - security_update_password_require_reauthentication - - sessions_inactivity_timeout - sessions_single_per_user - sessions_tags - - sessions_timebox - site_url - sms_autoconfirm - - sms_max_frequency - sms_messagebird_access_key - sms_messagebird_originator - - sms_otp_exp - - sms_otp_length - sms_provider - sms_template - sms_test_otp @@ -4257,7 +5016,6 @@ components: - sms_vonage_from - smtp_admin_email - smtp_host - - smtp_max_frequency - smtp_pass - smtp_port - smtp_sender_name @@ -4266,15 +5024,93 @@ components: UpdateAuthConfigBody: type: object properties: + jwt_exp: + type: integer + minimum: 0 + maximum: 604800 + smtp_max_frequency: + type: integer + minimum: 0 + maximum: 32767 + mfa_max_enrolled_factors: + type: integer + minimum: 0 + maximum: 2147483647 + sessions_timebox: + type: integer + minimum: 0 + sessions_inactivity_timeout: + type: integer + minimum: 0 + rate_limit_anonymous_users: + type: integer + minimum: 1 + maximum: 2147483647 + rate_limit_email_sent: + type: integer + minimum: 1 + maximum: 2147483647 + rate_limit_sms_sent: + type: integer + minimum: 1 + maximum: 2147483647 + rate_limit_verify: + type: integer + minimum: 1 + maximum: 2147483647 + rate_limit_token_refresh: + type: integer + minimum: 1 + maximum: 2147483647 + rate_limit_otp: + type: integer + minimum: 1 + maximum: 2147483647 + password_min_length: + type: integer + minimum: 6 + maximum: 32767 + security_refresh_token_reuse_interval: + type: integer + minimum: 0 + maximum: 2147483647 + mailer_otp_exp: + type: integer + minimum: 0 + maximum: 2147483647 + mailer_otp_length: + type: integer + minimum: 6 + maximum: 10 + sms_max_frequency: + type: integer + minimum: 0 + maximum: 32767 + sms_otp_exp: + type: integer + minimum: 0 + maximum: 2147483647 + sms_otp_length: + type: integer + minimum: 0 + maximum: 32767 + db_max_pool_size: + type: integer + api_max_request_duration: + type: integer + mfa_phone_max_frequency: + type: integer + minimum: 0 + maximum: 32767 + mfa_phone_otp_length: + type: integer + minimum: 0 + maximum: 32767 site_url: type: string pattern: /^[^,]+$/ disable_signup: type: boolean - jwt_exp: - type: number - minimum: 0 - maximum: 604800 smtp_admin_email: type: string smtp_host: @@ -4285,10 +5121,6 @@ components: type: string smtp_pass: type: string - smtp_max_frequency: - type: number - minimum: 0 - maximum: 32767 smtp_sender_name: type: string mailer_allow_unverified_email_sign_ins: @@ -4319,10 +5151,6 @@ components: type: string mailer_templates_reauthentication_content: type: string - mfa_max_enrolled_factors: - type: number - minimum: 0 - maximum: 2147483647 uri_allow_list: type: string external_anonymous_users_enabled: @@ -4342,51 +5170,17 @@ components: type: string security_captcha_secret: type: string - sessions_timebox: - type: number - minimum: 0 - sessions_inactivity_timeout: - type: number - minimum: 0 sessions_single_per_user: type: boolean sessions_tags: type: string pattern: /^\s*([a-z0-9_-]+(\s*,+\s*)?)*\s*$/i - rate_limit_anonymous_users: - type: number - minimum: 1 - maximum: 2147483647 - rate_limit_email_sent: - type: number - minimum: 1 - maximum: 2147483647 - rate_limit_sms_sent: - type: number - minimum: 1 - maximum: 2147483647 - rate_limit_verify: - type: number - minimum: 1 - maximum: 2147483647 - rate_limit_token_refresh: - type: number - minimum: 1 - maximum: 2147483647 - rate_limit_otp: - type: number - minimum: 1 - maximum: 2147483647 mailer_secure_email_change_enabled: type: boolean refresh_token_rotation_enabled: type: boolean password_hibp_enabled: type: boolean - password_min_length: - type: number - minimum: 6 - maximum: 32767 password_required_characters: type: string enum: @@ -4398,32 +5192,8 @@ components: type: boolean security_update_password_require_reauthentication: type: boolean - security_refresh_token_reuse_interval: - type: number - minimum: 0 - maximum: 2147483647 - mailer_otp_exp: - type: number - minimum: 0 - maximum: 2147483647 - mailer_otp_length: - type: number - minimum: 6 - maximum: 10 sms_autoconfirm: type: boolean - sms_max_frequency: - type: number - minimum: 0 - maximum: 32767 - sms_otp_exp: - type: number - minimum: 0 - maximum: 2147483647 - sms_otp_length: - type: number - minimum: 0 - maximum: 32767 sms_provider: type: string sms_messagebird_access_key: @@ -4625,26 +5395,18 @@ components: type: string external_zoom_secret: type: string - db_max_pool_size: - type: number - api_max_request_duration: - type: number mfa_totp_enroll_enabled: type: boolean mfa_totp_verify_enabled: type: boolean + mfa_web_authn_enroll_enabled: + type: boolean + mfa_web_authn_verify_enabled: + type: boolean mfa_phone_enroll_enabled: type: boolean mfa_phone_verify_enabled: type: boolean - mfa_phone_max_frequency: - type: number - minimum: 0 - maximum: 32767 - mfa_phone_otp_length: - type: number - minimum: 0 - maximum: 32767 mfa_phone_template: type: string CreateThirdPartyAuthBody: @@ -4687,32 +5449,112 @@ components: - type - inserted_at - updated_at - V1RunQueryBody: + ProjectAvailableRestoreVersion: type: object properties: - query: + version: type: string + release_channel: + type: string + enum: + - internal + - alpha + - beta + - ga + - withdrawn + - preview + postgres_engine: + type: string + enum: + - '13' + - '14' + - '15' + - 17-oriole required: - - query - V1CreateFunctionBody: + - version + - release_channel + - postgres_engine + GetProjectAvailableRestoreVersionsResponse: type: object properties: - slug: + available_versions: + type: array + items: + $ref: '#/components/schemas/ProjectAvailableRestoreVersion' + required: + - available_versions + RestoreProjectBodyDto: + type: object + properties: + release_channel: type: string - pattern: /^[A-Za-z0-9_-]+$/ - name: + enum: + - internal + - alpha + - beta + - ga + - withdrawn + - preview + description: >- + Release channel version. If not provided, GeneralAvailability will + be used. + postgres_engine: type: string - body: + enum: + - '15' + - 17-oriole + description: >- + Postgres engine version. If not provided, the latest version from + the given release channel will be used. + V1AnalyticsResponse: + type: object + properties: + error: + oneOf: + - properties: + code: + type: number + errors: + type: array + items: + properties: + domain: + type: string + location: + type: string + locationType: + type: string + message: + type: string + reason: + type: string + message: + type: string + status: + type: string + - type: string + result: + type: array + items: + type: object + V1RunQueryBody: + type: object + properties: + query: type: string - verify_jwt: - type: boolean required: - - slug - - name - - body + - query FunctionResponse: type: object properties: + version: + type: integer + created_at: + type: integer + format: int64 + updated_at: + type: integer + format: int64 id: type: string slug: @@ -4725,12 +5567,6 @@ components: - REMOVED - THROTTLED type: string - version: - type: number - created_at: - type: number - updated_at: - type: number verify_jwt: type: boolean import_map: @@ -4739,17 +5575,47 @@ components: type: string import_map_path: type: string + compute_multiplier: + type: number required: + - version + - created_at + - updated_at - id - slug - name - status - - version - - created_at - - updated_at + V1CreateFunctionBody: + type: object + properties: + slug: + type: string + pattern: /^[A-Za-z0-9_-]+$/ + name: + type: string + body: + type: string + verify_jwt: + type: boolean + compute_multiplier: + type: number + minimum: 1 + maximum: 4 + required: + - slug + - name + - body FunctionSlugResponse: type: object properties: + version: + type: integer + created_at: + type: integer + format: int64 + updated_at: + type: integer + format: int64 id: type: string slug: @@ -4762,12 +5628,6 @@ components: - REMOVED - THROTTLED type: string - version: - type: number - created_at: - type: number - updated_at: - type: number verify_jwt: type: boolean import_map: @@ -4776,14 +5636,16 @@ components: type: string import_map_path: type: string + compute_multiplier: + type: number required: + - version + - created_at + - updated_at - id - slug - name - status - - version - - created_at - - updated_at V1UpdateFunctionBody: type: object properties: @@ -4793,6 +5655,10 @@ components: type: string verify_jwt: type: boolean + compute_multiplier: + type: number + minimum: 1 + maximum: 4 V1StorageBucketResponse: type: object properties: @@ -5021,9 +5887,11 @@ components: type: object properties: earliest_physical_backup_date_unix: - type: number + type: integer + format: int64 latest_physical_backup_date_unix: - type: number + type: integer + format: int64 V1BackupsResponse: type: object properties: @@ -5049,7 +5917,9 @@ components: type: object properties: recovery_time_target_unix: - type: number + type: integer + minimum: 0 + format: int64 required: - recovery_time_target_unix V1OrganizationMemberResponse: @@ -5088,11 +5958,16 @@ components: type: string enum: - AI_SQL_GENERATOR_OPT_IN + allowed_release_channels: + type: array + items: + $ref: '#/components/schemas/ReleaseChannel' id: type: string name: type: string required: - opt_in_tags + - allowed_release_channels - id - name diff --git a/cmd/bootstrap.go b/cmd/bootstrap.go index 1d8faa263..fd9f74912 100644 --- a/cmd/bootstrap.go +++ b/cmd/bootstrap.go @@ -43,15 +43,15 @@ var ( return err } if len(args) > 0 { - name := strings.ToLower(args[0]) + name := args[0] for _, t := range templates { - if t.Name == name { + if strings.EqualFold(t.Name, name) { starter = t break } } - if name != starter.Name { - return errors.New("Invalid template: " + args[0]) + if !strings.EqualFold(starter.Name, name) { + return errors.New("Invalid template: " + name) } } else { if err := promptStarterTemplate(ctx, templates); err != nil { diff --git a/cmd/branches.go b/cmd/branches.go index df2b48a8f..b541704bf 100644 --- a/cmd/branches.go +++ b/cmd/branches.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "os" - "sort" "github.com/go-errors/errors" "github.com/spf13/afero" @@ -29,7 +28,7 @@ var ( } branchRegion = utils.EnumFlag{ - Allowed: flyRegions(), + Allowed: awsRegions(), } persistent bool @@ -83,7 +82,7 @@ var ( } else { branchId = args[0] } - return get.Run(ctx, branchId) + return get.Run(ctx, branchId, afero.NewOsFs()) }, } @@ -96,9 +95,8 @@ var ( string(api.BranchResponseStatusFUNCTIONSFAILED), }, } - branchName string - gitBranch string - resetOnPush bool + branchName string + gitBranch string branchUpdateCmd = &cobra.Command{ Use: "update [branch-id]", @@ -114,9 +112,6 @@ var ( if cmdFlags.Changed("git-branch") { body.GitBranch = &gitBranch } - if cmdFlags.Changed("reset-on-push") { - body.ResetOnPush = &resetOnPush - } if cmdFlags.Changed("persistent") { body.Persistent = &persistent } @@ -170,13 +165,14 @@ func init() { createFlags.Var(&branchRegion, "region", "Select a region to deploy the branch database.") createFlags.Var(&size, "size", "Select a desired instance size for the branch database.") createFlags.BoolVar(&persistent, "persistent", false, "Whether to create a persistent branch.") + getFlags := branchGetCmd.Flags() + getFlags.VarP(&utils.OutputFormat, "output", "o", "Output format of branch details.") branchesCmd.AddCommand(branchCreateCmd) branchesCmd.AddCommand(branchListCmd) branchesCmd.AddCommand(branchGetCmd) updateFlags := branchUpdateCmd.Flags() updateFlags.StringVar(&branchName, "name", "", "Rename the preview branch.") updateFlags.StringVar(&gitBranch, "git-branch", "", "Change the associated git branch.") - updateFlags.BoolVar(&resetOnPush, "reset-on-push", false, "Reset the preview branch on git push.") updateFlags.BoolVar(&persistent, "persistent", false, "Switch between ephemeral and persistent branch.") updateFlags.Var(&branchStatus, "status", "Override the current branch status.") branchesCmd.AddCommand(branchUpdateCmd) @@ -185,17 +181,6 @@ func init() { rootCmd.AddCommand(branchesCmd) } -func flyRegions() []string { - result := make([]string, len(utils.FlyRegions)) - i := 0 - for k := range utils.FlyRegions { - result[i] = k - i++ - } - sort.Strings(result) - return result -} - func promptBranchId(ctx context.Context, ref string) error { resp, err := utils.GetSupabase().V1ListAllBranchesWithResponse(ctx, ref) if err != nil { diff --git a/cmd/config.go b/cmd/config.go new file mode 100644 index 000000000..1d0f60733 --- /dev/null +++ b/cmd/config.go @@ -0,0 +1,30 @@ +package cmd + +import ( + "github.com/spf13/afero" + "github.com/spf13/cobra" + "github.com/supabase/cli/internal/config/push" + "github.com/supabase/cli/internal/utils/flags" +) + +var ( + configCmd = &cobra.Command{ + GroupID: groupManagementAPI, + Use: "config", + Short: "Manage Supabase project configurations", + } + + configPushCmd = &cobra.Command{ + Use: "push", + Short: "Pushes local config.toml to the linked project", + RunE: func(cmd *cobra.Command, args []string) error { + return push.Run(cmd.Context(), flags.ProjectRef, afero.NewOsFs()) + }, + } +) + +func init() { + configCmd.PersistentFlags().StringVar(&flags.ProjectRef, "project-ref", "", "Project ref of the Supabase project.") + configCmd.AddCommand(configPushCmd) + rootCmd.AddCommand(configCmd) +} diff --git a/cmd/db.go b/cmd/db.go index e2b109a21..bcc7ac5e7 100644 --- a/cmd/db.go +++ b/cmd/db.go @@ -4,6 +4,7 @@ import ( "fmt" "os" "os/signal" + "path/filepath" "github.com/spf13/afero" "github.com/spf13/cobra" @@ -125,7 +126,11 @@ var ( }, PostRun: func(cmd *cobra.Command, args []string) { if len(file) > 0 { - fmt.Fprintln(os.Stderr, "Dumped schema to "+utils.Bold(file)+".") + if absPath, err := filepath.Abs(file); err != nil { + fmt.Fprintln(os.Stderr, "Dumped schema to "+utils.Bold(file)+".") + } else { + fmt.Fprintln(os.Stderr, "Dumped schema to "+utils.Bold(absPath)+".") + } } }, } @@ -183,10 +188,15 @@ var ( }, } + noSeed bool + dbResetCmd = &cobra.Command{ Use: "reset", Short: "Resets the local database to current migrations", RunE: func(cmd *cobra.Command, args []string) error { + if noSeed { + utils.Config.Db.Seed.Enabled = false + } return reset.Run(cmd.Context(), migrationVersion, flags.DbConfig, afero.NewOsFs()) }, } @@ -196,19 +206,26 @@ var ( Value: lint.AllowedLevels[0], } + lintFailOn = utils.EnumFlag{ + Allowed: append([]string{"none"}, lint.AllowedLevels...), + Value: "none", + } + dbLintCmd = &cobra.Command{ Use: "lint", Short: "Checks local database for typing error", RunE: func(cmd *cobra.Command, args []string) error { - return lint.Run(cmd.Context(), schema, level.Value, flags.DbConfig, afero.NewOsFs()) + return lint.Run(cmd.Context(), schema, level.Value, lintFailOn.Value, flags.DbConfig, afero.NewOsFs()) }, } + fromBackup string + dbStartCmd = &cobra.Command{ Use: "start", Short: "Starts local Postgres database", RunE: func(cmd *cobra.Command, args []string) error { - return start.Run(cmd.Context(), afero.NewOsFs()) + return start.Run(cmd.Context(), fromBackup, afero.NewOsFs()) }, } @@ -266,7 +283,7 @@ func init() { pushFlags := dbPushCmd.Flags() pushFlags.BoolVar(&includeAll, "include-all", false, "Include all migrations not found on remote history table.") pushFlags.BoolVar(&includeRoles, "include-roles", false, "Include custom roles from "+utils.CustomRolesPath+".") - pushFlags.BoolVar(&includeSeed, "include-seed", false, "Include seed data from "+utils.SeedDataPath+".") + pushFlags.BoolVar(&includeSeed, "include-seed", false, "Include seed data from your config.") pushFlags.BoolVar(&dryRun, "dry-run", false, "Print the migrations that would be applied, but don't actually apply them.") pushFlags.String("db-url", "", "Pushes to the database specified by the connection string (must be percent-encoded).") pushFlags.Bool("linked", true, "Pushes to the linked project.") @@ -299,6 +316,7 @@ func init() { resetFlags.String("db-url", "", "Resets the database specified by the connection string (must be percent-encoded).") resetFlags.Bool("linked", false, "Resets the linked project with local migrations.") resetFlags.Bool("local", true, "Resets the local database with local migrations.") + resetFlags.BoolVar(&noSeed, "no-seed", false, "Skip running the seed script after reset.") dbResetCmd.MarkFlagsMutuallyExclusive("db-url", "linked", "local") resetFlags.StringVar(&migrationVersion, "version", "", "Reset up to the specified version.") dbCmd.AddCommand(dbResetCmd) @@ -310,8 +328,11 @@ func init() { dbLintCmd.MarkFlagsMutuallyExclusive("db-url", "linked", "local") lintFlags.StringSliceVarP(&schema, "schema", "s", []string{}, "Comma separated list of schema to include.") lintFlags.Var(&level, "level", "Error level to emit.") + lintFlags.Var(&lintFailOn, "fail-on", "Error level to exit with non-zero status.") dbCmd.AddCommand(dbLintCmd) // Build start command + startFlags := dbStartCmd.Flags() + startFlags.StringVar(&fromBackup, "from-backup", "", "Path to a logical backup file.") dbCmd.AddCommand(dbStartCmd) // Build test command dbCmd.AddCommand(dbTestCmd) diff --git a/cmd/functions.go b/cmd/functions.go index 6555bae9b..36dc47f94 100644 --- a/cmd/functions.go +++ b/cmd/functions.go @@ -13,6 +13,7 @@ import ( "github.com/supabase/cli/internal/functions/serve" "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/flags" + "github.com/supabase/cli/pkg/cast" ) var ( @@ -106,9 +107,9 @@ var ( } if len(inspectMode.Value) > 0 { - runtimeOption.InspectMode = utils.Ptr(serve.InspectMode(inspectMode.Value)) + runtimeOption.InspectMode = cast.Ptr(serve.InspectMode(inspectMode.Value)) } else if inspectBrk { - runtimeOption.InspectMode = utils.Ptr(serve.InspectModeBrk) + runtimeOption.InspectMode = cast.Ptr(serve.InspectModeBrk) } if runtimeOption.InspectMode == nil && runtimeOption.InspectMain { return fmt.Errorf("--inspect-main must be used together with one of these flags: [inspect inspect-mode]") diff --git a/cmd/inspect.go b/cmd/inspect.go index 299524fbe..2b55c3876 100644 --- a/cmd/inspect.go +++ b/cmd/inspect.go @@ -22,6 +22,7 @@ import ( "github.com/supabase/cli/internal/inspect/long_running_queries" "github.com/supabase/cli/internal/inspect/outliers" "github.com/supabase/cli/internal/inspect/replication_slots" + "github.com/supabase/cli/internal/inspect/role_configs" "github.com/supabase/cli/internal/inspect/role_connections" "github.com/supabase/cli/internal/inspect/seq_scans" "github.com/supabase/cli/internal/inspect/table_index_sizes" @@ -194,6 +195,14 @@ var ( }, } + inspectRoleConfigsCmd = &cobra.Command{ + Use: "role-configs", + Short: "Show configuration settings for database roles when they have been modified", + RunE: func(cmd *cobra.Command, args []string) error { + return role_configs.Run(cmd.Context(), flags.DbConfig, afero.NewOsFs()) + }, + } + inspectRoleConnectionsCmd = &cobra.Command{ Use: "role-connections", Short: "Show number of active connections for all database roles", @@ -247,6 +256,7 @@ func init() { inspectDBCmd.AddCommand(inspectTableRecordCountsCmd) inspectDBCmd.AddCommand(inspectBloatCmd) inspectDBCmd.AddCommand(inspectVacuumStatsCmd) + inspectDBCmd.AddCommand(inspectRoleConfigsCmd) inspectDBCmd.AddCommand(inspectRoleConnectionsCmd) inspectCmd.AddCommand(inspectDBCmd) reportCmd.Flags().StringVar(&outputDir, "output-dir", "", "Path to save CSV files in") diff --git a/cmd/link.go b/cmd/link.go index b1aec1d7a..65b8b5f08 100644 --- a/cmd/link.go +++ b/cmd/link.go @@ -8,7 +8,6 @@ import ( "github.com/spf13/cobra" "github.com/spf13/viper" "github.com/supabase/cli/internal/link" - "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/flags" "golang.org/x/term" ) @@ -31,7 +30,7 @@ var ( return err } fsys := afero.NewOsFs() - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } return link.Run(ctx, flags.ProjectRef, fsys) diff --git a/cmd/postgres.go b/cmd/postgres.go index 784b0aca4..a1f9c376b 100644 --- a/cmd/postgres.go +++ b/cmd/postgres.go @@ -3,6 +3,7 @@ package cmd import ( "github.com/spf13/afero" "github.com/spf13/cobra" + "github.com/supabase/cli/internal/postgresConfig/delete" "github.com/supabase/cli/internal/postgresConfig/get" "github.com/supabase/cli/internal/postgresConfig/update" "github.com/supabase/cli/internal/utils/flags" @@ -29,21 +30,39 @@ var ( Long: `Overriding the default Postgres config could result in unstable database behavior. Custom configuration also overrides the optimizations generated based on the compute add-ons in use.`, RunE: func(cmd *cobra.Command, args []string) error { - return update.Run(cmd.Context(), flags.ProjectRef, postgresConfigValues, postgresConfigUpdateReplaceMode, afero.NewOsFs()) + return update.Run(cmd.Context(), flags.ProjectRef, postgresConfigValues, postgresConfigUpdateReplaceMode, noRestart, afero.NewOsFs()) + }, + } + + postgresConfigDeleteCmd = &cobra.Command{ + Use: "delete", + Short: "Delete specific Postgres database config overrides", + Long: "Delete specific config overrides, reverting them to their default values.", + RunE: func(cmd *cobra.Command, args []string) error { + return delete.Run(cmd.Context(), flags.ProjectRef, postgresConfigKeysToDelete, noRestart, afero.NewOsFs()) }, } postgresConfigValues []string postgresConfigUpdateReplaceMode bool + postgresConfigKeysToDelete []string + noRestart bool ) func init() { postgresCmd.PersistentFlags().StringVar(&flags.ProjectRef, "project-ref", "", "Project ref of the Supabase project.") postgresCmd.AddCommand(postgresConfigGetCmd) postgresCmd.AddCommand(postgresConfigUpdateCmd) + postgresCmd.AddCommand(postgresConfigDeleteCmd) + + updateFlags := postgresConfigUpdateCmd.Flags() + updateFlags.StringSliceVar(&postgresConfigValues, "config", []string{}, "Config overrides specified as a 'key=value' pair") + updateFlags.BoolVar(&postgresConfigUpdateReplaceMode, "replace-existing-overrides", false, "If true, replaces all existing overrides with the ones provided. If false (default), merges existing overrides with the ones provided.") + updateFlags.BoolVar(&noRestart, "no-restart", false, "Do not restart the database after updating config.") - postgresConfigUpdateCmd.Flags().StringSliceVar(&postgresConfigValues, "config", []string{}, "Config overrides specified as a 'key=value' pair") - postgresConfigUpdateCmd.Flags().BoolVar(&postgresConfigUpdateReplaceMode, "replace-existing-overrides", false, "If true, replaces all existing overrides with the ones provided. If false (default), merges existing overrides with the ones provided.") + deleteFlags := postgresConfigDeleteCmd.Flags() + deleteFlags.StringSliceVar(&postgresConfigKeysToDelete, "config", []string{}, "Config keys to delete (comma-separated)") + deleteFlags.BoolVar(&noRestart, "no-restart", false, "Do not restart the database after deleting config.") rootCmd.AddCommand(postgresCmd) } diff --git a/cmd/projects.go b/cmd/projects.go index 08ec10dd8..c119e7171 100644 --- a/cmd/projects.go +++ b/cmd/projects.go @@ -33,21 +33,21 @@ var ( Allowed: awsRegions(), } plan = utils.EnumFlag{ - Allowed: []string{string(api.V1CreateProjectBodyPlanFree), string(api.V1CreateProjectBodyPlanPro)}, - Value: string(api.V1CreateProjectBodyPlanFree), + Allowed: []string{string(api.V1CreateProjectBodyDtoPlanFree), string(api.V1CreateProjectBodyDtoPlanPro)}, + Value: string(api.V1CreateProjectBodyDtoPlanFree), } size = utils.EnumFlag{ Allowed: []string{ - string(api.Micro), - string(api.Small), - string(api.Medium), - string(api.Large), - string(api.Xlarge), - string(api.N2xlarge), - string(api.N4xlarge), - string(api.N8xlarge), - string(api.N12xlarge), - string(api.N16xlarge), + string(api.DesiredInstanceSizeMicro), + string(api.DesiredInstanceSizeSmall), + string(api.DesiredInstanceSizeMedium), + string(api.DesiredInstanceSizeLarge), + string(api.DesiredInstanceSizeXlarge), + string(api.DesiredInstanceSizeN2xlarge), + string(api.DesiredInstanceSizeN4xlarge), + string(api.DesiredInstanceSizeN8xlarge), + string(api.DesiredInstanceSizeN12xlarge), + string(api.DesiredInstanceSizeN16xlarge), }, } @@ -69,14 +69,14 @@ var ( if len(args) > 0 { projectName = args[0] } - body := api.V1CreateProjectBody{ + body := api.V1CreateProjectBodyDto{ Name: projectName, OrganizationId: orgId, DbPass: dbPassword, - Region: api.V1CreateProjectBodyRegion(region.Value), + Region: api.V1CreateProjectBodyDtoRegion(region.Value), } if cmd.Flags().Changed("size") { - body.DesiredInstanceSize = (*api.DesiredInstanceSize)(&size.Value) + body.DesiredInstanceSize = (*api.V1CreateProjectBodyDtoDesiredInstanceSize)(&size.Value) } return create.Run(cmd.Context(), body, afero.NewOsFs()) }, diff --git a/cmd/root.go b/cmd/root.go index de3b9375d..35540a897 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -12,11 +12,9 @@ import ( "github.com/getsentry/sentry-go" "github.com/go-errors/errors" - "github.com/mitchellh/mapstructure" "github.com/spf13/afero" "github.com/spf13/cobra" "github.com/spf13/viper" - "github.com/supabase/cli/internal/services" "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/flags" "golang.org/x/mod/semver" @@ -223,18 +221,8 @@ func recoverAndExit() { func init() { cobra.OnInitialize(func() { - // Allow overriding config object with automatic env - // Ref: https://github.com/spf13/viper/issues/761 - envKeysMap := map[string]interface{}{} - dec, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ - Result: &envKeysMap, - IgnoreUntaggedFields: true, - }) - cobra.CheckErr(err) - cobra.CheckErr(dec.Decode(utils.Config)) - cobra.CheckErr(viper.MergeConfigMap(envKeysMap)) viper.SetEnvPrefix("SUPABASE") - viper.SetEnvKeyReplacer(strings.NewReplacer("-", "_", ".", "_")) + viper.SetEnvKeyReplacer(strings.NewReplacer("-", "_")) viper.AutomaticEnv() }) @@ -243,6 +231,7 @@ func init() { flags.String("workdir", "", "path to a Supabase project directory") flags.Bool("experimental", false, "enable experimental features") flags.String("network-id", "", "use the specified docker network instead of a generated one") + flags.Var(&utils.OutputFormat, "output", "output format of status variables") flags.Var(&utils.DNSResolver, "dns-resolver", "lookup domain names using the specified resolver") flags.BoolVar(&createTicket, "create-ticket", false, "create a support ticket for any CLI error") cobra.CheckErr(viper.BindPFlags(flags)) @@ -260,7 +249,7 @@ func GetRootCmd() *cobra.Command { } func addSentryScope(scope *sentry.Scope) { - serviceImages := services.GetServiceImages() + serviceImages := utils.Config.GetServiceImages() imageToVersion := make(map[string]interface{}, len(serviceImages)) for _, image := range serviceImages { parts := strings.Split(image, ":") diff --git a/cmd/snippets.go b/cmd/snippets.go index 86f439c67..f69b232af 100644 --- a/cmd/snippets.go +++ b/cmd/snippets.go @@ -5,6 +5,7 @@ import ( "github.com/spf13/cobra" "github.com/supabase/cli/internal/snippets/download" "github.com/supabase/cli/internal/snippets/list" + "github.com/supabase/cli/internal/utils/flags" ) var ( @@ -35,6 +36,7 @@ var ( ) func init() { + snippetsCmd.PersistentFlags().StringVar(&flags.ProjectRef, "project-ref", "", "Project ref of the Supabase project.") snippetsCmd.AddCommand(snippetsListCmd) snippetsCmd.AddCommand(snippetsDownloadCmd) rootCmd.AddCommand(snippetsCmd) diff --git a/cmd/sso.go b/cmd/sso.go index 766c749fe..a9b4db7d3 100644 --- a/cmd/sso.go +++ b/cmd/sso.go @@ -33,10 +33,6 @@ var ( ssoDomains []string ssoAddDomains []string ssoRemoveDomains []string - ssoOutput = utils.EnumFlag{ - Allowed: utils.OutputDefaultAllowed, - Value: utils.OutputPretty, - } ssoAddCmd = &cobra.Command{ Use: "add", @@ -47,7 +43,7 @@ var ( return create.Run(cmd.Context(), create.RunParams{ ProjectRef: flags.ProjectRef, Type: ssoProviderType.String(), - Format: ssoOutput.Value, + Format: utils.OutputFormat.Value, MetadataFile: ssoMetadataFile, MetadataURL: ssoMetadataURL, SkipURLValidation: ssoSkipURLValidation, @@ -68,7 +64,7 @@ var ( return errors.Errorf("identity provider ID %q is not a UUID", args[0]) } - return remove.Run(cmd.Context(), flags.ProjectRef, args[0], ssoOutput.Value) + return remove.Run(cmd.Context(), flags.ProjectRef, args[0], utils.OutputFormat.Value) }, } @@ -86,7 +82,7 @@ var ( return update.Run(cmd.Context(), update.RunParams{ ProjectRef: flags.ProjectRef, ProviderID: args[0], - Format: ssoOutput.Value, + Format: utils.OutputFormat.Value, MetadataFile: ssoMetadataFile, MetadataURL: ssoMetadataURL, @@ -110,7 +106,7 @@ var ( return errors.Errorf("identity provider ID %q is not a UUID", args[0]) } - format := ssoOutput.Value + format := utils.OutputFormat.Value if ssoMetadata { format = utils.OutputMetadata } @@ -125,7 +121,7 @@ var ( Long: "List all connections to a SSO identity provider to your Supabase project.", Example: ` supabase sso list --project-ref mwjylndxudmiehsxhmmz`, RunE: func(cmd *cobra.Command, args []string) error { - return list.Run(cmd.Context(), flags.ProjectRef, ssoOutput.Value) + return list.Run(cmd.Context(), flags.ProjectRef, utils.OutputFormat.Value) }, } @@ -135,7 +131,7 @@ var ( Long: "Returns all of the important SSO information necessary for your project to be registered with a SAML 2.0 compatible identity provider.", Example: ` supabase sso info --project-ref mwjylndxudmiehsxhmmz`, RunE: func(cmd *cobra.Command, args []string) error { - return info.Run(cmd.Context(), flags.ProjectRef, ssoOutput.Value) + return info.Run(cmd.Context(), flags.ProjectRef, utils.OutputFormat.Value) }, } ) @@ -143,7 +139,6 @@ var ( func init() { persistentFlags := ssoCmd.PersistentFlags() persistentFlags.StringVar(&flags.ProjectRef, "project-ref", "", "Project ref of the Supabase project.") - persistentFlags.VarP(&ssoOutput, "output", "o", "Output format") ssoAddFlags := ssoAddCmd.Flags() ssoAddFlags.VarP(&ssoProviderType, "type", "t", "Type of identity provider (according to supported protocol).") ssoAddFlags.StringSliceVar(&ssoDomains, "domains", nil, "Comma separated list of email domains to associate with the added identity provider.") diff --git a/cmd/start.go b/cmd/start.go index b3cba1a4c..a7af80e0c 100644 --- a/cmd/start.go +++ b/cmd/start.go @@ -1,13 +1,39 @@ package cmd import ( + "fmt" + "os" + "sort" "strings" "github.com/spf13/afero" "github.com/spf13/cobra" "github.com/supabase/cli/internal/start" + "github.com/supabase/cli/internal/utils" ) +func validateExcludedContainers(excludedContainers []string) { + // Validate excluded containers + validContainers := start.ExcludableContainers() + var invalidContainers []string + + for _, e := range excludedContainers { + if !utils.SliceContains(validContainers, e) { + invalidContainers = append(invalidContainers, e) + } + } + + if len(invalidContainers) > 0 { + // Sort the names list so it's easier to visually spot the one you looking for + sort.Strings(validContainers) + warning := fmt.Sprintf("%s The following container names are not valid to exclude: %s\nValid containers to exclude are: %s\n", + utils.Yellow("WARNING:"), + utils.Aqua(strings.Join(invalidContainers, ", ")), + utils.Aqua(strings.Join(validContainers, ", "))) + fmt.Fprint(os.Stderr, warning) + } +} + var ( allowedContainers = start.ExcludableContainers() excludedContainers []string @@ -19,6 +45,7 @@ var ( Use: "start", Short: "Start containers for Supabase local development", RunE: func(cmd *cobra.Command, args []string) error { + validateExcludedContainers(excludedContainers) return start.Run(cmd.Context(), afero.NewOsFs(), excludedContainers, ignoreHealthCheck) }, } diff --git a/cmd/status.go b/cmd/status.go index 5f131c90d..13540bfb1 100644 --- a/cmd/status.go +++ b/cmd/status.go @@ -18,6 +18,7 @@ var ( Allowed: append([]string{utils.OutputEnv}, utils.OutputDefaultAllowed...), Value: utils.OutputPretty, } + statusCmd = &cobra.Command{ GroupID: groupLocalDev, Use: "status", diff --git a/cmd/stop.go b/cmd/stop.go index 19401326f..6a6f4aa55 100644 --- a/cmd/stop.go +++ b/cmd/stop.go @@ -12,6 +12,7 @@ import ( var ( noBackup bool projectId string + all bool stopCmd = &cobra.Command{ GroupID: groupLocalDev, @@ -19,7 +20,7 @@ var ( Short: "Stop all local Supabase containers", RunE: func(cmd *cobra.Command, args []string) error { ctx, _ := signal.NotifyContext(cmd.Context(), os.Interrupt) - return stop.Run(ctx, !noBackup, projectId, afero.NewOsFs()) + return stop.Run(ctx, !noBackup, projectId, all, afero.NewOsFs()) }, } ) @@ -30,5 +31,7 @@ func init() { flags.StringVar(&projectId, "project-id", "", "Local project ID to stop.") cobra.CheckErr(flags.MarkHidden("backup")) flags.BoolVar(&noBackup, "no-backup", false, "Deletes all data volumes after stopping.") + flags.BoolVar(&all, "all", false, "Stop all local Supabase instances from all projects across the machine.") + stopCmd.MarkFlagsMutuallyExclusive("project-id", "all") rootCmd.AddCommand(stopCmd) } diff --git a/docs/supabase/db/lint.md b/docs/supabase/db/lint.md index 33415dc21..3718d2d31 100644 --- a/docs/supabase/db/lint.md +++ b/docs/supabase/db/lint.md @@ -7,3 +7,11 @@ Requires the local development stack to be running when linting against the loca Runs `plpgsql_check` extension in the local Postgres container to check for errors in all schemas. The default lint level is `warning` and can be raised to error via the `--level` flag. To lint against specific schemas only, pass in the `--schema` flag. + +The `--fail-on` flag can be used to control when the command should exit with a non-zero status code. The possible values are: + +- `none` (default): Always exit with a zero status code, regardless of lint results. +- `warning`: Exit with a non-zero status code if any warnings or errors are found. +- `error`: Exit with a non-zero status code only if errors are found. + +This flag is particularly useful in CI/CD pipelines where you want to fail the build based on certain lint conditions. \ No newline at end of file diff --git a/docs/supabase/db/reset.md b/docs/supabase/db/reset.md index 98a8003ca..acb9b9832 100644 --- a/docs/supabase/db/reset.md +++ b/docs/supabase/db/reset.md @@ -6,4 +6,4 @@ Requires the local development stack to be started by running `supabase start`. Recreates the local Postgres container and applies all local migrations found in `supabase/migrations` directory. If test data is defined in `supabase/seed.sql`, it will be seeded after the migrations are run. Any other data or schema changes made during local development will be discarded. -Note that since Postgres roles are cluster level entities, those changes will persist between resets. In order to reset custom roles, you need to restart the local development stack. +When running db reset with `--linked` or `--db-url` flag, a SQL script is executed to identify and drop all user created entities in the remote database. Since Postgres roles are cluster level entities, any custom roles created through the dashboard or `supabase/roles.sql` will not be deleted by remote reset. diff --git a/docs/supabase/stop.md b/docs/supabase/stop.md index e18261ab3..870fa8603 100644 --- a/docs/supabase/stop.md +++ b/docs/supabase/stop.md @@ -5,3 +5,5 @@ Stops the Supabase local development stack. Requires `supabase/config.toml` to be created in your current working directory by running `supabase init`. All Docker resources are maintained across restarts. Use `--no-backup` flag to reset your local development data between restarts. + +Use the `--all` flag to stop all local Supabase projects instances on the machine. Use with caution with `--no-backup` as it will delete all supabase local projects data. \ No newline at end of file diff --git a/go.mod b/go.mod index fe4ede743..68a057c2b 100644 --- a/go.mod +++ b/go.mod @@ -1,29 +1,29 @@ module github.com/supabase/cli -go 1.22.4 +go 1.23.2 require ( - github.com/BurntSushi/toml v1.4.0 - github.com/Netflix/go-env v0.0.0-20220526054621-78278af1949d - github.com/andybalholm/brotli v1.1.0 + github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c + github.com/Netflix/go-env v0.1.2 + github.com/andybalholm/brotli v1.1.1 github.com/cenkalti/backoff/v4 v4.3.0 github.com/charmbracelet/bubbles v0.18.0 github.com/charmbracelet/bubbletea v0.25.0 github.com/charmbracelet/glamour v0.7.0 github.com/charmbracelet/lipgloss v0.12.1 - github.com/containers/common v0.59.2 + github.com/containers/common v0.61.1 github.com/deepmap/oapi-codegen/v2 v2.2.0 - github.com/docker/cli v26.1.5+incompatible - github.com/docker/docker v26.1.5+incompatible + github.com/docker/cli v27.5.1+incompatible + github.com/docker/docker v27.5.1+incompatible github.com/docker/go-connections v0.5.0 github.com/docker/go-units v0.5.0 - github.com/getsentry/sentry-go v0.28.1 - github.com/gin-gonic/gin v1.10.0 + github.com/ecies/go/v2 v2.0.10 + github.com/getsentry/sentry-go v0.31.1 github.com/go-errors/errors v1.5.1 - github.com/go-git/go-git/v5 v5.12.0 - github.com/go-xmlfmt/xmlfmt v1.1.2 + github.com/go-git/go-git/v5 v5.13.2 + github.com/go-xmlfmt/xmlfmt v1.1.3 github.com/golang-jwt/jwt/v5 v5.2.1 - github.com/golangci/golangci-lint v1.59.1 + github.com/golangci/golangci-lint v1.63.4 github.com/google/go-github/v62 v62.0.0 github.com/google/go-querystring v1.1.0 github.com/google/uuid v1.6.0 @@ -32,27 +32,27 @@ require ( github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 github.com/jackc/pgproto3/v2 v2.3.3 - github.com/jackc/pgtype v1.14.3 + github.com/jackc/pgtype v1.14.4 github.com/jackc/pgx/v4 v4.18.3 github.com/joho/godotenv v1.5.1 - github.com/matoous/go-nanoid/v2 v2.1.0 github.com/mitchellh/mapstructure v1.5.0 github.com/muesli/reflow v0.3.0 github.com/oapi-codegen/runtime v1.1.1 - github.com/slack-go/slack v0.13.1 - github.com/spf13/afero v1.11.0 + github.com/slack-go/slack v0.15.0 + github.com/spf13/afero v1.12.0 github.com/spf13/cobra v1.8.1 - github.com/spf13/pflag v1.0.5 + github.com/spf13/pflag v1.0.6 github.com/spf13/viper v1.19.0 - github.com/stretchr/testify v1.9.0 - github.com/stripe/pg-schema-diff v0.7.0 + github.com/stretchr/testify v1.10.0 + github.com/stripe/pg-schema-diff v0.8.0 + github.com/tidwall/jsonc v0.3.2 github.com/withfig/autocomplete-tools/packages/cobra v1.2.0 - github.com/zalando/go-keyring v0.2.5 - go.opentelemetry.io/otel v1.28.0 - golang.org/x/mod v0.20.0 - golang.org/x/oauth2 v0.21.0 - golang.org/x/term v0.22.0 - google.golang.org/grpc v1.65.0 + github.com/zalando/go-keyring v0.2.6 + go.opentelemetry.io/otel v1.34.0 + golang.org/x/mod v0.22.0 + golang.org/x/oauth2 v0.25.0 + golang.org/x/term v0.28.0 + google.golang.org/grpc v1.70.0 gopkg.in/yaml.v3 v3.0.1 gotest.tools/gotestsum v1.12.0 ) @@ -60,43 +60,42 @@ require ( require ( 4d63.com/gocheckcompilerdirectives v1.2.1 // indirect 4d63.com/gochecknoglobals v0.2.1 // indirect - dario.cat/mergo v1.0.0 // indirect - github.com/4meepo/tagalign v1.3.4 // indirect - github.com/Abirdcfly/dupword v0.0.14 // indirect - github.com/Antonboom/errname v0.1.13 // indirect - github.com/Antonboom/nilnil v0.1.9 // indirect - github.com/Antonboom/testifylint v1.3.1 // indirect + al.essio.dev/pkg/shellescape v1.5.1 // indirect + dario.cat/mergo v1.0.1 // indirect + github.com/4meepo/tagalign v1.4.1 // indirect + github.com/Abirdcfly/dupword v0.1.3 // indirect + github.com/Antonboom/errname v1.0.0 // indirect + github.com/Antonboom/nilnil v1.0.1 // indirect + github.com/Antonboom/testifylint v1.5.2 // indirect github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect - github.com/Crocmagnon/fatcontext v0.2.2 // indirect + github.com/Crocmagnon/fatcontext v0.5.3 // indirect github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect - github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0 // indirect - github.com/Masterminds/semver/v3 v3.2.1 // indirect + github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.0 // indirect + github.com/Masterminds/semver/v3 v3.3.0 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/OpenPeeDeeP/depguard/v2 v2.2.0 // indirect - github.com/ProtonMail/go-crypto v1.0.0 // indirect + github.com/ProtonMail/go-crypto v1.1.5 // indirect github.com/alecthomas/chroma/v2 v2.8.0 // indirect - github.com/alecthomas/go-check-sumtype v0.1.4 // indirect - github.com/alessio/shellescape v1.4.1 // indirect - github.com/alexkohler/nakedret/v2 v2.0.4 // indirect + github.com/alecthomas/go-check-sumtype v0.3.1 // indirect + github.com/alexkohler/nakedret/v2 v2.0.5 // indirect github.com/alexkohler/prealloc v1.0.0 // indirect github.com/alingse/asasalint v0.0.11 // indirect + github.com/alingse/nilnesserr v0.1.1 // indirect github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect github.com/ashanbrown/forbidigo v1.6.0 // indirect - github.com/ashanbrown/makezero v1.1.1 // indirect + github.com/ashanbrown/makezero v1.2.0 // indirect github.com/atotto/clipboard v0.1.4 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/aymerick/douceur v0.2.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bitfield/gotestdox v0.2.2 // indirect - github.com/bkielbasa/cyclop v1.2.1 // indirect + github.com/bkielbasa/cyclop v1.2.3 // indirect github.com/blizzy78/varnamelen v0.8.0 // indirect - github.com/bombsimon/wsl/v4 v4.2.1 // indirect - github.com/breml/bidichk v0.2.7 // indirect - github.com/breml/errchkjson v0.3.6 // indirect - github.com/butuzov/ireturn v0.3.0 // indirect - github.com/butuzov/mirror v1.2.0 // indirect - github.com/bytedance/sonic v1.11.6 // indirect - github.com/bytedance/sonic/loader v0.1.1 // indirect + github.com/bombsimon/wsl/v4 v4.5.0 // indirect + github.com/breml/bidichk v0.3.2 // indirect + github.com/breml/errchkjson v0.4.0 // indirect + github.com/butuzov/ireturn v0.3.1 // indirect + github.com/butuzov/mirror v1.3.0 // indirect github.com/catenacyber/perfsprint v0.7.1 // indirect github.com/ccojocar/zxcvbn-go v1.0.2 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect @@ -104,49 +103,44 @@ require ( github.com/charmbracelet/harmonica v0.2.0 // indirect github.com/charmbracelet/x/ansi v0.1.4 // indirect github.com/chavacava/garif v0.1.0 // indirect - github.com/ckaznocha/intrange v0.1.2 // indirect + github.com/ckaznocha/intrange v0.3.0 // indirect github.com/cloudflare/circl v1.3.7 // indirect - github.com/cloudwego/base64x v0.1.4 // indirect - github.com/cloudwego/iasm v0.2.0 // indirect github.com/containerd/console v1.0.4-0.20230313162750-1ae8d489ac81 // indirect github.com/containerd/log v0.1.0 // indirect - github.com/containers/storage v1.54.0 // indirect - github.com/curioswitch/go-reassign v0.2.0 // indirect - github.com/cyphar/filepath-securejoin v0.2.5 // indirect - github.com/daixiang0/gci v0.13.4 // indirect - github.com/danieljoos/wincred v1.2.1 // indirect + github.com/containers/storage v1.56.1 // indirect + github.com/curioswitch/go-reassign v0.3.0 // indirect + github.com/cyphar/filepath-securejoin v0.3.6 // indirect + github.com/daixiang0/gci v0.13.5 // indirect + github.com/danieljoos/wincred v1.2.2 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 // indirect github.com/denis-tingaikin/go-header v0.5.0 // indirect github.com/distribution/reference v0.6.0 // indirect - github.com/dlclark/regexp2 v1.4.0 // indirect + github.com/dlclark/regexp2 v1.11.0 // indirect github.com/dnephin/pflag v1.0.7 // indirect github.com/docker/distribution v2.8.3+incompatible // indirect - github.com/docker/docker-credential-helpers v0.8.1 // indirect + github.com/docker/docker-credential-helpers v0.8.2 // indirect github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c // indirect github.com/docker/go-metrics v0.0.1 // indirect github.com/emirpasic/gods v1.18.1 // indirect + github.com/ethereum/go-ethereum v1.14.13 // indirect github.com/ettle/strcase v0.2.0 // indirect - github.com/fatih/color v1.17.0 // indirect + github.com/fatih/color v1.18.0 // indirect github.com/fatih/structtag v1.2.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/firefart/nonamedreturns v1.0.5 // indirect - github.com/fsnotify/fsnotify v1.7.0 // indirect + github.com/fsnotify/fsnotify v1.8.0 // indirect github.com/fvbommel/sortorder v1.1.0 // indirect github.com/fzipp/gocyclo v0.6.0 // indirect - github.com/gabriel-vasile/mimetype v1.4.3 // indirect github.com/getkin/kin-openapi v0.124.0 // indirect - github.com/ghostiam/protogetter v0.3.6 // indirect - github.com/gin-contrib/sse v0.1.0 // indirect - github.com/go-critic/go-critic v0.11.4 // indirect + github.com/ghostiam/protogetter v0.3.8 // indirect + github.com/go-critic/go-critic v0.11.5 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect - github.com/go-git/go-billy/v5 v5.5.0 // indirect + github.com/go-git/go-billy/v5 v5.6.2 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/jsonpointer v0.21.0 // indirect github.com/go-openapi/swag v0.23.0 // indirect - github.com/go-playground/locales v0.14.1 // indirect - github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/go-playground/validator/v10 v10.20.0 // indirect github.com/go-toolsmith/astcast v1.1.0 // indirect github.com/go-toolsmith/astcopy v1.1.0 // indirect github.com/go-toolsmith/astequal v1.2.0 // indirect @@ -154,18 +148,17 @@ require ( github.com/go-toolsmith/astp v1.1.0 // indirect github.com/go-toolsmith/strparse v1.1.0 // indirect github.com/go-toolsmith/typep v1.1.0 // indirect - github.com/go-viper/mapstructure/v2 v2.0.0 // indirect + github.com/go-viper/mapstructure/v2 v2.2.1 // indirect github.com/gobwas/glob v0.2.3 // indirect - github.com/goccy/go-json v0.10.2 // indirect github.com/godbus/dbus/v5 v5.1.0 // indirect - github.com/gofrs/flock v0.8.1 // indirect + github.com/gofrs/flock v0.12.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a // indirect - github.com/golangci/gofmt v0.0.0-20231018234816-f50ced29576e // indirect + github.com/golangci/go-printf-func-name v0.1.0 // indirect + github.com/golangci/gofmt v0.0.0-20241223200906-057b0627d9b9 // indirect github.com/golangci/misspell v0.6.0 // indirect - github.com/golangci/modinfo v0.3.4 // indirect github.com/golangci/plugin-module-register v0.1.1 // indirect github.com/golangci/revgrep v0.5.3 // indirect github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed // indirect @@ -181,7 +174,9 @@ require ( github.com/gostaticanalysis/nilerr v0.1.1 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect + github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect github.com/hashicorp/go-version v1.7.0 // indirect + github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/hexops/gotextdiff v1.0.3 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect @@ -193,29 +188,27 @@ require ( github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/jgautheron/goconst v1.7.1 // indirect github.com/jingyugao/rowserrcheck v1.1.1 // indirect - github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af // indirect - github.com/jjti/go-spancheck v0.6.1 // indirect + github.com/jjti/go-spancheck v0.6.4 // indirect github.com/josharian/intern v1.0.0 // indirect - github.com/json-iterator/go v1.1.12 // indirect - github.com/julz/importas v0.1.0 // indirect + github.com/julz/importas v0.2.0 // indirect github.com/karamaru-alpha/copyloopvar v1.1.0 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect - github.com/kisielk/errcheck v1.7.0 // indirect + github.com/kisielk/errcheck v1.8.0 // indirect github.com/kkHAIKE/contextcheck v1.1.5 // indirect - github.com/klauspost/cpuid/v2 v2.2.7 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/kulti/thelper v0.6.3 // indirect github.com/kunwardeep/paralleltest v1.0.10 // indirect github.com/kyoh86/exportloopref v0.1.11 // indirect - github.com/lasiar/canonicalheader v1.1.1 // indirect - github.com/ldez/gomoddirectives v0.2.4 // indirect - github.com/ldez/tagliatelle v0.5.0 // indirect - github.com/leodido/go-urn v1.4.0 // indirect + github.com/lasiar/canonicalheader v1.1.2 // indirect + github.com/ldez/exptostd v0.3.1 // indirect + github.com/ldez/gomoddirectives v0.6.0 // indirect + github.com/ldez/grignotin v0.7.0 // indirect + github.com/ldez/tagliatelle v0.7.1 // indirect + github.com/ldez/usetesting v0.4.2 // indirect github.com/leonklingele/grouper v1.1.2 // indirect github.com/lib/pq v1.10.9 // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect - github.com/lufeee/execinquery v1.2.1 // indirect github.com/macabu/inamedparam v0.1.3 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect @@ -225,9 +218,9 @@ require ( github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-localereader v0.0.1 // indirect - github.com/mattn/go-runewidth v0.0.15 // indirect - github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect - github.com/mgechev/revive v1.3.7 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 // indirect + github.com/mgechev/revive v1.5.1 // indirect github.com/microcosm-cc/bluemonday v1.0.25 // indirect github.com/miekg/pkcs11 v1.1.1 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect @@ -235,10 +228,8 @@ require ( github.com/moby/docker-image-spec v1.3.1 // indirect github.com/moby/sys/sequential v0.6.0 // indirect github.com/moby/term v0.5.0 // indirect - github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect - github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect - github.com/moricho/tparallel v0.3.1 // indirect + github.com/moricho/tparallel v0.3.2 // indirect github.com/morikuni/aec v1.0.0 // indirect github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b // indirect github.com/muesli/cancelreader v0.2.2 // indirect @@ -246,64 +237,64 @@ require ( github.com/nakabonne/nestif v0.3.1 // indirect github.com/nishanths/exhaustive v0.12.0 // indirect github.com/nishanths/predeclared v0.2.2 // indirect - github.com/nunnatsa/ginkgolinter v0.16.2 // indirect + github.com/nunnatsa/ginkgolinter v0.18.4 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0 // indirect - github.com/pelletier/go-toml/v2 v2.2.2 // indirect + github.com/pelletier/go-toml/v2 v2.2.3 // indirect github.com/perimeterx/marshmallow v1.1.5 // indirect - github.com/pjbgf/sha1cd v0.3.0 // indirect + github.com/pjbgf/sha1cd v0.3.2 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/polyfloyd/go-errorlint v1.5.2 // indirect + github.com/polyfloyd/go-errorlint v1.7.0 // indirect github.com/prometheus/client_golang v1.12.1 // indirect - github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/client_model v0.2.1-0.20210607210712-147c58e9608a // indirect github.com/prometheus/common v0.32.1 // indirect github.com/prometheus/procfs v0.7.3 // indirect - github.com/quasilyte/go-ruleguard v0.4.2 // indirect + github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1 // indirect github.com/quasilyte/go-ruleguard/dsl v0.3.22 // indirect github.com/quasilyte/gogrep v0.5.0 // indirect github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect + github.com/raeperd/recvcheck v0.2.0 // indirect github.com/rivo/uniseg v0.4.7 // indirect - github.com/rogpeppe/go-internal v1.12.0 // indirect - github.com/ryancurrah/gomodguard v1.3.2 // indirect + github.com/rogpeppe/go-internal v1.13.1 // indirect + github.com/ryancurrah/gomodguard v1.3.5 // indirect github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect github.com/sagikazarmark/locafero v0.4.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect github.com/sahilm/fuzzy v0.1.1-0.20230530133925-c48e322e2a8f // indirect - github.com/sanposhiho/wastedassign/v2 v2.0.7 // indirect - github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 // indirect + github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect + github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 // indirect github.com/sashamelentyev/interfacebloat v1.1.0 // indirect - github.com/sashamelentyev/usestdlibvars v1.26.0 // indirect - github.com/securego/gosec/v2 v2.20.1-0.20240525090044-5f0084eb01a9 // indirect + github.com/sashamelentyev/usestdlibvars v1.28.0 // indirect + github.com/securego/gosec/v2 v2.21.4 // indirect github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/sivchari/containedctx v1.0.3 // indirect - github.com/sivchari/tenv v1.7.1 // indirect - github.com/skeema/knownhosts v1.2.2 // indirect - github.com/sonatard/noctx v0.0.2 // indirect + github.com/sivchari/tenv v1.12.1 // indirect + github.com/skeema/knownhosts v1.3.0 // indirect + github.com/sonatard/noctx v0.1.0 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/sourcegraph/go-diff v0.7.0 // indirect github.com/spf13/cast v1.6.0 // indirect github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect - github.com/stbenjam/no-sprintf-host-port v0.1.1 // indirect + github.com/stbenjam/no-sprintf-host-port v0.2.0 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.6.0 // indirect - github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c // indirect - github.com/tdakkota/asciicheck v0.2.0 // indirect - github.com/tetafro/godot v1.4.16 // indirect + github.com/tdakkota/asciicheck v0.3.0 // indirect + github.com/tetafro/godot v1.4.20 // indirect github.com/theupdateframework/notary v0.7.0 // indirect - github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 // indirect - github.com/timonwong/loggercheck v0.9.4 // indirect - github.com/tomarrell/wrapcheck/v2 v2.8.3 // indirect + github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3 // indirect + github.com/timonwong/loggercheck v0.10.1 // indirect + github.com/tomarrell/wrapcheck/v2 v2.10.0 // indirect github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect - github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/ugorji/go/codec v1.2.12 // indirect - github.com/ultraware/funlen v0.1.0 // indirect - github.com/ultraware/whitespace v0.1.1 // indirect - github.com/uudashr/gocognit v1.1.2 // indirect + github.com/ultraware/funlen v0.2.0 // indirect + github.com/ultraware/whitespace v0.2.0 // indirect + github.com/uudashr/gocognit v1.2.0 // indirect + github.com/uudashr/iface v1.3.0 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect @@ -315,38 +306,38 @@ require ( github.com/yuin/goldmark v1.5.4 // indirect github.com/yuin/goldmark-emoji v1.0.2 // indirect gitlab.com/bosi/decorder v0.4.2 // indirect - go-simpler.org/musttag v0.12.2 // indirect - go-simpler.org/sloglint v0.7.1 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect + go-simpler.org/musttag v0.13.0 // indirect + go-simpler.org/sloglint v0.7.2 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.28.0 // indirect - go.opentelemetry.io/otel/metric v1.28.0 // indirect - go.opentelemetry.io/otel/sdk v1.28.0 // indirect - go.opentelemetry.io/otel/sdk/metric v1.28.0 // indirect - go.opentelemetry.io/otel/trace v1.28.0 // indirect + go.opentelemetry.io/otel/metric v1.34.0 // indirect + go.opentelemetry.io/otel/sdk v1.32.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.32.0 // indirect + go.opentelemetry.io/otel/trace v1.34.0 // indirect go.opentelemetry.io/proto/otlp v1.3.1 // indirect go.uber.org/atomic v1.9.0 // indirect - go.uber.org/automaxprocs v1.5.3 // indirect + go.uber.org/automaxprocs v1.6.0 // indirect go.uber.org/multierr v1.9.0 // indirect go.uber.org/zap v1.24.0 // indirect - golang.org/x/arch v0.8.0 // indirect - golang.org/x/crypto v0.24.0 // indirect - golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect - golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f // indirect - golang.org/x/net v0.26.0 // indirect - golang.org/x/sync v0.7.0 // indirect - golang.org/x/sys v0.22.0 // indirect - golang.org/x/text v0.16.0 // indirect - golang.org/x/tools v0.22.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240701130421-f6361c86f094 // indirect - google.golang.org/protobuf v1.34.2 // indirect + golang.org/x/crypto v0.32.0 // indirect + golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect + golang.org/x/exp/typeparams v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/net v0.34.0 // indirect + golang.org/x/sync v0.10.0 // indirect + golang.org/x/sys v0.29.0 // indirect + golang.org/x/text v0.21.0 // indirect + golang.org/x/tools v0.28.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20241223144023-3abc09e42ca8 // indirect + google.golang.org/protobuf v1.36.1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - honnef.co/go/tools v0.4.7 // indirect - mvdan.cc/gofumpt v0.6.0 // indirect + honnef.co/go/tools v0.5.1 // indirect + mvdan.cc/gofumpt v0.7.0 // indirect mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f // indirect ) diff --git a/go.sum b/go.sum index 9dcf2a2a7..7af9ae762 100644 --- a/go.sum +++ b/go.sum @@ -2,6 +2,8 @@ 4d63.com/gocheckcompilerdirectives v1.2.1/go.mod h1:yjDJSxmDTtIHHCqX0ufRYZDL6vQtMG7tJdKVeWwsqvs= 4d63.com/gochecknoglobals v0.2.1 h1:1eiorGsgHOFOuoOiJDy2psSrQbRdIHrlge0IJIkUgDc= 4d63.com/gochecknoglobals v0.2.1/go.mod h1:KRE8wtJB3CXCsb1xy421JfTHIIbmT3U5ruxw2Qu8fSU= +al.essio.dev/pkg/shellescape v1.5.1 h1:86HrALUujYS/h+GtqoB26SBEdkWfmMI6FubjXlsXyho= +al.essio.dev/pkg/shellescape v1.5.1/go.mod h1:6sIqp7X2P6mThCQ7twERpZTuigpr6KbZWtls1U8I890= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= @@ -34,69 +36,69 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= -dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= +dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/4meepo/tagalign v1.3.4 h1:P51VcvBnf04YkHzjfclN6BbsopfJR5rxs1n+5zHt+w8= -github.com/4meepo/tagalign v1.3.4/go.mod h1:M+pnkHH2vG8+qhE5bVc/zeP7HS/j910Fwa9TUSyZVI0= -github.com/Abirdcfly/dupword v0.0.14 h1:3U4ulkc8EUo+CaT105/GJ1BQwtgyj6+VaBVbAX11Ba8= -github.com/Abirdcfly/dupword v0.0.14/go.mod h1:VKDAbxdY8YbKUByLGg8EETzYSuC4crm9WwI6Y3S0cLI= -github.com/Antonboom/errname v0.1.13 h1:JHICqsewj/fNckzrfVSe+T33svwQxmjC+1ntDsHOVvM= -github.com/Antonboom/errname v0.1.13/go.mod h1:uWyefRYRN54lBg6HseYCFhs6Qjcy41Y3Jl/dVhA87Ns= -github.com/Antonboom/nilnil v0.1.9 h1:eKFMejSxPSA9eLSensFmjW2XTgTwJMjZ8hUHtV4s/SQ= -github.com/Antonboom/nilnil v0.1.9/go.mod h1:iGe2rYwCq5/Me1khrysB4nwI7swQvjclR8/YRPl5ihQ= -github.com/Antonboom/testifylint v1.3.1 h1:Uam4q1Q+2b6H7gvk9RQFw6jyVDdpzIirFOOrbs14eG4= -github.com/Antonboom/testifylint v1.3.1/go.mod h1:NV0hTlteCkViPW9mSR4wEMfwp+Hs1T3dY60bkvSfhpM= +github.com/4meepo/tagalign v1.4.1 h1:GYTu2FaPGOGb/xJalcqHeD4il5BiCywyEYZOA55P6J4= +github.com/4meepo/tagalign v1.4.1/go.mod h1:2H9Yu6sZ67hmuraFgfZkNcg5Py9Ch/Om9l2K/2W1qS4= +github.com/Abirdcfly/dupword v0.1.3 h1:9Pa1NuAsZvpFPi9Pqkd93I7LIYRURj+A//dFd5tgBeE= +github.com/Abirdcfly/dupword v0.1.3/go.mod h1:8VbB2t7e10KRNdwTVoxdBaxla6avbhGzb8sCTygUMhw= +github.com/Antonboom/errname v1.0.0 h1:oJOOWR07vS1kRusl6YRSlat7HFnb3mSfMl6sDMRoTBA= +github.com/Antonboom/errname v1.0.0/go.mod h1:gMOBFzK/vrTiXN9Oh+HFs+e6Ndl0eTFbtsRTSRdXyGI= +github.com/Antonboom/nilnil v1.0.1 h1:C3Tkm0KUxgfO4Duk3PM+ztPncTFlOf0b2qadmS0s4xs= +github.com/Antonboom/nilnil v1.0.1/go.mod h1:CH7pW2JsRNFgEh8B2UaPZTEPhCMuFowP/e8Udp9Nnb0= +github.com/Antonboom/testifylint v1.5.2 h1:4s3Xhuv5AvdIgbd8wOOEeo0uZG7PbDKQyKY5lGoQazk= +github.com/Antonboom/testifylint v1.5.2/go.mod h1:vxy8VJ0bc6NavlYqjZfmp6EfqXMtBgQ4+mhCojwC1P8= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0= -github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c h1:pxW6RcqyfI9/kWtOwnv/G+AzdKuy2ZrqINhenH4HyNs= +github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/Crocmagnon/fatcontext v0.2.2 h1:OrFlsDdOj9hW/oBEJBNSuH7QWf+E9WPVHw+x52bXVbk= -github.com/Crocmagnon/fatcontext v0.2.2/go.mod h1:WSn/c/+MMNiD8Pri0ahRj0o9jVpeowzavOQplBJw6u0= +github.com/Crocmagnon/fatcontext v0.5.3 h1:zCh/wjc9oyeF+Gmp+V60wetm8ph2tlsxocgg/J0hOps= +github.com/Crocmagnon/fatcontext v0.5.3/go.mod h1:XoCQYY1J+XTfyv74qLXvNw4xFunr3L1wkopIIKG7wGM= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= -github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0 h1:sATXp1x6/axKxz2Gjxv8MALP0bXaNRfQinEwyfMcx8c= -github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0/go.mod h1:Nl76DrGNJTA1KJ0LePKBw/vznBX1EHbAZX8mwjR82nI= +github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.0 h1:/fTUt5vmbkAcMBt4YQiuC23cV0kEsN1MVMNqeOW43cU= +github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.0/go.mod h1:ONJg5sxcbsdQQ4pOW8TGdTidT2TMAUy/2Xhr8mrYaao= github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= -github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= +github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0= +github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= -github.com/Netflix/go-env v0.0.0-20220526054621-78278af1949d h1:wvStE9wLpws31NiWUx+38wny1msZ/tm+eL5xmm4Y7So= -github.com/Netflix/go-env v0.0.0-20220526054621-78278af1949d/go.mod h1:9XMFaCeRyW7fC9XJOWQ+NdAv8VLG7ys7l3x4ozEGLUQ= +github.com/Netflix/go-env v0.1.2 h1:0DRoLR9lECQ9Zqvkswuebm3jJ/2enaDX6Ei8/Z+EnK0= +github.com/Netflix/go-env v0.1.2/go.mod h1:WlIhYi++8FlKNJtrop1mjXYAJMzv1f43K4MqCoh0yGE= github.com/OpenPeeDeeP/depguard/v2 v2.2.0 h1:vDfG60vDtIuf0MEOhmLlLLSzqaRM8EMcgJPdp74zmpA= github.com/OpenPeeDeeP/depguard/v2 v2.2.0/go.mod h1:CIzddKRvLBC4Au5aYP/i3nyaWQ+ClszLIuVocRiCYFQ= -github.com/ProtonMail/go-crypto v1.0.0 h1:LRuvITjQWX+WIfr930YHG2HNfjR1uOfyf5vE0kC2U78= -github.com/ProtonMail/go-crypto v1.0.0/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/ProtonMail/go-crypto v1.1.5 h1:eoAQfK2dwL+tFSFpr7TbOaPNUbPiJj4fLYwwGE1FQO4= +github.com/ProtonMail/go-crypto v1.1.5/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk= github.com/Shopify/logrus-bugsnag v0.0.0-20170309145241-6dbc35f2c30d h1:hi6J4K6DKrR4/ljxn6SF6nURyu785wKMuQcjt7H3VCQ= github.com/Shopify/logrus-bugsnag v0.0.0-20170309145241-6dbc35f2c30d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= -github.com/alecthomas/assert/v2 v2.2.2 h1:Z/iVC0xZfWTaFNE6bA3z07T86hd45Xe2eLt6WVy2bbk= -github.com/alecthomas/assert/v2 v2.2.2/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ= +github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0= +github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= github.com/alecthomas/chroma/v2 v2.8.0 h1:w9WJUjFFmHHB2e8mRpL9jjy3alYDlU0QLDezj1xE264= github.com/alecthomas/chroma/v2 v2.8.0/go.mod h1:yrkMI9807G1ROx13fhe1v6PN2DDeaR73L3d+1nmYQtw= -github.com/alecthomas/go-check-sumtype v0.1.4 h1:WCvlB3l5Vq5dZQTFmodqL2g68uHiSwwlWcT5a2FGK0c= -github.com/alecthomas/go-check-sumtype v0.1.4/go.mod h1:WyYPfhfkdhyrdaligV6svFopZV8Lqdzn5pyVBaV6jhQ= -github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk= -github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= +github.com/alecthomas/go-check-sumtype v0.3.1 h1:u9aUvbGINJxLVXiFvHUlPEaD7VDULsrxJb4Aq31NLkU= +github.com/alecthomas/go-check-sumtype v0.3.1/go.mod h1:A8TSiN3UPRw3laIgWEUOHHLPa6/r9MtoigdlP5h3K/E= +github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc= +github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0= -github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30= -github.com/alexkohler/nakedret/v2 v2.0.4 h1:yZuKmjqGi0pSmjGpOC016LtPJysIL0WEUiaXW5SUnNg= -github.com/alexkohler/nakedret/v2 v2.0.4/go.mod h1:bF5i0zF2Wo2o4X4USt9ntUWve6JbFv02Ff4vlkmS/VU= +github.com/alexkohler/nakedret/v2 v2.0.5 h1:fP5qLgtwbx9EJE8dGEERT02YwS8En4r9nnZ71RK+EVU= +github.com/alexkohler/nakedret/v2 v2.0.5/go.mod h1:bF5i0zF2Wo2o4X4USt9ntUWve6JbFv02Ff4vlkmS/VU= github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw= github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw= github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I= -github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M= -github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY= +github.com/alingse/nilnesserr v0.1.1 h1:7cYuJewpy9jFNMEA72Q1+3Nm3zKHzg+Q28D5f2bBFUA= +github.com/alingse/nilnesserr v0.1.1/go.mod h1:1xJPrXonEtX7wyTq8Dytns5P2hNzoWymVUIaKm4HNFg= +github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= +github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ= @@ -105,8 +107,8 @@ github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPd github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/ashanbrown/forbidigo v1.6.0 h1:D3aewfM37Yb3pxHujIPSpTf6oQk9sc9WZi8gerOIVIY= github.com/ashanbrown/forbidigo v1.6.0/go.mod h1:Y8j9jy9ZYAEHXdu723cUlraTqbzjKF1MUyfOKL+AjcU= -github.com/ashanbrown/makezero v1.1.1 h1:iCQ87C0V0vSyO+M9E/FZYbu65auqH0lnsOkf5FcB28s= -github.com/ashanbrown/makezero v1.1.1/go.mod h1:i1bJLCRSCHOcOa9Y6MyF2FTfMZMFdHvxKHxgO5Z1axI= +github.com/ashanbrown/makezero v1.2.0 h1:/2Lp1bypdmK9wDIq7uWBlDF1iMUpIIS4A+pF6C9IEUU= +github.com/ashanbrown/makezero v1.2.0/go.mod h1:dxlPhHbDMC6N6xICzFBSK+4njQDdK8euNO0qjQMtGY4= github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4= github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI= github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= @@ -124,33 +126,28 @@ github.com/bitfield/gotestdox v0.2.2 h1:x6RcPAbBbErKLnapz1QeAlf3ospg8efBsedU93CD github.com/bitfield/gotestdox v0.2.2/go.mod h1:D+gwtS0urjBrzguAkTM2wodsTQYFHdpx8eqRJ3N+9pY= github.com/bitly/go-hostpool v0.1.0/go.mod h1:4gOCgp6+NZnVqlKyZ/iBZFTAJKembaVENUpMkpg42fw= github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= -github.com/bkielbasa/cyclop v1.2.1 h1:AeF71HZDob1P2/pRm1so9cd1alZnrpyc4q2uP2l0gJY= -github.com/bkielbasa/cyclop v1.2.1/go.mod h1:K/dT/M0FPAiYjBgQGau7tz+3TMh4FWAEqlMhzFWCrgM= +github.com/bkielbasa/cyclop v1.2.3 h1:faIVMIGDIANuGPWH031CZJTi2ymOQBULs9H21HSMa5w= +github.com/bkielbasa/cyclop v1.2.3/go.mod h1:kHTwA9Q0uZqOADdupvcFJQtp/ksSnytRMe8ztxG8Fuo= github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M= github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k= github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= -github.com/bombsimon/wsl/v4 v4.2.1 h1:Cxg6u+XDWff75SIFFmNsqnIOgob+Q9hG6y/ioKbRFiM= -github.com/bombsimon/wsl/v4 v4.2.1/go.mod h1:Xu/kDxGZTofQcDGCtQe9KCzhHphIe0fDuyWTxER9Feo= -github.com/breml/bidichk v0.2.7 h1:dAkKQPLl/Qrk7hnP6P+E0xOodrq8Us7+U0o4UBOAlQY= -github.com/breml/bidichk v0.2.7/go.mod h1:YodjipAGI9fGcYM7II6wFvGhdMYsC5pHDlGzqvEW3tQ= -github.com/breml/errchkjson v0.3.6 h1:VLhVkqSBH96AvXEyclMR37rZslRrY2kcyq+31HCsVrA= -github.com/breml/errchkjson v0.3.6/go.mod h1:jhSDoFheAF2RSDOlCfhHO9KqhZgAYLyvHe7bRCX8f/U= +github.com/bombsimon/wsl/v4 v4.5.0 h1:iZRsEvDdyhd2La0FVi5k6tYehpOR/R7qIUjmKk7N74A= +github.com/bombsimon/wsl/v4 v4.5.0/go.mod h1:NOQ3aLF4nD7N5YPXMruR6ZXDOAqLoM0GEpLwTdvmOSc= +github.com/breml/bidichk v0.3.2 h1:xV4flJ9V5xWTqxL+/PMFF6dtJPvZLPsyixAoPe8BGJs= +github.com/breml/bidichk v0.3.2/go.mod h1:VzFLBxuYtT23z5+iVkamXO386OB+/sVwZOpIj6zXGos= +github.com/breml/errchkjson v0.4.0 h1:gftf6uWZMtIa/Is3XJgibewBm2ksAQSY/kABDNFTAdk= +github.com/breml/errchkjson v0.4.0/go.mod h1:AuBOSTHyLSaaAFlWsRSuRBIroCh3eh7ZHh5YeelDIk8= github.com/bugsnag/bugsnag-go v1.0.5-0.20150529004307-13fd6b8acda0 h1:s7+5BfS4WFJoVF9pnB8kBk03S7pZXRdKamnV0FOl5Sc= github.com/bugsnag/bugsnag-go v1.0.5-0.20150529004307-13fd6b8acda0/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8= github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b h1:otBG+dV+YK+Soembjv71DPz3uX/V/6MMlSyD9JBQ6kQ= github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50= github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0 h1:nvj0OLI3YqYXer/kZD8Ri1aaunCxIEsOst1BVJswV0o= github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE= -github.com/butuzov/ireturn v0.3.0 h1:hTjMqWw3y5JC3kpnC5vXmFJAWI/m31jaCYQqzkS6PL0= -github.com/butuzov/ireturn v0.3.0/go.mod h1:A09nIiwiqzN/IoVo9ogpa0Hzi9fex1kd9PSD6edP5ZA= -github.com/butuzov/mirror v1.2.0 h1:9YVK1qIjNspaqWutSv8gsge2e/Xpq1eqEkslEUHy5cs= -github.com/butuzov/mirror v1.2.0/go.mod h1:DqZZDtzm42wIAIyHXeN8W/qb1EPlb9Qn/if9icBOpdQ= -github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= -github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0= -github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= -github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM= -github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= +github.com/butuzov/ireturn v0.3.1 h1:mFgbEI6m+9W8oP/oDdfA34dLisRFCj2G6o/yiI1yZrY= +github.com/butuzov/ireturn v0.3.1/go.mod h1:ZfRp+E7eJLC0NQmk1Nrm1LOrn/gQlOykv+cVPdiXH5M= +github.com/butuzov/mirror v1.3.0 h1:HdWCXzmwlQHdVhwvsfBb2Au0r3HyINry3bDWLYXiKoc= +github.com/butuzov/mirror v1.3.0/go.mod h1:AEij0Z8YMALaq4yQj9CPPVYOyJQyiexpQEQgihajRfI= github.com/catenacyber/perfsprint v0.7.1 h1:PGW5G/Kxn+YrN04cRAZKC+ZuvlVwolYMrIyyTJ/rMmc= github.com/catenacyber/perfsprint v0.7.1/go.mod h1:/wclWYompEyjUD2FuIIDVKNkqz7IgBIWXIH3V0Zol50= github.com/ccojocar/zxcvbn-go v1.0.2 h1:na/czXU8RrhXO4EZme6eQJLR4PzcGsahsBOAwU6I3Vg= @@ -181,18 +178,13 @@ github.com/chavacava/garif v0.1.0/go.mod h1:XMyYCkEL58DF0oyW4qDjjnPWONs2HBqYKI+U github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/ckaznocha/intrange v0.1.2 h1:3Y4JAxcMntgb/wABQ6e8Q8leMd26JbX2790lIss9MTI= -github.com/ckaznocha/intrange v0.1.2/go.mod h1:RWffCw/vKBwHeOEwWdCikAtY0q4gGt8VhJZEEA5n+RE= +github.com/ckaznocha/intrange v0.3.0 h1:VqnxtK32pxgkhJgYQEeOArVidIPg+ahLP7WBOXZd5ZY= +github.com/ckaznocha/intrange v0.3.0/go.mod h1:+I/o2d2A1FBHgGELbGxzIcyd3/9l9DuwjM8FsbSS3Lo= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004 h1:lkAMpLVBDaj17e85keuznYcH5rqI438v41pKcBl4ZxQ= github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004/go.mod h1:yMWuSON2oQp+43nFtAV/uvKQIFpSPerB57DCt9t8sSA= -github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= -github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= -github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= -github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= -github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= @@ -200,10 +192,10 @@ github.com/containerd/console v1.0.4-0.20230313162750-1ae8d489ac81 h1:q2hJAaP1k2 github.com/containerd/console v1.0.4-0.20230313162750-1ae8d489ac81/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk= github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= -github.com/containers/common v0.59.2 h1:FcURZzlMYMVZXqjMEop6C0A3yWilrfmWUPUw09APHvI= -github.com/containers/common v0.59.2/go.mod h1:/PHpbapKSHQU29Jmjn3Ld3jekoHvX0zx7qQxxyPqSTM= -github.com/containers/storage v1.54.0 h1:xwYAlf6n9OnIlURQLLg3FYHbO74fQ/2W2N6EtQEUM4I= -github.com/containers/storage v1.54.0/go.mod h1:PlMOoinRrBSnhYODLxt4EXl0nmJt+X0kjG0Xdt9fMTw= +github.com/containers/common v0.61.1 h1:jpk385ZFEx3MAX+sjwOoTZElvpgsGi0YJHuRmrhF/j8= +github.com/containers/common v0.61.1/go.mod h1:C+TfkhTV+ADp1Hu+BMIAYPvSFix21swYo9PZuCKoSUM= +github.com/containers/storage v1.56.1 h1:gDZj/S6Zxus4Xx42X6iNB3ODXuh0qoOdH/BABfrvcKo= +github.com/containers/storage v1.56.1/go.mod h1:c6WKowcAlED/DkWGNuL9bvGYqIWCVy7isRMdCSKWNjk= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= @@ -211,18 +203,20 @@ github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7Do github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= -github.com/curioswitch/go-reassign v0.2.0 h1:G9UZyOcpk/d7Gd6mqYgd8XYWFMw/znxwGDUstnC9DIo= -github.com/curioswitch/go-reassign v0.2.0/go.mod h1:x6OpXuWvgfQaMGks2BZybTngWjT84hqJfKoO8Tt/Roc= -github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo= -github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= -github.com/daixiang0/gci v0.13.4 h1:61UGkmpoAcxHM2hhNkZEf5SzwQtWJXTSws7jaPyqwlw= -github.com/daixiang0/gci v0.13.4/go.mod h1:12etP2OniiIdP4q+kjUGrC/rUagga7ODbqsom5Eo5Yk= -github.com/danieljoos/wincred v1.2.1 h1:dl9cBrupW8+r5250DYkYxocLeZ1Y4vB1kxgtjxw8GQs= -github.com/danieljoos/wincred v1.2.1/go.mod h1:uGaFL9fDn3OLTvzCGulzE+SzjEe5NGlh5FdCcyfPwps= +github.com/curioswitch/go-reassign v0.3.0 h1:dh3kpQHuADL3cobV/sSGETA8DOv457dwl+fbBAhrQPs= +github.com/curioswitch/go-reassign v0.3.0/go.mod h1:nApPCCTtqLJN/s8HfItCcKV0jIPwluBOvZP+dsJGA88= +github.com/cyphar/filepath-securejoin v0.3.6 h1:4d9N5ykBnSp5Xn2JkhocYDkOpURL/18CYMpo6xB9uWM= +github.com/cyphar/filepath-securejoin v0.3.6/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= +github.com/daixiang0/gci v0.13.5 h1:kThgmH1yBmZSBCh1EJVxQ7JsHpm5Oms0AMed/0LaH4c= +github.com/daixiang0/gci v0.13.5/go.mod h1:12etP2OniiIdP4q+kjUGrC/rUagga7ODbqsom5Eo5Yk= +github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0= +github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 h1:rpfIENRNNilwHwZeG5+P150SMrnNEcHYvcCuK6dPZSg= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0/go.mod h1:v57UDF4pDQJcEfFUCRop3lJL149eHGSe9Jvczhzjo/0= github.com/deepmap/oapi-codegen/v2 v2.2.0 h1:FW4f7C0Xb6EaezBSB3GYw2QGwHD5ChDflG+3xSZBdvY= github.com/deepmap/oapi-codegen/v2 v2.2.0/go.mod h1:L4zUv7ULYDtYSb/aYk/xO3OYcQU6BoU/0viULkbi2DE= github.com/denis-tingaikin/go-header v0.5.0 h1:SRdnP5ZKvcO9KKRP1KJrhFR3RrlGuD+42t4429eC9k8= @@ -230,19 +224,19 @@ github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okeg github.com/denisenkom/go-mssqldb v0.0.0-20191128021309-1d7a30a10f73/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= -github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= -github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= +github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= +github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dnephin/pflag v1.0.7 h1:oxONGlWxhmUct0YzKTgrpQv9AUA1wtPBn7zuSjJqptk= github.com/dnephin/pflag v1.0.7/go.mod h1:uxE91IoWURlOiTUIA8Mq5ZZkAv3dPUfZNaT80Zm7OQE= -github.com/docker/cli v26.1.5+incompatible h1:NxXGSdz2N+Ibdaw330TDO3d/6/f7MvHuiMbuFaIQDTk= -github.com/docker/cli v26.1.5+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/cli v27.5.1+incompatible h1:JB9cieUT9YNiMITtIsguaN55PLOHhBSz3LKVc6cqWaY= +github.com/docker/cli v27.5.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v26.1.5+incompatible h1:NEAxTwEjxV6VbBMBoGG3zPqbiJosIApZjxlbrG9q3/g= -github.com/docker/docker v26.1.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker-credential-helpers v0.8.1 h1:j/eKUktUltBtMzKqmfLB0PAgqYyMHOp5vfsD1807oKo= -github.com/docker/docker-credential-helpers v0.8.1/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M= +github.com/docker/docker v27.5.1+incompatible h1:4PYU5dnBYqRQi0294d1FBECqT9ECWeQAIfE8q4YnPY8= +github.com/docker/docker v27.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo= +github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M= github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0= github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= @@ -256,8 +250,10 @@ github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDD github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7 h1:UhxFibDNY/bfvqU5CAUmr9zpesgbU6SWc8/B4mflAE4= github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE= github.com/dvsekhvalnov/jose2go v0.0.0-20170216131308-f21a8cedbbae/go.mod h1:7BvyPhdbLxMXIYTFPLsyJRFMsKmOZnQmzh6Gb+uquuM= -github.com/elazarl/goproxy v0.0.0-20230808193330-2592e75ae04a h1:mATvB/9r/3gvcejNsXKSkQ6lcIaNec2nyfOdlTBR2lU= -github.com/elazarl/goproxy v0.0.0-20230808193330-2592e75ae04a/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM= +github.com/ecies/go/v2 v2.0.10 h1:AaLxGio0MLLbvWur4rKnLzw+K9zI+wMScIDAtqCqOtU= +github.com/ecies/go/v2 v2.0.10/go.mod h1:N73OyuR6tuKznit2LhXjrZ0XAQ234uKbzYz8pEPYzlI= +github.com/elazarl/goproxy v1.4.0 h1:4GyuSbFa+s26+3rmYNSuUVsx+HgPrV1bk1jXI0l9wjM= +github.com/elazarl/goproxy v1.4.0/go.mod h1:X/5W/t+gzDyLfHW4DrMdpjqYjpXsURlBt9lpBDxZZZQ= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -265,12 +261,14 @@ github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.m github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0= +github.com/ethereum/go-ethereum v1.14.13 h1:L81Wmv0OUP6cf4CW6wtXsr23RUrDhKs2+Y9Qto+OgHU= +github.com/ethereum/go-ethereum v1.14.13/go.mod h1:RAC2gVMWJ6FkxSPESfbshrcKpIokgQKsVKmAuqdekDY= github.com/ettle/strcase v0.2.0 h1:fGNiVF21fHXpX1niBgk0aROov1LagYsOwV/xqKDKR/Q= github.com/ettle/strcase v0.2.0/go.mod h1:DajmHElDSaX76ITe3/VHVyMin4LWSJN5Z909Wp+ED1A= github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= -github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= -github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= @@ -280,38 +278,33 @@ github.com/firefart/nonamedreturns v1.0.5/go.mod h1:gHJjDqhGM4WyPt639SOZs+G89Ko7 github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= +github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= +github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/fvbommel/sortorder v1.1.0 h1:fUmoe+HLsBTctBDoaBwpQo5N+nrCp8g/BjKb/6ZQmYw= github.com/fvbommel/sortorder v1.1.0/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo= github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= -github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= -github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= github.com/getkin/kin-openapi v0.124.0 h1:VSFNMB9C9rTKBnQ/fpyDU8ytMTr4dWI9QovSKj9kz/M= github.com/getkin/kin-openapi v0.124.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM= -github.com/getsentry/sentry-go v0.28.1 h1:zzaSm/vHmGllRM6Tpx1492r0YDzauArdBfkJRtY6P5k= -github.com/getsentry/sentry-go v0.28.1/go.mod h1:1fQZ+7l7eeJ3wYi82q5Hg8GqAPgefRq+FP/QhafYVgg= -github.com/ghostiam/protogetter v0.3.6 h1:R7qEWaSgFCsy20yYHNIJsU9ZOb8TziSRRxuAOTVKeOk= -github.com/ghostiam/protogetter v0.3.6/go.mod h1:7lpeDnEJ1ZjL/YtyoN99ljO4z0pd3H0d18/t2dPBxHw= -github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= -github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= -github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= -github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= -github.com/gliderlabs/ssh v0.3.7 h1:iV3Bqi942d9huXnzEF2Mt+CY9gLu8DNM4Obd+8bODRE= -github.com/gliderlabs/ssh v0.3.7/go.mod h1:zpHEXBstFnQYtGnB8k8kQLol82umzn/2/snG7alWVD8= -github.com/go-critic/go-critic v0.11.4 h1:O7kGOCx0NDIni4czrkRIXTnit0mkyKOCePh3My6OyEU= -github.com/go-critic/go-critic v0.11.4/go.mod h1:2QAdo4iuLik5S9YG0rT4wcZ8QxwHYkrr6/2MWAiv/vc= +github.com/getsentry/sentry-go v0.31.1 h1:ELVc0h7gwyhnXHDouXkhqTFSO5oslsRDk0++eyE0KJ4= +github.com/getsentry/sentry-go v0.31.1/go.mod h1:CYNcMMz73YigoHljQRG+qPF+eMq8gG72XcGN/p71BAY= +github.com/ghostiam/protogetter v0.3.8 h1:LYcXbYvybUyTIxN2Mj9h6rHrDZBDwZloPoKctWrFyJY= +github.com/ghostiam/protogetter v0.3.8/go.mod h1:WZ0nw9pfzsgxuRsPOFQomgDVSWtDLJRfQJEhsGbmQMA= +github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= +github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU= +github.com/go-critic/go-critic v0.11.5 h1:TkDTOn5v7EEngMxu8KbuFqFR43USaaH8XRJLz1jhVYA= +github.com/go-critic/go-critic v0.11.5/go.mod h1:wu6U7ny9PiaHaZHcvMDmdysMqvDem162Rh3zWTrqk8M= github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= -github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= -github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow= +github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM= +github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= -github.com/go-git/go-git/v5 v5.12.0 h1:7Md+ndsjrzZxbddRDZjF14qK+NN56sy6wkqaVrjZtys= -github.com/go-git/go-git/v5 v5.12.0/go.mod h1:FTM9VKtnI2m65hNI/TenDDDnUf2Q9FHnXYjuz9i5OEY= +github.com/go-git/go-git/v5 v5.13.2 h1:7O7xvsK7K+rZPKW6AQR1YyNhfywkv7B8/FsP3ki6Zv0= +github.com/go-git/go-git/v5 v5.13.2/go.mod h1:hWdW5P4YZRjmpGHwRH2v3zkWcNl6HeXaXQEMGb3NJ9A= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -330,14 +323,8 @@ github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1 github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= -github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= -github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= -github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= -github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= -github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= -github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8= -github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI= +github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow= github.com/go-sql-driver/mysql v1.3.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrtU8EI= github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= @@ -366,18 +353,16 @@ github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQi github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ= github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus= github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig= -github.com/go-viper/mapstructure/v2 v2.0.0 h1:dhn8MZ1gZ0mzeodTG3jt5Vj/o87xZKuNAprG2mQfMfc= -github.com/go-viper/mapstructure/v2 v2.0.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= -github.com/go-xmlfmt/xmlfmt v1.1.2 h1:Nea7b4icn8s57fTx1M5AI4qQT5HEM3rVUO8MuE6g80U= -github.com/go-xmlfmt/xmlfmt v1.1.2/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= +github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss= +github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/go-xmlfmt/xmlfmt v1.1.3 h1:t8Ey3Uy7jDSEisW2K3somuMKIpzktkWptA0iFCnRUWY= +github.com/go-xmlfmt/xmlfmt v1.1.3/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= -github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= -github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= -github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= +github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= @@ -420,14 +405,14 @@ github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM= github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= -github.com/golangci/gofmt v0.0.0-20231018234816-f50ced29576e h1:ULcKCDV1LOZPFxGZaA6TlQbiM3J2GCPnkx/bGF6sX/g= -github.com/golangci/gofmt v0.0.0-20231018234816-f50ced29576e/go.mod h1:Pm5KhLPA8gSnQwrQ6ukebRcapGb/BG9iUkdaiCcGHJM= -github.com/golangci/golangci-lint v1.59.1 h1:CRRLu1JbhK5avLABFJ/OHVSQ0Ie5c4ulsOId1h3TTks= -github.com/golangci/golangci-lint v1.59.1/go.mod h1:jX5Oif4C7P0j9++YB2MMJmoNrb01NJ8ITqKWNLewThg= +github.com/golangci/go-printf-func-name v0.1.0 h1:dVokQP+NMTO7jwO4bwsRwLWeudOVUPPyAKJuzv8pEJU= +github.com/golangci/go-printf-func-name v0.1.0/go.mod h1:wqhWFH5mUdJQhweRnldEywnR5021wTdZSNgwYceV14s= +github.com/golangci/gofmt v0.0.0-20241223200906-057b0627d9b9 h1:t5wybL6RtO83VwoMOb7U/Peqe3gGKQlPIC66wXmnkvM= +github.com/golangci/gofmt v0.0.0-20241223200906-057b0627d9b9/go.mod h1:Ag3L7sh7E28qAp/5xnpMMTuGYqxLZoSaEHZDkZB1RgU= +github.com/golangci/golangci-lint v1.63.4 h1:bJQFQ3hSfUto597dkL7ipDzOxsGEpiWdLiZ359OWOBI= +github.com/golangci/golangci-lint v1.63.4/go.mod h1:Hx0B7Lg5/NXbaOHem8+KU+ZUIzMI6zNj/7tFwdnn10I= github.com/golangci/misspell v0.6.0 h1:JCle2HUTNWirNlDIAUO44hUsKhOFqGPoC4LZxlaSXDs= github.com/golangci/misspell v0.6.0/go.mod h1:keMNyY6R9isGaSAu+4Q8NMBwMPkh15Gtc8UCVoDtAWo= -github.com/golangci/modinfo v0.3.4 h1:oU5huX3fbxqQXdfspamej74DFX0kyGLkw1ppvXoJ8GA= -github.com/golangci/modinfo v0.3.4/go.mod h1:wytF1M5xl9u0ij8YSvhkEVPP3M5Mc7XLl1pxH3B2aUM= github.com/golangci/plugin-module-register v0.1.1 h1:TCmesur25LnyJkpsVrupv1Cdzo+2f7zX0H6Jkw1Ol6c= github.com/golangci/plugin-module-register v0.1.1/go.mod h1:TTpqoB6KkwOJMV8u7+NyXMrkwwESJLOkfl9TxR1DGFc= github.com/golangci/revgrep v0.5.3 h1:3tL7c1XBMtWHHqVpS5ChmiAAoe4PF/d5+ULzV9sLAzs= @@ -468,8 +453,8 @@ github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6 h1:k7nVchz72niMH6YLQNvHSdIE7iqsQxK1P41mySCvssg= -github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw= +github.com/google/pprof v0.0.0-20241029153458-d1b30febd7db h1:097atOisP2aRj7vFgYQBbFN4U4JNXUNYpxael3UzMyo= +github.com/google/pprof v0.0.0-20241029153458-d1b30febd7db/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= @@ -497,8 +482,8 @@ github.com/gostaticanalysis/forcetypeassert v0.1.0/go.mod h1:qZEedyP/sY1lTGV1uJ3 github.com/gostaticanalysis/nilerr v0.1.1 h1:ThE+hJP0fEp4zWLkWHWcRyI2Od0p7DlgYG3Uqrmrcpk= github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW0HU0GPE3+5PWN4A= github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M= -github.com/gostaticanalysis/testutil v0.4.0 h1:nhdCmubdmDF6VEatUNjgUZBJKWRqugoISdUv3PPQgHY= -github.com/gostaticanalysis/testutil v0.4.0/go.mod h1:bLIoPefWXrRi/ssLFWX1dx7Repi5x3CuviD3dgAZaBU= +github.com/gostaticanalysis/testutil v0.5.0 h1:Dq4wT1DdTwTGCQQv3rl3IvD5Ld0E6HiY+3Zh0sUGqw8= +github.com/gostaticanalysis/testutil v0.5.0/go.mod h1:OLQSbuM6zw2EvCcXTz1lVq5unyoNft372msDY0nY5Hs= github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 h1:bkypFPDjIYGfCYD5mRBvpqxfYX1YCS1PXdKYWi8FsN0= github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0/go.mod h1:P+Lt/0by1T8bfcF3z737NnSbmxQAppXMRziHUxPOC8k= github.com/h2non/gock v1.2.0 h1:K6ol8rfrRkUOefooBC8elXoaNGYkpp7y2qcxGG6BzUE= @@ -507,11 +492,17 @@ github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslC github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= +github.com/hashicorp/go-immutable-radix/v2 v2.1.0 h1:CUW5RYIcysz+D3B+l1mDeXrQ7fUvGGCwJfdASSzbrfo= +github.com/hashicorp/go-immutable-radix/v2 v2.1.0/go.mod h1:hgdqLXA4f6NIjRVisM1TJ9aOJVNRqKZj+xDGF6m7PBw= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= @@ -562,8 +553,8 @@ github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCM github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= -github.com/jackc/pgtype v1.14.3 h1:h6W9cPuHsRWQFTWUZMAKMgG5jSwQI0Zurzdvlx3Plus= -github.com/jackc/pgtype v1.14.3/go.mod h1:aKeozOde08iifGosdJpz9MBZonJOUJxqNpPBcMJTlVA= +github.com/jackc/pgtype v1.14.4 h1:fKuNiCumbKTAIxQwXfB/nsrnkEI6bPJrrSiMKgbJ2j8= +github.com/jackc/pgtype v1.14.4/go.mod h1:aKeozOde08iifGosdJpz9MBZonJOUJxqNpPBcMJTlVA= github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= @@ -586,10 +577,8 @@ github.com/jinzhu/gorm v0.0.0-20170222002820-5409931a1bb8/go.mod h1:Vla75njaFJ8c github.com/jinzhu/inflection v0.0.0-20170102125226-1c35d901db3d h1:jRQLvyVGL+iVtDElaEIDdKwpPqUIZJfzkNLV34htpEc= github.com/jinzhu/inflection v0.0.0-20170102125226-1c35d901db3d/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/now v1.1.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= -github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48= -github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0= -github.com/jjti/go-spancheck v0.6.1 h1:ZK/wE5Kyi1VX3PJpUO2oEgeoI4FWOUm7Shb2Gbv5obI= -github.com/jjti/go-spancheck v0.6.1/go.mod h1:vF1QkOO159prdo6mHRxak2CpzDpHAfKiPUDP/NeRnX8= +github.com/jjti/go-spancheck v0.6.4 h1:Tl7gQpYf4/TMU7AT84MN83/6PutY21Nb9fuQjFTpRRc= +github.com/jjti/go-spancheck v0.6.4/go.mod h1:yAEYdKJ2lRkDA8g7X+oKUHXOWVAXSBJRv04OhF+QUjk= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= @@ -599,7 +588,6 @@ github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCV github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= @@ -607,22 +595,18 @@ github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPci github.com/juju/loggo v0.0.0-20190526231331-6e530bcce5d8/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/julz/importas v0.1.0 h1:F78HnrsjY3cR7j0etXy5+TU1Zuy7Xt08X/1aJnH5xXY= -github.com/julz/importas v0.1.0/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= +github.com/julz/importas v0.2.0 h1:y+MJN/UdL63QbFJHws9BVC5RpA2iq0kpjrFajTGivjQ= +github.com/julz/importas v0.2.0/go.mod h1:pThlt589EnCYtMnmhmRYY/qn9lCf/frPOK+WMx3xiJY= github.com/karamaru-alpha/copyloopvar v1.1.0 h1:x7gNyKcC2vRBO1H2Mks5u1VxQtYvFiym7fCjIP8RPos= github.com/karamaru-alpha/copyloopvar v1.1.0/go.mod h1:u7CIfztblY0jZLOQZgH3oYsJzpC2A7S6u/lfgSXHy0k= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/errcheck v1.7.0 h1:+SbscKmWJ5mOK/bO1zS60F5I9WwZDWOfRsC4RwfwRV0= -github.com/kisielk/errcheck v1.7.0/go.mod h1:1kLL+jV4e+CFfueBmI1dSK2ADDyQnlrnrY/FqKluHJQ= +github.com/kisielk/errcheck v1.8.0 h1:ZX/URYa7ilESY19ik/vBmCn6zdGQLxACwjAcWbHlYlg= +github.com/kisielk/errcheck v1.8.0/go.mod h1:1kLL+jV4e+CFfueBmI1dSK2ADDyQnlrnrY/FqKluHJQ= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kkHAIKE/contextcheck v1.1.5 h1:CdnJh63tcDe53vG+RebdpdXJTc9atMgGqdx8LXxiilg= github.com/kkHAIKE/contextcheck v1.1.5/go.mod h1:O930cpht4xb1YQpK+1+AgoM3mFsvxr7uyFptcnWTYUA= -github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM= -github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= -github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= @@ -643,14 +627,18 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0 github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/kyoh86/exportloopref v0.1.11 h1:1Z0bcmTypkL3Q4k+IDHMWTcnCliEZcaPiIe0/ymEyhQ= github.com/kyoh86/exportloopref v0.1.11/go.mod h1:qkV4UF1zGl6EkF1ox8L5t9SwyeBAZ3qLMd6up458uqA= -github.com/lasiar/canonicalheader v1.1.1 h1:wC+dY9ZfiqiPwAexUApFush/csSPXeIi4QqyxXmng8I= -github.com/lasiar/canonicalheader v1.1.1/go.mod h1:cXkb3Dlk6XXy+8MVQnF23CYKWlyA7kfQhSw2CcZtZb0= -github.com/ldez/gomoddirectives v0.2.4 h1:j3YjBIjEBbqZ0NKtBNzr8rtMHTOrLPeiwTkfUJZ3alg= -github.com/ldez/gomoddirectives v0.2.4/go.mod h1:oWu9i62VcQDYp9EQ0ONTfqLNh+mDLWWDO+SO0qSQw5g= -github.com/ldez/tagliatelle v0.5.0 h1:epgfuYt9v0CG3fms0pEgIMNPuFf/LpPIfjk4kyqSioo= -github.com/ldez/tagliatelle v0.5.0/go.mod h1:rj1HmWiL1MiKQuOONhd09iySTEkUuE/8+5jtPYz9xa4= -github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= -github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/lasiar/canonicalheader v1.1.2 h1:vZ5uqwvDbyJCnMhmFYimgMZnJMjwljN5VGY0VKbMXb4= +github.com/lasiar/canonicalheader v1.1.2/go.mod h1:qJCeLFS0G/QlLQ506T+Fk/fWMa2VmBUiEI2cuMK4djI= +github.com/ldez/exptostd v0.3.1 h1:90yWWoAKMFHeovTK8uzBms9Ppp8Du/xQ20DRO26Ymrw= +github.com/ldez/exptostd v0.3.1/go.mod h1:iZBRYaUmcW5jwCR3KROEZ1KivQQp6PHXbDPk9hqJKCQ= +github.com/ldez/gomoddirectives v0.6.0 h1:Jyf1ZdTeiIB4dd+2n4qw+g4aI9IJ6JyfOZ8BityWvnA= +github.com/ldez/gomoddirectives v0.6.0/go.mod h1:TuwOGYoPAoENDWQpe8DMqEm5nIfjrxZXmxX/CExWyZ4= +github.com/ldez/grignotin v0.7.0 h1:vh0dI32WhHaq6LLPZ38g7WxXuZ1+RzyrJ7iPG9JMa8c= +github.com/ldez/grignotin v0.7.0/go.mod h1:uaVTr0SoZ1KBii33c47O1M8Jp3OP3YDwhZCmzT9GHEk= +github.com/ldez/tagliatelle v0.7.1 h1:bTgKjjc2sQcsgPiT902+aadvMjCeMHrY7ly2XKFORIk= +github.com/ldez/tagliatelle v0.7.1/go.mod h1:3zjxUpsNB2aEZScWiZTHrAXOl1x25t3cRmzfK1mlo2I= +github.com/ldez/usetesting v0.4.2 h1:J2WwbrFGk3wx4cZwSMiCQQ00kjGR0+tuuyW0Lqm4lwA= +github.com/ldez/usetesting v0.4.2/go.mod h1:eEs46T3PpQ+9RgN9VjpY6qWdiw2/QmfiDeWmdZdrjIQ= github.com/leonklingele/grouper v1.1.2 h1:o1ARBDLOmmasUaNDesWqWCIFH3u7hoFlM84YrjT3mIY= github.com/leonklingele/grouper v1.1.2/go.mod h1:6D0M/HVkhs2yRKRFZUoGjeDy7EZTfFBE9gl4kjmIGkA= github.com/lib/pq v0.0.0-20150723085316-0dad96c0b94f/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= @@ -662,8 +650,6 @@ github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= -github.com/lufeee/execinquery v1.2.1 h1:hf0Ems4SHcUGBxpGN7Jz78z1ppVkP/837ZlETPCEtOM= -github.com/lufeee/execinquery v1.2.1/go.mod h1:EC7DrEKView09ocscGHC+apXMIaorh4xqSxS/dy8SbM= github.com/macabu/inamedparam v0.1.3 h1:2tk/phHkMlEL/1GNe/Yf6kkR/hkcUdAEY3L0hjYV1Mk= github.com/macabu/inamedparam v0.1.3/go.mod h1:93FLICAIk/quk7eaPPQvbzihUdn/QkGDwIZEoLtpH6I= github.com/magiconair/properties v1.5.3/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= @@ -675,8 +661,6 @@ github.com/maratori/testableexamples v1.0.0 h1:dU5alXRrD8WKSjOUnmJZuzdxWOEQ57+7s github.com/maratori/testableexamples v1.0.0/go.mod h1:4rhjL1n20TUTT4vdh3RDqSizKLyXp7K2u6HgraZCGzE= github.com/maratori/testpackage v1.1.1 h1:S58XVV5AD7HADMmD0fNnziNHqKvSdDuEKdPD1rNTU04= github.com/maratori/testpackage v1.1.1/go.mod h1:s4gRK/ym6AMrqpOa/kEbQTV4Q4jb7WeLZzVhVVVOQMc= -github.com/matoous/go-nanoid/v2 v2.1.0 h1:P64+dmq21hhWdtvZfEAofnvJULaRR1Yib0+PnU669bE= -github.com/matoous/go-nanoid/v2 v2.1.0/go.mod h1:KlbGNQ+FhrUNIHUxZdL63t7tl4LaPkZNpUULS8H4uVM= github.com/matoous/godox v0.0.0-20230222163458-006bad1f9d26 h1:gWg6ZQ4JhDfJPqlo2srm/LN17lpybq15AryXIRcWYLE= github.com/matoous/godox v0.0.0-20230222163458-006bad1f9d26/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s= github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= @@ -697,13 +681,14 @@ github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2J github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= -github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= -github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-sqlite3 v1.6.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= -github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/mgechev/revive v1.3.7 h1:502QY0vQGe9KtYJ9FpxMz9rL+Fc/P13CI5POL4uHCcE= -github.com/mgechev/revive v1.3.7/go.mod h1:RJ16jUbF0OWC3co/+XTxmFNgEpUPwnnA0BRllX2aDNA= +github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI= +github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/mgechev/revive v1.5.1 h1:hE+QPeq0/wIzJwOphdVyUJ82njdd8Khp4fUIHGZHW3M= +github.com/mgechev/revive v1.5.1/go.mod h1:lC9AhkJIBs5zwx8wkudyHrU+IJkrEKmpCmGMnIJPk4o= github.com/microcosm-cc/bluemonday v1.0.25 h1:4NEwSfiJ+Wva0VxN5B8OwMicaJvD8r9tlJWm9rtloEg= github.com/microcosm-cc/bluemonday v1.0.25/go.mod h1:ZIOjCQp1OrzBBPIJmfX4qDYFuhU02nx4bn030ixfHLE= github.com/miekg/pkcs11 v1.0.2/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= @@ -723,16 +708,14 @@ github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiT github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= -github.com/moricho/tparallel v0.3.1 h1:fQKD4U1wRMAYNngDonW5XupoB/ZGJHdpzrWqgyg9krA= -github.com/moricho/tparallel v0.3.1/go.mod h1:leENX2cUv7Sv2qDgdi0D0fCftN8fRC67Bcn8pqzeYNI= +github.com/moricho/tparallel v0.3.2 h1:odr8aZVFA3NZrNybggMkYO3rgPRcqjeQUlBBFVxKHTI= +github.com/moricho/tparallel v0.3.2/go.mod h1:OQ+K3b4Ln3l2TZveGCywybl68glfLEwFGqvnjok8b+U= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b h1:1XF24mVaiu7u+CFywTdcDo2ie1pzzhwjt6RHqzpMU34= @@ -754,8 +737,8 @@ github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhK github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs= github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk= github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c= -github.com/nunnatsa/ginkgolinter v0.16.2 h1:8iLqHIZvN4fTLDC0Ke9tbSZVcyVHoBs0HIbnVSxfHJk= -github.com/nunnatsa/ginkgolinter v0.16.2/go.mod h1:4tWRinDN1FeJgU+iJANW/kz7xKN5nYRAOfJDQUS9dOQ= +github.com/nunnatsa/ginkgolinter v0.18.4 h1:zmX4KUR+6fk/vhUFt8DOP6KwznekhkmVSzzVJve2vyM= +github.com/nunnatsa/ginkgolinter v0.18.4/go.mod h1:AMEane4QQ6JwFz5GgjI5xLUM9S/CylO+UyM97fN2iBI= github.com/oapi-codegen/runtime v1.1.1 h1:EXLHh0DXIJnWhdRPN2w4MXAzFyE4CskzhNLUmtpMYro= github.com/oapi-codegen/runtime v1.1.1/go.mod h1:SK9X900oXmPWilYR5/WKPzt3Kqxn/uS/+lbpREv+eCg= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= @@ -763,12 +746,12 @@ github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6 github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.0 h1:Iw5WCbBcaAAd0fpRb1c9r5YCylv4XDoCSigm1zLevwU= github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg= -github.com/onsi/ginkgo/v2 v2.18.0 h1:W9Y7IWXxPUpAit9ieMOLI7PJZGaW22DTKgiVAuhDTLc= -github.com/onsi/ginkgo/v2 v2.18.0/go.mod h1:rlwLi9PilAFJ8jCg9UE1QP6VBpd6/xj3SRC0d6TU0To= +github.com/onsi/ginkgo/v2 v2.21.0 h1:7rg/4f3rB88pb5obDgNZrNHrQ4e6WpjonchcpuBRnZM= +github.com/onsi/ginkgo/v2 v2.21.0/go.mod h1:7Du3c42kxCUegi0IImZ1wUQzMBVecgIHjR1C+NkhLQo= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= -github.com/onsi/gomega v1.33.1 h1:dsYjIxxSR755MDmKVsaFQTE22ChNBcuuTWgkUDSubOk= -github.com/onsi/gomega v1.33.1/go.mod h1:U4R44UsT+9eLIaYRB2a5qajjtQYn0hauxvRm16AVYg0= +github.com/onsi/gomega v1.35.1 h1:Cwbd75ZBPxFSuZ6T+rN/WCb/gOc6YgFBXLlZLhC7Ds4= +github.com/onsi/gomega v1.35.1/go.mod h1:PvZbdDc8J6XJEpDK4HCuRBm8a6Fzp9/DmhC9C7yFlog= github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= @@ -784,14 +767,14 @@ github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJ github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= -github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= -github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= +github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= +github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= -github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= -github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= +github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= +github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -800,8 +783,8 @@ github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/polyfloyd/go-errorlint v1.5.2 h1:SJhVik3Umsjh7mte1vE0fVZ5T1gznasQG3PV7U5xFdA= -github.com/polyfloyd/go-errorlint v1.5.2/go.mod h1:sH1QC1pxxi0fFecsVIzBmxtrgd9IF/SkJpA6wqyKAJs= +github.com/polyfloyd/go-errorlint v1.7.0 h1:Zp6lzCK4hpBDj8y8a237YK4EPrMXQWvOe3nGoH4pFrU= +github.com/polyfloyd/go-errorlint v1.7.0/go.mod h1:dGWKu85mGHnegQ2SWpEybFityCg3j7ZbwsVUxAOk9gY= github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g= github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U= github.com/prometheus/client_golang v0.9.0-pre1.0.20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= @@ -816,8 +799,9 @@ github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1: github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.1-0.20210607210712-147c58e9608a h1:CmF68hwI0XsOQ5UwlBopMi2Ow4Pbg32akc4KIVCOm+Y= +github.com/prometheus/client_model v0.2.1-0.20210607210712-147c58e9608a/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= github.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc= @@ -833,8 +817,8 @@ github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4O github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU= github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/quasilyte/go-ruleguard v0.4.2 h1:htXcXDK6/rO12kiTHKfHuqR4kr3Y4M0J0rOL6CH/BYs= -github.com/quasilyte/go-ruleguard v0.4.2/go.mod h1:GJLgqsLeo4qgavUoL8JeGFNS7qcisx3awV/w9eWTmNI= +github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1 h1:+Wl/0aFp0hpuHM3H//KMft64WQ1yX9LdJY64Qm/gFCo= +github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1/go.mod h1:GJLgqsLeo4qgavUoL8JeGFNS7qcisx3awV/w9eWTmNI= github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE= github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo= @@ -843,6 +827,8 @@ github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs= github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ= +github.com/raeperd/recvcheck v0.2.0 h1:GnU+NsbiCqdC2XX5+vMZzP+jAJC5fht7rcVTAhX74UI= +github.com/raeperd/recvcheck v0.2.0/go.mod h1:n04eYkwIR0JbgD73wT8wL4JjPC3wm0nFtzBnWNocnYU= github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= @@ -850,14 +836,14 @@ github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUc github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryancurrah/gomodguard v1.3.2 h1:CuG27ulzEB1Gu5Dk5gP8PFxSOZ3ptSdP5iI/3IXxM18= -github.com/ryancurrah/gomodguard v1.3.2/go.mod h1:LqdemiFomEjcxOqirbQCb3JFvSxH2JUYMerTFd3sF2o= +github.com/ryancurrah/gomodguard v1.3.5 h1:cShyguSwUEeC0jS7ylOiG/idnd1TpJ1LfHGpV3oJmPU= +github.com/ryancurrah/gomodguard v1.3.5/go.mod h1:MXlEPQRxgfPQa62O8wzK3Ozbkv9Rkqr+wKjSxTdsNJE= github.com/ryanrolds/sqlclosecheck v0.5.1 h1:dibWW826u0P8jNLsLN+En7+RqWWTYrjCB9fJfSfdyCU= github.com/ryanrolds/sqlclosecheck v0.5.1/go.mod h1:2g3dUjoS6AL4huFdv6wn55WpLIDjY7ZgUR4J8HOO/XQ= github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= @@ -866,17 +852,17 @@ github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6g github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/sahilm/fuzzy v0.1.1-0.20230530133925-c48e322e2a8f h1:MvTmaQdww/z0Q4wrYjDSCcZ78NoftLQyHBSLW/Cx79Y= github.com/sahilm/fuzzy v0.1.1-0.20230530133925-c48e322e2a8f/go.mod h1:VFvziUEIMCrT6A6tw2RFIXPXXmzXbOsSHF0DOI8ZK9Y= -github.com/sanposhiho/wastedassign/v2 v2.0.7 h1:J+6nrY4VW+gC9xFzUc+XjPD3g3wF3je/NsJFwFK7Uxc= -github.com/sanposhiho/wastedassign/v2 v2.0.7/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI= -github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= -github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY= +github.com/sanposhiho/wastedassign/v2 v2.1.0 h1:crurBF7fJKIORrV85u9UUpePDYGWnwvv3+A96WvwXT0= +github.com/sanposhiho/wastedassign/v2 v2.1.0/go.mod h1:+oSmSC+9bQ+VUAxA66nBb0Z7N8CK7mscKTDYC6aIek4= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 h1:PKK9DyHxif4LZo+uQSgXNqs0jj5+xZwwfKHgph2lxBw= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.1/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tMEOsumirXcOJqAw= github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ= -github.com/sashamelentyev/usestdlibvars v1.26.0 h1:LONR2hNVKxRmzIrZR0PhSF3mhCAzvnr+DcUiHgREfXE= -github.com/sashamelentyev/usestdlibvars v1.26.0/go.mod h1:9nl0jgOfHKWNFS43Ojw0i7aRoS4j6EBye3YBhmAIRF8= +github.com/sashamelentyev/usestdlibvars v1.28.0 h1:jZnudE2zKCtYlGzLVreNp5pmCdOxXUzwsMDBkR21cyQ= +github.com/sashamelentyev/usestdlibvars v1.28.0/go.mod h1:9nl0jgOfHKWNFS43Ojw0i7aRoS4j6EBye3YBhmAIRF8= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= -github.com/securego/gosec/v2 v2.20.1-0.20240525090044-5f0084eb01a9 h1:rnO6Zp1YMQwv8AyxzuwsVohljJgp4L0ZqiCgtACsPsc= -github.com/securego/gosec/v2 v2.20.1-0.20240525090044-5f0084eb01a9/go.mod h1:dg7lPlu/xK/Ut9SedURCoZbVCR4yC7fM65DtH9/CDHs= +github.com/securego/gosec/v2 v2.21.4 h1:Le8MSj0PDmOnHJgUATjD96PaXRvCpKC+DGJvwyy0Mlk= +github.com/securego/gosec/v2 v2.21.4/go.mod h1:Jtb/MwRQfRxCXyCm1rfM1BEiiiTfUOdyzzAhlr6lUTA= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU= @@ -896,20 +882,20 @@ github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE= github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4= -github.com/sivchari/tenv v1.7.1 h1:PSpuD4bu6fSmtWMxSGWcvqUUgIn7k3yOJhOIzVWn8Ak= -github.com/sivchari/tenv v1.7.1/go.mod h1:64yStXKSOxDfX47NlhVwND4dHwfZDdbp2Lyl018Icvg= -github.com/skeema/knownhosts v1.2.2 h1:Iug2P4fLmDw9f41PB6thxUkNUkJzB5i+1/exaj40L3A= -github.com/skeema/knownhosts v1.2.2/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= -github.com/slack-go/slack v0.13.1 h1:6UkM3U1OnbhPsYeb1IMkQ6HSNOSikWluwOncJt4Tz/o= -github.com/slack-go/slack v0.13.1/go.mod h1:hlGi5oXA+Gt+yWTPP0plCdRKmjsDxecdHxYQdlMQKOw= -github.com/sonatard/noctx v0.0.2 h1:L7Dz4De2zDQhW8S0t+KUjY0MAQJd6SgVwhzNIc4ok00= -github.com/sonatard/noctx v0.0.2/go.mod h1:kzFz+CzWSjQ2OzIm46uJZoXuBpa2+0y3T36U18dWqIo= +github.com/sivchari/tenv v1.12.1 h1:+E0QzjktdnExv/wwsnnyk4oqZBUfuh89YMQT1cyuvSY= +github.com/sivchari/tenv v1.12.1/go.mod h1:1LjSOUCc25snIr5n3DtGGrENhX3LuWefcplwVGC24mw= +github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY= +github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M= +github.com/slack-go/slack v0.15.0 h1:LE2lj2y9vqqiOf+qIIy0GvEoxgF1N5yLGZffmEZykt0= +github.com/slack-go/slack v0.15.0/go.mod h1:hlGi5oXA+Gt+yWTPP0plCdRKmjsDxecdHxYQdlMQKOw= +github.com/sonatard/noctx v0.1.0 h1:JjqOc2WN16ISWAjAk8M5ej0RfExEXtkEyExl2hLW+OM= +github.com/sonatard/noctx v0.1.0/go.mod h1:0RvBxqY8D4j9cTTTWE8ylt2vqj2EPI8fHmrxHdsaZ2c= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0= github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= -github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= -github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= +github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs= +github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4= github.com/spf13/cast v0.0.0-20150508191742-4d07383ffe94/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg= github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= @@ -918,16 +904,17 @@ github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/pflag v1.0.0/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= +github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v0.0.0-20150530192845-be5ff3e4840c/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM= github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI= github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg= github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad/go.mod h1:qLr4V1qq6nMqFKkMo8ZTx3f+BZEkzsRUY10Xsm2mwU0= github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0= github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= -github.com/stbenjam/no-sprintf-host-port v0.1.1 h1:tYugd/yrm1O0dV+ThCbaKZh195Dfm07ysF0U6JQXczc= -github.com/stbenjam/no-sprintf-host-port v0.1.1/go.mod h1:TLhvtIvONRzdmkFiio4O8LHsN9N74I+PhRquPsxpL0I= +github.com/stbenjam/no-sprintf-host-port v0.2.0 h1:i8pxvGrt1+4G0czLr/WnmyH7zbZ8Bg8etvARQ1rpyl4= +github.com/stbenjam/no-sprintf-host-port v0.2.0/go.mod h1:eL0bQ9PasS0hsyTyfTjjG+E80QIyPnBVQbYZyv20Jfk= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= @@ -944,42 +931,42 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= -github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/stripe/pg-schema-diff v0.7.0 h1:00Z+LGGe9GhMsN5gLtx/ZwF/+xPOMgod/g8x8H1JmV4= -github.com/stripe/pg-schema-diff v0.7.0/go.mod h1:HuTBuWLuvnY9g9nptbSD58xugN19zSJNkF4w/sYRtdU= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stripe/pg-schema-diff v0.8.0 h1:Ggm4yDbPtaflYQLV3auEMTLxQPaentV/wmDEoCF5jxQ= +github.com/stripe/pg-schema-diff v0.8.0/go.mod h1:HuTBuWLuvnY9g9nptbSD58xugN19zSJNkF4w/sYRtdU= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= -github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c h1:+aPplBwWcHBo6q9xrfWdMrT9o4kltkmmvpemgIjep/8= -github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c/go.mod h1:SbErYREK7xXdsRiigaQiQkI9McGRzYMvlKYaP3Nimdk= -github.com/tdakkota/asciicheck v0.2.0 h1:o8jvnUANo0qXtnslk2d3nMKTFNlOnJjRrNcj0j9qkHM= -github.com/tdakkota/asciicheck v0.2.0/go.mod h1:Qb7Y9EgjCLJGup51gDHFzbI08/gbGhL/UVhYIPWG2rg= +github.com/tdakkota/asciicheck v0.3.0 h1:LqDGgZdholxZMaJgpM6b0U9CFIjDCbFdUF00bDnBKOQ= +github.com/tdakkota/asciicheck v0.3.0/go.mod h1:KoJKXuX/Z/lt6XzLo8WMBfQGzak0SrAKZlvRr4tg8Ac= github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA= github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0= github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag= github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY= -github.com/tetafro/godot v1.4.16 h1:4ChfhveiNLk4NveAZ9Pu2AN8QZ2nkUGFuadM9lrr5D0= -github.com/tetafro/godot v1.4.16/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio= +github.com/tetafro/godot v1.4.20 h1:z/p8Ek55UdNvzt4TFn2zx2KscpW4rWqcnUrdmvWJj7E= +github.com/tetafro/godot v1.4.20/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio= github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c= github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw= -github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 h1:quvGphlmUVU+nhpFa4gg4yJyTRJ13reZMDHrKwYw53M= -github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966/go.mod h1:27bSVNWSBOHm+qRp1T9qzaIpsWEP6TbUnei/43HK+PQ= -github.com/timonwong/loggercheck v0.9.4 h1:HKKhqrjcVj8sxL7K77beXh0adEm6DLjV/QOGeMXEVi4= -github.com/timonwong/loggercheck v0.9.4/go.mod h1:caz4zlPcgvpEkXgVnAJGowHAMW2NwHaNlpS8xDbVhTg= -github.com/tomarrell/wrapcheck/v2 v2.8.3 h1:5ov+Cbhlgi7s/a42BprYoxsr73CbdMUTzE3bRDFASUs= -github.com/tomarrell/wrapcheck/v2 v2.8.3/go.mod h1:g9vNIyhb5/9TQgumxQyOEqDHsmGYcGsVMOx/xGkqdMo= +github.com/tidwall/jsonc v0.3.2 h1:ZTKrmejRlAJYdn0kcaFqRAKlxxFIC21pYq8vLa4p2Wc= +github.com/tidwall/jsonc v0.3.2/go.mod h1:dw+3CIxqHi+t8eFSpzzMlcVYxKp08UP5CD8/uSFCyJE= +github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3 h1:y4mJRFlM6fUyPhoXuFg/Yu02fg/nIPFMOY8tOqppoFg= +github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3/go.mod h1:mkjARE7Yr8qU23YcGMSALbIxTQ9r9QBVahQOBRfU460= +github.com/timonwong/loggercheck v0.10.1 h1:uVZYClxQFpw55eh+PIoqM7uAOHMrhVcDoWDery9R8Lg= +github.com/timonwong/loggercheck v0.10.1/go.mod h1:HEAWU8djynujaAVX7QI65Myb8qgfcZ1uKbdpg3ZzKl8= +github.com/tomarrell/wrapcheck/v2 v2.10.0 h1:SzRCryzy4IrAH7bVGG4cK40tNUhmVmMDuJujy4XwYDg= +github.com/tomarrell/wrapcheck/v2 v2.10.0/go.mod h1:g9vNIyhb5/9TQgumxQyOEqDHsmGYcGsVMOx/xGkqdMo= github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw= github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= -github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= -github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= -github.com/ultraware/funlen v0.1.0 h1:BuqclbkY6pO+cvxoq7OsktIXZpgBSkYTQtmwhAK81vI= -github.com/ultraware/funlen v0.1.0/go.mod h1:XJqmOQja6DpxarLj6Jj1U7JuoS8PvL4nEqDaQhy22p4= -github.com/ultraware/whitespace v0.1.1 h1:bTPOGejYFulW3PkcrqkeQwOd6NKOOXvmGD9bo/Gk8VQ= -github.com/ultraware/whitespace v0.1.1/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8= -github.com/uudashr/gocognit v1.1.2 h1:l6BAEKJqQH2UpKAPKdMfZf5kE4W/2xk8pfU1OVLvniI= -github.com/uudashr/gocognit v1.1.2/go.mod h1:aAVdLURqcanke8h3vg35BC++eseDm66Z7KmchI5et4k= +github.com/ultraware/funlen v0.2.0 h1:gCHmCn+d2/1SemTdYMiKLAHFYxTYz7z9VIDRaTGyLkI= +github.com/ultraware/funlen v0.2.0/go.mod h1:ZE0q4TsJ8T1SQcjmkhN/w+MceuatI6pBFSxxyteHIJA= +github.com/ultraware/whitespace v0.2.0 h1:TYowo2m9Nfj1baEQBjuHzvMRbp19i+RCcRYrSWoFa+g= +github.com/ultraware/whitespace v0.2.0/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8= +github.com/uudashr/gocognit v1.2.0 h1:3BU9aMr1xbhPlvJLSydKwdLN3tEUUrzPSSM8S4hDYRA= +github.com/uudashr/gocognit v1.2.0/go.mod h1:k/DdKPI6XBZO1q7HgoV2juESI2/Ofj9AcHPZhBBdrTU= +github.com/uudashr/iface v1.3.0 h1:zwPch0fs9tdh9BmL5kcgSpvnObV+yHjO4JjVBl8IA10= +github.com/uudashr/iface v1.3.0/go.mod h1:4QvspiRd3JLPAEXBQ9AiZpLbJlrWWgRChOKDJEuQTdg= github.com/withfig/autocomplete-tools/packages/cobra v1.2.0 h1:MzD3XeOOSO3mAjOPpF07jFteSKZxsRHvlIcAR9RQzKM= github.com/withfig/autocomplete-tools/packages/cobra v1.2.0/go.mod h1:RoXh7+7qknOXL65uTzdzE1mPxqcPwS7FLCE9K5GfmKo= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= @@ -992,6 +979,8 @@ github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17 github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/xen0n/gosmopolitan v1.2.2 h1:/p2KTnMzwRexIW8GlKawsTWOxn7UHA+jCMF/V8HHtvU= github.com/xen0n/gosmopolitan v1.2.2/go.mod h1:7XX7Mj61uLYrj0qmeN0zi7XDon9JRAEhYQqAPLVNTeg= +github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU= +github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM= github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk= github.com/yeya24/promlinter v0.3.0 h1:JVDbMp08lVCP7Y6NP3qHroGAO6z2yGKQtS5JsjqtoFs= @@ -1010,26 +999,28 @@ github.com/yuin/goldmark v1.5.4 h1:2uY/xC0roWy8IBEGLgB1ywIoEJFGmRrX21YQcvGZzjU= github.com/yuin/goldmark v1.5.4/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yuin/goldmark-emoji v1.0.2 h1:c/RgTShNgHTtc6xdz2KKI74jJr6rWi7FPgnP9GAsO5s= github.com/yuin/goldmark-emoji v1.0.2/go.mod h1:RhP/RWpexdp+KHs7ghKnifRoIs/Bq4nDS7tRbCkOwKY= -github.com/zalando/go-keyring v0.2.5 h1:Bc2HHpjALryKD62ppdEzaFG6VxL6Bc+5v0LYpN8Lba8= -github.com/zalando/go-keyring v0.2.5/go.mod h1:HL4k+OXQfJUWaMnqyuSOc0drfGPX2b51Du6K+MRgZMk= +github.com/zalando/go-keyring v0.2.6 h1:r7Yc3+H+Ux0+M72zacZoItR3UDxeWfKTcabvkI8ua9s= +github.com/zalando/go-keyring v0.2.6/go.mod h1:2TCrxYrbUNYfNS/Kgy/LSrkSQzZ5UPVH85RwfczwvcI= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo= gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8= go-simpler.org/assert v0.9.0 h1:PfpmcSvL7yAnWyChSjOz6Sp6m9j5lyK8Ok9pEL31YkQ= go-simpler.org/assert v0.9.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28= -go-simpler.org/musttag v0.12.2 h1:J7lRc2ysXOq7eM8rwaTYnNrHd5JwjppzB6mScysB2Cs= -go-simpler.org/musttag v0.12.2/go.mod h1:uN1DVIasMTQKk6XSik7yrJoEysGtR2GRqvWnI9S7TYM= -go-simpler.org/sloglint v0.7.1 h1:qlGLiqHbN5islOxjeLXoPtUdZXb669RW+BDQ+xOSNoU= -go-simpler.org/sloglint v0.7.1/go.mod h1:OlaVDRh/FKKd4X4sIMbsz8st97vomydceL146Fthh/c= +go-simpler.org/musttag v0.13.0 h1:Q/YAW0AHvaoaIbsPj3bvEI5/QFP7w696IMUpnKXQfCE= +go-simpler.org/musttag v0.13.0/go.mod h1:FTzIGeK6OkKlUDVpj0iQUXZLUO1Js9+mvykDQy9C5yM= +go-simpler.org/sloglint v0.7.2 h1:Wc9Em/Zeuu7JYpl+oKoYOsQSy2X560aVueCW/m6IijY= +go-simpler.org/sloglint v0.7.2/go.mod h1:US+9C80ppl7VsThQclkM7BkCHQAzuz8kHLsW3ppuluo= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= -go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo= -go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8= +go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY= +go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0 h1:U2guen0GhqH8o/G2un8f/aG/y++OuW6MyCo6hT9prXk= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0/go.mod h1:yeGZANgEcpdx/WK0IvvRFC+2oLiMS2u4L/0Rj2M2Qr0= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 h1:3Q/xZUyC1BBkualc9ROb4G8qkH90LXEIICcs5zv1OYY= @@ -1038,14 +1029,14 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0 h1:R3X6Z go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0/go.mod h1:QWFXnDavXWwMx2EEcZsf3yxgEKAqsxQ+Syjp+seyInw= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.28.0 h1:j9+03ymgYhPKmeXGk5Zu+cIZOlVzd9Zv7QIiyItjFBU= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.28.0/go.mod h1:Y5+XiUG4Emn1hTfciPzGPJaSI+RpDts6BnCIir0SLqk= -go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q= -go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s= -go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE= -go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg= -go.opentelemetry.io/otel/sdk/metric v1.28.0 h1:OkuaKgKrgAbYrrY0t92c+cC+2F6hsFNnCQArXCKlg08= -go.opentelemetry.io/otel/sdk/metric v1.28.0/go.mod h1:cWPjykihLAPvXKi4iZc1dpER3Jdq2Z0YLse3moQUCpg= -go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g= -go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI= +go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ= +go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE= +go.opentelemetry.io/otel/sdk v1.32.0 h1:RNxepc9vK59A8XsgZQouW8ue8Gkb4jpWtJm9ge5lEG4= +go.opentelemetry.io/otel/sdk v1.32.0/go.mod h1:LqgegDBjKMmb2GC6/PrTnteJG39I8/vJCAP9LlJXEjU= +go.opentelemetry.io/otel/sdk/metric v1.32.0 h1:rZvFnvmvawYb0alrYkjraqJq0Z4ZUJAiyYCU9snn1CU= +go.opentelemetry.io/otel/sdk/metric v1.32.0/go.mod h1:PWeZlq0zt9YkYAp3gjKZ0eicRYvOh1Gd+X99x6GHpCQ= +go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k= +go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE= go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0= go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= @@ -1054,8 +1045,8 @@ go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/automaxprocs v1.5.3 h1:kWazyxZUrS3Gs4qUpbwo5kEIMGe/DAvi5Z4tl2NW4j8= -go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0= +go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs= +go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= @@ -1069,9 +1060,6 @@ go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60= go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg= -golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= -golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc= -golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -1089,15 +1077,14 @@ golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= -golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= -golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= -golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI= -golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= +golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= +golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1108,12 +1095,12 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 h1:vr/HnozRka3pE4EsMEg1lgkXJkTFJCVUX+S/ZT6wYzM= -golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc= +golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= +golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f h1:phY1HzDcf18Aq9A8KkmRtY9WvOFIxN8wgfvy6Zm1DV8= -golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20241108190413-2d47ceb2692f h1:WTyX8eCCyfdqiPYkRGm0MqElSfYFH3yR1+rl/mct9sA= +golang.org/x/exp/typeparams v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -1136,7 +1123,6 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI= @@ -1144,10 +1130,11 @@ golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.16.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0= -golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1193,18 +1180,19 @@ golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= -golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= -golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= +golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= +golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs= -golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70= +golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1219,9 +1207,10 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= -golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1263,7 +1252,6 @@ golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1275,24 +1263,23 @@ golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220702020025-31831981b65f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= -golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= +golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -1305,10 +1292,11 @@ golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= -golang.org/x/term v0.22.0 h1:BbsgPEJULsl2fV/AT3v15Mjva5yXKQDyKf+TbDz7QJk= -golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4= +golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= +golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1325,13 +1313,13 @@ golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= -golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= -golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg= +golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -1353,7 +1341,6 @@ golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -1387,24 +1374,23 @@ golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= -golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA= golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/tools v0.11.0/go.mod h1:anzJrxPjNtfgiYQYirP2CPGzGLxrH2u2QBhn6Bf3qY8= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= golang.org/x/tools v0.19.0/go.mod h1:qoJWxmGSIBmAeriMx19ogtrEPrGtDbPK634QFIcLAhc= -golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA= -golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c= +golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8= +golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1462,10 +1448,10 @@ google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7Fc google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 h1:0+ozOGcrp+Y8Aq8TLNN2Aliibms5LEzsq99ZZmAGYm0= -google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094/go.mod h1:fJ/e3If/Q67Mj99hin0hMhiNyCRmt6BQ2aWIJshUSJw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240701130421-f6361c86f094 h1:BwIjyKYGsK9dMCBOorzRri8MQwmi7mT9rGHsCEinZkA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240701130421-f6361c86f094/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY= +google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 h1:CkkIfIt50+lT6NHAVoRYEyAvQGFM7xEwXUUywFvEb3Q= +google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576/go.mod h1:1R3kvZ1dtP3+4p4d3G8uJ8rFk/fWlScl38vanWACI08= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241223144023-3abc09e42ca8 h1:TqExAhdPaB60Ux47Cn0oLV07rGnxZzIsaRhQaqS666A= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241223144023-3abc09e42ca8/go.mod h1:lcTa1sDdWEIHMWlITnIczmw5w60CF9ffkb8Z+DVmmjA= google.golang.org/grpc v1.0.5/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -1479,8 +1465,8 @@ google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKa google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc= -google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ= +google.golang.org/grpc v1.70.0 h1:pWFv03aZoHzlRKHWicjsZytKAiYCtNS0dHbXnIdq7jQ= +google.golang.org/grpc v1.70.0/go.mod h1:ofIJqVKDXx/JiXrwr2IG4/zwdH9txy3IlF40RmcJSQw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -1493,8 +1479,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= -google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +google.golang.org/protobuf v1.36.1 h1:yBPeRvTftaleIgM3PZ/WBIZ7XM/eEYAaEyCwvyjq/gk= +google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/cenkalti/backoff.v2 v2.2.1 h1:eJ9UAg01/HIHG987TwxvnzK2MgxXq97YY6rYDpY9aII= @@ -1540,14 +1526,12 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.4.7 h1:9MDAWxMoSnB6QoSqiVr7P5mtkT9pOc1kSxchzPCnqJs= -honnef.co/go/tools v0.4.7/go.mod h1:+rnGS1THNh8zMwnd2oVOTL9QF6vmfyG6ZXBULae2uc0= -mvdan.cc/gofumpt v0.6.0 h1:G3QvahNDmpD+Aek/bNOLrFR2XC6ZAdo62dZu65gmwGo= -mvdan.cc/gofumpt v0.6.0/go.mod h1:4L0wf+kgIPZtcCWXynNS2e6bhmj73umwnuXSZarixzA= +honnef.co/go/tools v0.5.1 h1:4bH5o3b5ZULQ4UrBmP+63W9r7qIkqJClEA9ko5YKx+I= +honnef.co/go/tools v0.5.1/go.mod h1:e9irvo83WDG9/irijV44wr3tbhcFeRnfpVlRqVwpzMs= +mvdan.cc/gofumpt v0.7.0 h1:bg91ttqXmi9y2xawvkuMXyvAA/1ZGJqYAEGjXuP0JXU= +mvdan.cc/gofumpt v0.7.0/go.mod h1:txVFJy/Sc/mvaycET54pV8SW8gWxTlUuGHVEcncmNUo= mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f h1:lMpcwN6GxNbWtbpI1+xzFLSW8XzX0u72NttUGVFjO3U= mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f/go.mod h1:RSLa7mKKCNeTTMHBw5Hsy2rfJmd6O2ivt9Dw9ZqCQpQ= -nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/internal/bootstrap/bootstrap.go b/internal/bootstrap/bootstrap.go index 4769a81d1..8310a82fb 100644 --- a/internal/bootstrap/bootstrap.go +++ b/internal/bootstrap/bootstrap.go @@ -77,7 +77,7 @@ func Run(ctx context.Context, starter StarterTemplate, fsys afero.Fs, options .. return err } // 2. Create project - params := api.V1CreateProjectBody{ + params := api.V1CreateProjectBodyDto{ Name: filepath.Base(workdir), TemplateUrl: &starter.Url, } @@ -95,7 +95,7 @@ func Run(ctx context.Context, starter StarterTemplate, fsys afero.Fs, options .. return err } // 4. Link project - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } link.LinkServices(ctx, flags.ProjectRef, tenant.NewApiKey(keys).Anon, fsys) diff --git a/internal/branches/create/create_test.go b/internal/branches/create/create_test.go index 6f56b586f..e07e10387 100644 --- a/internal/branches/create/create_test.go +++ b/internal/branches/create/create_test.go @@ -14,6 +14,7 @@ import ( "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" ) func TestCreateCommand(t *testing.T) { @@ -36,7 +37,7 @@ func TestCreateCommand(t *testing.T) { }) // Run test err := Run(context.Background(), api.CreateBranchBody{ - Region: utils.Ptr("sin"), + Region: cast.Ptr("sin"), }, fsys) // Check error assert.NoError(t, err) @@ -53,7 +54,7 @@ func TestCreateCommand(t *testing.T) { ReplyError(net.ErrClosed) // Run test err := Run(context.Background(), api.CreateBranchBody{ - Region: utils.Ptr("sin"), + Region: cast.Ptr("sin"), }, fsys) // Check error assert.ErrorIs(t, err, net.ErrClosed) @@ -70,7 +71,7 @@ func TestCreateCommand(t *testing.T) { Reply(http.StatusServiceUnavailable) // Run test err := Run(context.Background(), api.CreateBranchBody{ - Region: utils.Ptr("sin"), + Region: cast.Ptr("sin"), }, fsys) // Check error assert.ErrorContains(t, err, "Unexpected error creating preview branch:") diff --git a/internal/branches/get/get.go b/internal/branches/get/get.go index ee5bc332c..387e4ab79 100644 --- a/internal/branches/get/get.go +++ b/internal/branches/get/get.go @@ -3,13 +3,17 @@ package get import ( "context" "fmt" + "os" "github.com/go-errors/errors" + "github.com/jackc/pgconn" + "github.com/spf13/afero" "github.com/supabase/cli/internal/migration/list" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/pkg/cast" ) -func Run(ctx context.Context, branchId string) error { +func Run(ctx context.Context, branchId string, fsys afero.Fs) error { resp, err := utils.GetSupabase().V1GetABranchConfigWithResponse(ctx, branchId) if err != nil { return errors.Errorf("failed to retrieve preview branch: %w", err) @@ -29,10 +33,25 @@ func Run(ctx context.Context, branchId string) error { resp.JSON200.JwtSecret = &masked } - table := `|HOST|PORT|USER|PASSWORD|JWT SECRET|POSTGRES VERSION|STATUS| -|-|-|-|-|-|-|-| + config := pgconn.Config{ + Host: resp.JSON200.DbHost, + Port: cast.UIntToUInt16(cast.IntToUint(resp.JSON200.DbPort)), + User: *resp.JSON200.DbUser, + Password: *resp.JSON200.DbPass, + } + + postgresConnectionString := utils.ToPostgresURL(config) + if utils.OutputFormat.Value != utils.OutputPretty { + envs := map[string]string{ + "POSTGRES_URL": postgresConnectionString, + } + return utils.EncodeOutput(utils.OutputFormat.Value, os.Stdout, envs) + } + + table := `|HOST|PORT|USER|PASSWORD|JWT SECRET|POSTGRES VERSION|STATUS|POSTGRES URL| +|-|-|-|-|-|-|-|-| ` + fmt.Sprintf( - "|`%s`|`%d`|`%s`|`%s`|`%s`|`%s`|`%s`|\n", + "|`%s`|`%d`|`%s`|`%s`|`%s`|`%s`|`%s`|`%s`|\n", resp.JSON200.DbHost, resp.JSON200.DbPort, *resp.JSON200.DbUser, @@ -40,6 +59,8 @@ func Run(ctx context.Context, branchId string) error { *resp.JSON200.JwtSecret, resp.JSON200.PostgresVersion, resp.JSON200.Status, + postgresConnectionString, ) + return list.RenderTable(table) } diff --git a/internal/branches/list/list.go b/internal/branches/list/list.go index 7a153138f..79eb75e2a 100644 --- a/internal/branches/list/list.go +++ b/internal/branches/list/list.go @@ -22,8 +22,8 @@ func Run(ctx context.Context, fsys afero.Fs) error { return errors.New("Unexpected error listing preview branches: " + string(resp.Body)) } - table := `|ID|NAME|DEFAULT|GIT BRANCH|RESET ON PUSH|STATUS|CREATED AT (UTC)|UPDATED AT (UTC)| -|-|-|-|-|-|-|-|-| + table := `|ID|NAME|DEFAULT|GIT BRANCH|STATUS|CREATED AT (UTC)|UPDATED AT (UTC)| +|-|-|-|-|-|-|-| ` for _, branch := range *resp.JSON200 { gitBranch := " " @@ -31,12 +31,11 @@ func Run(ctx context.Context, fsys afero.Fs) error { gitBranch = *branch.GitBranch } table += fmt.Sprintf( - "|`%s`|`%s`|`%t`|`%s`|`%t`|`%s`|`%s`|`%s`|\n", + "|`%s`|`%s`|`%t`|`%s`|`%s`|`%s`|`%s`|\n", branch.Id, strings.ReplaceAll(branch.Name, "|", "\\|"), branch.IsDefault, strings.ReplaceAll(gitBranch, "|", "\\|"), - branch.ResetOnPush, branch.Status, utils.FormatTimestamp(branch.CreatedAt), utils.FormatTimestamp(branch.UpdatedAt), diff --git a/internal/config/push/push.go b/internal/config/push/push.go new file mode 100644 index 000000000..d2dadddc3 --- /dev/null +++ b/internal/config/push/push.go @@ -0,0 +1,35 @@ +package push + +import ( + "context" + "fmt" + "os" + + "github.com/spf13/afero" + "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" + "github.com/supabase/cli/pkg/config" +) + +func Run(ctx context.Context, ref string, fsys afero.Fs) error { + if err := flags.LoadConfig(fsys); err != nil { + return err + } + client := config.NewConfigUpdater(*utils.GetSupabase()) + remote, err := utils.Config.GetRemoteByProjectRef(ref) + if err != nil { + // Use base config when no remote is declared + remote.ProjectId = ref + } + fmt.Fprintln(os.Stderr, "Pushing config to project:", remote.ProjectId) + console := utils.NewConsole() + keep := func(name string) bool { + title := fmt.Sprintf("Do you want to push %s config to remote?", name) + shouldPush, err := console.PromptYesNo(ctx, title, true) + if err != nil { + fmt.Fprintln(os.Stderr, err) + } + return shouldPush + } + return client.UpdateRemoteConfig(ctx, remote, keep) +} diff --git a/internal/db/branch/create/create.go b/internal/db/branch/create/create.go index 98996e8c9..550128376 100644 --- a/internal/db/branch/create/create.go +++ b/internal/db/branch/create/create.go @@ -9,11 +9,12 @@ import ( "os" "path/filepath" - "github.com/docker/docker/api/types" + "github.com/docker/docker/api/types/container" "github.com/docker/docker/pkg/stdcopy" "github.com/go-errors/errors" "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" ) var ( @@ -22,7 +23,7 @@ var ( ) func Run(branch string, fsys afero.Fs) error { - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } if err := utils.AssertSupabaseDbIsRunning(); err != nil { @@ -70,7 +71,7 @@ func assertNewBranchIsValid(branchPath string, fsys afero.Fs) error { } func createBranch(ctx context.Context, branch string) error { - exec, err := utils.Docker.ContainerExecCreate(ctx, utils.DbId, types.ExecConfig{ + exec, err := utils.Docker.ContainerExecCreate(ctx, utils.DbId, container.ExecOptions{ Cmd: []string{"/bin/bash", "-c", cloneScript}, Env: []string{"DB_NAME=" + branch}, AttachStderr: true, @@ -80,7 +81,7 @@ func createBranch(ctx context.Context, branch string) error { return err } // Read exec output - resp, err := utils.Docker.ContainerExecAttach(ctx, exec.ID, types.ExecStartCheck{}) + resp, err := utils.Docker.ContainerExecAttach(ctx, exec.ID, container.ExecStartOptions{}) if err != nil { return err } diff --git a/internal/db/branch/delete/delete.go b/internal/db/branch/delete/delete.go index a3bfde6f9..ea14cd34d 100644 --- a/internal/db/branch/delete/delete.go +++ b/internal/db/branch/delete/delete.go @@ -7,15 +7,16 @@ import ( "io" "path/filepath" - "github.com/docker/docker/api/types" + "github.com/docker/docker/api/types/container" "github.com/docker/docker/pkg/stdcopy" "github.com/go-errors/errors" "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" ) func Run(branch string, fsys afero.Fs) error { - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } if err := utils.AssertSupabaseDbIsRunning(); err != nil { @@ -58,7 +59,7 @@ func deleteBranchDir(branch string, fsys afero.Fs) error { } func deleteBranchPG(ctx context.Context, branch string) error { - exec, err := utils.Docker.ContainerExecCreate(ctx, utils.DbId, types.ExecConfig{ + exec, err := utils.Docker.ContainerExecCreate(ctx, utils.DbId, container.ExecOptions{ Cmd: []string{"dropdb", "--username", "postgres", "--host", "127.0.0.1", branch}, AttachStderr: true, AttachStdout: true, @@ -67,7 +68,7 @@ func deleteBranchPG(ctx context.Context, branch string) error { return err } // Read exec output - resp, err := utils.Docker.ContainerExecAttach(ctx, exec.ID, types.ExecStartCheck{}) + resp, err := utils.Docker.ContainerExecAttach(ctx, exec.ID, container.ExecStartOptions{}) if err != nil { return err } diff --git a/internal/db/branch/switch_/switch_.go b/internal/db/branch/switch_/switch_.go index 462b26c69..d11803e88 100644 --- a/internal/db/branch/switch_/switch_.go +++ b/internal/db/branch/switch_/switch_.go @@ -12,12 +12,13 @@ import ( "github.com/spf13/afero" "github.com/supabase/cli/internal/db/reset" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" ) func Run(ctx context.Context, target string, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { // 1. Sanity checks { - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } if err := utils.AssertSupabaseDbIsRunning(); err != nil { diff --git a/internal/db/branch/switch_/switch__test.go b/internal/db/branch/switch_/switch__test.go index 429606627..722229b79 100644 --- a/internal/db/branch/switch_/switch__test.go +++ b/internal/db/branch/switch_/switch__test.go @@ -2,7 +2,6 @@ package switch_ import ( "context" - "fmt" "net/http" "os" "path/filepath" @@ -14,6 +13,7 @@ import ( "github.com/spf13/afero" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/supabase/cli/internal/db/reset" "github.com/supabase/cli/internal/testing/apitest" "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/pkg/pgtest" @@ -42,10 +42,14 @@ func TestSwitchCommand(t *testing.T) { // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) - conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false;"). + conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false"). Reply("ALTER DATABASE"). - Query(fmt.Sprintf(utils.TerminateDbSqlFmt, "postgres")). - Reply("DO"). + Query("ALTER DATABASE _supabase ALLOW_CONNECTIONS false"). + Reply("ALTER DATABASE"). + Query(reset.TERMINATE_BACKENDS). + Reply("SELECT 1"). + Query(reset.COUNT_REPLICATION_SLOTS). + Reply("SELECT 1", []interface{}{0}). Query("ALTER DATABASE postgres RENAME TO main;"). Reply("ALTER DATABASE"). Query("ALTER DATABASE " + branch + " RENAME TO postgres;"). @@ -75,7 +79,7 @@ func TestSwitchCommand(t *testing.T) { // Run test err := Run(context.Background(), "target", fsys) // Check error - assert.ErrorContains(t, err, "toml: line 0: unexpected EOF; expected key separator '='") + assert.ErrorContains(t, err, "toml: expected = after a key, but the document ends there") }) t.Run("throws error on missing database", func(t *testing.T) { @@ -218,8 +222,10 @@ func TestSwitchDatabase(t *testing.T) { // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) - conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false;"). - ReplyError(pgerrcode.InvalidParameterValue, `cannot disallow connections for current database`) + conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false"). + ReplyError(pgerrcode.InvalidParameterValue, `cannot disallow connections for current database`). + Query("ALTER DATABASE _supabase ALLOW_CONNECTIONS false"). + Query(reset.TERMINATE_BACKENDS) // Run test err := switchDatabase(context.Background(), "main", "target", conn.Intercept) // Check error @@ -234,10 +240,14 @@ func TestSwitchDatabase(t *testing.T) { // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) - conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false;"). + conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false"). + Reply("ALTER DATABASE"). + Query("ALTER DATABASE _supabase ALLOW_CONNECTIONS false"). Reply("ALTER DATABASE"). - Query(fmt.Sprintf(utils.TerminateDbSqlFmt, "postgres")). - Reply("DO"). + Query(reset.TERMINATE_BACKENDS). + Reply("SELECT 1"). + Query(reset.COUNT_REPLICATION_SLOTS). + Reply("SELECT 1", []interface{}{0}). Query("ALTER DATABASE postgres RENAME TO main;"). ReplyError(pgerrcode.DuplicateDatabase, `database "main" already exists`) // Setup mock docker @@ -260,10 +270,14 @@ func TestSwitchDatabase(t *testing.T) { // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) - conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false;"). + conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false"). + Reply("ALTER DATABASE"). + Query("ALTER DATABASE _supabase ALLOW_CONNECTIONS false"). Reply("ALTER DATABASE"). - Query(fmt.Sprintf(utils.TerminateDbSqlFmt, "postgres")). - Reply("DO"). + Query(reset.TERMINATE_BACKENDS). + Reply("SELECT 1"). + Query(reset.COUNT_REPLICATION_SLOTS). + Reply("SELECT 1", []interface{}{0}). Query("ALTER DATABASE postgres RENAME TO main;"). Reply("ALTER DATABASE"). Query("ALTER DATABASE target RENAME TO postgres;"). diff --git a/internal/db/diff/diff.go b/internal/db/diff/diff.go index 38a3bed58..d54f5e534 100644 --- a/internal/db/diff/diff.go +++ b/internal/db/diff/diff.go @@ -24,6 +24,7 @@ import ( "github.com/supabase/cli/internal/db/start" "github.com/supabase/cli/internal/gen/keys" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/migration" "github.com/supabase/cli/pkg/parser" ) @@ -32,7 +33,7 @@ type DiffFunc func(context.Context, string, string, []string) (string, error) func Run(ctx context.Context, schema []string, file string, config pgconn.Config, differ DiffFunc, fsys afero.Fs, options ...func(*pgx.ConnConfig)) (err error) { // Sanity checks. - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } if utils.IsLocalDatabase(config) { @@ -146,12 +147,12 @@ func CreateShadowDatabase(ctx context.Context, port uint16) (string, error) { func ConnectShadowDatabase(ctx context.Context, timeout time.Duration, options ...func(*pgx.ConnConfig)) (conn *pgx.Conn, err error) { // Retry until connected, cancelled, or timeout - policy := backoff.WithMaxRetries(backoff.NewConstantBackOff(time.Second), uint64(timeout.Seconds())) + policy := start.NewBackoffPolicy(ctx, timeout) config := pgconn.Config{Port: utils.Config.Db.ShadowPort} connect := func() (*pgx.Conn, error) { return utils.ConnectLocalPostgres(ctx, config, options...) } - return backoff.RetryWithData(connect, backoff.WithContext(policy, ctx)) + return backoff.RetryWithData(connect, policy) } func MigrateShadowDatabase(ctx context.Context, container string, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { diff --git a/internal/db/diff/diff_test.go b/internal/db/diff/diff_test.go index d5e8c6a46..6ada103c7 100644 --- a/internal/db/diff/diff_test.go +++ b/internal/db/diff/diff_test.go @@ -55,7 +55,7 @@ func TestRun(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) apitest.MockDockerStart(utils.Docker, utils.GetRegistryImageUrl(utils.Config.Realtime.Image), "test-shadow-realtime") @@ -267,7 +267,7 @@ func TestDiffDatabase(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) gock.New(utils.Docker.DaemonHost()). @@ -303,7 +303,7 @@ At statement 0: create schema public`) JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) gock.New(utils.Docker.DaemonHost()). diff --git a/internal/db/diff/pgadmin.go b/internal/db/diff/pgadmin.go index 298023580..55046bf24 100644 --- a/internal/db/diff/pgadmin.go +++ b/internal/db/diff/pgadmin.go @@ -11,6 +11,7 @@ import ( "github.com/supabase/cli/internal/db/start" "github.com/supabase/cli/internal/migration/new" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/config" ) @@ -35,7 +36,7 @@ func SaveDiff(out, file string, fsys afero.Fs) error { func RunPgAdmin(ctx context.Context, schema []string, file string, config pgconn.Config, fsys afero.Fs) error { // Sanity checks. { - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } if err := utils.AssertSupabaseDbIsRunning(); err != nil { diff --git a/internal/db/dump/dump.go b/internal/db/dump/dump.go index a52b1dc41..352942921 100644 --- a/internal/db/dump/dump.go +++ b/internal/db/dump/dump.go @@ -82,6 +82,7 @@ func dumpData(ctx context.Context, config pgconn.Config, schema, excludeTable [] "graphql", "graphql_public", // "net", + // "pgmq", // "pgsodium", // "pgsodium_masks", "pgtle", @@ -100,6 +101,7 @@ func dumpData(ctx context.Context, config pgconn.Config, schema, excludeTable [] // "storage", // "supabase_functions", "supabase_migrations", + // TODO: Remove in a few version in favor of _supabase internal db "_analytics", "_realtime", "_supavisor", diff --git a/internal/db/lint/lint.go b/internal/db/lint/lint.go index dc3026006..5701d89be 100644 --- a/internal/db/lint/lint.go +++ b/internal/db/lint/lint.go @@ -39,7 +39,7 @@ func toEnum(level string) LintLevel { return -1 } -func Run(ctx context.Context, schema []string, level string, config pgconn.Config, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { +func Run(ctx context.Context, schema []string, level string, failOn string, config pgconn.Config, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { // Sanity checks. conn, err := utils.ConnectByConfig(ctx, config, options...) if err != nil { @@ -55,7 +55,26 @@ func Run(ctx context.Context, schema []string, level string, config pgconn.Confi fmt.Fprintln(os.Stderr, "\nNo schema errors found") return nil } - return printResultJSON(result, toEnum(level), os.Stdout) + + // Apply filtering based on the minimum level + minLevel := toEnum(level) + filtered := filterResult(result, minLevel) + err = printResultJSON(filtered, os.Stdout) + if err != nil { + return err + } + // Check for fail-on condition + failOnLevel := toEnum(failOn) + if failOnLevel != -1 { + for _, r := range filtered { + for _, issue := range r.Issues { + if toEnum(issue.Level) >= failOnLevel { + return fmt.Errorf("fail-on is set to %s, non-zero exit", AllowedLevels[failOnLevel]) + } + } + } + } + return nil } func filterResult(result []Result, minLevel LintLevel) (filtered []Result) { @@ -73,15 +92,14 @@ func filterResult(result []Result, minLevel LintLevel) (filtered []Result) { return filtered } -func printResultJSON(result []Result, minLevel LintLevel, stdout io.Writer) error { - filtered := filterResult(result, minLevel) - if len(filtered) == 0 { +func printResultJSON(result []Result, stdout io.Writer) error { + if len(result) == 0 { return nil } // Pretty print output enc := json.NewEncoder(stdout) enc.SetIndent("", " ") - if err := enc.Encode(filtered); err != nil { + if err := enc.Encode(result); err != nil { return errors.Errorf("failed to print result json: %w", err) } return nil diff --git a/internal/db/lint/lint_test.go b/internal/db/lint/lint_test.go index 40ce05033..8d7abef8f 100644 --- a/internal/db/lint/lint_test.go +++ b/internal/db/lint/lint_test.go @@ -65,7 +65,7 @@ func TestLintCommand(t *testing.T) { Reply("SELECT 1", []interface{}{"f1", string(data)}). Query("rollback").Reply("ROLLBACK") // Run test - err = Run(context.Background(), []string{"public"}, "warning", dbConfig, fsys, conn.Intercept) + err = Run(context.Background(), []string{"public"}, "warning", "none", dbConfig, fsys, conn.Intercept) // Check error assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -221,7 +221,8 @@ func TestPrintResult(t *testing.T) { t.Run("filters warning level", func(t *testing.T) { // Run test var out bytes.Buffer - assert.NoError(t, printResultJSON(result, toEnum("warning"), &out)) + filtered := filterResult(result, toEnum("warning")) + assert.NoError(t, printResultJSON(filtered, &out)) // Validate output var actual []Result assert.NoError(t, json.Unmarshal(out.Bytes(), &actual)) @@ -231,7 +232,8 @@ func TestPrintResult(t *testing.T) { t.Run("filters error level", func(t *testing.T) { // Run test var out bytes.Buffer - assert.NoError(t, printResultJSON(result, toEnum("error"), &out)) + filtered := filterResult(result, toEnum("error")) + assert.NoError(t, printResultJSON(filtered, &out)) // Validate output var actual []Result assert.NoError(t, json.Unmarshal(out.Bytes(), &actual)) @@ -240,4 +242,58 @@ func TestPrintResult(t *testing.T) { Issues: []Issue{result[0].Issues[1]}, }}, actual) }) + + t.Run("exits with non-zero status on warning", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query("begin").Reply("BEGIN"). + Query(ENABLE_PGSQL_CHECK). + Reply("CREATE EXTENSION"). + Query(checkSchemaScript, "public"). + Reply("SELECT 1", []interface{}{"f1", `{"function":"22751","issues":[{"level":"warning","message":"test warning"}]}`}). + Query("rollback").Reply("ROLLBACK") + // Run test + err := Run(context.Background(), []string{"public"}, "warning", "warning", dbConfig, fsys, conn.Intercept) + // Check error + assert.ErrorContains(t, err, "fail-on is set to warning, non-zero exit") + }) + + t.Run("exits with non-zero status on error", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query("begin").Reply("BEGIN"). + Query(ENABLE_PGSQL_CHECK). + Reply("CREATE EXTENSION"). + Query(checkSchemaScript, "public"). + Reply("SELECT 1", []interface{}{"f1", `{"function":"22751","issues":[{"level":"error","message":"test error"}]}`}). + Query("rollback").Reply("ROLLBACK") + // Run test + err := Run(context.Background(), []string{"public"}, "warning", "error", dbConfig, fsys, conn.Intercept) + // Check error + assert.ErrorContains(t, err, "fail-on is set to error, non-zero exit") + }) + + t.Run("does not exit with non-zero status when fail-on is none", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query("begin").Reply("BEGIN"). + Query(ENABLE_PGSQL_CHECK). + Reply("CREATE EXTENSION"). + Query(checkSchemaScript, "public"). + Reply("SELECT 1", []interface{}{"f1", `{"function":"22751","issues":[{"level":"error","message":"test error"}]}`}). + Query("rollback").Reply("ROLLBACK") + // Run test + err := Run(context.Background(), []string{"public"}, "warning", "none", dbConfig, fsys, conn.Intercept) + // Check error + assert.NoError(t, err) + }) } diff --git a/internal/db/pull/pull.go b/internal/db/pull/pull.go index f6a235e11..6cc8dccab 100644 --- a/internal/db/pull/pull.go +++ b/internal/db/pull/pull.go @@ -18,6 +18,7 @@ import ( "github.com/supabase/cli/internal/migration/new" "github.com/supabase/cli/internal/migration/repair" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/migration" ) @@ -35,7 +36,7 @@ var ( func Run(ctx context.Context, schema []string, config pgconn.Config, name string, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { // 1. Sanity checks. - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } // 2. Check postgres connection diff --git a/internal/db/push/push.go b/internal/db/push/push.go index 68e3be9ac..4a1fdf6df 100644 --- a/internal/db/push/push.go +++ b/internal/db/push/push.go @@ -10,10 +10,11 @@ import ( "github.com/jackc/pgconn" "github.com/jackc/pgx/v4" "github.com/spf13/afero" - "github.com/supabase/cli/internal/migration/apply" "github.com/supabase/cli/internal/migration/up" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/migration" + "github.com/supabase/cli/pkg/vault" ) func Run(ctx context.Context, dryRun, ignoreVersionMismatch bool, includeRoles, includeSeed bool, config pgconn.Config, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { @@ -29,39 +30,80 @@ func Run(ctx context.Context, dryRun, ignoreVersionMismatch bool, includeRoles, if err != nil { return err } - if len(pending) == 0 { + var seeds []migration.SeedFile + if includeSeed { + // TODO: flag should override config but we don't resolve glob paths when seed is disabled. + if !utils.Config.Db.Seed.Enabled { + fmt.Fprintln(os.Stderr, "Skipping seed because it is disabled in config.toml for project:", flags.ProjectRef) + } else if seeds, err = migration.GetPendingSeeds(ctx, utils.Config.Db.Seed.SqlPaths, conn, afero.NewIOFS(fsys)); err != nil { + return err + } + } + var globals []string + if includeRoles { + if exists, err := afero.Exists(fsys, utils.CustomRolesPath); err != nil { + return errors.Errorf("failed to find custom roles: %w", err) + } else if exists { + globals = append(globals, utils.CustomRolesPath) + } + } + if len(pending) == 0 && len(seeds) == 0 && len(globals) == 0 { fmt.Println("Remote database is up to date.") return nil } // Push pending migrations if dryRun { - if includeRoles { - fmt.Fprintln(os.Stderr, "Would create custom roles "+utils.Bold(utils.CustomRolesPath)+"...") + if len(globals) > 0 { + fmt.Fprintln(os.Stderr, "Would create custom roles "+utils.Bold(globals[0])+"...") } - fmt.Fprintln(os.Stderr, "Would push these migrations:") - fmt.Fprint(os.Stderr, utils.Bold(confirmPushAll(pending))) - if includeSeed { - fmt.Fprintln(os.Stderr, "Would seed data "+utils.Bold(utils.SeedDataPath)+"...") + if len(pending) > 0 { + fmt.Fprintln(os.Stderr, "Would push these migrations:") + fmt.Fprint(os.Stderr, confirmPushAll(pending)) } - } else { - msg := fmt.Sprintf("Do you want to push these migrations to the remote database?\n%s\n", confirmPushAll(pending)) - if shouldPush, err := utils.NewConsole().PromptYesNo(ctx, msg, true); err != nil { - return err - } else if !shouldPush { - return errors.New(context.Canceled) + if len(seeds) > 0 { + fmt.Fprintln(os.Stderr, "Would seed these files:") + fmt.Fprint(os.Stderr, confirmSeedAll(seeds)) } - if includeRoles { - if err := apply.CreateCustomRoles(ctx, conn, fsys); err != nil { + } else { + if len(globals) > 0 { + msg := "Do you want to create custom roles in the database cluster?" + if shouldPush, err := utils.NewConsole().PromptYesNo(ctx, msg, true); err != nil { + return err + } else if !shouldPush { + return errors.New(context.Canceled) + } + if err := migration.SeedGlobals(ctx, globals, conn, afero.NewIOFS(fsys)); err != nil { return err } } - if err := migration.ApplyMigrations(ctx, pending, conn, afero.NewIOFS(fsys)); err != nil { - return err + if len(pending) > 0 { + msg := fmt.Sprintf("Do you want to push these migrations to the remote database?\n%s\n", confirmPushAll(pending)) + if shouldPush, err := utils.NewConsole().PromptYesNo(ctx, msg, true); err != nil { + return err + } else if !shouldPush { + return errors.New(context.Canceled) + } + if err := vault.UpsertVaultSecrets(ctx, utils.Config.Db.Vault, conn); err != nil { + return err + } + if err := migration.ApplyMigrations(ctx, pending, conn, afero.NewIOFS(fsys)); err != nil { + return err + } + } else { + fmt.Fprintln(os.Stderr, "Schema migrations are up to date.") } - if includeSeed { - if err := apply.SeedDatabase(ctx, conn, fsys); err != nil { + if len(seeds) > 0 { + msg := fmt.Sprintf("Do you want to seed the remote database with these files?\n%s\n", confirmSeedAll(seeds)) + if shouldPush, err := utils.NewConsole().PromptYesNo(ctx, msg, true); err != nil { + return err + } else if !shouldPush { + return errors.New(context.Canceled) + } + if err := migration.SeedData(ctx, seeds, conn, afero.NewIOFS(fsys)); err != nil { return err } + } else if includeSeed { + fmt.Fprintln(os.Stderr, "Seed files are up to date.") } } fmt.Println("Finished " + utils.Aqua("supabase db push") + ".") @@ -71,7 +113,18 @@ func Run(ctx context.Context, dryRun, ignoreVersionMismatch bool, includeRoles, func confirmPushAll(pending []string) (msg string) { for _, path := range pending { filename := filepath.Base(path) - msg += fmt.Sprintf(" • %s\n", filename) + msg += fmt.Sprintf(" • %s\n", utils.Bold(filename)) + } + return msg +} + +func confirmSeedAll(pending []migration.SeedFile) (msg string) { + for _, seed := range pending { + notice := seed.Path + if seed.Dirty { + notice += " (hash update)" + } + msg += fmt.Sprintf(" • %s\n", utils.Bold(notice)) } return msg } diff --git a/internal/db/push/push_test.go b/internal/db/push/push_test.go index e4f6353ec..3a3ff3cda 100644 --- a/internal/db/push/push_test.go +++ b/internal/db/push/push_test.go @@ -2,6 +2,8 @@ package push import ( "context" + "crypto/sha256" + "encoding/hex" "os" "path/filepath" "testing" @@ -146,7 +148,7 @@ func TestPushAll(t *testing.T) { t.Run("throws error on roles failure", func(t *testing.T) { // Setup in-memory fs - fsys := &fstest.OpenErrorFs{DenyPath: utils.CustomRolesPath} + fsys := &fstest.StatErrorFs{DenyPath: utils.CustomRolesPath} path := filepath.Join(utils.MigrationsDir, "0_test.sql") require.NoError(t, afero.WriteFile(fsys, path, []byte{}, 0644)) // Setup mock postgres @@ -161,21 +163,30 @@ func TestPushAll(t *testing.T) { }) t.Run("throws error on seed failure", func(t *testing.T) { + digest := hex.EncodeToString(sha256.New().Sum(nil)) + seedPath := filepath.Join(utils.SupabaseDirPath, "seed.sql") + utils.Config.Db.Seed.SqlPaths = []string{seedPath} // Setup in-memory fs - fsys := &fstest.OpenErrorFs{DenyPath: utils.SeedDataPath} + fsys := afero.NewMemMapFs() + require.NoError(t, afero.WriteFile(fsys, seedPath, []byte{}, 0644)) path := filepath.Join(utils.MigrationsDir, "0_test.sql") require.NoError(t, afero.WriteFile(fsys, path, []byte{}, 0644)) // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) conn.Query(migration.LIST_MIGRATION_VERSION). + Reply("SELECT 0"). + Query(migration.SELECT_SEED_TABLE). Reply("SELECT 0") helper.MockMigrationHistory(conn). Query(migration.INSERT_MIGRATION_VERSION, "0", "test", nil). Reply("INSERT 0 1") + helper.MockSeedHistory(conn). + Query(migration.UPSERT_SEED_FILE, seedPath, digest). + ReplyError(pgerrcode.NotNullViolation, `null value in column "hash" of relation "seed_files"`) // Run test err := Run(context.Background(), false, false, false, true, dbConfig, fsys, conn.Intercept) // Check error - assert.ErrorIs(t, err, os.ErrPermission) + assert.ErrorContains(t, err, `ERROR: null value in column "hash" of relation "seed_files" (SQLSTATE 23502)`) }) } diff --git a/internal/db/remote/changes/changes.go b/internal/db/remote/changes/changes.go index c735b5984..9aa4a47b0 100644 --- a/internal/db/remote/changes/changes.go +++ b/internal/db/remote/changes/changes.go @@ -8,6 +8,7 @@ import ( "github.com/spf13/afero" "github.com/supabase/cli/internal/db/diff" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/migration" ) @@ -15,7 +16,7 @@ var output string func Run(ctx context.Context, schema []string, config pgconn.Config, fsys afero.Fs) error { // Sanity checks. - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } diff --git a/internal/db/remote/commit/commit.go b/internal/db/remote/commit/commit.go index f9062d492..6e00a0295 100644 --- a/internal/db/remote/commit/commit.go +++ b/internal/db/remote/commit/commit.go @@ -15,12 +15,13 @@ import ( "github.com/supabase/cli/internal/migration/list" "github.com/supabase/cli/internal/migration/repair" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/migration" ) func Run(ctx context.Context, schema []string, config pgconn.Config, fsys afero.Fs) error { // Sanity checks. - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } diff --git a/internal/db/reset/reset.go b/internal/db/reset/reset.go index 62c3d3350..d3e33e582 100644 --- a/internal/db/reset/reset.go +++ b/internal/db/reset/reset.go @@ -10,6 +10,8 @@ import ( "strings" "time" + "github.com/cenkalti/backoff/v4" + "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/network" "github.com/docker/docker/errdefs" @@ -25,6 +27,7 @@ import ( "github.com/supabase/cli/internal/seed/buckets" "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/pkg/migration" + "github.com/supabase/cli/pkg/vault" ) func Run(ctx context.Context, version string, config pgconn.Config, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { @@ -54,9 +57,11 @@ func Run(ctx context.Context, version string, config pgconn.Config, fsys afero.F return err } // Seed objects from supabase/buckets directory - if utils.Config.Storage.Enabled { - if err := start.WaitForHealthyService(ctx, 30*time.Second, utils.StorageId); err != nil { - return err + if resp, err := utils.Docker.ContainerInspect(ctx, utils.StorageId); err == nil { + if resp.State.Health == nil || resp.State.Health.Status != types.Healthy { + if err := start.WaitForHealthyService(ctx, 30*time.Second, utils.StorageId); err != nil { + return err + } } if err := buckets.Run(ctx, "", false, fsys); err != nil { return err @@ -97,11 +102,6 @@ func resetDatabase14(ctx context.Context, version string, fsys afero.Fs, options return err } defer conn.Close(context.Background()) - if utils.Config.Db.MajorVersion > 14 { - if err := start.SetupDatabase(ctx, conn, utils.DbId, os.Stderr, fsys); err != nil { - return err - } - } return apply.MigrateAndSeed(ctx, version, conn, fsys) } @@ -112,10 +112,6 @@ func resetDatabase15(ctx context.Context, version string, fsys afero.Fs, options if err := utils.Docker.VolumeRemove(ctx, utils.DbId, true); err != nil { return errors.Errorf("failed to remove volume: %w", err) } - // Skip syslog if vector container is not started - if _, err := utils.Docker.ContainerInspect(ctx, utils.VectorId); err != nil { - utils.Config.Analytics.Enabled = false - } config := start.NewContainerConfig() hostConfig := start.NewHostConfig() networkingConfig := network.NetworkingConfig{ @@ -132,15 +128,7 @@ func resetDatabase15(ctx context.Context, version string, fsys afero.Fs, options if err := start.WaitForHealthyService(ctx, start.HealthTimeout, utils.DbId); err != nil { return err } - conn, err := utils.ConnectLocalPostgres(ctx, pgconn.Config{}, options...) - if err != nil { - return err - } - defer conn.Close(context.Background()) - if err := start.SetupDatabase(ctx, conn, utils.DbId, os.Stderr, fsys); err != nil { - return err - } - if err := apply.MigrateAndSeed(ctx, version, conn, fsys); err != nil { + if err := start.SetupLocalDatabase(ctx, version, fsys, os.Stderr, options...); err != nil { return err } fmt.Fprintln(os.Stderr, "Restarting containers...") @@ -171,25 +159,47 @@ func recreateDatabase(ctx context.Context, options ...func(*pgx.ConnConfig)) err Statements: []string{ "DROP DATABASE IF EXISTS postgres WITH (FORCE)", "CREATE DATABASE postgres WITH OWNER postgres", + "DROP DATABASE IF EXISTS _supabase WITH (FORCE)", + "CREATE DATABASE _supabase WITH OWNER postgres", }, } return sql.ExecBatch(ctx, conn) } +const ( + TERMINATE_BACKENDS = "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname IN ('postgres', '_supabase')" + COUNT_REPLICATION_SLOTS = "SELECT COUNT(*) FROM pg_replication_slots WHERE database IN ('postgres', '_supabase')" +) + func DisconnectClients(ctx context.Context, conn *pgx.Conn) error { - // Must be executed separately because running in transaction is unsupported - disconn := "ALTER DATABASE postgres ALLOW_CONNECTIONS false;" - if _, err := conn.Exec(ctx, disconn); err != nil { + // Must be executed separately because looping in transaction is unsupported + // https://dba.stackexchange.com/a/11895 + disconn := migration.MigrationFile{ + Statements: []string{ + "ALTER DATABASE postgres ALLOW_CONNECTIONS false", + "ALTER DATABASE _supabase ALLOW_CONNECTIONS false", + TERMINATE_BACKENDS, + }, + } + if err := disconn.ExecBatch(ctx, conn); err != nil { var pgErr *pgconn.PgError if errors.As(err, &pgErr) && pgErr.Code != pgerrcode.InvalidCatalogName { return errors.Errorf("failed to disconnect clients: %w", err) } } - term := fmt.Sprintf(utils.TerminateDbSqlFmt, "postgres") - if _, err := conn.Exec(ctx, term); err != nil { - return errors.Errorf("failed to terminate backend: %w", err) + // Wait for WAL senders to drop their replication slots + policy := start.NewBackoffPolicy(ctx, 10*time.Second) + waitForDrop := func() error { + var count int + if err := conn.QueryRow(ctx, COUNT_REPLICATION_SLOTS).Scan(&count); err != nil { + err = errors.Errorf("failed to count replication slots: %w", err) + return &backoff.PermanentError{Err: err} + } else if count > 0 { + return errors.Errorf("replication slots still active: %d", count) + } + return nil } - return nil + return backoff.Retry(waitForDrop, policy) } func RestartDatabase(ctx context.Context, w io.Writer) error { @@ -210,7 +220,7 @@ func restartServices(ctx context.Context) error { services := listServicesToRestart() result := utils.WaitAll(services, func(id string) error { if err := utils.Docker.ContainerRestart(ctx, id, container.StopOptions{}); err != nil && !errdefs.IsNotFound(err) { - return errors.Errorf("Failed to restart %s: %w", id, err) + return errors.Errorf("failed to restart %s: %w", id, err) } return nil }) @@ -232,6 +242,9 @@ func resetRemote(ctx context.Context, version string, config pgconn.Config, fsys if err := migration.DropUserSchemas(ctx, conn); err != nil { return err } + if err := vault.UpsertVaultSecrets(ctx, utils.Config.Db.Vault, conn); err != nil { + return err + } return apply.MigrateAndSeed(ctx, version, conn, fsys) } diff --git a/internal/db/reset/reset_test.go b/internal/db/reset/reset_test.go index 38fef4b96..4e3558be3 100644 --- a/internal/db/reset/reset_test.go +++ b/internal/db/reset/reset_test.go @@ -3,7 +3,6 @@ package reset import ( "context" "errors" - "fmt" "io" "net/http" "path/filepath" @@ -24,6 +23,7 @@ import ( "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/pkg/migration" "github.com/supabase/cli/pkg/pgtest" + "github.com/supabase/cli/pkg/storage" ) func TestResetCommand(t *testing.T) { @@ -38,6 +38,69 @@ func TestResetCommand(t *testing.T) { Database: "postgres", } + t.Run("seeds storage after reset", func(t *testing.T) { + utils.DbId = "test-reset" + utils.ConfigId = "test-config" + utils.Config.Db.MajorVersion = 15 + // Setup in-memory fs + fsys := afero.NewMemMapFs() + // Setup mock docker + require.NoError(t, apitest.MockDocker(utils.Docker)) + defer gock.OffAll() + gock.New(utils.Docker.DaemonHost()). + Get("/v" + utils.Docker.ClientVersion() + "/containers/" + utils.DbId). + Reply(http.StatusOK). + JSON(types.ContainerJSON{}) + gock.New(utils.Docker.DaemonHost()). + Delete("/v" + utils.Docker.ClientVersion() + "/containers/" + utils.DbId). + Reply(http.StatusOK) + gock.New(utils.Docker.DaemonHost()). + Delete("/v" + utils.Docker.ClientVersion() + "/volumes/" + utils.DbId). + Reply(http.StatusOK) + apitest.MockDockerStart(utils.Docker, utils.GetRegistryImageUrl(utils.Config.Db.Image), utils.DbId) + gock.New(utils.Docker.DaemonHost()). + Get("/v" + utils.Docker.ClientVersion() + "/containers/" + utils.DbId + "/json"). + Reply(http.StatusOK). + JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ + State: &types.ContainerState{ + Running: true, + Health: &types.Health{Status: types.Healthy}, + }, + }}) + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + // Restarts services + utils.StorageId = "test-storage" + utils.GotrueId = "test-auth" + utils.RealtimeId = "test-realtime" + utils.PoolerId = "test-pooler" + for _, container := range listServicesToRestart() { + gock.New(utils.Docker.DaemonHost()). + Post("/v" + utils.Docker.ClientVersion() + "/containers/" + container + "/restart"). + Reply(http.StatusOK) + } + // Seeds storage + gock.New(utils.Docker.DaemonHost()). + Get("/v" + utils.Docker.ClientVersion() + "/containers/" + utils.StorageId + "/json"). + Reply(http.StatusOK). + JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ + State: &types.ContainerState{ + Running: true, + Health: &types.Health{Status: types.Healthy}, + }, + }}) + gock.New(utils.Config.Api.ExternalUrl). + Get("/storage/v1/bucket"). + Reply(http.StatusOK). + JSON([]storage.BucketResponse{}) + // Run test + err := Run(context.Background(), "", dbConfig, fsys, conn.Intercept) + // Check error + assert.NoError(t, err) + assert.Empty(t, apitest.ListUnmatchedRequests()) + }) + t.Run("throws error on context canceled", func(t *testing.T) { // Setup in-memory fs fsys := afero.NewMemMapFs() @@ -138,13 +201,21 @@ func TestRecreateDatabase(t *testing.T) { // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) - conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false;"). + conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false"). + Reply("ALTER DATABASE"). + Query("ALTER DATABASE _supabase ALLOW_CONNECTIONS false"). Reply("ALTER DATABASE"). - Query(fmt.Sprintf(utils.TerminateDbSqlFmt, "postgres")). - Reply("DO"). + Query(TERMINATE_BACKENDS). + Reply("SELECT 1"). + Query(COUNT_REPLICATION_SLOTS). + Reply("SELECT 1", []interface{}{0}). Query("DROP DATABASE IF EXISTS postgres WITH (FORCE)"). Reply("DROP DATABASE"). Query("CREATE DATABASE postgres WITH OWNER postgres"). + Reply("CREATE DATABASE"). + Query("DROP DATABASE IF EXISTS _supabase WITH (FORCE)"). + Reply("DROP DATABASE"). + Query("CREATE DATABASE _supabase WITH OWNER postgres"). Reply("CREATE DATABASE") // Run test assert.NoError(t, recreateDatabase(context.Background(), conn.Intercept)) @@ -160,14 +231,17 @@ func TestRecreateDatabase(t *testing.T) { // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) - conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false;"). - ReplyError(pgerrcode.InvalidCatalogName, `database "postgres" does not exist`). - Query(fmt.Sprintf(utils.TerminateDbSqlFmt, "postgres")). - ReplyError(pgerrcode.UndefinedTable, `relation "pg_stat_activity" does not exist`) + conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false"). + Reply("ALTER DATABASE"). + Query("ALTER DATABASE _supabase ALLOW_CONNECTIONS false"). + ReplyError(pgerrcode.InvalidCatalogName, `database "_supabase" does not exist`). + Query(TERMINATE_BACKENDS). + Query(COUNT_REPLICATION_SLOTS). + ReplyError(pgerrcode.UndefinedTable, `relation "pg_replication_slots" does not exist`) // Run test err := recreateDatabase(context.Background(), conn.Intercept) // Check error - assert.ErrorContains(t, err, `ERROR: relation "pg_stat_activity" does not exist (SQLSTATE 42P01)`) + assert.ErrorContains(t, err, `ERROR: relation "pg_replication_slots" does not exist (SQLSTATE 42P01)`) }) t.Run("throws error on failure to disconnect", func(t *testing.T) { @@ -175,8 +249,10 @@ func TestRecreateDatabase(t *testing.T) { // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) - conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false;"). - ReplyError(pgerrcode.InvalidParameterValue, `cannot disallow connections for current database`) + conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false"). + ReplyError(pgerrcode.InvalidParameterValue, `cannot disallow connections for current database`). + Query("ALTER DATABASE _supabase ALLOW_CONNECTIONS false"). + Query(TERMINATE_BACKENDS) // Run test err := recreateDatabase(context.Background(), conn.Intercept) // Check error @@ -188,14 +264,21 @@ func TestRecreateDatabase(t *testing.T) { // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) - conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false;"). + conn.Query("ALTER DATABASE postgres ALLOW_CONNECTIONS false"). + Reply("ALTER DATABASE"). + Query("ALTER DATABASE _supabase ALLOW_CONNECTIONS false"). Reply("ALTER DATABASE"). - Query(fmt.Sprintf(utils.TerminateDbSqlFmt, "postgres")). - Reply("DO"). + Query(TERMINATE_BACKENDS). + Reply("SELECT 1"). + Query(COUNT_REPLICATION_SLOTS). + Reply("SELECT 1", []interface{}{0}). Query("DROP DATABASE IF EXISTS postgres WITH (FORCE)"). ReplyError(pgerrcode.ObjectInUse, `database "postgres" is used by an active logical replication slot`). - Query("CREATE DATABASE postgres WITH OWNER postgres") - // Run test + Query("CREATE DATABASE postgres WITH OWNER postgres"). + Query("DROP DATABASE IF EXISTS _supabase WITH (FORCE)"). + Reply("DROP DATABASE"). + Query("CREATE DATABASE _supabase WITH OWNER postgres"). + Reply("CREATE DATABASE") err := recreateDatabase(context.Background(), conn.Intercept) // Check error assert.ErrorContains(t, err, `ERROR: database "postgres" is used by an active logical replication slot (SQLSTATE 55006)`) @@ -218,7 +301,7 @@ func TestRestartDatabase(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) // Restarts services @@ -253,7 +336,7 @@ func TestRestartDatabase(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) // Restarts services @@ -272,9 +355,9 @@ func TestRestartDatabase(t *testing.T) { // Run test err := RestartDatabase(context.Background(), io.Discard) // Check error - assert.ErrorContains(t, err, "Failed to restart "+utils.StorageId) - assert.ErrorContains(t, err, "Failed to restart "+utils.GotrueId) - assert.ErrorContains(t, err, "Failed to restart "+utils.RealtimeId) + assert.ErrorContains(t, err, "failed to restart "+utils.StorageId) + assert.ErrorContains(t, err, "failed to restart "+utils.GotrueId) + assert.ErrorContains(t, err, "failed to restart "+utils.RealtimeId) assert.Empty(t, apitest.ListUnmatchedRequests()) }) @@ -357,6 +440,33 @@ func TestResetRemote(t *testing.T) { assert.NoError(t, err) }) + t.Run("resets remote database with seed config disabled", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + path := filepath.Join(utils.MigrationsDir, "0_schema.sql") + require.NoError(t, afero.WriteFile(fsys, path, nil, 0644)) + seedPath := filepath.Join(utils.SupabaseDirPath, "seed.sql") + // Will raise an error when seeding + require.NoError(t, afero.WriteFile(fsys, seedPath, []byte("INSERT INTO test_table;"), 0644)) + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(migration.ListSchemas, escapedSchemas). + Reply("SELECT 1", []interface{}{"private"}). + Query("DROP SCHEMA IF EXISTS private CASCADE"). + Reply("DROP SCHEMA"). + Query(migration.DropObjects). + Reply("INSERT 0") + helper.MockMigrationHistory(conn). + Query(migration.INSERT_MIGRATION_VERSION, "0", "schema", nil). + Reply("INSERT 0 1") + utils.Config.Db.Seed.Enabled = false + // Run test + err := resetRemote(context.Background(), "", dbConfig, fsys, conn.Intercept) + // No error should be raised since we're skipping the seed + assert.NoError(t, err) + }) + t.Run("throws error on connect failure", func(t *testing.T) { // Setup in-memory fs fsys := afero.NewMemMapFs() diff --git a/internal/db/start/start.go b/internal/db/start/start.go index 14c2e9a12..85b5c6e35 100644 --- a/internal/db/start/start.go +++ b/internal/db/start/start.go @@ -6,6 +6,7 @@ import ( "fmt" "io" "os" + "path/filepath" "strconv" "strings" "time" @@ -22,17 +23,25 @@ import ( "github.com/supabase/cli/internal/migration/apply" "github.com/supabase/cli/internal/status" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/migration" + "github.com/supabase/cli/pkg/vault" ) var ( HealthTimeout = 120 * time.Second //go:embed templates/schema.sql initialSchema string + //go:embed templates/webhook.sql + webhookSchema string + //go:embed templates/_supabase.sql + _supabaseSchema string + //go:embed templates/restore.sh + restoreScript string ) -func Run(ctx context.Context, fsys afero.Fs) error { - if err := utils.LoadConfigFS(fsys); err != nil { +func Run(ctx context.Context, fromBackup string, fsys afero.Fs) error { + if err := flags.LoadConfig(fsys); err != nil { return err } if err := utils.AssertSupabaseDbIsRunning(); err == nil { @@ -41,11 +50,9 @@ func Run(ctx context.Context, fsys afero.Fs) error { } else if !errors.Is(err, utils.ErrNotRunning) { return err } - // Skip logflare container in db start - utils.Config.Analytics.Enabled = false - err := StartDatabase(ctx, fsys, os.Stderr) + err := StartDatabase(ctx, fromBackup, fsys, os.Stderr) if err != nil { - if err := utils.DockerRemoveAll(context.Background(), os.Stderr); err != nil { + if err := utils.DockerRemoveAll(context.Background(), os.Stderr, utils.Config.ProjectId); err != nil { fmt.Fprintln(os.Stderr, err) } } @@ -56,7 +63,6 @@ func NewContainerConfig() container.Config { env := []string{ "POSTGRES_PASSWORD=" + utils.Config.Db.Password, "POSTGRES_HOST=/var/run/postgresql", - "POSTGRES_INITDB_ARGS=--lc-ctype=C.UTF-8", "JWT_SECRET=" + utils.Config.Auth.JwtSecret, fmt.Sprintf("JWT_EXP=%d", utils.Config.Auth.JwtExpiry), } @@ -81,12 +87,19 @@ func NewContainerConfig() container.Config { Timeout: 2 * time.Second, Retries: 3, }, - Entrypoint: []string{"sh", "-c", `cat <<'EOF' > /etc/postgresql.schema.sql && cat <<'EOF' > /etc/postgresql-custom/pgsodium_root.key && docker-entrypoint.sh postgres -D /etc/postgresql + Entrypoint: []string{"sh", "-c", ` +cat <<'EOF' > /etc/postgresql.schema.sql && \ +cat <<'EOF' > /etc/postgresql-custom/pgsodium_root.key && \ +cat <<'EOF' >> /etc/postgresql/postgresql.conf && \ +docker-entrypoint.sh postgres -D /etc/postgresql ` + initialSchema + ` +` + webhookSchema + ` +` + _supabaseSchema + ` EOF ` + utils.Config.Db.RootKey + ` EOF -`}, +` + utils.Config.Db.Settings.ToPostgresConfig() + ` +EOF`}, } if utils.Config.Db.MajorVersion >= 14 { config.Cmd = []string{"postgres", @@ -111,7 +124,7 @@ func NewHostConfig() container.HostConfig { return hostConfig } -func StartDatabase(ctx context.Context, fsys afero.Fs, w io.Writer, options ...func(*pgx.ConnConfig)) error { +func StartDatabase(ctx context.Context, fromBackup string, fsys afero.Fs, w io.Writer, options ...func(*pgx.ConnConfig)) error { config := NewContainerConfig() hostConfig := NewHostConfig() networkingConfig := network.NetworkingConfig{ @@ -122,14 +135,45 @@ func StartDatabase(ctx context.Context, fsys afero.Fs, w io.Writer, options ...f }, } if utils.Config.Db.MajorVersion <= 14 { - config.Entrypoint = nil + config.Entrypoint = []string{"sh", "-c", ` +cat <<'EOF' > /docker-entrypoint-initdb.d/supabase_schema.sql && \ +cat <<'EOF' >> /etc/postgresql/postgresql.conf && \ +docker-entrypoint.sh postgres -D /etc/postgresql +` + _supabaseSchema + ` +EOF +` + utils.Config.Db.Settings.ToPostgresConfig() + ` +EOF`} hostConfig.Tmpfs = map[string]string{"/docker-entrypoint-initdb.d": ""} } + if len(fromBackup) > 0 { + config.Entrypoint = []string{"sh", "-c", ` +cat <<'EOF' > /etc/postgresql.schema.sql && \ +cat <<'EOF' > /docker-entrypoint-initdb.d/migrate.sh && \ +cat <<'EOF' > /etc/postgresql-custom/pgsodium_root.key && \ +cat <<'EOF' >> /etc/postgresql/postgresql.conf && \ +docker-entrypoint.sh postgres -D /etc/postgresql +` + initialSchema + ` +` + _supabaseSchema + ` +EOF +` + restoreScript + ` +EOF +` + utils.Config.Db.RootKey + ` +EOF +` + utils.Config.Db.Settings.ToPostgresConfig() + ` +EOF`} + if !filepath.IsAbs(fromBackup) { + fromBackup = filepath.Join(utils.CurrentDirAbs, fromBackup) + } + hostConfig.Binds = append(hostConfig.Binds, utils.ToDockerPath(fromBackup)+":/etc/backup.sql:ro") + } // Creating volume will not override existing volume, so we must inspect explicitly _, err := utils.Docker.VolumeInspect(ctx, utils.DbId) utils.NoBackupVolume = client.IsErrNotFound(err) if utils.NoBackupVolume { fmt.Fprintln(w, "Starting database...") + } else if len(fromBackup) > 0 { + utils.CmdSuggestion = fmt.Sprintf("Run %s to remove existing docker volumes.", utils.Aqua("supabase stop --no-backup")) + return errors.Errorf("backup volume already exists") } else { fmt.Fprintln(w, "Starting database from backup...") } @@ -140,14 +184,26 @@ func StartDatabase(ctx context.Context, fsys afero.Fs, w io.Writer, options ...f return err } // Initialize if we are on PG14 and there's no existing db volume - if utils.NoBackupVolume { - if err := setupDatabase(ctx, fsys, w, options...); err != nil { + if len(fromBackup) > 0 { + if err := initSchema15(ctx, utils.DbId); err != nil { + return err + } + } else if utils.NoBackupVolume { + if err := SetupLocalDatabase(ctx, "", fsys, w, options...); err != nil { return err } } return initCurrentBranch(fsys) } +func NewBackoffPolicy(ctx context.Context, timeout time.Duration) backoff.BackOff { + policy := backoff.WithMaxRetries( + backoff.NewConstantBackOff(time.Second), + uint64(timeout.Seconds()), + ) + return backoff.WithContext(policy, ctx) +} + func WaitForHealthyService(ctx context.Context, timeout time.Duration, started ...string) error { probe := func() error { var errHealth []error @@ -161,10 +217,7 @@ func WaitForHealthyService(ctx context.Context, timeout time.Duration, started . started = unhealthy return errors.Join(errHealth...) } - policy := backoff.WithContext(backoff.WithMaxRetries( - backoff.NewConstantBackOff(time.Second), - uint64(timeout.Seconds()), - ), ctx) + policy := NewBackoffPolicy(ctx, timeout) err := backoff.Retry(probe, policy) if err != nil && !errors.Is(err, context.Canceled) { // Print container logs for easier debugging @@ -237,8 +290,9 @@ func initRealtimeJob(host string) utils.DockerJob { "SECRET_KEY_BASE=" + utils.Config.Realtime.SecretKeyBase, "ERL_AFLAGS=" + utils.ToRealtimeEnv(utils.Config.Realtime.IpVersion), "DNS_NODES=''", - "RLIMIT_NOFILE=10000", + "RLIMIT_NOFILE=", "SEED_SELF_HOST=true", + "RUN_JANITOR=true", fmt.Sprintf("MAX_HEADER_LENGTH=%d", utils.Config.Realtime.MaxHeaderLength), }, Cmd: []string{"/app/bin/realtime", "eval", fmt.Sprintf(`{:ok, _} = Application.ensure_all_started(:realtime) @@ -257,6 +311,7 @@ func initStorageJob(host string) utils.DockerJob { fmt.Sprintf("DATABASE_URL=postgresql://supabase_storage_admin:%s@%s:5432/postgres", utils.Config.Db.Password, host), fmt.Sprintf("FILE_SIZE_LIMIT=%v", utils.Config.Storage.FileSizeLimit), "STORAGE_BACKEND=file", + "STORAGE_FILE_BACKEND_PATH=/mnt", "TENANT_ID=stub", // TODO: https://github.com/supabase/storage-api/issues/55 "REGION=stub", @@ -302,7 +357,7 @@ func initSchema15(ctx context.Context, host string) error { return nil } -func setupDatabase(ctx context.Context, fsys afero.Fs, w io.Writer, options ...func(*pgx.ConnConfig)) error { +func SetupLocalDatabase(ctx context.Context, version string, fsys afero.Fs, w io.Writer, options ...func(*pgx.ConnConfig)) error { conn, err := utils.ConnectLocalPostgres(ctx, pgconn.Config{}, options...) if err != nil { return err @@ -311,12 +366,20 @@ func setupDatabase(ctx context.Context, fsys afero.Fs, w io.Writer, options ...f if err := SetupDatabase(ctx, conn, utils.DbId, w, fsys); err != nil { return err } - return apply.MigrateAndSeed(ctx, "", conn, fsys) + return apply.MigrateAndSeed(ctx, version, conn, fsys) } func SetupDatabase(ctx context.Context, conn *pgx.Conn, host string, w io.Writer, fsys afero.Fs) error { if err := initSchema(ctx, conn, host, w); err != nil { return err } - return apply.CreateCustomRoles(ctx, conn, fsys) + // Create vault secrets first so roles.sql can reference them + if err := vault.UpsertVaultSecrets(ctx, utils.Config.Db.Vault, conn); err != nil { + return err + } + err := migration.SeedGlobals(ctx, []string{utils.CustomRolesPath}, conn, afero.NewIOFS(fsys)) + if errors.Is(err, os.ErrNotExist) { + return nil + } + return err } diff --git a/internal/db/start/start_test.go b/internal/db/start/start_test.go index dcc93b4c8..afdb8e28b 100644 --- a/internal/db/start/start_test.go +++ b/internal/db/start/start_test.go @@ -17,6 +17,7 @@ import ( "github.com/supabase/cli/internal/testing/apitest" "github.com/supabase/cli/internal/testing/fstest" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/pkg/cast" "github.com/supabase/cli/pkg/pgtest" ) @@ -59,8 +60,6 @@ func TestStartDatabase(t *testing.T) { fsys := afero.NewMemMapFs() roles := "create role test" require.NoError(t, afero.WriteFile(fsys, utils.CustomRolesPath, []byte(roles), 0644)) - seed := "INSERT INTO employees(name) VALUES ('Alice')" - require.NoError(t, afero.WriteFile(fsys, utils.SeedDataPath, []byte(seed), 0644)) // Setup mock docker require.NoError(t, apitest.MockDocker(utils.Docker)) defer gock.OffAll() @@ -75,7 +74,7 @@ func TestStartDatabase(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) apitest.MockDockerStart(utils.Docker, utils.GetRegistryImageUrl(utils.Config.Realtime.Image), "test-realtime") @@ -88,11 +87,9 @@ func TestStartDatabase(t *testing.T) { conn := pgtest.NewConn() defer conn.Close(t) conn.Query(roles). - Reply("CREATE ROLE"). - Query(seed). - Reply("INSERT 0 1") + Reply("CREATE ROLE") // Run test - err := StartDatabase(context.Background(), fsys, io.Discard, conn.Intercept) + err := StartDatabase(context.Background(), "", fsys, io.Discard, conn.Intercept) // Check error assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -123,11 +120,11 @@ func TestStartDatabase(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) // Run test - err := StartDatabase(context.Background(), fsys, io.Discard) + err := StartDatabase(context.Background(), "", fsys, io.Discard) // Check error assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -152,7 +149,7 @@ func TestStartDatabase(t *testing.T) { Get("/v" + utils.Docker.ClientVersion() + "/images/" + utils.GetRegistryImageUrl(utils.Config.Db.Image) + "/json"). Reply(http.StatusInternalServerError) // Run test - err := StartDatabase(context.Background(), fsys, io.Discard) + err := StartDatabase(context.Background(), "", fsys, io.Discard) // Check error assert.ErrorContains(t, err, "request returned Internal Server Error for API route and version") assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -164,7 +161,7 @@ func TestStartCommand(t *testing.T) { // Setup in-memory fs fsys := afero.NewMemMapFs() // Run test - err := Run(context.Background(), fsys) + err := Run(context.Background(), "", fsys) // Check error assert.ErrorIs(t, err, os.ErrNotExist) }) @@ -180,7 +177,7 @@ func TestStartCommand(t *testing.T) { Get("/v" + utils.Docker.ClientVersion() + "/containers"). ReplyError(errors.New("network error")) // Run test - err := Run(context.Background(), fsys) + err := Run(context.Background(), "", fsys) // Check error assert.ErrorContains(t, err, "network error") assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -198,7 +195,7 @@ func TestStartCommand(t *testing.T) { Reply(http.StatusOK). JSON(types.ContainerJSON{}) // Run test - err := Run(context.Background(), fsys) + err := Run(context.Background(), "", fsys) // Check error assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -224,7 +221,7 @@ func TestStartCommand(t *testing.T) { // Cleanup resources apitest.MockDockerStop(utils.Docker) // Run test - err := Run(context.Background(), fsys) + err := Run(context.Background(), "", fsys) // Check error assert.ErrorContains(t, err, "network error") assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -256,7 +253,7 @@ func TestSetupDatabase(t *testing.T) { Query(roles). Reply("CREATE ROLE") // Run test - err := setupDatabase(context.Background(), fsys, io.Discard, conn.Intercept) + err := SetupLocalDatabase(context.Background(), "", fsys, io.Discard, conn.Intercept) // Check error assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -265,12 +262,13 @@ func TestSetupDatabase(t *testing.T) { t.Run("throws error on connect failure", func(t *testing.T) { utils.Config.Db.Port = 0 // Run test - err := setupDatabase(context.Background(), nil, io.Discard) + err := SetupLocalDatabase(context.Background(), "", nil, io.Discard) // Check error assert.ErrorContains(t, err, "invalid port (outside range)") }) t.Run("throws error on init failure", func(t *testing.T) { + utils.Config.Realtime.Enabled = true utils.Config.Db.Port = 5432 // Setup mock docker require.NoError(t, apitest.MockDocker(utils.Docker)) @@ -282,7 +280,7 @@ func TestSetupDatabase(t *testing.T) { conn := pgtest.NewConn() defer conn.Close(t) // Run test - err := setupDatabase(context.Background(), nil, io.Discard, conn.Intercept) + err := SetupLocalDatabase(context.Background(), "", nil, io.Discard, conn.Intercept) // Check error assert.ErrorContains(t, err, "network error") assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -305,9 +303,61 @@ func TestSetupDatabase(t *testing.T) { conn := pgtest.NewConn() defer conn.Close(t) // Run test - err := setupDatabase(context.Background(), fsys, io.Discard, conn.Intercept) + err := SetupLocalDatabase(context.Background(), "", fsys, io.Discard, conn.Intercept) // Check error assert.ErrorIs(t, err, os.ErrPermission) assert.Empty(t, apitest.ListUnmatchedRequests()) }) } +func TestStartDatabaseWithCustomSettings(t *testing.T) { + t.Run("starts database with custom MaxConnections", func(t *testing.T) { + // Setup + utils.Config.Db.MajorVersion = 15 + utils.DbId = "supabase_db_test" + utils.ConfigId = "supabase_config_test" + utils.Config.Db.Port = 5432 + utils.Config.Db.Settings.MaxConnections = cast.Ptr(uint(50)) + + // Setup in-memory fs + fsys := afero.NewMemMapFs() + + // Setup mock docker + require.NoError(t, apitest.MockDocker(utils.Docker)) + defer gock.OffAll() + gock.New(utils.Docker.DaemonHost()). + Get("/v" + utils.Docker.ClientVersion() + "/volumes/" + utils.DbId). + Reply(http.StatusNotFound). + JSON(volume.Volume{}) + apitest.MockDockerStart(utils.Docker, utils.GetRegistryImageUrl(utils.Config.Db.Image), utils.DbId) + gock.New(utils.Docker.DaemonHost()). + Get("/v" + utils.Docker.ClientVersion() + "/containers/" + utils.DbId + "/json"). + Reply(http.StatusOK). + JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ + State: &types.ContainerState{ + Running: true, + Health: &types.Health{Status: types.Healthy}, + }, + }}) + + apitest.MockDockerStart(utils.Docker, utils.GetRegistryImageUrl(utils.Config.Realtime.Image), "test-realtime") + require.NoError(t, apitest.MockDockerLogs(utils.Docker, "test-realtime", "")) + apitest.MockDockerStart(utils.Docker, utils.GetRegistryImageUrl(utils.Config.Storage.Image), "test-storage") + require.NoError(t, apitest.MockDockerLogs(utils.Docker, "test-storage", "")) + apitest.MockDockerStart(utils.Docker, utils.GetRegistryImageUrl(utils.Config.Auth.Image), "test-auth") + require.NoError(t, apitest.MockDockerLogs(utils.Docker, "test-auth", "")) + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + + // Run test + err := StartDatabase(context.Background(), "", fsys, io.Discard, conn.Intercept) + + // Check error + assert.NoError(t, err) + assert.Empty(t, apitest.ListUnmatchedRequests()) + + // Check if the custom MaxConnections setting was applied + config := NewContainerConfig() + assert.Contains(t, config.Entrypoint[2], "max_connections = 50") + }) +} diff --git a/internal/db/start/templates/_supabase.sql b/internal/db/start/templates/_supabase.sql new file mode 100644 index 000000000..6e5d8487b --- /dev/null +++ b/internal/db/start/templates/_supabase.sql @@ -0,0 +1,13 @@ +CREATE DATABASE _supabase WITH OWNER postgres; + +-- Switch to the newly created _supabase database +\c _supabase +-- Create schemas in _supabase database for +-- internals tools and reports to not overload user database +-- with non-user activity +CREATE SCHEMA IF NOT EXISTS _analytics; +ALTER SCHEMA _analytics OWNER TO postgres; + +CREATE SCHEMA IF NOT EXISTS _supavisor; +ALTER SCHEMA _supavisor OWNER TO postgres; +\c postgres diff --git a/internal/db/start/templates/restore.sh b/internal/db/start/templates/restore.sh new file mode 100755 index 000000000..26f6418e3 --- /dev/null +++ b/internal/db/start/templates/restore.sh @@ -0,0 +1,41 @@ +#!/bin/sh +set -eu + +####################################### +# Used by both ami and docker builds to initialise database schema. +# Env vars: +# POSTGRES_DB defaults to postgres +# POSTGRES_HOST defaults to localhost +# POSTGRES_PORT defaults to 5432 +# POSTGRES_PASSWORD defaults to "" +# USE_DBMATE defaults to "" +# Exit code: +# 0 if migration succeeds, non-zero on error. +####################################### + +export PGDATABASE="${POSTGRES_DB:-postgres}" +export PGHOST="${POSTGRES_HOST:-localhost}" +export PGPORT="${POSTGRES_PORT:-5432}" +export PGPASSWORD="${POSTGRES_PASSWORD:-}" + +echo "$0: restoring roles" +cat "/etc/backup.sql" \ +| grep 'CREATE ROLE' \ +| grep -v 'supabase_admin' \ +| psql -v ON_ERROR_STOP=1 --no-password --no-psqlrc -U supabase_admin + +echo "$0: restoring schema" +cat "/etc/backup.sql" \ +| sed -E 's/^CREATE VIEW /CREATE OR REPLACE VIEW /' \ +| sed -E 's/^CREATE FUNCTION /CREATE OR REPLACE FUNCTION /' \ +| sed -E 's/^CREATE TRIGGER /CREATE OR REPLACE TRIGGER /' \ +| sed -E 's/^GRANT ALL ON FUNCTION graphql_public\./-- &/' \ +| sed -E 's/^CREATE ROLE /-- &/' \ +| psql -v ON_ERROR_STOP=1 --no-password --no-psqlrc -U supabase_admin + +# run any post migration script to update role passwords +postinit="/etc/postgresql.schema.sql" +if [ -e "$postinit" ]; then + echo "$0: running $postinit" + psql -v ON_ERROR_STOP=1 --no-password --no-psqlrc -U supabase_admin -f "$postinit" +fi diff --git a/internal/db/start/templates/schema.sql b/internal/db/start/templates/schema.sql index 0d0010231..810d9506e 100644 --- a/internal/db/start/templates/schema.sql +++ b/internal/db/start/templates/schema.sql @@ -14,242 +14,3 @@ ALTER USER supabase_read_only_user WITH PASSWORD :'pgpass'; create schema if not exists _realtime; alter schema _realtime owner to postgres; - -create schema if not exists _analytics; -alter schema _analytics owner to postgres; - -create schema if not exists _supavisor; -alter schema _supavisor owner to postgres; - -BEGIN; - --- Create pg_net extension -CREATE EXTENSION IF NOT EXISTS pg_net SCHEMA extensions; - --- Create supabase_functions schema -CREATE SCHEMA supabase_functions AUTHORIZATION supabase_admin; - -GRANT USAGE ON SCHEMA supabase_functions TO postgres, anon, authenticated, service_role; -ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON TABLES TO postgres, anon, authenticated, service_role; -ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON FUNCTIONS TO postgres, anon, authenticated, service_role; -ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON SEQUENCES TO postgres, anon, authenticated, service_role; - --- supabase_functions.migrations definition -CREATE TABLE supabase_functions.migrations ( - version text PRIMARY KEY, - inserted_at timestamptz NOT NULL DEFAULT NOW() -); - --- Initial supabase_functions migration -INSERT INTO supabase_functions.migrations (version) VALUES ('initial'); - --- supabase_functions.hooks definition -CREATE TABLE supabase_functions.hooks ( - id bigserial PRIMARY KEY, - hook_table_id integer NOT NULL, - hook_name text NOT NULL, - created_at timestamptz NOT NULL DEFAULT NOW(), - request_id bigint -); -CREATE INDEX supabase_functions_hooks_request_id_idx ON supabase_functions.hooks USING btree (request_id); -CREATE INDEX supabase_functions_hooks_h_table_id_h_name_idx ON supabase_functions.hooks USING btree (hook_table_id, hook_name); -COMMENT ON TABLE supabase_functions.hooks IS 'Supabase Functions Hooks: Audit trail for triggered hooks.'; - -CREATE FUNCTION supabase_functions.http_request() - RETURNS trigger - LANGUAGE plpgsql - AS $function$ - DECLARE - request_id bigint; - payload jsonb; - url text := TG_ARGV[0]::text; - method text := TG_ARGV[1]::text; - headers jsonb DEFAULT '{}'::jsonb; - params jsonb DEFAULT '{}'::jsonb; - timeout_ms integer DEFAULT 1000; - BEGIN - IF url IS NULL OR url = 'null' THEN - RAISE EXCEPTION 'url argument is missing'; - END IF; - - IF method IS NULL OR method = 'null' THEN - RAISE EXCEPTION 'method argument is missing'; - END IF; - - IF TG_ARGV[2] IS NULL OR TG_ARGV[2] = 'null' THEN - headers = '{"Content-Type": "application/json"}'::jsonb; - ELSE - headers = TG_ARGV[2]::jsonb; - END IF; - - IF TG_ARGV[3] IS NULL OR TG_ARGV[3] = 'null' THEN - params = '{}'::jsonb; - ELSE - params = TG_ARGV[3]::jsonb; - END IF; - - IF TG_ARGV[4] IS NULL OR TG_ARGV[4] = 'null' THEN - timeout_ms = 1000; - ELSE - timeout_ms = TG_ARGV[4]::integer; - END IF; - - CASE - WHEN method = 'GET' THEN - SELECT http_get INTO request_id FROM net.http_get( - url, - params, - headers, - timeout_ms - ); - WHEN method = 'POST' THEN - payload = jsonb_build_object( - 'old_record', OLD, - 'record', NEW, - 'type', TG_OP, - 'table', TG_TABLE_NAME, - 'schema', TG_TABLE_SCHEMA - ); - - SELECT http_post INTO request_id FROM net.http_post( - url, - payload, - params, - headers, - timeout_ms - ); - ELSE - RAISE EXCEPTION 'method argument % is invalid', method; - END CASE; - - INSERT INTO supabase_functions.hooks - (hook_table_id, hook_name, request_id) - VALUES - (TG_RELID, TG_NAME, request_id); - - RETURN NEW; - END -$function$; - --- Supabase super admin -DO -$$ -BEGIN - IF NOT EXISTS ( - SELECT 1 - FROM pg_roles - WHERE rolname = 'supabase_functions_admin' - ) - THEN - CREATE USER supabase_functions_admin NOINHERIT CREATEROLE LOGIN NOREPLICATION; - END IF; -END -$$; - -GRANT ALL PRIVILEGES ON SCHEMA supabase_functions TO supabase_functions_admin; -GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA supabase_functions TO supabase_functions_admin; -GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA supabase_functions TO supabase_functions_admin; -ALTER USER supabase_functions_admin SET search_path = "supabase_functions"; -ALTER table "supabase_functions".migrations OWNER TO supabase_functions_admin; -ALTER table "supabase_functions".hooks OWNER TO supabase_functions_admin; -ALTER function "supabase_functions".http_request() OWNER TO supabase_functions_admin; -GRANT supabase_functions_admin TO postgres; - --- Remove unused supabase_pg_net_admin role -DO -$$ -BEGIN - IF EXISTS ( - SELECT 1 - FROM pg_roles - WHERE rolname = 'supabase_pg_net_admin' - ) - THEN - REASSIGN OWNED BY supabase_pg_net_admin TO supabase_admin; - DROP OWNED BY supabase_pg_net_admin; - DROP ROLE supabase_pg_net_admin; - END IF; -END -$$; - --- pg_net grants when extension is already enabled -DO -$$ -BEGIN - IF EXISTS ( - SELECT 1 - FROM pg_extension - WHERE extname = 'pg_net' - ) - THEN - GRANT USAGE ON SCHEMA net TO supabase_functions_admin, postgres, anon, authenticated, service_role; - - ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER; - ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER; - - ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net; - ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net; - - REVOKE ALL ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC; - REVOKE ALL ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC; - - GRANT EXECUTE ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role; - GRANT EXECUTE ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role; - END IF; -END -$$; - --- Event trigger for pg_net -CREATE OR REPLACE FUNCTION extensions.grant_pg_net_access() -RETURNS event_trigger -LANGUAGE plpgsql -AS $$ -BEGIN - IF EXISTS ( - SELECT 1 - FROM pg_event_trigger_ddl_commands() AS ev - JOIN pg_extension AS ext - ON ev.objid = ext.oid - WHERE ext.extname = 'pg_net' - ) - THEN - GRANT USAGE ON SCHEMA net TO supabase_functions_admin, postgres, anon, authenticated, service_role; - - ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER; - ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER; - - ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net; - ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net; - - REVOKE ALL ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC; - REVOKE ALL ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC; - - GRANT EXECUTE ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role; - GRANT EXECUTE ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role; - END IF; -END; -$$; -COMMENT ON FUNCTION extensions.grant_pg_net_access IS 'Grants access to pg_net'; - -DO -$$ -BEGIN - IF NOT EXISTS ( - SELECT 1 - FROM pg_event_trigger - WHERE evtname = 'issue_pg_net_access' - ) THEN - CREATE EVENT TRIGGER issue_pg_net_access ON ddl_command_end WHEN TAG IN ('CREATE EXTENSION') - EXECUTE PROCEDURE extensions.grant_pg_net_access(); - END IF; -END -$$; - -INSERT INTO supabase_functions.migrations (version) VALUES ('20210809183423_update_grants'); - -ALTER function supabase_functions.http_request() SECURITY DEFINER; -ALTER function supabase_functions.http_request() SET search_path = supabase_functions; -REVOKE ALL ON FUNCTION supabase_functions.http_request() FROM PUBLIC; -GRANT EXECUTE ON FUNCTION supabase_functions.http_request() TO postgres, anon, authenticated, service_role; - -COMMIT; diff --git a/internal/db/start/templates/webhook.sql b/internal/db/start/templates/webhook.sql new file mode 100644 index 000000000..52cd09747 --- /dev/null +++ b/internal/db/start/templates/webhook.sql @@ -0,0 +1,232 @@ +BEGIN; + +-- Create pg_net extension +CREATE EXTENSION IF NOT EXISTS pg_net SCHEMA extensions; + +-- Create supabase_functions schema +CREATE SCHEMA supabase_functions AUTHORIZATION supabase_admin; + +GRANT USAGE ON SCHEMA supabase_functions TO postgres, anon, authenticated, service_role; +ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON TABLES TO postgres, anon, authenticated, service_role; +ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON FUNCTIONS TO postgres, anon, authenticated, service_role; +ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON SEQUENCES TO postgres, anon, authenticated, service_role; + +-- supabase_functions.migrations definition +CREATE TABLE supabase_functions.migrations ( + version text PRIMARY KEY, + inserted_at timestamptz NOT NULL DEFAULT NOW() +); + +-- Initial supabase_functions migration +INSERT INTO supabase_functions.migrations (version) VALUES ('initial'); + +-- supabase_functions.hooks definition +CREATE TABLE supabase_functions.hooks ( + id bigserial PRIMARY KEY, + hook_table_id integer NOT NULL, + hook_name text NOT NULL, + created_at timestamptz NOT NULL DEFAULT NOW(), + request_id bigint +); +CREATE INDEX supabase_functions_hooks_request_id_idx ON supabase_functions.hooks USING btree (request_id); +CREATE INDEX supabase_functions_hooks_h_table_id_h_name_idx ON supabase_functions.hooks USING btree (hook_table_id, hook_name); +COMMENT ON TABLE supabase_functions.hooks IS 'Supabase Functions Hooks: Audit trail for triggered hooks.'; + +CREATE FUNCTION supabase_functions.http_request() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + DECLARE + request_id bigint; + payload jsonb; + url text := TG_ARGV[0]::text; + method text := TG_ARGV[1]::text; + headers jsonb DEFAULT '{}'::jsonb; + params jsonb DEFAULT '{}'::jsonb; + timeout_ms integer DEFAULT 1000; + BEGIN + IF url IS NULL OR url = 'null' THEN + RAISE EXCEPTION 'url argument is missing'; + END IF; + + IF method IS NULL OR method = 'null' THEN + RAISE EXCEPTION 'method argument is missing'; + END IF; + + IF TG_ARGV[2] IS NULL OR TG_ARGV[2] = 'null' THEN + headers = '{"Content-Type": "application/json"}'::jsonb; + ELSE + headers = TG_ARGV[2]::jsonb; + END IF; + + IF TG_ARGV[3] IS NULL OR TG_ARGV[3] = 'null' THEN + params = '{}'::jsonb; + ELSE + params = TG_ARGV[3]::jsonb; + END IF; + + IF TG_ARGV[4] IS NULL OR TG_ARGV[4] = 'null' THEN + timeout_ms = 1000; + ELSE + timeout_ms = TG_ARGV[4]::integer; + END IF; + + CASE + WHEN method = 'GET' THEN + SELECT http_get INTO request_id FROM net.http_get( + url, + params, + headers, + timeout_ms + ); + WHEN method = 'POST' THEN + payload = jsonb_build_object( + 'old_record', OLD, + 'record', NEW, + 'type', TG_OP, + 'table', TG_TABLE_NAME, + 'schema', TG_TABLE_SCHEMA + ); + + SELECT http_post INTO request_id FROM net.http_post( + url, + payload, + params, + headers, + timeout_ms + ); + ELSE + RAISE EXCEPTION 'method argument % is invalid', method; + END CASE; + + INSERT INTO supabase_functions.hooks + (hook_table_id, hook_name, request_id) + VALUES + (TG_RELID, TG_NAME, request_id); + + RETURN NEW; + END +$function$; + +-- Supabase super admin +DO +$$ +BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM pg_roles + WHERE rolname = 'supabase_functions_admin' + ) + THEN + CREATE USER supabase_functions_admin NOINHERIT CREATEROLE LOGIN NOREPLICATION; + END IF; +END +$$; + +GRANT ALL PRIVILEGES ON SCHEMA supabase_functions TO supabase_functions_admin; +GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA supabase_functions TO supabase_functions_admin; +GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA supabase_functions TO supabase_functions_admin; +ALTER USER supabase_functions_admin SET search_path = "supabase_functions"; +ALTER table "supabase_functions".migrations OWNER TO supabase_functions_admin; +ALTER table "supabase_functions".hooks OWNER TO supabase_functions_admin; +ALTER function "supabase_functions".http_request() OWNER TO supabase_functions_admin; +GRANT supabase_functions_admin TO postgres; + +-- Remove unused supabase_pg_net_admin role +DO +$$ +BEGIN + IF EXISTS ( + SELECT 1 + FROM pg_roles + WHERE rolname = 'supabase_pg_net_admin' + ) + THEN + REASSIGN OWNED BY supabase_pg_net_admin TO supabase_admin; + DROP OWNED BY supabase_pg_net_admin; + DROP ROLE supabase_pg_net_admin; + END IF; +END +$$; + +-- pg_net grants when extension is already enabled +DO +$$ +BEGIN + IF EXISTS ( + SELECT 1 + FROM pg_extension + WHERE extname = 'pg_net' + ) + THEN + GRANT USAGE ON SCHEMA net TO supabase_functions_admin, postgres, anon, authenticated, service_role; + + ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER; + ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER; + + ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net; + ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net; + + REVOKE ALL ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC; + REVOKE ALL ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC; + + GRANT EXECUTE ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role; + GRANT EXECUTE ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role; + END IF; +END +$$; + +-- Event trigger for pg_net +CREATE OR REPLACE FUNCTION extensions.grant_pg_net_access() +RETURNS event_trigger +LANGUAGE plpgsql +AS $$ +BEGIN + IF EXISTS ( + SELECT 1 + FROM pg_event_trigger_ddl_commands() AS ev + JOIN pg_extension AS ext + ON ev.objid = ext.oid + WHERE ext.extname = 'pg_net' + ) + THEN + GRANT USAGE ON SCHEMA net TO supabase_functions_admin, postgres, anon, authenticated, service_role; + + ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER; + ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER; + + ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net; + ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net; + + REVOKE ALL ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC; + REVOKE ALL ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC; + + GRANT EXECUTE ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role; + GRANT EXECUTE ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role; + END IF; +END; +$$; +COMMENT ON FUNCTION extensions.grant_pg_net_access IS 'Grants access to pg_net'; + +DO +$$ +BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM pg_event_trigger + WHERE evtname = 'issue_pg_net_access' + ) THEN + CREATE EVENT TRIGGER issue_pg_net_access ON ddl_command_end WHEN TAG IN ('CREATE EXTENSION') + EXECUTE PROCEDURE extensions.grant_pg_net_access(); + END IF; +END +$$; + +INSERT INTO supabase_functions.migrations (version) VALUES ('20210809183423_update_grants'); + +ALTER function supabase_functions.http_request() SECURITY DEFINER; +ALTER function supabase_functions.http_request() SET search_path = supabase_functions; +REVOKE ALL ON FUNCTION supabase_functions.http_request() FROM PUBLIC; +GRANT EXECUTE ON FUNCTION supabase_functions.http_request() TO postgres, anon, authenticated, service_role; + +COMMIT; diff --git a/internal/db/test/test.go b/internal/db/test/test.go index 03aa3a548..263305736 100644 --- a/internal/db/test/test.go +++ b/internal/db/test/test.go @@ -67,7 +67,8 @@ func Run(ctx context.Context, testFiles []string, config pgconn.Config, fsys afe }() } // Use custom network when connecting to local database - hostConfig := container.HostConfig{Binds: binds} + // disable selinux via security-opt to allow pg-tap to work properly + hostConfig := container.HostConfig{Binds: binds, SecurityOpt: []string{"label:disable"}} if utils.IsLocalDatabase(config) { config.Host = utils.DbAliases[0] config.Port = 5432 diff --git a/internal/functions/deploy/bundle.go b/internal/functions/deploy/bundle.go index 0b178b0f7..c82826bec 100644 --- a/internal/functions/deploy/bundle.go +++ b/internal/functions/deploy/bundle.go @@ -25,7 +25,7 @@ func NewDockerBundler(fsys afero.Fs) function.EszipBundler { return &dockerBundler{fsys: fsys} } -func (b *dockerBundler) Bundle(ctx context.Context, entrypoint string, importMap string, output io.Writer) error { +func (b *dockerBundler) Bundle(ctx context.Context, entrypoint string, importMap string, staticFiles []string, output io.Writer) error { // Create temp directory to store generated eszip slug := filepath.Base(filepath.Dir(entrypoint)) fmt.Fprintln(os.Stderr, "Bundling Function:", utils.Bold(slug)) @@ -44,8 +44,7 @@ func (b *dockerBundler) Bundle(ctx context.Context, entrypoint string, importMap } }() // Create bind mounts - hostEntrypointDir := filepath.Dir(entrypoint) - binds, err := GetBindMounts(cwd, utils.FunctionsDir, hostOutputDir, hostEntrypointDir, importMap, b.fsys) + binds, err := GetBindMounts(cwd, utils.FunctionsDir, hostOutputDir, entrypoint, importMap, b.fsys) if err != nil { return err } @@ -55,15 +54,23 @@ func (b *dockerBundler) Bundle(ctx context.Context, entrypoint string, importMap if len(importMap) > 0 { cmd = append(cmd, "--import-map", utils.ToDockerPath(importMap)) } + for _, staticFile := range staticFiles { + cmd = append(cmd, "--static", utils.ToDockerPath(staticFile)) + } if viper.GetBool("DEBUG") { cmd = append(cmd, "--verbose") } + + env := []string{} + if custom_registry := os.Getenv("NPM_CONFIG_REGISTRY"); custom_registry != "" { + env = append(env, "NPM_CONFIG_REGISTRY="+custom_registry) + } // Run bundle if err := utils.DockerRunOnceWithConfig( ctx, container.Config{ Image: utils.Config.EdgeRuntime.Image, - Env: []string{}, + Env: env, Cmd: cmd, WorkingDir: utils.ToDockerPath(cwd), }, @@ -86,7 +93,7 @@ func (b *dockerBundler) Bundle(ctx context.Context, entrypoint string, importMap return function.Compress(eszipBytes, output) } -func GetBindMounts(cwd, hostFuncDir, hostOutputDir, hostEntrypointDir, hostImportMapPath string, fsys afero.Fs) ([]string, error) { +func GetBindMounts(cwd, hostFuncDir, hostOutputDir, hostEntrypointPath, hostImportMapPath string, fsys afero.Fs) ([]string, error) { sep := string(filepath.Separator) // Docker requires all host paths to be absolute if !filepath.IsAbs(hostFuncDir) { @@ -116,6 +123,7 @@ func GetBindMounts(cwd, hostFuncDir, hostOutputDir, hostEntrypointDir, hostImpor } } // Allow entrypoints outside the functions directory + hostEntrypointDir := filepath.Dir(hostEntrypointPath) if len(hostEntrypointDir) > 0 { if !filepath.IsAbs(hostEntrypointDir) { hostEntrypointDir = filepath.Join(cwd, hostEntrypointDir) diff --git a/internal/functions/deploy/bundle_test.go b/internal/functions/deploy/bundle_test.go index f8a68f439..45b5aedd3 100644 --- a/internal/functions/deploy/bundle_test.go +++ b/internal/functions/deploy/bundle_test.go @@ -43,7 +43,7 @@ func TestDockerBundle(t *testing.T) { apitest.MockDockerStart(utils.Docker, imageUrl, containerId) require.NoError(t, apitest.MockDockerLogsExitCode(utils.Docker, containerId, 1)) // Run test - err = NewDockerBundler(fsys).Bundle(context.Background(), "", "", &body) + err = NewDockerBundler(fsys).Bundle(context.Background(), "", "", []string{}, &body) // Check error assert.ErrorContains(t, err, "error running container: exit 1") assert.Empty(t, apitest.ListUnmatchedRequests()) diff --git a/internal/functions/deploy/deploy.go b/internal/functions/deploy/deploy.go index 8cbfbd0e7..196d2e8a7 100644 --- a/internal/functions/deploy/deploy.go +++ b/internal/functions/deploy/deploy.go @@ -10,13 +10,15 @@ import ( "github.com/go-errors/errors" "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" + "github.com/supabase/cli/pkg/cast" "github.com/supabase/cli/pkg/config" "github.com/supabase/cli/pkg/function" ) func Run(ctx context.Context, slugs []string, projectRef string, noVerifyJWT *bool, importMapPath string, fsys afero.Fs) error { // Load function config and project id - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } else if len(slugs) > 0 { for _, s := range slugs { @@ -45,19 +47,22 @@ func Run(ctx context.Context, slugs []string, projectRef string, noVerifyJWT *bo return nil } -func GetFunctionSlugs(fsys afero.Fs) ([]string, error) { +func GetFunctionSlugs(fsys afero.Fs) (slugs []string, err error) { pattern := filepath.Join(utils.FunctionsDir, "*", "index.ts") paths, err := afero.Glob(fsys, pattern) if err != nil { return nil, errors.Errorf("failed to glob function slugs: %w", err) } - var slugs []string for _, path := range paths { slug := filepath.Base(filepath.Dir(path)) if utils.FuncSlugPattern.MatchString(slug) { slugs = append(slugs, slug) } } + // Add all function slugs declared in config file + for slug := range utils.Config.Functions { + slugs = append(slugs, slug) + } return slugs, nil } @@ -65,6 +70,8 @@ func GetFunctionConfig(slugs []string, importMapPath string, noVerifyJWT *bool, // Although some functions do not require import map, it's more convenient to setup // vscode deno extension with a single import map for all functions. fallbackExists := true + functionsUsingDeprecatedGlobalFallback := []string{} + functionsUsingDeprecatedImportMap := []string{} if _, err := fsys.Stat(utils.FallbackImportMapPath); errors.Is(err, os.ErrNotExist) { fallbackExists = false } else if err != nil { @@ -78,18 +85,50 @@ func GetFunctionConfig(slugs []string, importMapPath string, noVerifyJWT *bool, for _, name := range slugs { function := utils.Config.Functions[name] // Precedence order: flag > config > fallback + functionDir := filepath.Join(utils.FunctionsDir, name) if len(function.Entrypoint) == 0 { - function.Entrypoint = filepath.Join(utils.FunctionsDir, name, "index.ts") + function.Entrypoint = filepath.Join(functionDir, "index.ts") } if len(importMapPath) > 0 { function.ImportMap = importMapPath - } else if len(function.ImportMap) == 0 && fallbackExists { - function.ImportMap = utils.FallbackImportMapPath + } else if len(function.ImportMap) == 0 { + denoJsonPath := filepath.Join(functionDir, "deno.json") + denoJsoncPath := filepath.Join(functionDir, "deno.jsonc") + importMapPath := filepath.Join(functionDir, "import_map.json") + if _, err := fsys.Stat(denoJsonPath); err == nil { + function.ImportMap = denoJsonPath + } else if _, err := fsys.Stat(denoJsoncPath); err == nil { + function.ImportMap = denoJsoncPath + } else if _, err := fsys.Stat(importMapPath); err == nil { + function.ImportMap = importMapPath + functionsUsingDeprecatedImportMap = append(functionsUsingDeprecatedImportMap, name) + } else if fallbackExists { + function.ImportMap = utils.FallbackImportMapPath + functionsUsingDeprecatedGlobalFallback = append(functionsUsingDeprecatedGlobalFallback, name) + } } if noVerifyJWT != nil { - function.VerifyJWT = utils.Ptr(!*noVerifyJWT) + function.VerifyJWT = cast.Ptr(!*noVerifyJWT) } functionConfig[name] = function } + if len(functionsUsingDeprecatedImportMap) > 0 { + fmt.Fprintln(os.Stderr, + utils.Yellow("WARNING:"), + "Functions using deprecated import_map.json (please migrate to deno.json):", + utils.Aqua(strings.Join(functionsUsingDeprecatedImportMap, ", ")), + ) + } + if len(functionsUsingDeprecatedGlobalFallback) > 0 { + fmt.Fprintln(os.Stderr, + utils.Yellow("WARNING:"), + "Functions using fallback import map:", + utils.Aqua(strings.Join(functionsUsingDeprecatedGlobalFallback, ", ")), + ) + fmt.Fprintln(os.Stderr, + "Please use recommended per function dependency declaration ", + utils.Aqua("https://supabase.com/docs/guides/functions/import-maps"), + ) + } return functionConfig, nil } diff --git a/internal/functions/deploy/deploy_test.go b/internal/functions/deploy/deploy_test.go index faea4a710..558a33f32 100644 --- a/internal/functions/deploy/deploy_test.go +++ b/internal/functions/deploy/deploy_test.go @@ -15,6 +15,7 @@ import ( "github.com/supabase/cli/internal/testing/apitest" "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" "github.com/supabase/cli/pkg/config" ) @@ -121,6 +122,58 @@ import_map = "./import_map.json" assert.Empty(t, apitest.ListUnmatchedRequests()) }) + t.Run("skip disabled functions from config", func(t *testing.T) { + t.Cleanup(func() { clear(utils.Config.Functions) }) + // Setup in-memory fs + fsys := afero.NewMemMapFs() + require.NoError(t, utils.WriteConfig(fsys, false)) + f, err := fsys.OpenFile(utils.ConfigPath, os.O_APPEND|os.O_WRONLY, 0600) + require.NoError(t, err) + _, err = f.WriteString(` +[functions.disabled-func] +enabled = false +import_map = "./import_map.json" +`) + require.NoError(t, err) + require.NoError(t, f.Close()) + importMapPath, err := filepath.Abs(filepath.Join(utils.SupabaseDirPath, "import_map.json")) + require.NoError(t, err) + require.NoError(t, afero.WriteFile(fsys, importMapPath, []byte("{}"), 0644)) + // Setup function entrypoints + require.NoError(t, afero.WriteFile(fsys, filepath.Join(utils.FunctionsDir, "enabled-func", "index.ts"), []byte{}, 0644)) + require.NoError(t, afero.WriteFile(fsys, filepath.Join(utils.FunctionsDir, "disabled-func", "index.ts"), []byte{}, 0644)) + // Setup valid project ref + project := apitest.RandomProjectRef() + // Setup valid access token + token := apitest.RandomAccessToken(t) + t.Setenv("SUPABASE_ACCESS_TOKEN", string(token)) + // Setup valid deno path + _, err = fsys.Create(utils.DenoPathOverride) + require.NoError(t, err) + // Setup mock api + defer gock.OffAll() + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project + "/functions"). + Reply(http.StatusOK). + JSON([]api.FunctionResponse{}) + gock.New(utils.DefaultApiHost). + Post("/v1/projects/"+project+"/functions"). + MatchParam("slug", "enabled-func"). + Reply(http.StatusCreated). + JSON(api.FunctionResponse{Id: "1"}) + require.NoError(t, apitest.MockDocker(utils.Docker)) + apitest.MockDockerStart(utils.Docker, imageUrl, containerId) + require.NoError(t, apitest.MockDockerLogs(utils.Docker, containerId, "bundled")) + // Setup output file + outputDir := filepath.Join(utils.TempDir, ".output_enabled-func") + require.NoError(t, afero.WriteFile(fsys, filepath.Join(outputDir, "output.eszip"), []byte(""), 0644)) + // Run test + err = Run(context.Background(), nil, project, nil, "", fsys) + // Check error + assert.NoError(t, err) + assert.Empty(t, apitest.ListUnmatchedRequests()) + }) + t.Run("throws error on malformed slug", func(t *testing.T) { // Setup in-memory fs fsys := afero.NewMemMapFs() @@ -269,12 +322,16 @@ func TestImportMapPath(t *testing.T) { } // Setup in-memory fs fsys := afero.NewMemMapFs() + // Custom global import map loaded via cli flag + customImportMapPath := filepath.Join(utils.FunctionsDir, "custom_import_map.json") + require.NoError(t, afero.WriteFile(fsys, customImportMapPath, []byte("{}"), 0644)) + // Create fallback import map to test precedence order require.NoError(t, afero.WriteFile(fsys, utils.FallbackImportMapPath, []byte("{}"), 0644)) // Run test - fc, err := GetFunctionConfig([]string{slug}, utils.FallbackImportMapPath, utils.Ptr(false), fsys) + fc, err := GetFunctionConfig([]string{slug}, customImportMapPath, cast.Ptr(false), fsys) // Check error assert.NoError(t, err) - assert.Equal(t, utils.FallbackImportMapPath, fc[slug].ImportMap) + assert.Equal(t, customImportMapPath, fc[slug].ImportMap) }) t.Run("returns empty string if no fallback", func(t *testing.T) { diff --git a/internal/functions/download/download.go b/internal/functions/download/download.go index e3b68cae9..ca99c1bcf 100644 --- a/internal/functions/download/download.go +++ b/internal/functions/download/download.go @@ -16,6 +16,7 @@ import ( "github.com/go-errors/errors" "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/api" ) @@ -112,7 +113,7 @@ func Run(ctx context.Context, slug string, projectRef string, useLegacyBundle bo return RunLegacy(ctx, slug, projectRef, fsys) } // 1. Sanity check - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } // 2. Download eszip to temp file diff --git a/internal/functions/list/list.go b/internal/functions/list/list.go index aeb17f2cd..d0d2d9ecc 100644 --- a/internal/functions/list/list.go +++ b/internal/functions/list/list.go @@ -25,14 +25,14 @@ func Run(ctx context.Context, projectRef string, fsys afero.Fs) error { |-|-|-|-|-|-| ` for _, function := range *resp.JSON200 { - t := time.UnixMilli(int64(function.UpdatedAt)) + t := time.UnixMilli(function.UpdatedAt) table += fmt.Sprintf( "|`%s`|`%s`|`%s`|`%s`|`%d`|`%s`|\n", function.Id, function.Name, function.Slug, function.Status, - uint64(function.Version), + function.Version, t.UTC().Format("2006-01-02 15:04:05"), ) } diff --git a/internal/functions/new/new.go b/internal/functions/new/new.go index 3656e9b5f..67a9893e2 100644 --- a/internal/functions/new/new.go +++ b/internal/functions/new/new.go @@ -11,12 +11,18 @@ import ( "github.com/go-errors/errors" "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" ) var ( //go:embed templates/index.ts - indexEmbed string - indexTemplate = template.Must(template.New("indexl").Parse(indexEmbed)) + indexEmbed string + //go:embed templates/deno.json + denoEmbed string + //go:embed templates/.npmrc + npmrcEmbed string + + indexTemplate = template.Must(template.New("index").Parse(indexEmbed)) ) type indexConfig struct { @@ -38,25 +44,37 @@ func Run(ctx context.Context, slug string, fsys afero.Fs) error { if err := utils.MkdirIfNotExistFS(fsys, funcDir); err != nil { return err } - path := filepath.Join(funcDir, "index.ts") - f, err := fsys.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0644) - if err != nil { - return errors.Errorf("failed to create function entrypoint: %w", err) - } - defer f.Close() - // Templatize index.ts by config.toml if available - if err := utils.LoadConfigFS(fsys); err != nil { + + // Load config if available + if err := flags.LoadConfig(fsys); err != nil { utils.CmdSuggestion = "" } - config := indexConfig{ + + if err := createTemplateFile(fsys, filepath.Join(funcDir, "index.ts"), indexTemplate, indexConfig{ URL: utils.GetApiUrl("/functions/v1/" + slug), Token: utils.Config.Auth.AnonKey, + }); err != nil { + return errors.Errorf("failed to create function entrypoint: %w", err) } - if err := indexTemplate.Option("missingkey=error").Execute(f, config); err != nil { - return errors.Errorf("failed to initialise function entrypoint: %w", err) + + if err := afero.WriteFile(fsys, filepath.Join(funcDir, "deno.json"), []byte(denoEmbed), 0644); err != nil { + return errors.Errorf("failed to create deno.json config: %w", err) + } + + if err := afero.WriteFile(fsys, filepath.Join(funcDir, ".npmrc"), []byte(npmrcEmbed), 0644); err != nil { + return errors.Errorf("failed to create .npmrc config: %w", err) } } fmt.Println("Created new Function at " + utils.Bold(funcDir)) return nil } + +func createTemplateFile(fsys afero.Fs, path string, tmpl *template.Template, data interface{}) error { + f, err := fsys.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0644) + if err != nil { + return err + } + defer f.Close() + return tmpl.Option("missingkey=error").Execute(f, data) +} diff --git a/internal/functions/new/new_test.go b/internal/functions/new/new_test.go index d5e9c2fc8..8e00fa656 100644 --- a/internal/functions/new/new_test.go +++ b/internal/functions/new/new_test.go @@ -24,6 +24,16 @@ func TestNewCommand(t *testing.T) { assert.Contains(t, string(content), "curl -i --location --request POST 'http://127.0.0.1:54321/functions/v1/test-func'", ) + + // Verify deno.json exists + denoPath := filepath.Join(utils.FunctionsDir, "test-func", "deno.json") + _, err = afero.ReadFile(fsys, denoPath) + assert.NoError(t, err, "deno.json should be created") + + // Verify .npmrc exists + npmrcPath := filepath.Join(utils.FunctionsDir, "test-func", ".npmrc") + _, err = afero.ReadFile(fsys, npmrcPath) + assert.NoError(t, err, ".npmrc should be created") }) t.Run("throws error on malformed slug", func(t *testing.T) { diff --git a/internal/functions/new/templates/.npmrc b/internal/functions/new/templates/.npmrc new file mode 100644 index 000000000..48c638863 --- /dev/null +++ b/internal/functions/new/templates/.npmrc @@ -0,0 +1,3 @@ +# Configuration for private npm package dependencies +# For more information on using private registries with Edge Functions, see: +# https://supabase.com/docs/guides/functions/import-maps#importing-from-private-registries diff --git a/internal/functions/new/templates/deno.json b/internal/functions/new/templates/deno.json new file mode 100644 index 000000000..f6ca8454c --- /dev/null +++ b/internal/functions/new/templates/deno.json @@ -0,0 +1,3 @@ +{ + "imports": {} +} diff --git a/internal/functions/serve/serve.go b/internal/functions/serve/serve.go index 3b6f85119..2f09fed16 100644 --- a/internal/functions/serve/serve.go +++ b/internal/functions/serve/serve.go @@ -19,6 +19,7 @@ import ( "github.com/supabase/cli/internal/functions/deploy" "github.com/supabase/cli/internal/secrets/set" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" ) type InspectMode string @@ -70,7 +71,7 @@ var ( func Run(ctx context.Context, envFilePath string, noVerifyJWT *bool, importMapPath string, runtimeOption RuntimeOption, fsys afero.Fs) error { // 1. Sanity checks. - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } if err := utils.AssertSupabaseDbIsRunning(); err != nil { @@ -109,11 +110,6 @@ func ServeFunctions(ctx context.Context, envFilePath string, noVerifyJWT *bool, if err != nil { return err } - cwd, err := os.Getwd() - if err != nil { - return errors.Errorf("failed to get working directory: %w", err) - } - dockerFuncDir := utils.ToDockerPath(filepath.Join(cwd, utils.FunctionsDir)) env = append(env, fmt.Sprintf("SUPABASE_URL=http://%s:8000", utils.KongAliases[0]), "SUPABASE_ANON_KEY="+utils.Config.Auth.AnonKey, @@ -121,7 +117,6 @@ func ServeFunctions(ctx context.Context, envFilePath string, noVerifyJWT *bool, "SUPABASE_DB_URL="+dbUrl, "SUPABASE_INTERNAL_JWT_SECRET="+utils.Config.Auth.JwtSecret, fmt.Sprintf("SUPABASE_INTERNAL_HOST_PORT=%d", utils.Config.Api.Port), - "SUPABASE_INTERNAL_FUNCTIONS_PATH="+dockerFuncDir, ) if viper.GetBool("DEBUG") { env = append(env, "SUPABASE_INTERNAL_DEBUG=true") @@ -130,6 +125,10 @@ func ServeFunctions(ctx context.Context, envFilePath string, noVerifyJWT *bool, env = append(env, "SUPABASE_INTERNAL_WALLCLOCK_LIMIT_SEC=0") } // 3. Parse custom import map + cwd, err := os.Getwd() + if err != nil { + return errors.Errorf("failed to get working directory: %w", err) + } binds, functionsConfigString, err := populatePerFunctionConfigs(cwd, importMapPath, noVerifyJWT, fsys) if err != nil { return err @@ -219,6 +218,10 @@ func populatePerFunctionConfigs(cwd, importMapPath string, noVerifyJWT *bool, fs } binds := []string{} for slug, fc := range functionsConfig { + if !fc.IsEnabled() { + fmt.Fprintln(os.Stderr, "Skipped serving Function:", slug) + continue + } modules, err := deploy.GetBindMounts(cwd, utils.FunctionsDir, "", fc.Entrypoint, fc.ImportMap, fsys) if err != nil { return nil, "", err @@ -227,6 +230,9 @@ func populatePerFunctionConfigs(cwd, importMapPath string, noVerifyJWT *bool, fs fc.ImportMap = utils.ToDockerPath(fc.ImportMap) fc.Entrypoint = utils.ToDockerPath(fc.Entrypoint) functionsConfig[slug] = fc + for i, val := range fc.StaticFiles { + fc.StaticFiles[i] = utils.ToDockerPath(val) + } } functionsConfigBytes, err := json.Marshal(functionsConfig) if err != nil { diff --git a/internal/functions/serve/serve_test.go b/internal/functions/serve/serve_test.go index 5b98ece87..570c4b927 100644 --- a/internal/functions/serve/serve_test.go +++ b/internal/functions/serve/serve_test.go @@ -14,6 +14,7 @@ import ( "github.com/stretchr/testify/require" "github.com/supabase/cli/internal/testing/apitest" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/pkg/cast" ) func TestServeCommand(t *testing.T) { @@ -100,7 +101,7 @@ func TestServeCommand(t *testing.T) { Reply(http.StatusOK). JSON(types.ContainerJSON{}) // Run test - err := Run(context.Background(), ".env", utils.Ptr(true), "import_map.json", RuntimeOption{}, fsys) + err := Run(context.Background(), ".env", cast.Ptr(true), "import_map.json", RuntimeOption{}, fsys) // Check error assert.ErrorIs(t, err, os.ErrNotExist) }) diff --git a/internal/functions/serve/templates/main.ts b/internal/functions/serve/templates/main.ts index c96847e3e..534409a98 100644 --- a/internal/functions/serve/templates/main.ts +++ b/internal/functions/serve/templates/main.ts @@ -1,48 +1,56 @@ -import { - STATUS_CODE, - STATUS_TEXT, -} from "https://deno.land/std/http/status.ts"; +import { STATUS_CODE, STATUS_TEXT } from "https://deno.land/std/http/status.ts"; +import * as posix from "https://deno.land/std/path/posix/mod.ts"; import * as jose from "https://deno.land/x/jose@v4.13.1/index.ts"; const SB_SPECIFIC_ERROR_CODE = { - BootError: STATUS_CODE.ServiceUnavailable, /** Service Unavailable (RFC 7231, 6.6.4) */ - WorkerRequestCancelled: STATUS_CODE.BadGateway, /** Bad Gateway (RFC 7231, 6.6.3) */ + BootError: + STATUS_CODE.ServiceUnavailable, /** Service Unavailable (RFC 7231, 6.6.4) */ + InvalidWorkerResponse: + STATUS_CODE.InternalServerError, /** Internal Server Error (RFC 7231, 6.6.1) */ WorkerLimit: 546, /** Extended */ }; const SB_SPECIFIC_ERROR_TEXT = { [SB_SPECIFIC_ERROR_CODE.BootError]: "BOOT_ERROR", - [SB_SPECIFIC_ERROR_CODE.WorkerRequestCancelled]: "WORKER_REQUEST_CANCELLED", + [SB_SPECIFIC_ERROR_CODE.InvalidWorkerResponse]: "WORKER_ERROR", [SB_SPECIFIC_ERROR_CODE.WorkerLimit]: "WORKER_LIMIT", }; const SB_SPECIFIC_ERROR_REASON = { - [SB_SPECIFIC_ERROR_CODE.BootError]: "Worker failed to boot (please check logs)", - [SB_SPECIFIC_ERROR_CODE.WorkerRequestCancelled]: "Request cancelled by the proxy due to an error or resource limit of worker (please check logs)", - [SB_SPECIFIC_ERROR_CODE.WorkerLimit]: "Worker failed to respond due to an error or resource limit (please check logs)", -} + [SB_SPECIFIC_ERROR_CODE.BootError]: + "Worker failed to boot (please check logs)", + [SB_SPECIFIC_ERROR_CODE.InvalidWorkerResponse]: + "Function exited due to an error (please check logs)", + [SB_SPECIFIC_ERROR_CODE.WorkerLimit]: + "Worker failed to respond due to a resource limit (please check logs)", +}; // OS stuff - we don't want to expose these to the functions. const EXCLUDED_ENVS = ["HOME", "HOSTNAME", "PATH", "PWD"]; const JWT_SECRET = Deno.env.get("SUPABASE_INTERNAL_JWT_SECRET")!; const HOST_PORT = Deno.env.get("SUPABASE_INTERNAL_HOST_PORT")!; -const FUNCTIONS_PATH = Deno.env.get("SUPABASE_INTERNAL_FUNCTIONS_PATH")!; const DEBUG = Deno.env.get("SUPABASE_INTERNAL_DEBUG") === "true"; const FUNCTIONS_CONFIG_STRING = Deno.env.get( "SUPABASE_INTERNAL_FUNCTIONS_CONFIG", )!; -const WALLCLOCK_LIMIT_SEC = parseInt(Deno.env.get("SUPABASE_INTERNAL_WALLCLOCK_LIMIT_SEC")); +const WALLCLOCK_LIMIT_SEC = parseInt( + Deno.env.get("SUPABASE_INTERNAL_WALLCLOCK_LIMIT_SEC"), +); const DENO_SB_ERROR_MAP = new Map([ [Deno.errors.InvalidWorkerCreation, SB_SPECIFIC_ERROR_CODE.BootError], - [Deno.errors.InvalidWorkerResponse, SB_SPECIFIC_ERROR_CODE.WorkerLimit], - [Deno.errors.WorkerRequestCancelled, SB_SPECIFIC_ERROR_CODE.WorkerRequestCancelled], + [Deno.errors.InvalidWorkerResponse, SB_SPECIFIC_ERROR_CODE.InvalidWorkerResponse], + [ + Deno.errors.WorkerRequestCancelled, + SB_SPECIFIC_ERROR_CODE.WorkerLimit, + ], ]); interface FunctionConfig { + entrypointPath: string; importMapPath: string; verifyJWT: boolean; } @@ -144,7 +152,7 @@ Deno.serve({ } } - const servicePath = `${FUNCTIONS_PATH}/${functionName}`; + const servicePath = posix.dirname(functionsConfig[functionName].entrypointPath); console.error(`serving the request with ${servicePath}`); // Ref: https://supabase.com/docs/guides/functions/limits @@ -167,6 +175,11 @@ Deno.serve({ // point, as their migration process will not be easy. const decoratorType = "tc39"; + const absEntrypoint = posix.join(Deno.cwd(), functionsConfig[functionName].entrypointPath); + const maybeEntrypoint = posix.toFileUrl(absEntrypoint).href; + + const staticPatterns = functionsConfig[functionName].staticFiles; + try { const worker = await EdgeRuntime.userWorkers.create({ servicePath, @@ -179,17 +192,15 @@ Deno.serve({ customModuleRoot, cpuTimeSoftLimitMs, cpuTimeHardLimitMs, - decoratorType + decoratorType, + maybeEntrypoint, + context: { + useReadSyncFileAPI: true, + }, + staticPatterns, }); - const controller = new AbortController(); - const { signal } = controller; - - // Note: Requests are aborted after 200s (same config as in production) - // TODO: make this configuarable - setTimeout(() => controller.abort(), 200 * 1000); - - return await worker.fetch(req, { signal }); + return await worker.fetch(req); } catch (e) { console.error(e); diff --git a/internal/hostnames/common.go b/internal/hostnames/common.go index 4ec371da7..c01e6a0da 100644 --- a/internal/hostnames/common.go +++ b/internal/hostnames/common.go @@ -104,7 +104,6 @@ Please ensure that your custom domain is set up as a CNAME record to your Supaba if err != nil { return "", errors.Errorf("failed to deserialize body: %w", err) } - owner := res.Result.OwnershipVerification ssl := res.Result.Ssl.ValidationRecords if res.Result.Ssl.Status == "initializing" { return appendRawOutputIfNeeded("Custom hostname setup is being initialized; please request re-verification in a few seconds.\n", response, includeRawOutput), nil @@ -124,11 +123,8 @@ Please ensure that your custom domain is set up as a CNAME record to your Supaba return "", errors.Errorf("expected a single SSL verification record, received: %+v", ssl) } records := "" - if owner.Name != "" { - records = fmt.Sprintf("\n\t%s TXT -> %s", owner.Name, owner.Value) - } if ssl[0].TxtName != "" { - records = fmt.Sprintf("%s\n\t%s TXT -> %s (replace any existing CNAME records)", records, ssl[0].TxtName, ssl[0].TxtValue) + records = fmt.Sprintf("%s\n\t%s TXT -> %s", records, ssl[0].TxtName, ssl[0].TxtValue) } status := fmt.Sprintf("Custom hostname verification in-progress; please configure the appropriate DNS entries and request re-verification.\n"+ "Required outstanding validation records: %s\n", diff --git a/internal/init/init.go b/internal/init/init.go index 033638dd9..72241a0e0 100644 --- a/internal/init/init.go +++ b/internal/init/init.go @@ -1,6 +1,7 @@ package init import ( + "bytes" "context" _ "embed" "encoding/json" @@ -12,6 +13,7 @@ import ( "github.com/go-errors/errors" "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" + "github.com/tidwall/jsonc" ) var ( @@ -40,19 +42,14 @@ func Run(ctx context.Context, fsys afero.Fs, createVscodeSettings, createIntelli return err } - // 2. Create `seed.sql`. - if err := initSeed(fsys); err != nil { - return err - } - - // 3. Append to `.gitignore`. + // 2. Append to `.gitignore`. if utils.IsGitRepo() { if err := updateGitIgnore(utils.GitIgnorePath, fsys); err != nil { return err } } - // 4. Generate VS Code settings. + // 3. Generate VS Code settings. if createVscodeSettings != nil { if *createVscodeSettings { return writeVscodeConfig(fsys) @@ -77,15 +74,6 @@ func Run(ctx context.Context, fsys afero.Fs, createVscodeSettings, createIntelli return nil } -func initSeed(fsys afero.Fs) error { - f, err := fsys.OpenFile(utils.SeedDataPath, os.O_WRONLY|os.O_CREATE, 0644) - if err != nil { - return errors.Errorf("failed to create seed file: %w", err) - } - defer f.Close() - return nil -} - func updateGitIgnore(ignorePath string, fsys afero.Fs) error { var contents []byte @@ -114,15 +102,14 @@ func updateGitIgnore(ignorePath string, fsys afero.Fs) error { type VSCodeSettings map[string]interface{} func loadUserSettings(path string, fsys afero.Fs) (VSCodeSettings, error) { - // Open our jsonFile - jsonFile, err := fsys.Open(path) + data, err := afero.ReadFile(fsys, path) if err != nil { return nil, errors.Errorf("failed to load settings file: %w", err) } - defer jsonFile.Close() + data = jsonc.ToJSONInPlace(data) // Parse and unmarshal JSON file. var userSettings VSCodeSettings - dec := json.NewDecoder(jsonFile) + dec := json.NewDecoder(bytes.NewReader(data)) if err := dec.Decode(&userSettings); err != nil { return nil, errors.Errorf("failed to parse settings: %w", err) } diff --git a/internal/init/init_test.go b/internal/init/init_test.go index 09bf6ab0f..99a96dce5 100644 --- a/internal/init/init_test.go +++ b/internal/init/init_test.go @@ -11,6 +11,7 @@ import ( "github.com/stretchr/testify/require" "github.com/supabase/cli/internal/testing/fstest" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/pkg/cast" ) func TestInitCommand(t *testing.T) { @@ -28,10 +29,6 @@ func TestInitCommand(t *testing.T) { exists, err = afero.Exists(fsys, utils.GitIgnorePath) assert.NoError(t, err) assert.True(t, exists) - // Validate generated seed.sql - exists, err = afero.Exists(fsys, utils.SeedDataPath) - assert.NoError(t, err) - assert.True(t, exists) // Validate vscode settings file isn't generated exists, err = afero.Exists(fsys, settingsPath) assert.NoError(t, err) @@ -70,20 +67,11 @@ func TestInitCommand(t *testing.T) { assert.Error(t, Run(context.Background(), fsys, nil, nil, utils.InitParams{})) }) - t.Run("throws error on seed failure", func(t *testing.T) { - // Setup in-memory fs - fsys := &fstest.OpenErrorFs{DenyPath: utils.SeedDataPath} - // Run test - err := Run(context.Background(), fsys, nil, nil, utils.InitParams{}) - // Check error - assert.ErrorIs(t, err, os.ErrPermission) - }) - t.Run("creates vscode settings file", func(t *testing.T) { // Setup in-memory fs fsys := &afero.MemMapFs{} // Run test - assert.NoError(t, Run(context.Background(), fsys, utils.Ptr(true), nil, utils.InitParams{})) + assert.NoError(t, Run(context.Background(), fsys, cast.Ptr(true), nil, utils.InitParams{})) // Validate generated vscode settings exists, err := afero.Exists(fsys, settingsPath) assert.NoError(t, err) @@ -97,7 +85,7 @@ func TestInitCommand(t *testing.T) { // Setup in-memory fs fsys := &afero.MemMapFs{} // Run test - assert.NoError(t, Run(context.Background(), fsys, utils.Ptr(false), nil, utils.InitParams{})) + assert.NoError(t, Run(context.Background(), fsys, cast.Ptr(false), nil, utils.InitParams{})) // Validate vscode settings file isn't generated exists, err := afero.Exists(fsys, settingsPath) assert.NoError(t, err) @@ -111,7 +99,7 @@ func TestInitCommand(t *testing.T) { // Setup in-memory fs fsys := &afero.MemMapFs{} // Run test - assert.NoError(t, Run(context.Background(), fsys, nil, utils.Ptr(true), utils.InitParams{})) + assert.NoError(t, Run(context.Background(), fsys, nil, cast.Ptr(true), utils.InitParams{})) // Validate generated intellij deno config exists, err := afero.Exists(fsys, denoPath) assert.NoError(t, err) @@ -122,7 +110,7 @@ func TestInitCommand(t *testing.T) { // Setup in-memory fs fsys := &afero.MemMapFs{} // Run test - assert.NoError(t, Run(context.Background(), fsys, nil, utils.Ptr(false), utils.InitParams{})) + assert.NoError(t, Run(context.Background(), fsys, nil, cast.Ptr(false), utils.InitParams{})) // Validate intellij deno config file isn't generated exists, err := afero.Exists(fsys, denoPath) assert.NoError(t, err) diff --git a/internal/init/templates/.gitignore b/internal/init/templates/.gitignore index a3ad88055..ad9264f0b 100644 --- a/internal/init/templates/.gitignore +++ b/internal/init/templates/.gitignore @@ -1,4 +1,8 @@ # Supabase .branches .temp -.env + +# dotenvx +.env.keys +.env.local +.env.*.local diff --git a/internal/init/templates/.vscode/settings.json b/internal/init/templates/.vscode/settings.json index 5d0c9ed1c..af62c23f8 100644 --- a/internal/init/templates/.vscode/settings.json +++ b/internal/init/templates/.vscode/settings.json @@ -3,7 +3,21 @@ "supabase/functions" ], "deno.lint": true, - "deno.unstable": true, + "deno.unstable": [ + "bare-node-builtins", + "byonm", + "sloppy-imports", + "unsafe-proto", + "webgpu", + "broadcast-channel", + "worker-options", + "cron", + "kv", + "ffi", + "fs", + "http", + "net" + ], "[typescript]": { "editor.defaultFormatter": "denoland.vscode-deno" } diff --git a/internal/inspect/report_test.go b/internal/inspect/report_test.go index 4de1ab602..6b4220451 100644 --- a/internal/inspect/report_test.go +++ b/internal/inspect/report_test.go @@ -18,6 +18,7 @@ import ( "github.com/supabase/cli/internal/inspect/long_running_queries" "github.com/supabase/cli/internal/inspect/outliers" "github.com/supabase/cli/internal/inspect/replication_slots" + "github.com/supabase/cli/internal/inspect/role_configs" "github.com/supabase/cli/internal/inspect/role_connections" "github.com/supabase/cli/internal/inspect/seq_scans" "github.com/supabase/cli/internal/inspect/table_index_sizes" @@ -65,6 +66,8 @@ func TestReportCommand(t *testing.T) { Reply("COPY 0"). Query(wrapQuery(replication_slots.ReplicationSlotsQuery)). Reply("COPY 0"). + Query(wrapQuery(role_configs.RoleConfigsQuery)). + Reply("COPY 0"). Query(wrapQuery(role_connections.RoleConnectionsQuery)). Reply("COPY 0"). Query(wrapQuery(seq_scans.SeqScansQuery)). @@ -89,7 +92,7 @@ func TestReportCommand(t *testing.T) { assert.NoError(t, err) matches, err := afero.Glob(fsys, "*.csv") assert.NoError(t, err) - assert.Len(t, matches, 19) + assert.Len(t, matches, 20) }) } diff --git a/internal/inspect/role_configs/role_configs.go b/internal/inspect/role_configs/role_configs.go new file mode 100644 index 000000000..f4fb79382 --- /dev/null +++ b/internal/inspect/role_configs/role_configs.go @@ -0,0 +1,46 @@ +package role_configs + +import ( + "context" + _ "embed" + "fmt" + + "github.com/go-errors/errors" + "github.com/jackc/pgconn" + "github.com/jackc/pgx/v4" + "github.com/spf13/afero" + "github.com/supabase/cli/internal/migration/list" + "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/pkg/pgxv5" +) + +//go:embed role_configs.sql +var RoleConfigsQuery string + +type Result struct { + Role_name string + Custom_config string +} + +func Run(ctx context.Context, config pgconn.Config, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { + conn, err := utils.ConnectByConfig(ctx, config, options...) + if err != nil { + return err + } + defer conn.Close(context.Background()) + rows, err := conn.Query(ctx, RoleConfigsQuery) + if err != nil { + return errors.Errorf("failed to query rows: %w", err) + } + result, err := pgxv5.CollectRows[Result](rows) + if err != nil { + return err + } + + table := "|Role name|Custom config|\n|-|-|\n" + for _, r := range result { + table += fmt.Sprintf("|`%s`|`%s`|\n", r.Role_name, r.Custom_config) + } + + return list.RenderTable(table) +} diff --git a/internal/inspect/role_configs/role_configs.sql b/internal/inspect/role_configs/role_configs.sql new file mode 100644 index 000000000..fd7f964d0 --- /dev/null +++ b/internal/inspect/role_configs/role_configs.sql @@ -0,0 +1,5 @@ +select + rolname as role_name, + array_to_string(rolconfig, ',', '*') as custom_config +from + pg_roles where rolconfig is not null diff --git a/internal/inspect/role_configs/role_configs_test.go b/internal/inspect/role_configs/role_configs_test.go new file mode 100644 index 000000000..554a12526 --- /dev/null +++ b/internal/inspect/role_configs/role_configs_test.go @@ -0,0 +1,38 @@ +package role_configs + +import ( + "context" + "testing" + + "github.com/jackc/pgconn" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" + "github.com/supabase/cli/pkg/pgtest" +) + +var dbConfig = pgconn.Config{ + Host: "127.0.0.1", + Port: 5432, + User: "admin", + Password: "password", + Database: "postgres", +} + +func TestRoleCommand(t *testing.T) { + t.Run("inspects role connections", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(RoleConfigsQuery). + Reply("SELECT 1", Result{ + Role_name: "postgres", + Custom_config: "statement_timeout=3s", + }) + // Run test + err := Run(context.Background(), dbConfig, fsys, conn.Intercept) + // Check error + assert.NoError(t, err) + }) +} diff --git a/internal/link/link.go b/internal/link/link.go index 9113f5434..d35321e2e 100644 --- a/internal/link/link.go +++ b/internal/link/link.go @@ -1,15 +1,13 @@ package link import ( - "bytes" "context" "fmt" + "net/http" "os" "strconv" - "strings" "sync" - "github.com/BurntSushi/toml" "github.com/go-errors/errors" "github.com/jackc/pgconn" "github.com/jackc/pgx/v4" @@ -20,15 +18,23 @@ import ( "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/internal/utils/tenant" "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" cliConfig "github.com/supabase/cli/pkg/config" + "github.com/supabase/cli/pkg/diff" "github.com/supabase/cli/pkg/migration" ) func Run(ctx context.Context, projectRef string, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { - original := toTomlBytes(map[string]interface{}{ - "api": utils.Config.Api, - "db": utils.Config.Db, - }) + copy := utils.Config.Clone() + original, err := cliConfig.ToTomlBytes(copy) + if err != nil { + fmt.Fprintln(utils.GetDebugLogger(), err) + } + + if err := checkRemoteProjectStatus(ctx, projectRef, fsys); err != nil { + return err + } + // 1. Check service config keys, err := tenant.GetApiKeys(ctx, projectRef) if err != nil { @@ -43,7 +49,7 @@ func Run(ctx context.Context, projectRef string, fsys afero.Fs, options ...func( return err } // Save database password - if err := credentials.Set(projectRef, config.Password); err != nil { + if err := credentials.StoreProvider.Set(projectRef, config.Password); err != nil { fmt.Fprintln(os.Stderr, "Failed to save database password:", err) } } @@ -55,35 +61,25 @@ func Run(ctx context.Context, projectRef string, fsys afero.Fs, options ...func( fmt.Fprintln(os.Stdout, "Finished "+utils.Aqua("supabase link")+".") // 4. Suggest config update - updated := toTomlBytes(map[string]interface{}{ - "api": utils.Config.Api, - "db": utils.Config.Db, - }) - // if lineDiff := cmp.Diff(original, updated); len(lineDiff) > 0 { - if lineDiff := Diff(utils.ConfigPath, original, projectRef, updated); len(lineDiff) > 0 { + updated, err := cliConfig.ToTomlBytes(utils.Config.Clone()) + if err != nil { + fmt.Fprintln(utils.GetDebugLogger(), err) + } + + if lineDiff := diff.Diff(utils.ConfigPath, original, projectRef, updated); len(lineDiff) > 0 { fmt.Fprintln(os.Stderr, utils.Yellow("WARNING:"), "Local config differs from linked project. Try updating", utils.Bold(utils.ConfigPath)) fmt.Println(string(lineDiff)) } return nil } -func toTomlBytes(config any) []byte { - var buf bytes.Buffer - enc := toml.NewEncoder(&buf) - enc.Indent = "" - if err := enc.Encode(config); err != nil { - fmt.Fprintln(utils.GetDebugLogger(), "failed to marshal toml config:", err) - } - return buf.Bytes() -} - func LinkServices(ctx context.Context, projectRef, anonKey string, fsys afero.Fs) { // Ignore non-fatal errors linking services var wg sync.WaitGroup - wg.Add(6) + wg.Add(8) go func() { defer wg.Done() - if err := linkDatabaseVersion(ctx, projectRef, fsys); err != nil && viper.GetBool("DEBUG") { + if err := linkDatabaseSettings(ctx, projectRef); err != nil && viper.GetBool("DEBUG") { fmt.Fprintln(os.Stderr, err) } }() @@ -93,6 +89,18 @@ func LinkServices(ctx context.Context, projectRef, anonKey string, fsys afero.Fs fmt.Fprintln(os.Stderr, err) } }() + go func() { + defer wg.Done() + if err := linkGotrue(ctx, projectRef); err != nil && viper.GetBool("DEBUG") { + fmt.Fprintln(os.Stderr, err) + } + }() + go func() { + defer wg.Done() + if err := linkStorage(ctx, projectRef); err != nil && viper.GetBool("DEBUG") { + fmt.Fprintln(os.Stderr, err) + } + }() go func() { defer wg.Done() if err := linkPooler(ctx, projectRef, fsys); err != nil && viper.GetBool("DEBUG") { @@ -124,12 +132,11 @@ func LinkServices(ctx context.Context, projectRef, anonKey string, fsys afero.Fs func linkPostgrest(ctx context.Context, projectRef string) error { resp, err := utils.GetSupabase().V1GetPostgrestServiceConfigWithResponse(ctx, projectRef) if err != nil { - return errors.Errorf("failed to get postgrest config: %w", err) - } - if resp.JSON200 == nil { - return errors.Errorf("%w: %s", tenant.ErrAuthToken, string(resp.Body)) + return errors.Errorf("failed to read API config: %w", err) + } else if resp.JSON200 == nil { + return errors.Errorf("unexpected API config status %d: %s", resp.StatusCode(), string(resp.Body)) } - updateApiConfig(*resp.JSON200) + utils.Config.Api.FromRemoteApiConfig(*resp.JSON200) return nil } @@ -141,22 +148,15 @@ func linkPostgrestVersion(ctx context.Context, api tenant.TenantAPI, fsys afero. return utils.WriteFile(utils.RestVersionPath, []byte(version), fsys) } -func updateApiConfig(config api.PostgrestConfigWithJWTSecretResponse) { - utils.Config.Api.MaxRows = uint(config.MaxRows) - utils.Config.Api.ExtraSearchPath = readCsv(config.DbExtraSearchPath) - utils.Config.Api.Schemas = readCsv(config.DbSchema) -} - -func readCsv(line string) []string { - var result []string - tokens := strings.Split(line, ",") - for _, t := range tokens { - trimmed := strings.TrimSpace(t) - if len(trimmed) > 0 { - result = append(result, trimmed) - } +func linkGotrue(ctx context.Context, projectRef string) error { + resp, err := utils.GetSupabase().V1GetAuthServiceConfigWithResponse(ctx, projectRef) + if err != nil { + return errors.Errorf("failed to read Auth config: %w", err) + } else if resp.JSON200 == nil { + return errors.Errorf("unexpected Auth config status %d: %s", resp.StatusCode(), string(resp.Body)) } - return result + utils.Config.Auth.FromRemoteAuthConfig(*resp.JSON200) + return nil } func linkGotrueVersion(ctx context.Context, api tenant.TenantAPI, fsys afero.Fs) error { @@ -167,6 +167,17 @@ func linkGotrueVersion(ctx context.Context, api tenant.TenantAPI, fsys afero.Fs) return utils.WriteFile(utils.GotrueVersionPath, []byte(version), fsys) } +func linkStorage(ctx context.Context, projectRef string) error { + resp, err := utils.GetSupabase().V1GetStorageConfigWithResponse(ctx, projectRef) + if err != nil { + return errors.Errorf("failed to read Storage config: %w", err) + } else if resp.JSON200 == nil { + return errors.Errorf("unexpected Storage config status %d: %s", resp.StatusCode(), string(resp.Body)) + } + utils.Config.Storage.FromRemoteStorageConfig(*resp.JSON200) + return nil +} + func linkStorageVersion(ctx context.Context, api tenant.TenantAPI, fsys afero.Fs) error { version, err := api.GetStorageVersion(ctx) if err != nil { @@ -175,6 +186,17 @@ func linkStorageVersion(ctx context.Context, api tenant.TenantAPI, fsys afero.Fs return utils.WriteFile(utils.StorageVersionPath, []byte(version), fsys) } +func linkDatabaseSettings(ctx context.Context, projectRef string) error { + resp, err := utils.GetSupabase().V1GetPostgresConfigWithResponse(ctx, projectRef) + if err != nil { + return errors.Errorf("failed to read DB config: %w", err) + } else if resp.JSON200 == nil { + return errors.Errorf("unexpected DB config status %d: %s", resp.StatusCode(), string(resp.Body)) + } + utils.Config.Db.Settings.FromRemotePostgresConfig(*resp.JSON200) + return nil +} + func linkDatabase(ctx context.Context, config pgconn.Config, options ...func(*pgx.ConnConfig)) error { conn, err := utils.ConnectByConfig(ctx, config, options...) if err != nil { @@ -183,15 +205,10 @@ func linkDatabase(ctx context.Context, config pgconn.Config, options ...func(*pg defer conn.Close(context.Background()) updatePostgresConfig(conn) // If `schema_migrations` doesn't exist on the remote database, create it. - return migration.CreateMigrationTable(ctx, conn) -} - -func linkDatabaseVersion(ctx context.Context, projectRef string, fsys afero.Fs) error { - version, err := tenant.GetDatabaseVersion(ctx, projectRef) - if err != nil { + if err := migration.CreateMigrationTable(ctx, conn); err != nil { return err } - return utils.WriteFile(utils.PostgresVersionPath, []byte(version), fsys) + return migration.CreateSeedTable(ctx, conn) } func updatePostgresConfig(conn *pgx.Conn) { @@ -227,9 +244,43 @@ func updatePoolerConfig(config api.SupavisorConfigResponse) { utils.Config.Db.Pooler.ConnectionString = config.ConnectionString utils.Config.Db.Pooler.PoolMode = cliConfig.PoolMode(config.PoolMode) if config.DefaultPoolSize != nil { - utils.Config.Db.Pooler.DefaultPoolSize = uint(*config.DefaultPoolSize) + utils.Config.Db.Pooler.DefaultPoolSize = cast.IntToUint(*config.DefaultPoolSize) } if config.MaxClientConn != nil { - utils.Config.Db.Pooler.MaxClientConn = uint(*config.MaxClientConn) + utils.Config.Db.Pooler.MaxClientConn = cast.IntToUint(*config.MaxClientConn) + } +} + +var errProjectPaused = errors.New("project is paused") + +func checkRemoteProjectStatus(ctx context.Context, projectRef string, fsys afero.Fs) error { + resp, err := utils.GetSupabase().V1GetProjectWithResponse(ctx, projectRef) + if err != nil { + return errors.Errorf("failed to retrieve remote project status: %w", err) } + switch resp.StatusCode() { + case http.StatusNotFound: + // Ignore not found error to support linking branch projects + return nil + case http.StatusOK: + // resp.JSON200 is not nil, proceed + default: + return errors.New("Unexpected error retrieving remote project status: " + string(resp.Body)) + } + + switch resp.JSON200.Status { + case api.V1ProjectWithDatabaseResponseStatusINACTIVE: + utils.CmdSuggestion = fmt.Sprintf("An admin must unpause it from the Supabase dashboard at %s", utils.Aqua(fmt.Sprintf("%s/project/%s", utils.GetSupabaseDashboardURL(), projectRef))) + return errors.New(errProjectPaused) + case api.V1ProjectWithDatabaseResponseStatusACTIVEHEALTHY: + // Project is in the desired state, do nothing + default: + fmt.Fprintf(os.Stderr, "%s: Project status is %s instead of Active Healthy. Some operations might fail.\n", utils.Yellow("WARNING"), resp.JSON200.Status) + } + + // Update postgres image version to match the remote project + if version := resp.JSON200.Database.Version; len(version) > 0 { + return utils.WriteFile(utils.PostgresVersionPath, []byte(version), fsys) + } + return nil } diff --git a/internal/link/link_test.go b/internal/link/link_test.go index f33961293..18c090ad3 100644 --- a/internal/link/link_test.go +++ b/internal/link/link_test.go @@ -3,6 +3,7 @@ package link import ( "context" "errors" + "net/http" "testing" "github.com/h2non/gock" @@ -46,17 +47,42 @@ func TestLinkCommand(t *testing.T) { conn := pgtest.NewConn() defer conn.Close(t) helper.MockMigrationHistory(conn) + helper.MockSeedHistory(conn) // Flush pending mocks after test execution defer gock.OffAll() + // Mock project status + postgres := api.V1DatabaseResponse{ + Host: utils.GetSupabaseDbHost(project), + Version: "15.1.0.117", + } + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project). + Reply(200). + JSON(api.V1ProjectWithDatabaseResponse{ + Status: api.V1ProjectWithDatabaseResponseStatusACTIVEHEALTHY, + Database: postgres, + }) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + project + "/api-keys"). Reply(200). JSON([]api.ApiKeyResponse{{Name: "anon", ApiKey: "anon-key"}}) // Link configs + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project + "/config/database/postgres"). + Reply(200). + JSON(api.PostgresConfigResponse{}) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + project + "/postgrest"). Reply(200). JSON(api.V1PostgrestConfigResponse{}) + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project + "/config/auth"). + Reply(200). + JSON(api.AuthConfigResponse{}) + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project + "/config/storage"). + Reply(200). + JSON(api.StorageConfigResponse{}) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + project + "/config/database/pooler"). Reply(200). @@ -76,23 +102,6 @@ func TestLinkCommand(t *testing.T) { Get("/storage/v1/version"). Reply(200). BodyString("0.40.4") - postgres := api.V1DatabaseResponse{ - Host: utils.GetSupabaseDbHost(project), - Version: "15.1.0.117", - } - gock.New(utils.DefaultApiHost). - Get("/v1/projects"). - Reply(200). - JSON([]api.V1ProjectResponse{ - { - Id: project, - Database: &postgres, - OrganizationId: "combined-fuchsia-lion", - Name: "Test Project", - Region: "us-west-1", - CreatedAt: "2022-04-25T02:14:55.906498Z", - }, - }) // Run test err := Run(context.Background(), project, fsys, conn.Intercept) // Check error @@ -119,14 +128,31 @@ func TestLinkCommand(t *testing.T) { fsys := afero.NewMemMapFs() // Flush pending mocks after test execution defer gock.OffAll() + // Mock project status + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project). + Reply(200). + JSON(api.V1ProjectWithDatabaseResponse{ + Status: api.V1ProjectWithDatabaseResponseStatusACTIVEHEALTHY, + Database: api.V1DatabaseResponse{}, + }) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + project + "/api-keys"). Reply(200). JSON([]api.ApiKeyResponse{{Name: "anon", ApiKey: "anon-key"}}) // Link configs + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project + "/config/database/postgres"). + ReplyError(errors.New("network error")) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + project + "/postgrest"). ReplyError(errors.New("network error")) + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project + "/config/auth"). + ReplyError(errors.New("network error")) + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project + "/config/storage"). + ReplyError(errors.New("network error")) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + project + "/config/database/pooler"). ReplyError(errors.New("network error")) @@ -140,9 +166,6 @@ func TestLinkCommand(t *testing.T) { gock.New("https://" + utils.GetSupabaseHost(project)). Get("/storage/v1/version"). ReplyError(errors.New("network error")) - gock.New(utils.DefaultApiHost). - Get("/v1/projects"). - ReplyError(errors.New("network error")) // Run test err := Run(context.Background(), project, fsys, func(cc *pgx.ConnConfig) { cc.LookupFunc = func(ctx context.Context, host string) (addrs []string, err error) { @@ -159,14 +182,31 @@ func TestLinkCommand(t *testing.T) { fsys := afero.NewReadOnlyFs(afero.NewMemMapFs()) // Flush pending mocks after test execution defer gock.OffAll() + // Mock project status + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project). + Reply(200). + JSON(api.V1ProjectWithDatabaseResponse{ + Status: api.V1ProjectWithDatabaseResponseStatusACTIVEHEALTHY, + Database: api.V1DatabaseResponse{}, + }) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + project + "/api-keys"). Reply(200). JSON([]api.ApiKeyResponse{{Name: "anon", ApiKey: "anon-key"}}) // Link configs + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project + "/config/database/postgres"). + ReplyError(errors.New("network error")) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + project + "/postgrest"). ReplyError(errors.New("network error")) + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project + "/config/auth"). + ReplyError(errors.New("network error")) + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project + "/config/storage"). + ReplyError(errors.New("network error")) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + project + "/config/database/pooler"). ReplyError(errors.New("network error")) @@ -195,6 +235,72 @@ func TestLinkCommand(t *testing.T) { }) } +func TestStatusCheck(t *testing.T) { + project := "test-project" + + t.Run("updates postgres version when healthy", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + // Flush pending mocks after test execution + defer gock.OffAll() + // Mock project status + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project). + Reply(http.StatusOK). + JSON(api.V1ProjectWithDatabaseResponse{ + Status: api.V1ProjectWithDatabaseResponseStatusACTIVEHEALTHY, + Database: api.V1DatabaseResponse{Version: "15.6.1.139"}, + }) + // Run test + err := checkRemoteProjectStatus(context.Background(), project, fsys) + // Check error + assert.NoError(t, err) + version, err := afero.ReadFile(fsys, utils.PostgresVersionPath) + assert.NoError(t, err) + assert.Equal(t, "15.6.1.139", string(version)) + assert.Empty(t, apitest.ListUnmatchedRequests()) + }) + + t.Run("ignores project not found", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + // Flush pending mocks after test execution + defer gock.OffAll() + // Mock project status + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project). + Reply(http.StatusNotFound) + // Run test + err := checkRemoteProjectStatus(context.Background(), project, fsys) + // Check error + assert.NoError(t, err) + exists, err := afero.Exists(fsys, utils.PostgresVersionPath) + assert.NoError(t, err) + assert.False(t, exists) + assert.Empty(t, apitest.ListUnmatchedRequests()) + }) + + t.Run("throws error on project inactive", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + // Flush pending mocks after test execution + defer gock.OffAll() + // Mock project status + gock.New(utils.DefaultApiHost). + Get("/v1/projects/" + project). + Reply(http.StatusOK). + JSON(api.V1ProjectWithDatabaseResponse{Status: api.V1ProjectWithDatabaseResponseStatusINACTIVE}) + // Run test + err := checkRemoteProjectStatus(context.Background(), project, fsys) + // Check error + assert.ErrorIs(t, err, errProjectPaused) + exists, err := afero.Exists(fsys, utils.PostgresVersionPath) + assert.NoError(t, err) + assert.False(t, exists) + assert.Empty(t, apitest.ListUnmatchedRequests()) + }) +} + func TestLinkPostgrest(t *testing.T) { project := "test-project" // Setup valid access token @@ -259,7 +365,7 @@ func TestLinkPostgrest(t *testing.T) { // Run test err := linkPostgrest(context.Background(), project) // Validate api - assert.ErrorIs(t, err, tenant.ErrAuthToken) + assert.ErrorContains(t, err, `unexpected API config status 500: {"message":"unavailable"}`) assert.Empty(t, apitest.ListUnmatchedRequests()) }) } @@ -279,6 +385,7 @@ func TestLinkDatabase(t *testing.T) { }) defer conn.Close(t) helper.MockMigrationHistory(conn) + helper.MockSeedHistory(conn) // Run test err := linkDatabase(context.Background(), dbConfig, conn.Intercept) // Check error @@ -294,6 +401,7 @@ func TestLinkDatabase(t *testing.T) { }) defer conn.Close(t) helper.MockMigrationHistory(conn) + helper.MockSeedHistory(conn) // Run test err := linkDatabase(context.Background(), dbConfig, conn.Intercept) // Check error diff --git a/internal/login/login.go b/internal/login/login.go index 1cb74afdc..6239b500a 100644 --- a/internal/login/login.go +++ b/internal/login/login.go @@ -13,7 +13,6 @@ import ( "net/http" "os" "os/user" - "strconv" "strings" "time" @@ -43,8 +42,6 @@ type AccessTokenResponse struct { Nonce string `json:"nonce"` } -const defaultRetryAfterSeconds = 2 -const defaultMaxRetries = 90 const decryptionErrorMsg = "cannot decrypt access token" var loggedInMsg = "You are now logged in. " + utils.Aqua("Happy coding!") @@ -128,6 +125,8 @@ func (enc LoginEncryption) decryptAccessToken(accessToken string, publicKey stri return string(decryptedAccessToken), nil } +const maxRetries = 2 + func pollForAccessToken(ctx context.Context, url string) (AccessTokenResponse, error) { // TODO: Move to OpenAPI-generated http client once we reach v1 on API schema. client := fetcher.NewFetcher( @@ -137,20 +136,31 @@ func pollForAccessToken(ctx context.Context, url string) (AccessTokenResponse, e }), fetcher.WithExpectedStatus(http.StatusOK), ) - timeout := backoff.NewConstantBackOff(defaultRetryAfterSeconds) + console := utils.NewConsole() probe := func() (AccessTokenResponse, error) { - resp, err := client.Send(ctx, http.MethodGet, url, nil) - if err == nil { - return fetcher.ParseJSON[AccessTokenResponse](resp.Body) - } else if resp != nil { - if retryAfterSeconds, err := strconv.Atoi(resp.Header.Get("Retry-After")); err == nil { - timeout.Interval = time.Duration(retryAfterSeconds) * time.Second - } + // TODO: support automatic login flow + deviceCode, err := console.PromptText(ctx, "Enter your verification code: ") + if err != nil { + return AccessTokenResponse{}, err + } + urlWithQuery := fmt.Sprintf("%s?device_code=%s", url, deviceCode) + resp, err := client.Send(ctx, http.MethodGet, urlWithQuery, nil) + if err != nil { + return AccessTokenResponse{}, err } - return AccessTokenResponse{}, err + return fetcher.ParseJSON[AccessTokenResponse](resp.Body) + } + policy := backoff.WithContext(backoff.WithMaxRetries(&backoff.ZeroBackOff{}, maxRetries), ctx) + return backoff.RetryNotifyWithData(probe, policy, newErrorCallback()) +} + +func newErrorCallback() backoff.Notify { + failureCount := 0 + return func(err error, d time.Duration) { + failureCount += 1 + fmt.Fprintln(os.Stderr, err) + fmt.Fprintf(os.Stderr, "Retry (%d/%d): ", failureCount, maxRetries) } - policy := backoff.WithContext(backoff.WithMaxRetries(timeout, defaultMaxRetries), ctx) - return backoff.RetryWithData(probe, policy) } func Run(ctx context.Context, stdout io.Writer, params RunParams) error { @@ -194,22 +204,16 @@ func Run(ctx context.Context, stdout io.Writer, params RunParams) error { fmt.Fprintf(stdout, "Here is your login link, open it in the browser %s\n\n", utils.Bold(createLoginSessionUrl)) } - if err := utils.RunProgram(ctx, func(p utils.Program, ctx context.Context) error { - p.Send(utils.StatusMsg("Your token is now being generated and securely encrypted. Waiting for it to arrive...")) - - sessionPollingUrl := "/platform/cli/login/" + params.SessionId - accessTokenResponse, err := pollForAccessToken(ctx, sessionPollingUrl) - if err != nil { - return err - } - - decryptedAccessToken, err := params.Encryption.decryptAccessToken(accessTokenResponse.AccessToken, accessTokenResponse.PublicKey, accessTokenResponse.Nonce) - if err != nil { - return err - } - - return utils.SaveAccessToken(decryptedAccessToken, params.Fsys) - }); err != nil { + sessionPollingUrl := "/platform/cli/login/" + params.SessionId + accessTokenResponse, err := pollForAccessToken(ctx, sessionPollingUrl) + if err != nil { + return err + } + decryptedAccessToken, err := params.Encryption.decryptAccessToken(accessTokenResponse.AccessToken, accessTokenResponse.PublicKey, accessTokenResponse.Nonce) + if err != nil { + return err + } + if err := utils.SaveAccessToken(decryptedAccessToken, params.Fsys); err != nil { return err } diff --git a/internal/login/login_test.go b/internal/login/login_test.go index faf351e99..758fbc5c4 100644 --- a/internal/login/login_test.go +++ b/internal/login/login_test.go @@ -40,7 +40,7 @@ func TestLoginCommand(t *testing.T) { Token: token, Fsys: afero.NewMemMapFs(), })) - saved, err := credentials.Get(utils.AccessTokenKey) + saved, err := credentials.StoreProvider.Get(utils.AccessTokenKey) assert.NoError(t, err) assert.Equal(t, token, saved) }) @@ -83,7 +83,7 @@ func TestLoginCommand(t *testing.T) { expectedBrowserUrl := fmt.Sprintf("%s/cli/login?session_id=%s&token_name=%s&public_key=%s", utils.GetSupabaseDashboardURL(), sessionId, tokenName, publicKey) assert.Contains(t, out.String(), expectedBrowserUrl) - saved, err := credentials.Get(utils.AccessTokenKey) + saved, err := credentials.StoreProvider.Get(utils.AccessTokenKey) assert.NoError(t, err) assert.Equal(t, token, saved) assert.Empty(t, apitest.ListUnmatchedRequests()) diff --git a/internal/logout/logout.go b/internal/logout/logout.go index c117abd94..abbd191b8 100644 --- a/internal/logout/logout.go +++ b/internal/logout/logout.go @@ -8,6 +8,7 @@ import ( "github.com/go-errors/errors" "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/credentials" ) func Run(ctx context.Context, stdout *os.File, fsys afero.Fs) error { @@ -24,6 +25,11 @@ func Run(ctx context.Context, stdout *os.File, fsys afero.Fs) error { return err } + // Delete all possible stored project credentials + if err := credentials.StoreProvider.DeleteAll(); err != nil { + fmt.Fprintln(utils.GetDebugLogger(), err) + } + fmt.Fprintln(stdout, "Access token deleted successfully. You are now logged out.") return nil } diff --git a/internal/logout/logout_test.go b/internal/logout/logout_test.go index 0bb39be5c..42f9f8ead 100644 --- a/internal/logout/logout_test.go +++ b/internal/logout/logout_test.go @@ -33,16 +33,37 @@ func TestLogoutCommand(t *testing.T) { assert.Empty(t, saved) }) + t.Run("removes all Supabase CLI credentials", func(t *testing.T) { + t.Cleanup(credentials.MockInit()) + require.NoError(t, credentials.StoreProvider.Set(utils.AccessTokenKey, token)) + require.NoError(t, credentials.StoreProvider.Set("project1", "password1")) + require.NoError(t, credentials.StoreProvider.Set("project2", "password2")) + t.Cleanup(fstest.MockStdin(t, "y")) + // Run test + err := Run(context.Background(), os.Stdout, afero.NewMemMapFs()) + // Check error + assert.NoError(t, err) + // Check that access token has been removed + saved, _ := credentials.StoreProvider.Get(utils.AccessTokenKey) + assert.Empty(t, saved) + // check that project 1 has been removed + saved, _ = credentials.StoreProvider.Get("project1") + assert.Empty(t, saved) + // check that project 2 has been removed + saved, _ = credentials.StoreProvider.Get("project2") + assert.Empty(t, saved) + }) + t.Run("skips logout by default", func(t *testing.T) { keyring.MockInit() - require.NoError(t, credentials.Set(utils.AccessTokenKey, token)) + require.NoError(t, credentials.StoreProvider.Set(utils.AccessTokenKey, token)) // Setup in-memory fs fsys := afero.NewMemMapFs() // Run test err := Run(context.Background(), os.Stdout, fsys) // Check error assert.ErrorIs(t, err, context.Canceled) - saved, err := credentials.Get(utils.AccessTokenKey) + saved, err := credentials.StoreProvider.Get(utils.AccessTokenKey) assert.NoError(t, err) assert.Equal(t, token, saved) }) diff --git a/internal/migration/apply/apply.go b/internal/migration/apply/apply.go index c9df3cacd..224b342f7 100644 --- a/internal/migration/apply/apply.go +++ b/internal/migration/apply/apply.go @@ -2,9 +2,7 @@ package apply import ( "context" - "os" - "github.com/go-errors/errors" "github.com/jackc/pgx/v4" "github.com/spf13/afero" "github.com/supabase/cli/internal/migration/list" @@ -20,21 +18,16 @@ func MigrateAndSeed(ctx context.Context, version string, conn *pgx.Conn, fsys af if err := migration.ApplyMigrations(ctx, migrations, conn, afero.NewIOFS(fsys)); err != nil { return err } - return SeedDatabase(ctx, conn, fsys) + return applySeedFiles(ctx, conn, fsys) } -func SeedDatabase(ctx context.Context, conn *pgx.Conn, fsys afero.Fs) error { - err := migration.SeedData(ctx, []string{utils.SeedDataPath}, conn, afero.NewIOFS(fsys)) - if errors.Is(err, os.ErrNotExist) { +func applySeedFiles(ctx context.Context, conn *pgx.Conn, fsys afero.Fs) error { + if !utils.Config.Db.Seed.Enabled { return nil } - return err -} - -func CreateCustomRoles(ctx context.Context, conn *pgx.Conn, fsys afero.Fs) error { - err := migration.SeedGlobals(ctx, []string{utils.CustomRolesPath}, conn, afero.NewIOFS(fsys)) - if errors.Is(err, os.ErrNotExist) { - return nil + seeds, err := migration.GetPendingSeeds(ctx, utils.Config.Db.Seed.SqlPaths, conn, afero.NewIOFS(fsys)) + if err != nil { + return err } - return err + return migration.SeedData(ctx, seeds, conn, afero.NewIOFS(fsys)) } diff --git a/internal/migration/apply/apply_test.go b/internal/migration/apply/apply_test.go index 7150f6263..286093743 100644 --- a/internal/migration/apply/apply_test.go +++ b/internal/migration/apply/apply_test.go @@ -6,7 +6,6 @@ import ( "path/filepath" "testing" - "github.com/jackc/pgerrcode" "github.com/spf13/afero" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -38,65 +37,40 @@ func TestMigrateDatabase(t *testing.T) { assert.NoError(t, err) }) - t.Run("ignores empty local directory", func(t *testing.T) { - assert.NoError(t, MigrateAndSeed(context.Background(), "", nil, afero.NewMemMapFs())) - }) - - t.Run("throws error on open failure", func(t *testing.T) { - // Setup in-memory fs - fsys := &fstest.OpenErrorFs{DenyPath: utils.MigrationsDir} - // Run test - err := MigrateAndSeed(context.Background(), "", nil, fsys) - // Check error - assert.ErrorIs(t, err, os.ErrPermission) - }) -} - -func TestSeedDatabase(t *testing.T) { - t.Run("seeds from file", func(t *testing.T) { + t.Run("skip seeding when seed config is disabled", func(t *testing.T) { // Setup in-memory fs fsys := afero.NewMemMapFs() - // Setup seed file - sql := "INSERT INTO employees(name) VALUES ('Alice')" - require.NoError(t, afero.WriteFile(fsys, utils.SeedDataPath, []byte(sql), 0644)) + path := filepath.Join(utils.MigrationsDir, "0_test.sql") + sql := "create schema public" + require.NoError(t, afero.WriteFile(fsys, path, []byte(sql), 0644)) + seedPath := filepath.Join(utils.SupabaseDirPath, "seed.sql") + // This will raise an error when seeding + require.NoError(t, afero.WriteFile(fsys, seedPath, []byte("INSERT INTO test_table;"), 0644)) // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) - conn.Query(sql). + helper.MockMigrationHistory(conn). + Query(sql). + Reply("CREATE SCHEMA"). + Query(migration.INSERT_MIGRATION_VERSION, "0", "test", []string{sql}). Reply("INSERT 0 1") + utils.Config.Db.Seed.Enabled = false // Run test - err := SeedDatabase(context.Background(), conn.MockClient(t), fsys) - // Check error + err := MigrateAndSeed(context.Background(), "", conn.MockClient(t), fsys) + // No error should be returned since seeding is skipped assert.NoError(t, err) }) - t.Run("ignores missing seed", func(t *testing.T) { - assert.NoError(t, SeedDatabase(context.Background(), nil, afero.NewMemMapFs())) + t.Run("ignores empty local directory", func(t *testing.T) { + assert.NoError(t, MigrateAndSeed(context.Background(), "", nil, afero.NewMemMapFs())) }) - t.Run("throws error on read failure", func(t *testing.T) { + t.Run("throws error on open failure", func(t *testing.T) { // Setup in-memory fs - fsys := &fstest.OpenErrorFs{DenyPath: utils.SeedDataPath} + fsys := &fstest.OpenErrorFs{DenyPath: utils.MigrationsDir} // Run test - err := SeedDatabase(context.Background(), nil, fsys) + err := MigrateAndSeed(context.Background(), "", nil, fsys) // Check error assert.ErrorIs(t, err, os.ErrPermission) }) - - t.Run("throws error on insert failure", func(t *testing.T) { - // Setup in-memory fs - fsys := afero.NewMemMapFs() - // Setup seed file - sql := "INSERT INTO employees(name) VALUES ('Alice')" - require.NoError(t, afero.WriteFile(fsys, utils.SeedDataPath, []byte(sql), 0644)) - // Setup mock postgres - conn := pgtest.NewConn() - defer conn.Close(t) - conn.Query(sql). - ReplyError(pgerrcode.NotNullViolation, `null value in column "age" of relation "employees"`) - // Run test - err := SeedDatabase(context.Background(), conn.MockClient(t), fsys) - // Check error - assert.ErrorContains(t, err, `ERROR: null value in column "age" of relation "employees" (SQLSTATE 23502)`) - }) } diff --git a/internal/migration/squash/squash.go b/internal/migration/squash/squash.go index 2ed9e6252..4aad8b832 100644 --- a/internal/migration/squash/squash.go +++ b/internal/migration/squash/squash.go @@ -20,6 +20,7 @@ import ( "github.com/supabase/cli/internal/migration/list" "github.com/supabase/cli/internal/migration/repair" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/migration" ) @@ -34,7 +35,7 @@ func Run(ctx context.Context, version string, config pgconn.Config, fsys afero.F return err } } - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } // 1. Squash local migrations diff --git a/internal/migration/squash/squash_test.go b/internal/migration/squash/squash_test.go index c85f014c0..cc0461f3b 100644 --- a/internal/migration/squash/squash_test.go +++ b/internal/migration/squash/squash_test.go @@ -61,7 +61,7 @@ func TestSquashCommand(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) gock.New(utils.Docker.DaemonHost()). @@ -251,7 +251,7 @@ func TestSquashMigrations(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) gock.New(utils.Docker.DaemonHost()). @@ -286,7 +286,7 @@ func TestSquashMigrations(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) gock.New(utils.Docker.DaemonHost()). diff --git a/internal/migration/up/up.go b/internal/migration/up/up.go index d33117f33..331abce66 100644 --- a/internal/migration/up/up.go +++ b/internal/migration/up/up.go @@ -11,6 +11,7 @@ import ( "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/pkg/migration" + "github.com/supabase/cli/pkg/vault" ) func Run(ctx context.Context, includeAll bool, config pgconn.Config, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { @@ -23,6 +24,9 @@ func Run(ctx context.Context, includeAll bool, config pgconn.Config, fsys afero. if err != nil { return err } + if err := vault.UpsertVaultSecrets(ctx, utils.Config.Db.Vault, conn); err != nil { + return err + } return migration.ApplyMigrations(ctx, pending, conn, afero.NewIOFS(fsys)) } diff --git a/internal/postgresConfig/delete/delete.go b/internal/postgresConfig/delete/delete.go new file mode 100644 index 000000000..20b7b978f --- /dev/null +++ b/internal/postgresConfig/delete/delete.go @@ -0,0 +1,48 @@ +package delete + +import ( + "bytes" + "context" + "encoding/json" + "strings" + + "github.com/go-errors/errors" + "github.com/spf13/afero" + "github.com/supabase/cli/internal/postgresConfig/get" + "github.com/supabase/cli/internal/utils" +) + +func Run(ctx context.Context, projectRef string, configKeys []string, noRestart bool, fsys afero.Fs) error { + // 1. Get current config + currentConfig, err := get.GetCurrentPostgresConfig(ctx, projectRef) + if err != nil { + return err + } + + // 2. Remove specified keys + for _, key := range configKeys { + delete(currentConfig, strings.TrimSpace(key)) + } + + // 3. Update config with removed keys + if noRestart { + currentConfig["restart_database"] = false + } + bts, err := json.Marshal(currentConfig) + if err != nil { + return errors.Errorf("failed to serialize config overrides: %w", err) + } + + resp, err := utils.GetSupabase().V1UpdatePostgresConfigWithBodyWithResponse(ctx, projectRef, "application/json", bytes.NewReader(bts)) + if err != nil { + return errors.Errorf("failed to update config overrides: %w", err) + } + if resp.JSON200 == nil { + if resp.StatusCode() == 400 { + return errors.Errorf("failed to update config overrides: %s (%s). This usually indicates that an unsupported or invalid config override was attempted. Please refer to https://supabase.com/docs/guides/platform/custom-postgres-config", resp.Status(), string(resp.Body)) + } + return errors.Errorf("failed to update config overrides: %s (%s)", resp.Status(), string(resp.Body)) + } + + return get.Run(ctx, projectRef, fsys) +} diff --git a/internal/postgresConfig/update/update.go b/internal/postgresConfig/update/update.go index 00a9766f8..94632d486 100644 --- a/internal/postgresConfig/update/update.go +++ b/internal/postgresConfig/update/update.go @@ -13,7 +13,7 @@ import ( "github.com/supabase/cli/internal/utils" ) -func Run(ctx context.Context, projectRef string, values []string, replaceOverrides bool, fsys afero.Fs) error { +func Run(ctx context.Context, projectRef string, values []string, replaceOverrides, noRestart bool, fsys afero.Fs) error { // 1. Prepare config overrides newConfigOverrides := make(map[string]string) for _, config := range values { @@ -39,16 +39,20 @@ func Run(ctx context.Context, projectRef string, values []string, replaceOverrid for k, v := range newConfigOverrides { // this is hacky - if we're able to convert the value to an integer, we do so // if we start supporting config fields with e.g. floating pt overrides this'll need to be updated - attemptedConvert, err := strconv.Atoi(v) - if err != nil { - finalOverrides[k] = v + if vInt, err := strconv.Atoi(v); err == nil { + finalOverrides[k] = vInt + } else if vBool, err := strconv.ParseBool(v); err == nil { + finalOverrides[k] = vBool } else { - finalOverrides[k] = attemptedConvert + finalOverrides[k] = v } } } // 4. update config overrides and print out final result { + if noRestart { + finalOverrides["restart_database"] = false + } bts, err := json.Marshal(finalOverrides) if err != nil { return errors.Errorf("failed to serialize config overrides: %w", err) diff --git a/internal/projects/apiKeys/api_keys.go b/internal/projects/apiKeys/api_keys.go index 2561e1d67..7daddce21 100644 --- a/internal/projects/apiKeys/api_keys.go +++ b/internal/projects/apiKeys/api_keys.go @@ -3,6 +3,7 @@ package apiKeys import ( "context" "fmt" + "os" "strings" "github.com/go-errors/errors" @@ -18,18 +19,22 @@ func Run(ctx context.Context, projectRef string, fsys afero.Fs) error { return err } - table := `|NAME|KEY VALUE| + if utils.OutputFormat.Value == utils.OutputPretty { + table := `|NAME|KEY VALUE| |-|-| ` - for _, entry := range keys { - table += fmt.Sprintf("|`%s`|`%s`|\n", strings.ReplaceAll(entry.Name, "|", "\\|"), entry.ApiKey) + for _, entry := range keys { + table += fmt.Sprintf("|`%s`|`%s`|\n", strings.ReplaceAll(entry.Name, "|", "\\|"), entry.ApiKey) + } + + return list.RenderTable(table) } - return list.RenderTable(table) + return utils.EncodeOutput(utils.OutputFormat.Value, os.Stdout, keys) } func RunGetApiKeys(ctx context.Context, projectRef string) ([]api.ApiKeyResponse, error) { - resp, err := utils.GetSupabase().V1GetProjectApiKeysWithResponse(ctx, projectRef) + resp, err := utils.GetSupabase().V1GetProjectApiKeysWithResponse(ctx, projectRef, &api.V1GetProjectApiKeysParams{}) if err != nil { return nil, errors.Errorf("failed to get api keys: %w", err) } diff --git a/internal/projects/create/create.go b/internal/projects/create/create.go index 1df4b3878..ddbd46b47 100644 --- a/internal/projects/create/create.go +++ b/internal/projects/create/create.go @@ -15,7 +15,7 @@ import ( "github.com/supabase/cli/pkg/api" ) -func Run(ctx context.Context, params api.V1CreateProjectBody, fsys afero.Fs) error { +func Run(ctx context.Context, params api.V1CreateProjectBodyDto, fsys afero.Fs) error { if err := promptMissingParams(ctx, ¶ms); err != nil { return err } @@ -30,13 +30,17 @@ func Run(ctx context.Context, params api.V1CreateProjectBody, fsys afero.Fs) err flags.ProjectRef = resp.JSON201.Id viper.Set("DB_PASSWORD", params.DbPass) - if err := credentials.Set(flags.ProjectRef, params.DbPass); err != nil { + if err := credentials.StoreProvider.Set(flags.ProjectRef, params.DbPass); err != nil { fmt.Fprintln(os.Stderr, "Failed to save database password:", err) } projectUrl := fmt.Sprintf("%s/project/%s", utils.GetSupabaseDashboardURL(), resp.JSON201.Id) - fmt.Printf("Created a new project %s at %s\n", utils.Aqua(resp.JSON201.Name), utils.Bold(projectUrl)) - return nil + fmt.Fprintf(os.Stderr, "Created a new project %s at %s\n", utils.Aqua(resp.JSON201.Name), utils.Bold(projectUrl)) + if utils.OutputFormat.Value == utils.OutputPretty { + return nil + } + + return utils.EncodeOutput(utils.OutputFormat.Value, os.Stdout, resp.JSON201) } func printKeyValue(key, value string) string { @@ -45,7 +49,7 @@ func printKeyValue(key, value string) string { return key + ":" + spaces + value } -func promptMissingParams(ctx context.Context, body *api.V1CreateProjectBody) error { +func promptMissingParams(ctx context.Context, body *api.V1CreateProjectBodyDto) error { var err error if len(body.Name) == 0 { if body.Name, err = promptProjectName(ctx); err != nil { @@ -102,7 +106,7 @@ func promptOrgId(ctx context.Context) (string, error) { return choice.Details, nil } -func promptProjectRegion(ctx context.Context) (api.V1CreateProjectBodyRegion, error) { +func promptProjectRegion(ctx context.Context) (api.V1CreateProjectBodyDtoRegion, error) { title := "Which region do you want to host the project in?" items := make([]utils.PromptItem, len(utils.RegionMap)) i := 0 @@ -114,5 +118,5 @@ func promptProjectRegion(ctx context.Context) (api.V1CreateProjectBodyRegion, er if err != nil { return "", err } - return api.V1CreateProjectBodyRegion(choice.Summary), nil + return api.V1CreateProjectBodyDtoRegion(choice.Summary), nil } diff --git a/internal/projects/create/create_test.go b/internal/projects/create/create_test.go index c67c7a35e..879421d72 100644 --- a/internal/projects/create/create_test.go +++ b/internal/projects/create/create_test.go @@ -14,11 +14,11 @@ import ( ) func TestProjectCreateCommand(t *testing.T) { - var params = api.V1CreateProjectBody{ + var params = api.V1CreateProjectBodyDto{ Name: "Test Project", OrganizationId: "combined-fuchsia-lion", DbPass: "redacted", - Region: api.V1CreateProjectBodyRegionUsWest1, + Region: api.V1CreateProjectBodyDtoRegionUsWest1, } t.Run("creates a new project", func(t *testing.T) { diff --git a/internal/projects/delete/delete.go b/internal/projects/delete/delete.go index c8ce9d535..f042f62e1 100644 --- a/internal/projects/delete/delete.go +++ b/internal/projects/delete/delete.go @@ -43,7 +43,7 @@ func Run(ctx context.Context, ref string, fsys afero.Fs) error { } // Unlink project - if err := credentials.Delete(ref); err != nil && !errors.Is(err, keyring.ErrNotFound) { + if err := credentials.StoreProvider.Delete(ref); err != nil && !errors.Is(err, keyring.ErrNotFound) { fmt.Fprintln(os.Stderr, err) } if match, err := afero.FileContainsBytes(fsys, utils.ProjectRefPath, []byte(ref)); match { diff --git a/internal/projects/list/list.go b/internal/projects/list/list.go index 18798ec00..ef356dfcf 100644 --- a/internal/projects/list/list.go +++ b/internal/projects/list/list.go @@ -5,15 +5,20 @@ import ( "fmt" "os" "strings" - "time" "github.com/go-errors/errors" "github.com/spf13/afero" "github.com/supabase/cli/internal/migration/list" "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/flags" + "github.com/supabase/cli/pkg/api" ) +type linkedProject struct { + api.V1ProjectWithDatabaseResponse `yaml:",inline"` + Linked bool `json:"linked"` +} + func Run(ctx context.Context, fsys afero.Fs) error { resp, err := utils.GetSupabase().V1ListAllProjectsWithResponse(ctx) if err != nil { @@ -24,35 +29,55 @@ func Run(ctx context.Context, fsys afero.Fs) error { return errors.New("Unexpected error retrieving projects: " + string(resp.Body)) } - projectRef, err := flags.LoadProjectRef(fsys) - if err != nil && err != utils.ErrNotLinked { + if err := flags.LoadProjectRef(fsys); err != nil && err != utils.ErrNotLinked { fmt.Fprintln(os.Stderr, err) } - table := `LINKED|ORG ID|REFERENCE ID|NAME|REGION|CREATED AT (UTC) + var projects []linkedProject + for _, project := range *resp.JSON200 { + projects = append(projects, linkedProject{ + V1ProjectWithDatabaseResponse: project, + Linked: project.Id == flags.ProjectRef, + }) + } + + if utils.OutputFormat.Value == utils.OutputPretty { + table := `LINKED|ORG ID|REFERENCE ID|NAME|REGION|CREATED AT (UTC) |-|-|-|-|-|-| ` - for _, project := range *resp.JSON200 { - if t, err := time.Parse(time.RFC3339, project.CreatedAt); err == nil { - project.CreatedAt = t.UTC().Format("2006-01-02 15:04:05") - } - if region, ok := utils.RegionMap[project.Region]; ok { - project.Region = region + for _, project := range projects { + table += fmt.Sprintf( + "|`%s`|`%s`|`%s`|`%s`|`%s`|`%s`|\n", + formatBullet(project.Linked), + project.OrganizationId, + project.Id, + strings.ReplaceAll(project.Name, "|", "\\|"), + formatRegion(project.Region), + utils.FormatTimestamp(project.CreatedAt), + ) } - linked := " " - if project.Id == projectRef { - linked = " ●" - } - table += fmt.Sprintf( - "|`%s`|`%s`|`%s`|`%s`|`%s`|`%s`|\n", - linked, - project.OrganizationId, - project.Id, - strings.ReplaceAll(project.Name, "|", "\\|"), - project.Region, - utils.FormatTimestamp(project.CreatedAt), - ) + return list.RenderTable(table) + } else if utils.OutputFormat.Value == utils.OutputToml { + return utils.EncodeOutput(utils.OutputFormat.Value, os.Stdout, struct { + Projects []linkedProject `toml:"projects"` + }{ + Projects: projects, + }) } - return list.RenderTable(table) + return utils.EncodeOutput(utils.OutputFormat.Value, os.Stdout, projects) +} + +func formatBullet(value bool) string { + if value { + return " ●" + } + return " " +} + +func formatRegion(region string) string { + if readable, ok := utils.RegionMap[region]; ok { + return readable + } + return region } diff --git a/internal/seed/buckets/buckets.go b/internal/seed/buckets/buckets.go index 2af80b43d..365c3a395 100644 --- a/internal/seed/buckets/buckets.go +++ b/internal/seed/buckets/buckets.go @@ -3,12 +3,10 @@ package buckets import ( "context" "fmt" - "path/filepath" "github.com/spf13/afero" "github.com/supabase/cli/internal/storage/client" "github.com/supabase/cli/internal/utils" - "github.com/supabase/cli/pkg/config" ) func Run(ctx context.Context, projectRef string, interactive bool, fsys afero.Fs) error { @@ -31,12 +29,5 @@ func Run(ctx context.Context, projectRef string, interactive bool, fsys afero.Fs if err := api.UpsertBuckets(ctx, utils.Config.Storage.Buckets, filter); err != nil { return err } - resolved := config.BucketConfig{} - for name, bucket := range utils.Config.Storage.Buckets { - if len(bucket.ObjectsPath) > 0 && !filepath.IsAbs(bucket.ObjectsPath) { - bucket.ObjectsPath = filepath.Join(utils.SupabaseDirPath, bucket.ObjectsPath) - } - resolved[name] = bucket - } - return api.UpsertObjects(ctx, resolved, utils.NewRootFS(fsys)) + return api.UpsertObjects(ctx, utils.Config.Storage.Buckets, utils.NewRootFS(fsys)) } diff --git a/internal/services/services.go b/internal/services/services.go index 1e6dc531d..ebc08efca 100644 --- a/internal/services/services.go +++ b/internal/services/services.go @@ -2,7 +2,9 @@ package services import ( "context" + "errors" "fmt" + "os" "strings" "sync" @@ -13,50 +15,56 @@ import ( "github.com/supabase/cli/internal/utils/tenant" ) -var suggestLinkCommand = fmt.Sprintf("Run %s to sync your local image versions with the linked project.", utils.Aqua("supabase link")) - func Run(ctx context.Context, fsys afero.Fs) error { - _ = utils.LoadConfigFS(fsys) - serviceImages := GetServiceImages() - - var linked map[string]string - if projectRef, err := flags.LoadProjectRef(fsys); err == nil { - linked = GetRemoteImages(ctx, projectRef) + if err := flags.LoadProjectRef(fsys); err != nil && !errors.Is(err, utils.ErrNotLinked) { + fmt.Fprintln(os.Stderr, err) + } + if err := utils.Config.Load("", utils.NewRootFS(fsys)); err != nil && !errors.Is(err, os.ErrNotExist) { + fmt.Fprintln(os.Stderr, err) } + serviceImages := CheckVersions(ctx, fsys) table := `|SERVICE IMAGE|LOCAL|LINKED| |-|-|-| ` for _, image := range serviceImages { - parts := strings.Split(image, ":") - version, ok := linked[image] - if !ok { - version = "-" - } else if parts[1] != version && image != utils.Config.Db.Image { - utils.CmdSuggestion = suggestLinkCommand + remote := image.Remote + if len(remote) == 0 { + remote = "-" } - table += fmt.Sprintf("|`%s`|`%s`|`%s`|\n", parts[0], parts[1], version) + table += fmt.Sprintf("|`%s`|`%s`|`%s`|\n", image.Name, image.Local, remote) } return list.RenderTable(table) } -func GetServiceImages() []string { - return []string{ - utils.Config.Db.Image, - utils.Config.Auth.Image, - utils.Config.Api.Image, - utils.Config.Realtime.Image, - utils.Config.Storage.Image, - utils.Config.EdgeRuntime.Image, - utils.Config.Studio.Image, - utils.Config.Studio.PgmetaImage, - utils.Config.Analytics.Image, - utils.Config.Db.Pooler.Image, +type imageVersion struct { + Name string `json:"name"` + Local string `json:"local"` + Remote string `json:"remote"` +} + +func CheckVersions(ctx context.Context, fsys afero.Fs) []imageVersion { + var remote map[string]string + if _, err := utils.LoadAccessTokenFS(fsys); err == nil && len(flags.ProjectRef) > 0 { + remote = listRemoteImages(ctx, flags.ProjectRef) } + var result []imageVersion + for _, image := range utils.Config.GetServiceImages() { + parts := strings.Split(image, ":") + v := imageVersion{Name: parts[0], Local: parts[1]} + if v.Remote = remote[image]; v.Remote == v.Local { + delete(remote, image) + } + result = append(result, v) + } + if len(remote) > 0 { + fmt.Fprintln(os.Stderr, suggestUpdateCmd(remote)) + } + return result } -func GetRemoteImages(ctx context.Context, projectRef string) map[string]string { +func listRemoteImages(ctx context.Context, projectRef string) map[string]string { linked := make(map[string]string, 4) var wg sync.WaitGroup wg.Add(1) @@ -94,3 +102,12 @@ func GetRemoteImages(ctx context.Context, projectRef string) map[string]string { wg.Wait() return linked } + +func suggestUpdateCmd(serviceImages map[string]string) string { + cmd := fmt.Sprintln(utils.Yellow("WARNING:"), "You are running different service versions locally than your linked project:") + for k, v := range serviceImages { + cmd += fmt.Sprintf("%s => %s\n", k, v) + } + cmd += fmt.Sprintf("Run %s to update them.", utils.Aqua("supabase link")) + return cmd +} diff --git a/internal/snippets/download/download.go b/internal/snippets/download/download.go index 7f1f3a463..22b45dd68 100644 --- a/internal/snippets/download/download.go +++ b/internal/snippets/download/download.go @@ -5,12 +5,18 @@ import ( "fmt" "github.com/go-errors/errors" + "github.com/google/uuid" "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" ) func Run(ctx context.Context, snippetId string, fsys afero.Fs) error { - resp, err := utils.GetSupabase().V1GetASnippetWithResponse(ctx, snippetId) + // Convert string to UUID + id, err := uuid.Parse(snippetId) + if err != nil { + return fmt.Errorf("invalid snippet ID: %w", err) + } + resp, err := utils.GetSupabase().V1GetASnippetWithResponse(ctx, id) if err != nil { return errors.Errorf("failed to download snippet: %w", err) } diff --git a/internal/snippets/list/list.go b/internal/snippets/list/list.go index 1bd47060f..f54325eaa 100644 --- a/internal/snippets/list/list.go +++ b/internal/snippets/list/list.go @@ -14,11 +14,8 @@ import ( ) func Run(ctx context.Context, fsys afero.Fs) error { - ref, err := flags.LoadProjectRef(fsys) - if err != nil { - return err - } - resp, err := utils.GetSupabase().V1ListAllSnippetsWithResponse(ctx, &api.V1ListAllSnippetsParams{ProjectRef: &ref}) + opts := api.V1ListAllSnippetsParams{ProjectRef: &flags.ProjectRef} + resp, err := utils.GetSupabase().V1ListAllSnippetsWithResponse(ctx, &opts) if err != nil { return errors.Errorf("failed to list snippets: %w", err) } diff --git a/internal/sso/internal/saml/files.go b/internal/sso/internal/saml/files.go index 3c327e6c0..62e0846e3 100644 --- a/internal/sso/internal/saml/files.go +++ b/internal/sso/internal/saml/files.go @@ -65,7 +65,7 @@ func ValidateMetadataURL(ctx context.Context, metadataURL string) error { return errors.Errorf("failed to parse metadata uri: %w", err) } - if strings.ToLower(parsed.Scheme) != "https" { + if !strings.EqualFold(parsed.Scheme, "https") { return errors.New("only HTTPS Metadata URLs are supported") } diff --git a/internal/start/start.go b/internal/start/start.go index d1a9612a9..86331a3cb 100644 --- a/internal/start/start.go +++ b/internal/start/start.go @@ -34,42 +34,21 @@ import ( "golang.org/x/mod/semver" ) -func suggestUpdateCmd(serviceImages map[string]string) string { - cmd := fmt.Sprintln(utils.Yellow("WARNING:"), "You are running different service versions locally than your linked project:") - for k, v := range serviceImages { - cmd += fmt.Sprintf("%s => %s\n", k, v) - } - cmd += fmt.Sprintf("Run %s to update them.", utils.Aqua("supabase link")) - return cmd -} - func Run(ctx context.Context, fsys afero.Fs, excludedContainers []string, ignoreHealthCheck bool) error { // Sanity checks. { - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } if err := utils.AssertSupabaseDbIsRunning(); err == nil { fmt.Fprintln(os.Stderr, utils.Aqua("supabase start")+" is already running.") - utils.CmdSuggestion = fmt.Sprintf("Run %s to show status of local Supabase containers.", utils.Aqua("supabase status")) - return nil + names := status.CustomName{} + return status.Run(ctx, names, utils.OutputPretty, fsys) } else if !errors.Is(err, utils.ErrNotRunning) { return err } - if _, err := utils.LoadAccessTokenFS(fsys); err == nil { - if ref, err := flags.LoadProjectRef(fsys); err == nil { - local := services.GetServiceImages() - remote := services.GetRemoteImages(ctx, ref) - for _, image := range local { - parts := strings.Split(image, ":") - if version, ok := remote[image]; ok && version == parts[1] { - delete(remote, image) - } - } - if len(remote) > 0 { - fmt.Fprintln(os.Stderr, suggestUpdateCmd(remote)) - } - } + if err := flags.LoadProjectRef(fsys); err == nil { + _ = services.CheckVersions(ctx, fsys) } } @@ -86,7 +65,7 @@ func Run(ctx context.Context, fsys afero.Fs, excludedContainers []string, ignore if ignoreHealthCheck && start.IsUnhealthyError(err) { fmt.Fprintln(os.Stderr, err) } else { - if err := utils.DockerRemoveAll(context.Background(), os.Stderr); err != nil { + if err := utils.DockerRemoveAll(context.Background(), os.Stderr, utils.Config.ProjectId); err != nil { fmt.Fprintln(os.Stderr, err) } return err @@ -183,18 +162,21 @@ func run(p utils.Program, ctx context.Context, fsys afero.Fs, excludedContainers // Start Postgres. w := utils.StatusWriter{Program: p} if dbConfig.Host == utils.DbId { - if err := start.StartDatabase(ctx, fsys, w, options...); err != nil { + if err := start.StartDatabase(ctx, "", fsys, w, options...); err != nil { return err } } var started []string + var isStorageEnabled = utils.Config.Storage.Enabled && !isContainerExcluded(utils.Config.Storage.Image, excluded) + var isImgProxyEnabled = utils.Config.Storage.ImageTransformation != nil && + utils.Config.Storage.ImageTransformation.Enabled && !isContainerExcluded(utils.Config.Storage.ImgProxyImage, excluded) p.Send(utils.StatusMsg("Starting containers...")) // Start Logflare if utils.Config.Analytics.Enabled && !isContainerExcluded(utils.Config.Analytics.Image, excluded) { env := []string{ - "DB_DATABASE=" + dbConfig.Database, + "DB_DATABASE=_supabase", "DB_HOSTNAME=" + dbConfig.Host, fmt.Sprintf("DB_PORT=%d", dbConfig.Port), "DB_SCHEMA=_analytics", @@ -227,7 +209,7 @@ func run(p utils.Program, ctx context.Context, fsys afero.Fs, excludedContainers ) case config.LogflarePostgres: env = append(env, - fmt.Sprintf("POSTGRES_BACKEND_URL=postgresql://%s:%s@%s:%d/%s", dbConfig.User, dbConfig.Password, dbConfig.Host, dbConfig.Port, dbConfig.Database), + fmt.Sprintf("POSTGRES_BACKEND_URL=postgresql://%s:%s@%s:%d/%s", dbConfig.User, dbConfig.Password, dbConfig.Host, dbConfig.Port, "_supabase"), "POSTGRES_BACKEND_SCHEMA=_analytics", ) } @@ -484,15 +466,11 @@ EOF fmt.Sprintf("GOTRUE_EXTERNAL_EMAIL_ENABLED=%v", utils.Config.Auth.Email.EnableSignup), fmt.Sprintf("GOTRUE_MAILER_SECURE_EMAIL_CHANGE_ENABLED=%v", utils.Config.Auth.Email.DoubleConfirmChanges), fmt.Sprintf("GOTRUE_MAILER_AUTOCONFIRM=%v", !utils.Config.Auth.Email.EnableConfirmations), + fmt.Sprintf("GOTRUE_MAILER_OTP_LENGTH=%v", utils.Config.Auth.Email.OtpLength), + fmt.Sprintf("GOTRUE_MAILER_OTP_EXP=%v", utils.Config.Auth.Email.OtpExpiry), fmt.Sprintf("GOTRUE_EXTERNAL_ANONYMOUS_USERS_ENABLED=%v", utils.Config.Auth.EnableAnonymousSignIns), - fmt.Sprintf("GOTRUE_SMTP_HOST=%s", utils.Config.Auth.Email.Smtp.Host), - fmt.Sprintf("GOTRUE_SMTP_PORT=%d", utils.Config.Auth.Email.Smtp.Port), - fmt.Sprintf("GOTRUE_SMTP_USER=%s", utils.Config.Auth.Email.Smtp.User), - fmt.Sprintf("GOTRUE_SMTP_PASS=%s", utils.Config.Auth.Email.Smtp.Pass), - fmt.Sprintf("GOTRUE_SMTP_ADMIN_EMAIL=%s", utils.Config.Auth.Email.Smtp.AdminEmail), - fmt.Sprintf("GOTRUE_SMTP_SENDER_NAME=%s", utils.Config.Auth.Email.Smtp.SenderName), fmt.Sprintf("GOTRUE_SMTP_MAX_FREQUENCY=%v", utils.Config.Auth.Email.MaxFrequency), "GOTRUE_MAILER_URLPATHS_INVITE=" + utils.GetApiUrl("/auth/v1/verify"), @@ -509,6 +487,8 @@ EOF fmt.Sprintf("GOTRUE_SMS_TEMPLATE=%v", utils.Config.Auth.Sms.Template), "GOTRUE_SMS_TEST_OTP=" + testOTP.String(), + fmt.Sprintf("GOTRUE_PASSWORD_MIN_LENGTH=%v", utils.Config.Auth.MinimumPasswordLength), + fmt.Sprintf("GOTRUE_PASSWORD_REQUIRED_CHARACTERS=%v", utils.Config.Auth.PasswordRequirements.ToChar()), fmt.Sprintf("GOTRUE_SECURITY_REFRESH_TOKEN_ROTATION_ENABLED=%v", utils.Config.Auth.EnableRefreshTokenRotation), fmt.Sprintf("GOTRUE_SECURITY_REFRESH_TOKEN_REUSE_INTERVAL=%v", utils.Config.Auth.RefreshTokenReuseInterval), fmt.Sprintf("GOTRUE_SECURITY_MANUAL_LINKING_ENABLED=%v", utils.Config.Auth.EnableManualLinking), @@ -517,13 +497,32 @@ EOF fmt.Sprintf("GOTRUE_MFA_PHONE_VERIFY_ENABLED=%v", utils.Config.Auth.MFA.Phone.VerifyEnabled), fmt.Sprintf("GOTRUE_MFA_TOTP_ENROLL_ENABLED=%v", utils.Config.Auth.MFA.TOTP.EnrollEnabled), fmt.Sprintf("GOTRUE_MFA_TOTP_VERIFY_ENABLED=%v", utils.Config.Auth.MFA.TOTP.VerifyEnabled), + fmt.Sprintf("GOTRUE_MFA_WEB_AUTHN_ENROLL_ENABLED=%v", utils.Config.Auth.MFA.WebAuthn.EnrollEnabled), + fmt.Sprintf("GOTRUE_MFA_WEB_AUTHN_VERIFY_ENABLED=%v", utils.Config.Auth.MFA.WebAuthn.VerifyEnabled), fmt.Sprintf("GOTRUE_MFA_MAX_ENROLLED_FACTORS=%v", utils.Config.Auth.MFA.MaxEnrolledFactors), } + if utils.Config.Auth.Email.Smtp != nil && utils.Config.Auth.Email.Smtp.IsEnabled() { + env = append(env, + fmt.Sprintf("GOTRUE_SMTP_HOST=%s", utils.Config.Auth.Email.Smtp.Host), + fmt.Sprintf("GOTRUE_SMTP_PORT=%d", utils.Config.Auth.Email.Smtp.Port), + fmt.Sprintf("GOTRUE_SMTP_USER=%s", utils.Config.Auth.Email.Smtp.User), + fmt.Sprintf("GOTRUE_SMTP_PASS=%s", utils.Config.Auth.Email.Smtp.Pass.Value), + fmt.Sprintf("GOTRUE_SMTP_ADMIN_EMAIL=%s", utils.Config.Auth.Email.Smtp.AdminEmail), + fmt.Sprintf("GOTRUE_SMTP_SENDER_NAME=%s", utils.Config.Auth.Email.Smtp.SenderName), + ) + } else if utils.Config.Inbucket.Enabled { + env = append(env, + "GOTRUE_SMTP_HOST="+utils.InbucketId, + "GOTRUE_SMTP_PORT=2500", + fmt.Sprintf("GOTRUE_SMTP_ADMIN_EMAIL=%s", utils.Config.Inbucket.AdminEmail), + fmt.Sprintf("GOTRUE_SMTP_SENDER_NAME=%s", utils.Config.Inbucket.SenderName), + ) + } + if utils.Config.Auth.Sessions.Timebox > 0 { env = append(env, fmt.Sprintf("GOTRUE_SESSIONS_TIMEBOX=%v", utils.Config.Auth.Sessions.Timebox)) } - if utils.Config.Auth.Sessions.InactivityTimeout > 0 { env = append(env, fmt.Sprintf("GOTRUE_SESSIONS_INACTIVITY_TIMEOUT=%v", utils.Config.Auth.Sessions.InactivityTimeout)) } @@ -537,101 +536,96 @@ EOF id+filepath.Ext(tmpl.ContentPath), )) } - if len(tmpl.Subject) > 0 { + if tmpl.Subject != nil { env = append(env, fmt.Sprintf("GOTRUE_MAILER_SUBJECTS_%s=%s", strings.ToUpper(id), - tmpl.Subject, + *tmpl.Subject, )) } } - if utils.Config.Auth.Sms.Twilio.Enabled { + switch { + case utils.Config.Auth.Sms.Twilio.Enabled: env = append( env, "GOTRUE_SMS_PROVIDER=twilio", "GOTRUE_SMS_TWILIO_ACCOUNT_SID="+utils.Config.Auth.Sms.Twilio.AccountSid, - "GOTRUE_SMS_TWILIO_AUTH_TOKEN="+utils.Config.Auth.Sms.Twilio.AuthToken, + "GOTRUE_SMS_TWILIO_AUTH_TOKEN="+utils.Config.Auth.Sms.Twilio.AuthToken.Value, "GOTRUE_SMS_TWILIO_MESSAGE_SERVICE_SID="+utils.Config.Auth.Sms.Twilio.MessageServiceSid, ) - } - if utils.Config.Auth.Sms.TwilioVerify.Enabled { + case utils.Config.Auth.Sms.TwilioVerify.Enabled: env = append( env, "GOTRUE_SMS_PROVIDER=twilio_verify", "GOTRUE_SMS_TWILIO_VERIFY_ACCOUNT_SID="+utils.Config.Auth.Sms.TwilioVerify.AccountSid, - "GOTRUE_SMS_TWILIO_VERIFY_AUTH_TOKEN="+utils.Config.Auth.Sms.TwilioVerify.AuthToken, + "GOTRUE_SMS_TWILIO_VERIFY_AUTH_TOKEN="+utils.Config.Auth.Sms.TwilioVerify.AuthToken.Value, "GOTRUE_SMS_TWILIO_VERIFY_MESSAGE_SERVICE_SID="+utils.Config.Auth.Sms.TwilioVerify.MessageServiceSid, ) - } - if utils.Config.Auth.Sms.Messagebird.Enabled { + case utils.Config.Auth.Sms.Messagebird.Enabled: env = append( env, "GOTRUE_SMS_PROVIDER=messagebird", - "GOTRUE_SMS_MESSAGEBIRD_ACCESS_KEY="+utils.Config.Auth.Sms.Messagebird.AccessKey, + "GOTRUE_SMS_MESSAGEBIRD_ACCESS_KEY="+utils.Config.Auth.Sms.Messagebird.AccessKey.Value, "GOTRUE_SMS_MESSAGEBIRD_ORIGINATOR="+utils.Config.Auth.Sms.Messagebird.Originator, ) - } - if utils.Config.Auth.Sms.Textlocal.Enabled { + case utils.Config.Auth.Sms.Textlocal.Enabled: env = append( env, "GOTRUE_SMS_PROVIDER=textlocal", - "GOTRUE_SMS_TEXTLOCAL_API_KEY="+utils.Config.Auth.Sms.Textlocal.ApiKey, + "GOTRUE_SMS_TEXTLOCAL_API_KEY="+utils.Config.Auth.Sms.Textlocal.ApiKey.Value, "GOTRUE_SMS_TEXTLOCAL_SENDER="+utils.Config.Auth.Sms.Textlocal.Sender, ) - } - if utils.Config.Auth.Sms.Vonage.Enabled { + case utils.Config.Auth.Sms.Vonage.Enabled: env = append( env, "GOTRUE_SMS_PROVIDER=vonage", "GOTRUE_SMS_VONAGE_API_KEY="+utils.Config.Auth.Sms.Vonage.ApiKey, - "GOTRUE_SMS_VONAGE_API_SECRET="+utils.Config.Auth.Sms.Vonage.ApiSecret, + "GOTRUE_SMS_VONAGE_API_SECRET="+utils.Config.Auth.Sms.Vonage.ApiSecret.Value, "GOTRUE_SMS_VONAGE_FROM="+utils.Config.Auth.Sms.Vonage.From, ) } - if utils.Config.Auth.Hook.MFAVerificationAttempt.Enabled { + + if hook := utils.Config.Auth.Hook.MFAVerificationAttempt; hook != nil && hook.Enabled { env = append( env, "GOTRUE_HOOK_MFA_VERIFICATION_ATTEMPT_ENABLED=true", - "GOTRUE_HOOK_MFA_VERIFICATION_ATTEMPT_URI="+utils.Config.Auth.Hook.MFAVerificationAttempt.URI, - "GOTRUE_HOOK_MFA_VERIFICATION_ATTEMPT_SECRETS="+utils.Config.Auth.Hook.MFAVerificationAttempt.Secrets, + "GOTRUE_HOOK_MFA_VERIFICATION_ATTEMPT_URI="+hook.URI, + "GOTRUE_HOOK_MFA_VERIFICATION_ATTEMPT_SECRETS="+hook.Secrets.Value, ) } - - if utils.Config.Auth.Hook.PasswordVerificationAttempt.Enabled { + if hook := utils.Config.Auth.Hook.PasswordVerificationAttempt; hook != nil && hook.Enabled { env = append( env, "GOTRUE_HOOK_PASSWORD_VERIFICATION_ATTEMPT_ENABLED=true", - "GOTRUE_HOOK_PASSWORD_VERIFICATION_ATTEMPT_URI="+utils.Config.Auth.Hook.PasswordVerificationAttempt.URI, - "GOTRUE_HOOK_PASSWORD_VERIFICATION_ATTEMPT_SECRETS="+utils.Config.Auth.Hook.PasswordVerificationAttempt.Secrets, + "GOTRUE_HOOK_PASSWORD_VERIFICATION_ATTEMPT_URI="+hook.URI, + "GOTRUE_HOOK_PASSWORD_VERIFICATION_ATTEMPT_SECRETS="+hook.Secrets.Value, ) } - - if utils.Config.Auth.Hook.CustomAccessToken.Enabled { + if hook := utils.Config.Auth.Hook.CustomAccessToken; hook != nil && hook.Enabled { env = append( env, "GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_ENABLED=true", - "GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_URI="+utils.Config.Auth.Hook.CustomAccessToken.URI, - "GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_SECRETS="+utils.Config.Auth.Hook.CustomAccessToken.Secrets, + "GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_URI="+hook.URI, + "GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_SECRETS="+hook.Secrets.Value, ) } - - if utils.Config.Auth.Hook.SendSMS.Enabled { + if hook := utils.Config.Auth.Hook.SendSMS; hook != nil && hook.Enabled { env = append( env, "GOTRUE_HOOK_SEND_SMS_ENABLED=true", - "GOTRUE_HOOK_SEND_SMS_URI="+utils.Config.Auth.Hook.SendSMS.URI, - "GOTRUE_HOOK_SEND_SMS_SECRETS="+utils.Config.Auth.Hook.SendSMS.Secrets, + "GOTRUE_HOOK_SEND_SMS_URI="+hook.URI, + "GOTRUE_HOOK_SEND_SMS_SECRETS="+hook.Secrets.Value, ) } - - if utils.Config.Auth.Hook.SendEmail.Enabled { + if hook := utils.Config.Auth.Hook.SendEmail; hook != nil && hook.Enabled { env = append( env, "GOTRUE_HOOK_SEND_EMAIL_ENABLED=true", - "GOTRUE_HOOK_SEND_EMAIL_URI="+utils.Config.Auth.Hook.SendEmail.URI, - "GOTRUE_HOOK_SEND_EMAIL_SECRETS="+utils.Config.Auth.Hook.SendEmail.Secrets, + "GOTRUE_HOOK_SEND_EMAIL_URI="+hook.URI, + "GOTRUE_HOOK_SEND_EMAIL_SECRETS="+hook.Secrets.Value, ) } + if utils.Config.Auth.MFA.Phone.EnrollEnabled || utils.Config.Auth.MFA.Phone.VerifyEnabled { env = append( env, @@ -646,24 +640,18 @@ EOF env, fmt.Sprintf("GOTRUE_EXTERNAL_%s_ENABLED=%v", strings.ToUpper(name), config.Enabled), fmt.Sprintf("GOTRUE_EXTERNAL_%s_CLIENT_ID=%s", strings.ToUpper(name), config.ClientId), - fmt.Sprintf("GOTRUE_EXTERNAL_%s_SECRET=%s", strings.ToUpper(name), config.Secret), + fmt.Sprintf("GOTRUE_EXTERNAL_%s_SECRET=%s", strings.ToUpper(name), config.Secret.Value), fmt.Sprintf("GOTRUE_EXTERNAL_%s_SKIP_NONCE_CHECK=%t", strings.ToUpper(name), config.SkipNonceCheck), ) - if config.RedirectUri != "" { - env = append(env, - fmt.Sprintf("GOTRUE_EXTERNAL_%s_REDIRECT_URI=%s", strings.ToUpper(name), config.RedirectUri), - ) - } else { - env = append(env, - fmt.Sprintf("GOTRUE_EXTERNAL_%s_REDIRECT_URI=%s", strings.ToUpper(name), utils.GetApiUrl("/auth/v1/callback")), - ) + redirectUri := config.RedirectUri + if redirectUri == "" { + redirectUri = utils.GetApiUrl("/auth/v1/callback") } + env = append(env, fmt.Sprintf("GOTRUE_EXTERNAL_%s_REDIRECT_URI=%s", strings.ToUpper(name), redirectUri)) if config.Url != "" { - env = append(env, - fmt.Sprintf("GOTRUE_EXTERNAL_%s_URL=%s", strings.ToUpper(name), config.Url), - ) + env = append(env, fmt.Sprintf("GOTRUE_EXTERNAL_%s_URL=%s", strings.ToUpper(name), config.Url)) } } @@ -759,8 +747,9 @@ EOF "SECRET_KEY_BASE=" + utils.Config.Realtime.SecretKeyBase, "ERL_AFLAGS=" + utils.ToRealtimeEnv(utils.Config.Realtime.IpVersion), "DNS_NODES=''", - "RLIMIT_NOFILE=10000", + "RLIMIT_NOFILE=", "SEED_SELF_HOST=true", + "RUN_JANITOR=true", fmt.Sprintf("MAX_HEADER_LENGTH=%d", utils.Config.Realtime.MaxHeaderLength), }, ExposedPorts: nat.PortSet{"4000/tcp": {}}, @@ -827,7 +816,7 @@ EOF } // Start Storage. - if utils.Config.Storage.Enabled && !isContainerExcluded(utils.Config.Storage.Image, excluded) { + if isStorageEnabled { dockerStoragePath := "/mnt" if _, err := utils.DockerStart( ctx, @@ -846,7 +835,7 @@ EOF // TODO: https://github.com/supabase/storage-api/issues/55 "STORAGE_S3_REGION=" + utils.Config.Storage.S3Credentials.Region, "GLOBAL_S3_BUCKET=stub", - fmt.Sprintf("ENABLE_IMAGE_TRANSFORMATION=%t", utils.Config.Storage.ImageTransformation.Enabled), + fmt.Sprintf("ENABLE_IMAGE_TRANSFORMATION=%t", isImgProxyEnabled), fmt.Sprintf("IMGPROXY_URL=http://%s:5001", utils.ImgProxyId), "TUS_URL_PATH=/storage/v1/upload/resumable", "S3_PROTOCOL_ACCESS_KEY_ID=" + utils.Config.Storage.S3Credentials.AccessKeyId, @@ -885,15 +874,19 @@ EOF } // Start Storage ImgProxy. - if utils.Config.Storage.Enabled && utils.Config.Storage.ImageTransformation.Enabled && !isContainerExcluded(utils.Config.Storage.ImageTransformation.Image, excluded) { + if isStorageEnabled && isImgProxyEnabled { if _, err := utils.DockerStart( ctx, container.Config{ - Image: utils.Config.Storage.ImageTransformation.Image, + Image: utils.Config.Storage.ImgProxyImage, Env: []string{ "IMGPROXY_BIND=:5001", "IMGPROXY_LOCAL_FILESYSTEM_ROOT=/", "IMGPROXY_USE_ETAG=/", + "IMGPROXY_MAX_SRC_RESOLUTION=50", + "IMGPROXY_MAX_SRC_FILE_SIZE=25000000", + "IMGPROXY_MAX_ANIMATION_FRAMES=60", + "IMGPROXY_ENABLE_WEBP_DETECTION=true", }, Healthcheck: &container.HealthConfig{ Test: []string{"CMD", "imgproxy", "health"}, @@ -944,7 +937,7 @@ EOF "PG_META_DB_PASSWORD=" + dbConfig.Password, }, Healthcheck: &container.HealthConfig{ - Test: []string{"CMD", "node", `--eval='fetch("http://127.0.0.1:8080/health").then((r) => {if (r.status !== 200) throw new Error(r.status)})'`}, + Test: []string{"CMD-SHELL", `node --eval="fetch('http://127.0.0.1:8080/health').then((r) => {if (!r.ok) throw new Error(r.status)})"`}, Interval: 10 * time.Second, Timeout: 2 * time.Second, Retries: 3, @@ -990,7 +983,7 @@ EOF "HOSTNAME=0.0.0.0", }, Healthcheck: &container.HealthConfig{ - Test: []string{"CMD", "node", `--eval='fetch("http://127.0.0.1:3000/api/profile", (r) => {if (r.statusCode !== 200) throw new Error(r.statusCode)})'`}, + Test: []string{"CMD-SHELL", `node --eval="fetch('http://localhost:3000/api/platform/profile').then((r) => {if (!r.ok) throw new Error(r.status)})"`}, Interval: 10 * time.Second, Timeout: 2 * time.Second, Retries: 3, @@ -1044,13 +1037,14 @@ EOF "PORT=4000", fmt.Sprintf("PROXY_PORT_SESSION=%d", portSession), fmt.Sprintf("PROXY_PORT_TRANSACTION=%d", portTransaction), - fmt.Sprintf("DATABASE_URL=ecto://%s:%s@%s:%d/%s", dbConfig.User, dbConfig.Password, dbConfig.Host, dbConfig.Port, dbConfig.Database), + fmt.Sprintf("DATABASE_URL=ecto://%s:%s@%s:%d/%s", dbConfig.User, dbConfig.Password, dbConfig.Host, dbConfig.Port, "_supabase"), "CLUSTER_POSTGRES=true", "SECRET_KEY_BASE=" + utils.Config.Db.Pooler.SecretKeyBase, "VAULT_ENC_KEY=" + utils.Config.Db.Pooler.EncryptionKey, "API_JWT_SECRET=" + utils.Config.Auth.JwtSecret, "METRICS_JWT_SECRET=" + utils.Config.Auth.JwtSecret, "REGION=local", + "RUN_JANITOR=true", "ERL_AFLAGS=-proto_dist inet_tcp", }, Cmd: []string{ diff --git a/internal/start/start_test.go b/internal/start/start_test.go index 645a1cd70..bbd73ab93 100644 --- a/internal/start/start_test.go +++ b/internal/start/start_test.go @@ -10,6 +10,7 @@ import ( "testing" "github.com/docker/docker/api/types" + "github.com/docker/docker/api/types/network" "github.com/docker/docker/api/types/volume" "github.com/h2non/gock" "github.com/jackc/pgconn" @@ -36,7 +37,7 @@ func TestStartCommand(t *testing.T) { // Run test err := Run(context.Background(), fsys, []string{}, false) // Check error - assert.ErrorContains(t, err, "toml: line 0: unexpected EOF; expected key separator '='") + assert.ErrorContains(t, err, "toml: expected = after a key, but the document ends there") }) t.Run("throws error on missing docker", func(t *testing.T) { @@ -56,7 +57,13 @@ func TestStartCommand(t *testing.T) { assert.Empty(t, apitest.ListUnmatchedRequests()) }) - t.Run("noop if database is already running", func(t *testing.T) { + t.Run("show status if database is already running", func(t *testing.T) { + var running []types.Container + for _, name := range utils.GetDockerIds() { + running = append(running, types.Container{ + Names: []string{name + "_test"}, + }) + } // Setup in-memory fs fsys := afero.NewMemMapFs() require.NoError(t, utils.WriteConfig(fsys, false)) @@ -67,6 +74,17 @@ func TestStartCommand(t *testing.T) { Get("/v" + utils.Docker.ClientVersion() + "/containers"). Reply(http.StatusOK). JSON(types.ContainerJSON{}) + + gock.New(utils.Docker.DaemonHost()). + Get("/v" + utils.Docker.ClientVersion() + "/containers/supabase_db_start/json"). + Reply(http.StatusOK). + JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ + State: &types.ContainerState{Running: true}, + }}) + gock.New(utils.Docker.DaemonHost()). + Get("/v" + utils.Docker.ClientVersion() + "/containers/json"). + Reply(http.StatusOK). + JSON(running) // Run test err := Run(context.Background(), fsys, []string{}, false) // Check error @@ -85,7 +103,7 @@ func TestDatabaseStart(t *testing.T) { gock.New(utils.Docker.DaemonHost()). Post("/v" + utils.Docker.ClientVersion() + "/networks/create"). Reply(http.StatusCreated). - JSON(types.NetworkCreateResponse{}) + JSON(network.CreateResponse{}) // Caches all dependencies imageUrl := utils.GetRegistryImageUrl(utils.Config.Db.Image) gock.New(utils.Docker.DaemonHost()). @@ -132,7 +150,7 @@ func TestDatabaseStart(t *testing.T) { utils.StorageId = "test-storage" apitest.MockDockerStart(utils.Docker, utils.GetRegistryImageUrl(utils.Config.Storage.Image), utils.StorageId) utils.ImgProxyId = "test-imgproxy" - apitest.MockDockerStart(utils.Docker, utils.GetRegistryImageUrl(utils.Config.Storage.ImageTransformation.Image), utils.ImgProxyId) + apitest.MockDockerStart(utils.Docker, utils.GetRegistryImageUrl(utils.Config.Storage.ImgProxyImage), utils.ImgProxyId) utils.EdgeRuntimeId = "test-edge-runtime" apitest.MockDockerStart(utils.Docker, utils.GetRegistryImageUrl(utils.Config.EdgeRuntime.Image), utils.EdgeRuntimeId) utils.PgmetaId = "test-pgmeta" @@ -159,7 +177,7 @@ func TestDatabaseStart(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) } @@ -176,7 +194,7 @@ func TestDatabaseStart(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) gock.New(utils.Config.Api.ExternalUrl). @@ -201,7 +219,7 @@ func TestDatabaseStart(t *testing.T) { gock.New(utils.Docker.DaemonHost()). Post("/v" + utils.Docker.ClientVersion() + "/networks/create"). Reply(http.StatusCreated). - JSON(types.NetworkCreateResponse{}) + JSON(network.CreateResponse{}) // Caches all dependencies imageUrl := utils.GetRegistryImageUrl(utils.Config.Db.Image) gock.New(utils.Docker.DaemonHost()). @@ -224,7 +242,7 @@ func TestDatabaseStart(t *testing.T) { JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{ State: &types.ContainerState{ Running: true, - Health: &types.Health{Status: "healthy"}, + Health: &types.Health{Status: types.Healthy}, }, }}) // Run test diff --git a/internal/start/templates/kong.yml b/internal/start/templates/kong.yml index 3865eba1c..8f1d28fd8 100644 --- a/internal/start/templates/kong.yml +++ b/internal/start/templates/kong.yml @@ -76,7 +76,7 @@ services: headers: - "Content-Profile: graphql_public" - name: realtime-v1-ws - _comment: "Realtime: /realtime/v1/* -> ws://realtime:4000/socket/*" + _comment: "Realtime: /realtime/v1/* -> ws://realtime:4000/socket/websocket" url: http://{{ .RealtimeId }}:4000/socket protocol: ws routes: @@ -86,6 +86,17 @@ services: - /realtime/v1/ plugins: - name: cors + - name: realtime-v1-longpoll + _comment: "Realtime: /realtime/v1/* -> ws://realtime:4000/socket/longpoll" + url: http://{{ .RealtimeId }}:4000/socket + protocol: http + routes: + - name: realtime-v1-longpoll + strip_path: true + paths: + - /realtime/v1/ + plugins: + - name: cors - name: realtime-v1-rest _comment: "Realtime: /realtime/v1/* -> http://realtime:4000/api/*" url: http://{{ .RealtimeId }}:4000/api diff --git a/internal/status/status.go b/internal/status/status.go index 0963be09c..68eae2fe3 100644 --- a/internal/status/status.go +++ b/internal/status/status.go @@ -14,10 +14,12 @@ import ( "sync" "time" + "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" "github.com/go-errors/errors" "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/fetcher" ) @@ -66,7 +68,7 @@ func (c *CustomName) toValues(exclude ...string) map[string]string { func Run(ctx context.Context, names CustomName, format string, fsys afero.Fs) error { // Sanity checks. - if err := utils.LoadConfigFS(fsys); err != nil { + if err := flags.LoadConfig(fsys); err != nil { return err } if err := assertContainerHealthy(ctx, utils.DbId); err != nil { @@ -89,7 +91,7 @@ func Run(ctx context.Context, names CustomName, format string, fsys afero.Fs) er func checkServiceHealth(ctx context.Context) ([]string, error) { resp, err := utils.Docker.ContainerList(ctx, container.ListOptions{ - Filters: utils.CliProjectFilter(), + Filters: utils.CliProjectFilter(utils.Config.ProjectId), }) if err != nil { return nil, errors.Errorf("failed to list running containers: %w", err) @@ -114,7 +116,7 @@ func assertContainerHealthy(ctx context.Context, container string) error { return errors.Errorf("failed to inspect container health: %w", err) } else if !resp.State.Running { return errors.Errorf("%s container is not running: %s", container, resp.State.Status) - } else if resp.State.Health != nil && resp.State.Health.Status != "healthy" { + } else if resp.State.Health != nil && resp.State.Health.Status != types.Healthy { return errors.Errorf("%s container is not ready: %s", container, resp.State.Health.Status) } return nil diff --git a/internal/status/status_test.go b/internal/status/status_test.go index 391a8c998..175d28ca0 100644 --- a/internal/status/status_test.go +++ b/internal/status/status_test.go @@ -59,7 +59,7 @@ func TestStatusCommand(t *testing.T) { // Run test err := Run(context.Background(), CustomName{}, utils.OutputPretty, fsys) // Check error - assert.ErrorContains(t, err, "toml: line 0: unexpected EOF; expected key separator '='") + assert.ErrorContains(t, err, "toml: expected = after a key, but the document ends there") }) t.Run("throws error on missing docker", func(t *testing.T) { diff --git a/internal/stop/stop.go b/internal/stop/stop.go index b4bd94c23..da98f1600 100644 --- a/internal/stop/stop.go +++ b/internal/stop/stop.go @@ -9,35 +9,45 @@ import ( "github.com/docker/docker/api/types/volume" "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" ) -func Run(ctx context.Context, backup bool, projectId string, fsys afero.Fs) error { - // Sanity checks. - if len(projectId) > 0 { - utils.Config.ProjectId = projectId - } else if err := utils.LoadConfigFS(fsys); err != nil { - return err +func Run(ctx context.Context, backup bool, projectId string, all bool, fsys afero.Fs) error { + var searchProjectIdFilter string + if !all { + // Sanity checks. + if len(projectId) > 0 { + utils.Config.ProjectId = projectId + } else if err := flags.LoadConfig(fsys); err != nil { + return err + } + searchProjectIdFilter = utils.Config.ProjectId } // Stop all services if err := utils.RunProgram(ctx, func(p utils.Program, ctx context.Context) error { w := utils.StatusWriter{Program: p} - return stop(ctx, backup, w) + return stop(ctx, backup, w, searchProjectIdFilter) }); err != nil { return err } fmt.Println("Stopped " + utils.Aqua("supabase") + " local development setup.") if resp, err := utils.Docker.VolumeList(ctx, volume.ListOptions{ - Filters: utils.CliProjectFilter(), + Filters: utils.CliProjectFilter(searchProjectIdFilter), }); err == nil && len(resp.Volumes) > 0 { - listVolume := fmt.Sprintf("docker volume ls --filter label=%s=%s", utils.CliProjectLabel, utils.Config.ProjectId) - utils.CmdSuggestion = "Local data are backed up to docker volume. Use docker to show them: " + utils.Aqua(listVolume) + if len(searchProjectIdFilter) > 0 { + listVolume := fmt.Sprintf("docker volume ls --filter label=%s=%s", utils.CliProjectLabel, searchProjectIdFilter) + utils.CmdSuggestion = "Local data are backed up to docker volume. Use docker to show them: " + utils.Aqua(listVolume) + } else { + listVolume := fmt.Sprintf("docker volume ls --filter label=%s", utils.CliProjectLabel) + utils.CmdSuggestion = "Local data are backed up to docker volume. Use docker to show them: " + utils.Aqua(listVolume) + } } return nil } -func stop(ctx context.Context, backup bool, w io.Writer) error { +func stop(ctx context.Context, backup bool, w io.Writer, projectId string) error { utils.NoBackupVolume = !backup - return utils.DockerRemoveAll(ctx, w) + return utils.DockerRemoveAll(ctx, w, projectId) } diff --git a/internal/stop/stop_test.go b/internal/stop/stop_test.go index dbb8b9030..b607d87d7 100644 --- a/internal/stop/stop_test.go +++ b/internal/stop/stop_test.go @@ -3,13 +3,17 @@ package stop import ( "context" "errors" + "fmt" "io" "net/http" "os" "testing" "github.com/docker/docker/api/types" + "github.com/docker/docker/api/types/container" + "github.com/docker/docker/api/types/network" "github.com/docker/docker/api/types/volume" + "github.com/docker/docker/client" "github.com/h2non/gock" "github.com/spf13/afero" "github.com/stretchr/testify/assert" @@ -33,11 +37,11 @@ func TestStopCommand(t *testing.T) { gock.New(utils.Docker.DaemonHost()). Post("/v" + utils.Docker.ClientVersion() + "/containers/prune"). Reply(http.StatusOK). - JSON(types.ContainersPruneReport{}) + JSON(container.PruneReport{}) gock.New(utils.Docker.DaemonHost()). Post("/v" + utils.Docker.ClientVersion() + "/networks/prune"). Reply(http.StatusOK). - JSON(types.NetworksPruneReport{}) + JSON(network.PruneReport{}) gock.New(utils.Docker.DaemonHost()). Get("/v" + utils.Docker.ClientVersion() + "/volumes"). Reply(http.StatusOK). @@ -45,7 +49,87 @@ func TestStopCommand(t *testing.T) { Name: utils.DbId, }}}) // Run test - err := Run(context.Background(), true, "", fsys) + err := Run(context.Background(), true, "", false, fsys) + // Check error + assert.NoError(t, err) + assert.Empty(t, apitest.ListUnmatchedRequests()) + }) + + t.Run("stops all instances when --all flag is used", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + require.NoError(t, utils.WriteConfig(fsys, false)) + // Setup mock docker + require.NoError(t, apitest.MockDocker(utils.Docker)) + defer gock.OffAll() + + projects := []string{"project1", "project2"} + + // Mock initial ContainerList for all containers + gock.New(utils.Docker.DaemonHost()). + Get("/v"+utils.Docker.ClientVersion()+"/containers/json"). + MatchParam("all", "true"). + Reply(http.StatusOK). + JSON([]types.Container{ + {ID: "container1", Labels: map[string]string{utils.CliProjectLabel: "project1"}}, + {ID: "container2", Labels: map[string]string{utils.CliProjectLabel: "project2"}}, + }) + + // Mock initial VolumeList + gock.New(utils.Docker.DaemonHost()). + Get("/v" + utils.Docker.ClientVersion() + "/volumes"). + Reply(http.StatusOK). + JSON(volume.ListResponse{ + Volumes: []*volume.Volume{ + {Name: "volume1", Labels: map[string]string{utils.CliProjectLabel: "project1"}}, + {Name: "volume2", Labels: map[string]string{utils.CliProjectLabel: "project2"}}, + }, + }) + + // Mock stopOneProject for each project + for _, projectId := range projects { + // Mock ContainerList for each project + gock.New(utils.Docker.DaemonHost()). + Get("/v"+utils.Docker.ClientVersion()+"/containers/json"). + MatchParam("all", "1"). + MatchParam("filters", fmt.Sprintf(`{"label":{"com.supabase.cli.project=%s":true}}`, projectId)). + Reply(http.StatusOK). + JSON([]types.Container{{ID: "container-" + projectId, State: "running"}}) + + // Mock container stop + gock.New(utils.Docker.DaemonHost()). + Post("/v" + utils.Docker.ClientVersion() + "/containers/container-" + projectId + "/stop"). + Reply(http.StatusOK) + + gock.New(utils.Docker.DaemonHost()). + Post("/v" + utils.Docker.ClientVersion() + "/containers/prune"). + Reply(http.StatusOK). + JSON(container.PruneReport{}) + gock.New(utils.Docker.DaemonHost()). + Post("/v" + utils.Docker.ClientVersion() + "/networks/prune"). + Reply(http.StatusOK). + JSON(network.PruneReport{}) + gock.New(utils.Docker.DaemonHost()). + Get("/v"+utils.Docker.ClientVersion()+"/volumes"). + MatchParam("filters", fmt.Sprintf(`{"label":{"com.supabase.cli.project=%s":true}}`, projectId)). + Reply(http.StatusOK). + JSON(volume.ListResponse{Volumes: []*volume.Volume{{Name: "volume-" + projectId}}}) + } + + // Mock final ContainerList to verify all containers are stopped + gock.New(utils.Docker.DaemonHost()). + Get("/v"+utils.Docker.ClientVersion()+"/containers/json"). + MatchParam("all", "true"). + Reply(http.StatusOK). + JSON([]types.Container{}) + gock.New(utils.Docker.DaemonHost()). + Get("/v" + utils.Docker.ClientVersion() + "/containers/json"). + Reply(http.StatusOK). + JSON([]types.Container{}) + + // Run test + err := Run(context.Background(), true, "", true, fsys) + // Check error assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -55,7 +139,7 @@ func TestStopCommand(t *testing.T) { // Setup in-memory fs fsys := afero.NewMemMapFs() // Run test - err := Run(context.Background(), false, "", fsys) + err := Run(context.Background(), false, "", false, fsys) // Check error assert.ErrorIs(t, err, os.ErrNotExist) }) @@ -71,7 +155,7 @@ func TestStopCommand(t *testing.T) { Get("/v" + utils.Docker.ClientVersion() + "/containers/json"). Reply(http.StatusServiceUnavailable) // Run test - err := Run(context.Background(), false, "test", afero.NewReadOnlyFs(fsys)) + err := Run(context.Background(), false, "test", false, afero.NewReadOnlyFs(fsys)) // Check error assert.ErrorContains(t, err, "request returned Service Unavailable for API route and version") assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -94,13 +178,13 @@ func TestStopServices(t *testing.T) { gock.New(utils.Docker.DaemonHost()). Post("/v" + utils.Docker.ClientVersion() + "/containers/prune"). Reply(http.StatusOK). - JSON(types.ContainersPruneReport{}) + JSON(container.PruneReport{}) gock.New(utils.Docker.DaemonHost()). Post("/v" + utils.Docker.ClientVersion() + "/networks/prune"). Reply(http.StatusOK). - JSON(types.NetworksPruneReport{}) + JSON(network.PruneReport{}) // Run test - err := stop(context.Background(), true, io.Discard) + err := stop(context.Background(), true, io.Discard, utils.Config.ProjectId) // Check error assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -117,7 +201,25 @@ func TestStopServices(t *testing.T) { defer gock.OffAll() apitest.MockDockerStop(utils.Docker) // Run test - err := stop(context.Background(), false, io.Discard) + err := stop(context.Background(), false, io.Discard, utils.Config.ProjectId) + // Check error + assert.NoError(t, err) + assert.Empty(t, apitest.ListUnmatchedRequests()) + }) + + t.Run("skips all filter when removing data volumes with Docker version pre-v1.42", func(t *testing.T) { + utils.DbId = "test-db" + utils.ConfigId = "test-config" + utils.StorageId = "test-storage" + utils.EdgeRuntimeId = "test-functions" + utils.InbucketId = "test-inbucket" + // Setup mock docker + require.NoError(t, apitest.MockDocker(utils.Docker)) + require.NoError(t, client.WithVersion("1.41")(utils.Docker)) + defer gock.OffAll() + apitest.MockDockerStop(utils.Docker) + // Run test + err := stop(context.Background(), false, io.Discard, utils.Config.ProjectId) // Check error assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -135,7 +237,7 @@ func TestStopServices(t *testing.T) { Post("/v" + utils.Docker.ClientVersion() + "/containers/prune"). ReplyError(errors.New("network error")) // Run test - err := stop(context.Background(), true, io.Discard) + err := stop(context.Background(), true, io.Discard, utils.Config.ProjectId) // Check error assert.ErrorContains(t, err, "network error") assert.Empty(t, apitest.ListUnmatchedRequests()) diff --git a/internal/storage/client/scheme.go b/internal/storage/client/scheme.go index 768c29036..bb3254f5b 100644 --- a/internal/storage/client/scheme.go +++ b/internal/storage/client/scheme.go @@ -16,7 +16,7 @@ func ParseStorageURL(objectURL string) (string, error) { if err != nil { return "", errors.Errorf("failed to parse storage url: %w", err) } - if strings.ToLower(parsed.Scheme) != STORAGE_SCHEME || len(parsed.Path) == 0 || len(parsed.Host) > 0 { + if !strings.EqualFold(parsed.Scheme, STORAGE_SCHEME) || len(parsed.Path) == 0 || len(parsed.Host) > 0 { return "", errors.New(ErrInvalidURL) } return parsed.Path, nil diff --git a/internal/storage/cp/cp.go b/internal/storage/cp/cp.go index 3676e0348..4ead44639 100644 --- a/internal/storage/cp/cp.go +++ b/internal/storage/cp/cp.go @@ -35,7 +35,7 @@ func Run(ctx context.Context, src, dst string, recursive bool, maxJobs uint, fsy if err != nil { return err } - if strings.ToLower(srcParsed.Scheme) == client.STORAGE_SCHEME && dstParsed.Scheme == "" { + if strings.EqualFold(srcParsed.Scheme, client.STORAGE_SCHEME) && dstParsed.Scheme == "" { localPath := dst if !filepath.IsAbs(dst) { localPath = filepath.Join(utils.CurrentDirAbs, dst) @@ -44,7 +44,7 @@ func Run(ctx context.Context, src, dst string, recursive bool, maxJobs uint, fsy return DownloadStorageObjectAll(ctx, api, srcParsed.Path, localPath, maxJobs, fsys) } return api.DownloadObject(ctx, srcParsed.Path, localPath, fsys) - } else if srcParsed.Scheme == "" && strings.ToLower(dstParsed.Scheme) == client.STORAGE_SCHEME { + } else if srcParsed.Scheme == "" && strings.EqualFold(dstParsed.Scheme, client.STORAGE_SCHEME) { localPath := src if !filepath.IsAbs(localPath) { localPath = filepath.Join(utils.CurrentDirAbs, localPath) @@ -52,8 +52,8 @@ func Run(ctx context.Context, src, dst string, recursive bool, maxJobs uint, fsy if recursive { return UploadStorageObjectAll(ctx, api, dstParsed.Path, localPath, maxJobs, fsys, opts...) } - return api.UploadObject(ctx, dstParsed.Path, src, fsys, opts...) - } else if strings.ToLower(srcParsed.Scheme) == client.STORAGE_SCHEME && strings.ToLower(dstParsed.Scheme) == client.STORAGE_SCHEME { + return api.UploadObject(ctx, dstParsed.Path, src, utils.NewRootFS(fsys), opts...) + } else if strings.EqualFold(srcParsed.Scheme, client.STORAGE_SCHEME) && strings.EqualFold(dstParsed.Scheme, client.STORAGE_SCHEME) { return errors.New("Copying between buckets is not supported") } utils.CmdSuggestion = fmt.Sprintf("Run %s to copy between local directories.", utils.Aqua("cp -r ")) @@ -148,7 +148,7 @@ func UploadStorageObjectAll(ctx context.Context, api storage.StorageAPI, remoteP } fmt.Fprintln(os.Stderr, "Uploading:", filePath, "=>", dstPath) job := func() error { - err := api.UploadObject(ctx, dstPath, filePath, fsys, opts...) + err := api.UploadObject(ctx, dstPath, filePath, utils.NewRootFS(fsys), opts...) if err != nil && strings.Contains(err.Error(), `"error":"Bucket not found"`) { // Retry after creating bucket if bucket, prefix := client.SplitBucketPrefix(dstPath); len(prefix) > 0 { @@ -161,7 +161,7 @@ func UploadStorageObjectAll(ctx context.Context, api storage.StorageAPI, remoteP if _, err := api.CreateBucket(ctx, body); err != nil { return err } - err = api.UploadObject(ctx, dstPath, filePath, fsys, opts...) + err = api.UploadObject(ctx, dstPath, filePath, utils.NewRootFS(fsys), opts...) } } return err diff --git a/internal/storage/cp/cp_test.go b/internal/storage/cp/cp_test.go index fe988c21c..75a0cf3cd 100644 --- a/internal/storage/cp/cp_test.go +++ b/internal/storage/cp/cp_test.go @@ -14,16 +14,17 @@ import ( "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" "github.com/supabase/cli/pkg/fetcher" "github.com/supabase/cli/pkg/storage" ) var mockFile = storage.ObjectResponse{ Name: "abstract.pdf", - Id: utils.Ptr("9b7f9f48-17a6-4ca8-b14a-39b0205a63e9"), - UpdatedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), - CreatedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), - LastAccessedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), + Id: cast.Ptr("9b7f9f48-17a6-4ca8-b14a-39b0205a63e9"), + UpdatedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), + CreatedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), + LastAccessedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), Metadata: &storage.ObjectMetadata{ ETag: `"887ea9be3c68e6f2fca7fd2d7c77d8fe"`, Size: 82702, diff --git a/internal/storage/ls/ls_test.go b/internal/storage/ls/ls_test.go index f1682759c..e0e2cd207 100644 --- a/internal/storage/ls/ls_test.go +++ b/internal/storage/ls/ls_test.go @@ -14,16 +14,17 @@ import ( "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" "github.com/supabase/cli/pkg/fetcher" "github.com/supabase/cli/pkg/storage" ) var mockFile = storage.ObjectResponse{ Name: "abstract.pdf", - Id: utils.Ptr("9b7f9f48-17a6-4ca8-b14a-39b0205a63e9"), - UpdatedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), - CreatedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), - LastAccessedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), + Id: cast.Ptr("9b7f9f48-17a6-4ca8-b14a-39b0205a63e9"), + UpdatedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), + CreatedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), + LastAccessedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), Metadata: &storage.ObjectMetadata{ ETag: `"887ea9be3c68e6f2fca7fd2d7c77d8fe"`, Size: 82702, diff --git a/internal/storage/mv/mv_test.go b/internal/storage/mv/mv_test.go index 2cb1c57e5..fd8ecfbcc 100644 --- a/internal/storage/mv/mv_test.go +++ b/internal/storage/mv/mv_test.go @@ -12,16 +12,17 @@ import ( "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" "github.com/supabase/cli/pkg/fetcher" "github.com/supabase/cli/pkg/storage" ) var mockFile = storage.ObjectResponse{ Name: "abstract.pdf", - Id: utils.Ptr("9b7f9f48-17a6-4ca8-b14a-39b0205a63e9"), - UpdatedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), - CreatedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), - LastAccessedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), + Id: cast.Ptr("9b7f9f48-17a6-4ca8-b14a-39b0205a63e9"), + UpdatedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), + CreatedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), + LastAccessedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), Metadata: &storage.ObjectMetadata{ ETag: `"887ea9be3c68e6f2fca7fd2d7c77d8fe"`, Size: 82702, diff --git a/internal/storage/rm/rm_test.go b/internal/storage/rm/rm_test.go index 6032c5b9b..46d204cf0 100644 --- a/internal/storage/rm/rm_test.go +++ b/internal/storage/rm/rm_test.go @@ -13,16 +13,17 @@ import ( "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" "github.com/supabase/cli/pkg/fetcher" "github.com/supabase/cli/pkg/storage" ) var mockFile = storage.ObjectResponse{ Name: "abstract.pdf", - Id: utils.Ptr("9b7f9f48-17a6-4ca8-b14a-39b0205a63e9"), - UpdatedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), - CreatedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), - LastAccessedAt: utils.Ptr("2023-10-13T18:08:22.068Z"), + Id: cast.Ptr("9b7f9f48-17a6-4ca8-b14a-39b0205a63e9"), + UpdatedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), + CreatedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), + LastAccessedAt: cast.Ptr("2023-10-13T18:08:22.068Z"), Metadata: &storage.ObjectMetadata{ ETag: `"887ea9be3c68e6f2fca7fd2d7c77d8fe"`, Size: 82702, diff --git a/internal/testing/apitest/docker.go b/internal/testing/apitest/docker.go index 37f5549ee..556756dfd 100644 --- a/internal/testing/apitest/docker.go +++ b/internal/testing/apitest/docker.go @@ -8,9 +8,12 @@ import ( "github.com/docker/docker/api" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" + "github.com/docker/docker/api/types/network" + "github.com/docker/docker/api/types/versions" "github.com/docker/docker/api/types/volume" "github.com/docker/docker/client" "github.com/docker/docker/pkg/stdcopy" + "github.com/go-errors/errors" "github.com/h2non/gock" ) @@ -38,7 +41,7 @@ func MockDockerStart(docker *client.Client, image, containerID string) { gock.New(docker.DaemonHost()). Post("/v" + docker.ClientVersion() + "/networks/create"). Reply(http.StatusCreated). - JSON(types.NetworkCreateResponse{}) + JSON(network.CreateResponse{}) gock.New(docker.DaemonHost()). Post("/v" + docker.ClientVersion() + "/volumes/create"). Persist(). @@ -62,15 +65,21 @@ func MockDockerStop(docker *client.Client) { gock.New(docker.DaemonHost()). Post("/v" + docker.ClientVersion() + "/containers/prune"). Reply(http.StatusOK). - JSON(types.ContainersPruneReport{}) + JSON(container.PruneReport{}) + if !versions.GreaterThanOrEqualTo(docker.ClientVersion(), "1.42") { + gock.New(docker.DaemonHost()). + Post("/v"+docker.ClientVersion()+"/volumes/prune"). + MatchParam("filters", `"all":{"true":true}`). + ReplyError(errors.New(`failed to parse filters for all=true&label=com.supabase.cli.project%3Dtest: "all" is an invalid volume filter`)) + } gock.New(docker.DaemonHost()). Post("/v" + docker.ClientVersion() + "/volumes/prune"). Reply(http.StatusOK). - JSON(types.VolumesPruneReport{}) + JSON(volume.PruneReport{}) gock.New(docker.DaemonHost()). Post("/v" + docker.ClientVersion() + "/networks/prune"). Reply(http.StatusOK). - JSON(types.NetworksPruneReport{}) + JSON(network.PruneReport{}) } // Ref: internal/utils/docker.go::DockerRunOnce diff --git a/internal/testing/helper/history.go b/internal/testing/helper/history.go index b73d267e8..95c846b7a 100644 --- a/internal/testing/helper/history.go +++ b/internal/testing/helper/history.go @@ -17,3 +17,12 @@ func MockMigrationHistory(conn *pgtest.MockConn) *pgtest.MockConn { Reply("ALTER TABLE") return conn } + +func MockSeedHistory(conn *pgtest.MockConn) *pgtest.MockConn { + conn.Query(migration.SET_LOCK_TIMEOUT). + Query(migration.CREATE_VERSION_SCHEMA). + Reply("CREATE SCHEMA"). + Query(migration.CREATE_SEED_TABLE). + Reply("CREATE TABLE") + return conn +} diff --git a/internal/unlink/unlink.go b/internal/unlink/unlink.go index aa0f76871..b59f4d1f0 100644 --- a/internal/unlink/unlink.go +++ b/internal/unlink/unlink.go @@ -9,16 +9,15 @@ import ( "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/credentials" - "github.com/supabase/cli/internal/utils/flags" "github.com/zalando/go-keyring" ) func Run(ctx context.Context, fsys afero.Fs) error { - projectRef, err := flags.LoadProjectRef(fsys) - if err != nil { - return err - } - if err := Unlink(projectRef, fsys); err != nil { + if projectRef, err := afero.ReadFile(fsys, utils.ProjectRefPath); errors.Is(err, os.ErrNotExist) { + return errors.New(utils.ErrNotLinked) + } else if err != nil { + return errors.Errorf("failed to load project ref: %w", err) + } else if err := Unlink(string(projectRef), fsys); err != nil { return err } fmt.Fprintln(os.Stdout, "Finished "+utils.Aqua("supabase unlink")+".") @@ -34,7 +33,7 @@ func Unlink(projectRef string, fsys afero.Fs) error { allErrors = append(allErrors, wrapped) } // Remove linked credentials - if err := credentials.Delete(projectRef); err != nil && + if err := credentials.StoreProvider.Delete(projectRef); err != nil && !errors.Is(err, credentials.ErrNotSupported) && !errors.Is(err, keyring.ErrNotFound) { allErrors = append(allErrors, err) diff --git a/internal/unlink/unlink_test.go b/internal/unlink/unlink_test.go index e8bd284b6..9b526e109 100644 --- a/internal/unlink/unlink_test.go +++ b/internal/unlink/unlink_test.go @@ -23,7 +23,7 @@ func TestUnlinkCommand(t *testing.T) { fsys := afero.NewMemMapFs() require.NoError(t, afero.WriteFile(fsys, utils.ProjectRefPath, []byte(project), 0644)) // Save database password - require.NoError(t, credentials.Set(project, "test")) + require.NoError(t, credentials.StoreProvider.Set(project, "test")) // Run test err := Run(context.Background(), fsys) // Check error @@ -33,7 +33,7 @@ func TestUnlinkCommand(t *testing.T) { assert.NoError(t, err) assert.False(t, exists) // Check credentials does not exist - _, err = credentials.Get(project) + _, err = credentials.StoreProvider.Get(project) assert.ErrorIs(t, err, keyring.ErrNotFound) }) diff --git a/internal/utils/access_token.go b/internal/utils/access_token.go index c94b9de83..f7ef90a01 100644 --- a/internal/utils/access_token.go +++ b/internal/utils/access_token.go @@ -41,7 +41,7 @@ func loadAccessToken(fsys afero.Fs) (string, error) { return accessToken, nil } // Load from native credentials store - if accessToken, err := credentials.Get(AccessTokenKey); err == nil { + if accessToken, err := credentials.StoreProvider.Get(AccessTokenKey); err == nil { return accessToken, nil } // Fallback to token file @@ -68,7 +68,7 @@ func SaveAccessToken(accessToken string, fsys afero.Fs) error { return errors.New(ErrInvalidToken) } // Save to native credentials store - if err := credentials.Set(AccessTokenKey, accessToken); err == nil { + if err := credentials.StoreProvider.Set(AccessTokenKey, accessToken); err == nil { return nil } // Fallback to token file @@ -94,13 +94,13 @@ func DeleteAccessToken(fsys afero.Fs) error { if err := fallbackDeleteToken(fsys); err == nil { // Typically user system should only have either token file or keyring. // But we delete from both just in case. - _ = credentials.Delete(AccessTokenKey) + _ = credentials.StoreProvider.Delete(AccessTokenKey) return nil } else if !errors.Is(err, os.ErrNotExist) { return err } // Fallback not found, delete from native credentials store - err := credentials.Delete(AccessTokenKey) + err := credentials.StoreProvider.Delete(AccessTokenKey) if errors.Is(err, credentials.ErrNotSupported) || errors.Is(err, keyring.ErrNotFound) { return errors.New(ErrNotLoggedIn) } else if err != nil { diff --git a/internal/utils/access_token_test.go b/internal/utils/access_token_test.go index 846158487..f31988876 100644 --- a/internal/utils/access_token_test.go +++ b/internal/utils/access_token_test.go @@ -165,7 +165,7 @@ func TestSaveTokenFallback(t *testing.T) { func TestDeleteToken(t *testing.T) { t.Run("deletes both keyring and fallback", func(t *testing.T) { token := string(apitest.RandomAccessToken(t)) - require.NoError(t, credentials.Set(AccessTokenKey, token)) + require.NoError(t, credentials.StoreProvider.Set(AccessTokenKey, token)) // Setup in-memory fs fsys := afero.NewMemMapFs() require.NoError(t, fallbackSaveToken(token, fsys)) @@ -173,7 +173,7 @@ func TestDeleteToken(t *testing.T) { err := DeleteAccessToken(fsys) // Check error assert.NoError(t, err) - _, err = credentials.Get(AccessTokenKey) + _, err = credentials.StoreProvider.Get(AccessTokenKey) assert.ErrorIs(t, err, keyring.ErrNotFound) path, err := getAccessTokenPath() assert.NoError(t, err) diff --git a/internal/utils/api.go b/internal/utils/api.go index 3dc63d30d..32b199b69 100644 --- a/internal/utils/api.go +++ b/internal/utils/api.go @@ -16,6 +16,7 @@ import ( "github.com/spf13/viper" "github.com/supabase/cli/internal/utils/cloudflare" supabase "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" ) const ( @@ -60,7 +61,7 @@ func FallbackLookupIP(ctx context.Context, host string) ([]string, error) { func ResolveCNAME(ctx context.Context, host string) (string, error) { // Ref: https://developers.cloudflare.com/1.1.1.1/encryption/dns-over-https/make-api-requests/dns-json cf := cloudflare.NewCloudflareAPI() - data, err := cf.DNSQuery(ctx, cloudflare.DNSParams{Name: host, Type: Ptr(cloudflare.TypeCNAME)}) + data, err := cf.DNSQuery(ctx, cloudflare.DNSParams{Name: host, Type: cast.Ptr(cloudflare.TypeCNAME)}) if err != nil { return "", err } @@ -216,41 +217,6 @@ var RegionMap = map[string]string{ "us-west-2": "West US (Oregon)", } -var FlyRegions = map[string]string{ - "ams": "Amsterdam, Netherlands", - "arn": "Stockholm, Sweden", - "bog": "Bogotá, Colombia", - "bos": "Boston, Massachusetts (US)", - "cdg": "Paris, France", - "den": "Denver, Colorado (US)", - "dfw": "Dallas, Texas (US", - "ewr": "Secaucus, NJ (US)", - "fra": "Frankfurt, Germany", - "gdl": "Guadalajara, Mexico", - "gig": "Rio de Janeiro, Brazil", - "gru": "Sao Paulo, Brazil", - "hkg": "Hong Kong, Hong Kong", - "iad": "Ashburn, Virginia (US", - "jnb": "Johannesburg, South Africa", - "lax": "Los Angeles, California (US", - "lhr": "London, United Kingdom", - "maa": "Chennai (Madras), India", - "mad": "Madrid, Spain", - "mia": "Miami, Florida (US)", - "nrt": "Tokyo, Japan", - "ord": "Chicago, Illinois (US", - "otp": "Bucharest, Romania", - "qro": "Querétaro, Mexico", - "scl": "Santiago, Chile", - "sea": "Seattle, Washington (US", - "sin": "Singapore, Singapore", - "sjc": "San Jose, California (US", - "syd": "Sydney, Australia", - "waw": "Warsaw, Poland", - "yul": "Montreal, Canada", - "yyz": "Toronto, Canada", -} - func GetSupabaseAPIHost() string { apiHost := viper.GetString("INTERNAL_API_HOST") if apiHost == "" { diff --git a/internal/utils/config.go b/internal/utils/config.go index 3f89dc75c..06d42d443 100644 --- a/internal/utils/config.go +++ b/internal/utils/config.go @@ -2,7 +2,6 @@ package utils import ( _ "embed" - "fmt" "io/fs" "net" "net/url" @@ -101,17 +100,6 @@ func GetDockerIds() []string { var Config = config.NewConfig(config.WithHostname(GetHostname())) -func LoadConfigFS(fsys afero.Fs) error { - if err := Config.Load("", NewRootFS(fsys)); err != nil { - if errors.Is(err, os.ErrNotExist) { - CmdSuggestion = fmt.Sprintf("Have you set up the project with %s?", Aqua("supabase init")) - } - return err - } - UpdateDockerIds() - return nil -} - // Adapts fs.FS to support absolute paths type rootFS struct { fsys afero.Fs diff --git a/internal/utils/console.go b/internal/utils/console.go index dfd014afc..85bebdc1e 100644 --- a/internal/utils/console.go +++ b/internal/utils/console.go @@ -10,6 +10,7 @@ import ( "time" "github.com/go-errors/errors" + "github.com/supabase/cli/pkg/cast" "golang.org/x/term" ) @@ -78,10 +79,10 @@ func (c *Console) PromptYesNo(ctx context.Context, label string, def bool) (bool func parseYesNo(s string) *bool { s = strings.ToLower(s) if s == "y" || s == "yes" { - return Ptr(true) + return cast.Ptr(true) } if s == "n" || s == "no" { - return Ptr(false) + return cast.Ptr(false) } return nil } diff --git a/internal/utils/console_test.go b/internal/utils/console_test.go index 19857ee11..ffc9532e7 100644 --- a/internal/utils/console_test.go +++ b/internal/utils/console_test.go @@ -2,11 +2,9 @@ package utils import ( "context" - "os" "testing" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" "github.com/supabase/cli/internal/testing/fstest" ) @@ -34,15 +32,7 @@ func TestPromptYesNo(t *testing.T) { func TestPromptText(t *testing.T) { t.Run("defaults on timeout", func(t *testing.T) { - // Setup stdin - r, _, err := os.Pipe() - require.NoError(t, err) - // Replace stdin - oldStdin := os.Stdin - defer func() { - os.Stdin = oldStdin - }() - os.Stdin = r + t.Cleanup(fstest.MockStdin(t, "")) c := NewConsole() // Run test val, err := c.PromptText(context.Background(), "test") diff --git a/internal/utils/credentials/keyring_test.go b/internal/utils/credentials/keyring_test.go new file mode 100644 index 000000000..4f450078f --- /dev/null +++ b/internal/utils/credentials/keyring_test.go @@ -0,0 +1,28 @@ +package credentials + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/zalando/go-keyring" +) + +func TestDeleteAll(t *testing.T) { + service := "test-cli" + // Nothing to delete + err := keyring.DeleteAll(service) + assert.NoError(t, err) + // Setup 2 items + err = keyring.Set(service, "key1", "value") + assert.NoError(t, err) + err = keyring.Set(service, "key2", "value") + assert.NoError(t, err) + // Delete all items + err = keyring.DeleteAll(service) + assert.NoError(t, err) + // Check items are gone + _, err = keyring.Get(service, "key1") + assert.ErrorIs(t, err, keyring.ErrNotFound) + _, err = keyring.Get(service, "key2") + assert.ErrorIs(t, err, keyring.ErrNotFound) +} diff --git a/internal/utils/credentials/store.go b/internal/utils/credentials/store.go index 32a2abd24..d7bf9d2b2 100644 --- a/internal/utils/credentials/store.go +++ b/internal/utils/credentials/store.go @@ -13,8 +13,19 @@ const namespace = "Supabase CLI" var ErrNotSupported = errors.New("Keyring is not supported on WSL") -// Retrieves the stored password of a project and username -func Get(project string) (string, error) { +type Store interface { + Get(key string) (string, error) + Set(key, value string) error + Delete(project string) error + DeleteAll() error +} + +type KeyringStore struct{} + +var StoreProvider Store = &KeyringStore{} + +// Get retrieves the password for a project from the keyring. +func (ks *KeyringStore) Get(project string) (string, error) { if err := assertKeyringSupported(); err != nil { return "", err } @@ -27,32 +38,45 @@ func Get(project string) (string, error) { return val, nil } -// Stores the password of a project and username -func Set(project, password string) error { +func (ks *KeyringStore) Set(project, password string) error { if err := assertKeyringSupported(); err != nil { return err } - if err := keyring.Set(namespace, project, password); errors.Is(err, exec.ErrNotFound) { - return errors.New(ErrNotSupported) - } else if err != nil { + if err := keyring.Set(namespace, project, password); err != nil { + if errors.Is(err, exec.ErrNotFound) { + return ErrNotSupported + } return errors.Errorf("failed to set credentials: %w", err) } return nil } -// Erases the stored password of a project and username -func Delete(project string) error { +func (ks *KeyringStore) Delete(project string) error { if err := assertKeyringSupported(); err != nil { return err } - if err := keyring.Delete(namespace, project); errors.Is(err, exec.ErrNotFound) { - return errors.New(ErrNotSupported) - } else if err != nil { + if err := keyring.Delete(namespace, project); err != nil { + if errors.Is(err, exec.ErrNotFound) { + return ErrNotSupported + } return errors.Errorf("failed to delete credentials: %w", err) } return nil } +func (ks *KeyringStore) DeleteAll() error { + if err := assertKeyringSupported(); err != nil { + return err + } + if err := keyring.DeleteAll(namespace); err != nil { + if errors.Is(err, exec.ErrNotFound) { + return ErrNotSupported + } + return errors.Errorf("failed to delete all credentials in %s: %w", namespace, err) + } + return nil +} + func assertKeyringSupported() error { // Suggested check: https://github.com/microsoft/WSL/issues/423 if f, err := os.ReadFile("/proc/sys/kernel/osrelease"); err == nil { diff --git a/internal/utils/credentials/store_mock.go b/internal/utils/credentials/store_mock.go new file mode 100644 index 000000000..32715fc37 --- /dev/null +++ b/internal/utils/credentials/store_mock.go @@ -0,0 +1,68 @@ +package credentials + +import ( + "github.com/zalando/go-keyring" +) + +type mockProvider struct { + mockStore map[string]map[string]string + mockError error +} + +// Get retrieves the password for a project from the mock store. +func (m *mockProvider) Get(project string) (string, error) { + if m.mockError != nil { + return "", m.mockError + } + if pass, ok := m.mockStore[namespace][project]; ok { + return pass, nil + } + return "", keyring.ErrNotFound +} + +// Set stores the password for a project in the mock store. +func (m *mockProvider) Set(project, password string) error { + if m.mockError != nil { + return m.mockError + } + if m.mockStore == nil { + m.mockStore = make(map[string]map[string]string) + } + if m.mockStore[namespace] == nil { + m.mockStore[namespace] = make(map[string]string) + } + m.mockStore[namespace][project] = password + return nil +} + +// Delete removes the password for a project from the mock store. +func (m *mockProvider) Delete(project string) error { + if m.mockError != nil { + return m.mockError + } + if _, ok := m.mockStore[namespace][project]; ok { + delete(m.mockStore[namespace], project) + return nil + } + return keyring.ErrNotFound +} + +// DeleteAll removes all passwords from the mock store. +func (m *mockProvider) DeleteAll() error { + if m.mockError != nil { + return m.mockError + } + delete(m.mockStore, namespace) + return nil +} + +func MockInit() func() { + oldStore := StoreProvider + teardown := func() { + StoreProvider = oldStore + } + StoreProvider = &mockProvider{ + mockStore: map[string]map[string]string{}, + } + return teardown +} diff --git a/internal/utils/deno.go b/internal/utils/deno.go index 7a6006202..978f0bb63 100644 --- a/internal/utils/deno.go +++ b/internal/utils/deno.go @@ -19,6 +19,7 @@ import ( "github.com/go-errors/errors" "github.com/spf13/afero" + "github.com/tidwall/jsonc" ) var ( @@ -215,13 +216,13 @@ type ImportMap struct { } func NewImportMap(absJsonPath string, fsys afero.Fs) (*ImportMap, error) { - contents, err := fsys.Open(absJsonPath) + data, err := afero.ReadFile(fsys, absJsonPath) if err != nil { return nil, errors.Errorf("failed to load import map: %w", err) } - defer contents.Close() + data = jsonc.ToJSONInPlace(data) result := ImportMap{} - decoder := json.NewDecoder(contents) + decoder := json.NewDecoder(bytes.NewReader(data)) if err := decoder.Decode(&result); err != nil { return nil, errors.Errorf("failed to parse import map: %w", err) } diff --git a/internal/utils/docker.go b/internal/utils/docker.go index 8a44c75d3..84911d07f 100644 --- a/internal/utils/docker.go +++ b/internal/utils/docker.go @@ -20,12 +20,12 @@ import ( dockerConfig "github.com/docker/cli/cli/config" dockerFlags "github.com/docker/cli/cli/flags" "github.com/docker/cli/cli/streams" - "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/filters" "github.com/docker/docker/api/types/image" "github.com/docker/docker/api/types/mount" "github.com/docker/docker/api/types/network" + "github.com/docker/docker/api/types/versions" "github.com/docker/docker/api/types/volume" "github.com/docker/docker/client" "github.com/docker/docker/errdefs" @@ -65,10 +65,7 @@ func DockerNetworkCreateIfNotExists(ctx context.Context, mode container.NetworkM if !isUserDefined(mode) { return nil } - _, err := Docker.NetworkCreate(ctx, mode.NetworkName(), types.NetworkCreate{ - CheckDuplicate: true, - Labels: labels, - }) + _, err := Docker.NetworkCreate(ctx, mode.NetworkName(), network.CreateOptions{Labels: labels}) // if error is network already exists, no need to propagate to user if errdefs.IsConflict(err) || errors.Is(err, podman.ErrNetworkExists) { return nil @@ -96,9 +93,9 @@ func WaitAll[T any](containers []T, exec func(container T) error) []error { // NoBackupVolume TODO: encapsulate this state in a class var NoBackupVolume = false -func DockerRemoveAll(ctx context.Context, w io.Writer) error { +func DockerRemoveAll(ctx context.Context, w io.Writer, projectId string) error { fmt.Fprintln(w, "Stopping containers...") - args := CliProjectFilter() + args := CliProjectFilter(projectId) containers, err := Docker.ContainerList(ctx, container.ListOptions{ All: true, Filters: args, @@ -129,10 +126,12 @@ func DockerRemoveAll(ctx context.Context, w io.Writer) error { } // Remove named volumes if NoBackupVolume { - // Since docker engine 25.0.3, all flag is required to include named volumes. - // https://github.com/docker/cli/blob/master/cli/command/volume/prune.go#L76 vargs := args.Clone() - vargs.Add("all", "true") + if versions.GreaterThanOrEqualTo(Docker.ClientVersion(), "1.42") { + // Since docker engine 25.0.3, all flag is required to include named volumes. + // https://github.com/docker/cli/blob/master/cli/command/volume/prune.go#L76 + vargs.Add("all", "true") + } if report, err := Docker.VolumesPrune(ctx, vargs); err != nil { return errors.Errorf("failed to prune volumes: %w", err) } else if viper.GetBool("DEBUG") { @@ -148,9 +147,14 @@ func DockerRemoveAll(ctx context.Context, w io.Writer) error { return nil } -func CliProjectFilter() filters.Args { +func CliProjectFilter(projectId string) filters.Args { + if len(projectId) == 0 { + return filters.NewArgs( + filters.Arg("label", CliProjectLabel), + ) + } return filters.NewArgs( - filters.Arg("label", CliProjectLabel+"="+Config.ProjectId), + filters.Arg("label", CliProjectLabel+"="+projectId), ) } @@ -286,6 +290,9 @@ func DockerStart(ctx context.Context, config container.Config, hostConfig contai // Skip named volume for BitBucket pipeline if os.Getenv("BITBUCKET_CLONE_DIR") != "" { hostConfig.Binds = binds + // Bitbucket doesn't allow for --security-opt option to be set + // https://support.atlassian.com/bitbucket-cloud/docs/run-docker-commands-in-bitbucket-pipelines/#Full-list-of-restricted-commands + hostConfig.SecurityOpt = nil } else { // Create named volumes with labels for _, name := range sources { @@ -410,19 +417,19 @@ func DockerStreamLogsOnce(ctx context.Context, containerId string, stdout, stder } // Exec a command once inside a container, returning stdout and throwing error on non-zero exit code. -func DockerExecOnce(ctx context.Context, container string, env []string, cmd []string) (string, error) { +func DockerExecOnce(ctx context.Context, containerId string, env []string, cmd []string) (string, error) { stderr := io.Discard if viper.GetBool("DEBUG") { stderr = os.Stderr } var out bytes.Buffer - err := DockerExecOnceWithStream(ctx, container, "", env, cmd, &out, stderr) + err := DockerExecOnceWithStream(ctx, containerId, "", env, cmd, &out, stderr) return out.String(), err } -func DockerExecOnceWithStream(ctx context.Context, container, workdir string, env, cmd []string, stdout, stderr io.Writer) error { +func DockerExecOnceWithStream(ctx context.Context, containerId, workdir string, env, cmd []string, stdout, stderr io.Writer) error { // Reset shadow database - exec, err := Docker.ContainerExecCreate(ctx, container, types.ExecConfig{ + exec, err := Docker.ContainerExecCreate(ctx, containerId, container.ExecOptions{ Env: env, Cmd: cmd, WorkingDir: workdir, @@ -433,7 +440,7 @@ func DockerExecOnceWithStream(ctx context.Context, container, workdir string, en return errors.Errorf("failed to exec docker create: %w", err) } // Read exec output - resp, err := Docker.ContainerExecAttach(ctx, exec.ID, types.ExecStartCheck{}) + resp, err := Docker.ContainerExecAttach(ctx, exec.ID, container.ExecStartOptions{}) if err != nil { return errors.Errorf("failed to exec docker attach: %w", err) } diff --git a/internal/utils/docker_test.go b/internal/utils/docker_test.go index a3f92bd01..6a0cdf034 100644 --- a/internal/utils/docker_test.go +++ b/internal/utils/docker_test.go @@ -9,6 +9,7 @@ import ( "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" + "github.com/docker/docker/api/types/network" "github.com/docker/docker/pkg/jsonmessage" "github.com/docker/docker/pkg/stdcopy" "github.com/h2non/gock" @@ -133,7 +134,7 @@ func TestRunOnce(t *testing.T) { gock.New(Docker.DaemonHost()). Post("/v" + Docker.ClientVersion() + "/networks/create"). Reply(http.StatusCreated). - JSON(types.NetworkCreateResponse{}) + JSON(network.CreateResponse{}) gock.New(Docker.DaemonHost()). Post("/v" + Docker.ClientVersion() + "/containers/create"). Reply(http.StatusServiceUnavailable) @@ -155,7 +156,7 @@ func TestRunOnce(t *testing.T) { gock.New(Docker.DaemonHost()). Post("/v" + Docker.ClientVersion() + "/networks/create"). Reply(http.StatusCreated). - JSON(types.NetworkCreateResponse{}) + JSON(network.CreateResponse{}) gock.New(Docker.DaemonHost()). Post("/v" + Docker.ClientVersion() + "/containers/create"). Reply(http.StatusOK). diff --git a/internal/utils/flags/config_path.go b/internal/utils/flags/config_path.go new file mode 100644 index 000000000..d0ed105c7 --- /dev/null +++ b/internal/utils/flags/config_path.go @@ -0,0 +1,22 @@ +package flags + +import ( + "fmt" + "os" + + "github.com/go-errors/errors" + "github.com/spf13/afero" + "github.com/supabase/cli/internal/utils" +) + +func LoadConfig(fsys afero.Fs) error { + utils.Config.ProjectId = ProjectRef + if err := utils.Config.Load("", utils.NewRootFS(fsys)); err != nil { + if errors.Is(err, os.ErrNotExist) { + utils.CmdSuggestion = fmt.Sprintf("Have you set up the project with %s?", utils.Aqua("supabase init")) + } + return err + } + utils.UpdateDockerIds() + return nil +} diff --git a/internal/utils/flags/db_url.go b/internal/utils/flags/db_url.go index c183aa1ad..04565939d 100644 --- a/internal/utils/flags/db_url.go +++ b/internal/utils/flags/db_url.go @@ -50,6 +50,9 @@ func ParseDatabaseConfig(flagSet *pflag.FlagSet, fsys afero.Fs) error { // Update connection config switch connType { case direct: + if err := utils.Config.Load("", utils.NewRootFS(fsys)); err != nil && !errors.Is(err, os.ErrNotExist) { + return err + } if flag := flagSet.Lookup("db-url"); flag != nil { config, err := pgconn.ParseConfig(flag.Value.String()) if err != nil { @@ -58,7 +61,7 @@ func ParseDatabaseConfig(flagSet *pflag.FlagSet, fsys afero.Fs) error { DbConfig = *config } case local: - if err := utils.LoadConfigFS(fsys); err != nil { + if err := LoadConfig(fsys); err != nil { return err } // Ignore other PG settings @@ -68,28 +71,26 @@ func ParseDatabaseConfig(flagSet *pflag.FlagSet, fsys afero.Fs) error { DbConfig.Password = utils.Config.Db.Password DbConfig.Database = "postgres" case linked: - if err := utils.LoadConfigFS(fsys); err != nil { + if err := LoadProjectRef(fsys); err != nil { return err } - projectRef, err := LoadProjectRef(fsys) - if err != nil { + if err := LoadConfig(fsys); err != nil { return err } - DbConfig = NewDbConfigWithPassword(projectRef) + DbConfig = NewDbConfigWithPassword(ProjectRef) case proxy: token, err := utils.LoadAccessTokenFS(fsys) if err != nil { return err } - projectRef, err := LoadProjectRef(fsys) - if err != nil { + if err := LoadProjectRef(fsys); err != nil { return err } DbConfig.Host = utils.GetSupabaseAPIHost() DbConfig.Port = 443 DbConfig.User = "postgres" DbConfig.Password = token - DbConfig.Database = projectRef + DbConfig.Database = ProjectRef } return nil } @@ -104,7 +105,7 @@ func getPassword(projectRef string) string { if password := viper.GetString("DB_PASSWORD"); len(password) > 0 { return password } - if password, err := credentials.Get(projectRef); err == nil { + if password, err := credentials.StoreProvider.Get(projectRef); err == nil { return password } resetUrl := fmt.Sprintf("%s/project/%s/settings/database", utils.GetSupabaseDashboardURL(), projectRef) diff --git a/internal/utils/flags/project_ref.go b/internal/utils/flags/project_ref.go index c2d5012bc..c1629c0fa 100644 --- a/internal/utils/flags/project_ref.go +++ b/internal/utils/flags/project_ref.go @@ -6,6 +6,7 @@ import ( "fmt" "os" + tea "github.com/charmbracelet/bubbletea" "github.com/go-errors/errors" "github.com/spf13/afero" "github.com/spf13/viper" @@ -16,15 +17,7 @@ import ( var ProjectRef string func ParseProjectRef(ctx context.Context, fsys afero.Fs) error { - // Flag takes highest precedence - if len(ProjectRef) == 0 { - ProjectRef = viper.GetString("PROJECT_ID") - } - if len(ProjectRef) > 0 { - return utils.AssertProjectRefIsValid(ProjectRef) - } - // Followed by linked ref file - if _, err := LoadProjectRef(fsys); !errors.Is(err, utils.ErrNotLinked) { + if err := LoadProjectRef(fsys); !errors.Is(err, utils.ErrNotLinked) { return err } // Prompt as the last resort @@ -34,7 +27,7 @@ func ParseProjectRef(ctx context.Context, fsys afero.Fs) error { return errors.New(utils.ErrNotLinked) } -func PromptProjectRef(ctx context.Context, title string) error { +func PromptProjectRef(ctx context.Context, title string, opts ...tea.ProgramOption) error { resp, err := utils.GetSupabase().V1ListAllProjectsWithResponse(ctx) if err != nil { return errors.Errorf("failed to retrieve projects: %w", err) @@ -49,7 +42,7 @@ func PromptProjectRef(ctx context.Context, title string) error { Details: fmt.Sprintf("name: %s, org: %s, region: %s", project.Name, project.OrganizationId, project.Region), } } - choice, err := utils.PromptChoice(ctx, title, items) + choice, err := utils.PromptChoice(ctx, title, items, opts...) if err != nil { return err } @@ -58,16 +51,22 @@ func PromptProjectRef(ctx context.Context, title string) error { return nil } -func LoadProjectRef(fsys afero.Fs) (string, error) { +func LoadProjectRef(fsys afero.Fs) error { + // Flag takes highest precedence + if len(ProjectRef) > 0 { + return utils.AssertProjectRefIsValid(ProjectRef) + } + // Env var takes precedence over ref file + if ProjectRef = viper.GetString("PROJECT_ID"); len(ProjectRef) > 0 { + return utils.AssertProjectRefIsValid(ProjectRef) + } + // Load from local file last projectRefBytes, err := afero.ReadFile(fsys, utils.ProjectRefPath) if errors.Is(err, os.ErrNotExist) { - return "", errors.New(utils.ErrNotLinked) + return errors.New(utils.ErrNotLinked) } else if err != nil { - return "", errors.Errorf("failed to load project ref: %w", err) + return errors.Errorf("failed to load project ref: %w", err) } ProjectRef = string(bytes.TrimSpace(projectRefBytes)) - if err := utils.AssertProjectRefIsValid(ProjectRef); err != nil { - return "", err - } - return ProjectRef, nil + return utils.AssertProjectRefIsValid(ProjectRef) } diff --git a/internal/utils/flags/project_ref_test.go b/internal/utils/flags/project_ref_test.go index 9932d6176..ee6a5374a 100644 --- a/internal/utils/flags/project_ref_test.go +++ b/internal/utils/flags/project_ref_test.go @@ -4,8 +4,10 @@ import ( "context" "net/http" "os" + "strings" "testing" + tea "github.com/charmbracelet/bubbletea" "github.com/go-errors/errors" "github.com/h2non/gock" "github.com/spf13/afero" @@ -69,6 +71,7 @@ func TestProjectPrompt(t *testing.T) { t.Setenv("SUPABASE_ACCESS_TOKEN", string(token)) t.Run("validates prompt input", func(t *testing.T) { + input := tea.WithInput(strings.NewReader("\r")) // Setup mock api defer gock.OffAll() gock.New(utils.DefaultApiHost). @@ -80,9 +83,9 @@ func TestProjectPrompt(t *testing.T) { OrganizationId: "test-org", }}) // Run test - err := PromptProjectRef(context.Background(), "") + err := PromptProjectRef(context.Background(), "", input) // Check error - assert.ErrorContains(t, err, "failed to prompt choice:") + assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) }) diff --git a/internal/utils/misc.go b/internal/utils/misc.go index 8118b8213..32a2fdf15 100644 --- a/internal/utils/misc.go +++ b/internal/utils/misc.go @@ -31,17 +31,7 @@ func ShortContainerImageName(imageName string) string { return matches[1] } -const ( - // https://dba.stackexchange.com/a/11895 - // Args: dbname - TerminateDbSqlFmt = ` -SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = '%[1]s'; --- Wait for WAL sender to drop replication slot. -DO 'BEGIN WHILE ( - SELECT COUNT(*) FROM pg_replication_slots WHERE database = ''%[1]s'' -) > 0 LOOP END LOOP; END';` - SuggestDebugFlag = "Try rerunning the command with --debug to troubleshoot the error." -) +const SuggestDebugFlag = "Try rerunning the command with --debug to troubleshoot the error." var ( CmdSuggestion string @@ -84,6 +74,7 @@ var ( "graphql", "graphql_public", "net", + "pgmq", "pgsodium", "pgsodium_masks", "pgtle", @@ -148,7 +139,6 @@ var ( FallbackImportMapPath = filepath.Join(FunctionsDir, "import_map.json") FallbackEnvFilePath = filepath.Join(FunctionsDir, ".env") DbTestsDir = filepath.Join(SupabaseDirPath, "tests") - SeedDataPath = filepath.Join(SupabaseDirPath, "seed.sql") CustomRolesPath = filepath.Join(SupabaseDirPath, "roles.sql") ErrNotLinked = errors.Errorf("Cannot find project ref. Have you run %s?", Aqua("supabase link")) @@ -296,10 +286,6 @@ func ValidateFunctionSlug(slug string) error { return nil } -func Ptr[T any](v T) *T { - return &v -} - func GetHostname() string { host := Docker.DaemonHost() if parsed, err := client.ParseHostURL(host); err == nil && parsed.Scheme == "tcp" { diff --git a/internal/utils/output.go b/internal/utils/output.go index 4b916c373..6d1a72aa4 100644 --- a/internal/utils/output.go +++ b/internal/utils/output.go @@ -29,6 +29,11 @@ var ( OutputToml, OutputYaml, } + + OutputFormat = EnumFlag{ + Allowed: OutputDefaultAllowed, + Value: OutputPretty, + } ) func EncodeOutput(format string, w io.Writer, value any) error { diff --git a/internal/utils/prompt.go b/internal/utils/prompt.go index 8ff67d97e..5589f49d6 100644 --- a/internal/utils/prompt.go +++ b/internal/utils/prompt.go @@ -103,7 +103,7 @@ func (m model) View() string { } // Prompt user to choose from a list of items, returns the chosen index. -func PromptChoice(ctx context.Context, title string, items []PromptItem) (PromptItem, error) { +func PromptChoice(ctx context.Context, title string, items []PromptItem, opts ...tea.ProgramOption) (PromptItem, error) { // Create list items var listItems []list.Item for _, v := range items { @@ -126,7 +126,7 @@ func PromptChoice(ctx context.Context, title string, items []PromptItem) (Prompt // Create our model ctx, cancel := context.WithCancel(ctx) initial := model{cancel: cancel, list: l} - prog := tea.NewProgram(initial) + prog := tea.NewProgram(initial, opts...) state, err := prog.Run() if err != nil { return initial.choice, errors.Errorf("failed to prompt choice: %w", err) diff --git a/internal/utils/release_test.go b/internal/utils/release_test.go index 00b44f49d..25aa920e8 100644 --- a/internal/utils/release_test.go +++ b/internal/utils/release_test.go @@ -10,6 +10,7 @@ import ( "github.com/h2non/gock" "github.com/stretchr/testify/assert" "github.com/supabase/cli/internal/testing/apitest" + "github.com/supabase/cli/pkg/cast" ) func TestLatestRelease(t *testing.T) { @@ -19,7 +20,7 @@ func TestLatestRelease(t *testing.T) { gock.New("https://api.github.com"). Get("/repos/supabase/cli/releases/latest"). Reply(http.StatusOK). - JSON(github.RepositoryRelease{TagName: Ptr("v2")}) + JSON(github.RepositoryRelease{TagName: cast.Ptr("v2")}) // Run test version, err := GetLatestRelease(context.Background()) // Check error diff --git a/internal/utils/tenant/client.go b/internal/utils/tenant/client.go index 9fc86a670..ad0ef6aa7 100644 --- a/internal/utils/tenant/client.go +++ b/internal/utils/tenant/client.go @@ -39,7 +39,7 @@ func NewApiKey(resp []api.ApiKeyResponse) ApiKey { } func GetApiKeys(ctx context.Context, projectRef string) (ApiKey, error) { - resp, err := utils.GetSupabase().V1GetProjectApiKeysWithResponse(ctx, projectRef) + resp, err := utils.GetSupabase().V1GetProjectApiKeysWithResponse(ctx, projectRef, &api.V1GetProjectApiKeysParams{}) if err != nil { return ApiKey{}, errors.Errorf("failed to get api keys: %w", err) } diff --git a/package.json b/package.json index 54a6ae29f..dd3014d22 100644 --- a/package.json +++ b/package.json @@ -21,7 +21,7 @@ "supabase": "bin/supabase" }, "dependencies": { - "bin-links": "^4.0.3", + "bin-links": "^5.0.0", "https-proxy-agent": "^7.0.2", "node-fetch": "^3.3.2", "tar": "7.4.3" diff --git a/pkg/api/client.gen.go b/pkg/api/client.gen.go index 594246049..8a5b498d4 100644 --- a/pkg/api/client.gen.go +++ b/pkg/api/client.gen.go @@ -14,6 +14,7 @@ import ( "strings" "github.com/oapi-codegen/runtime" + openapi_types "github.com/oapi-codegen/runtime/types" ) // RequestEditorFn is the function signature for the RequestEditor callback function @@ -100,12 +101,20 @@ type ClientInterface interface { V1UpdateABranchConfig(ctx context.Context, branchId string, body V1UpdateABranchConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1PushABranch request + V1PushABranch(ctx context.Context, branchId string, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1ResetABranch request V1ResetABranch(ctx context.Context, branchId string, reqEditors ...RequestEditorFn) (*http.Response, error) // V1AuthorizeUser request V1AuthorizeUser(ctx context.Context, params *V1AuthorizeUserParams, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1RevokeTokenWithBody request with any body + V1RevokeTokenWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + V1RevokeToken(ctx context.Context, body V1RevokeTokenJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1ExchangeOauthTokenWithBody request with any body V1ExchangeOauthTokenWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -136,8 +145,30 @@ type ClientInterface interface { // V1DeleteAProject request V1DeleteAProject(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1GetProject request + V1GetProject(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetLogs request + GetLogs(ctx context.Context, ref string, params *GetLogsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1GetProjectApiKeys request - V1GetProjectApiKeys(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) + V1GetProjectApiKeys(ctx context.Context, ref string, params *V1GetProjectApiKeysParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateApiKeyWithBody request with any body + CreateApiKeyWithBody(ctx context.Context, ref string, params *CreateApiKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateApiKey(ctx context.Context, ref string, params *CreateApiKeyParams, body CreateApiKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteApiKey request + DeleteApiKey(ctx context.Context, ref string, id string, params *DeleteApiKeyParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetApiKey request + GetApiKey(ctx context.Context, ref string, id string, params *GetApiKeyParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateApiKeyWithBody request with any body + UpdateApiKeyWithBody(ctx context.Context, ref string, id string, params *UpdateApiKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateApiKey(ctx context.Context, ref string, id string, params *UpdateApiKeyParams, body UpdateApiKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) // V1DisablePreviewBranching request V1DisablePreviewBranching(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -210,6 +241,14 @@ type ClientInterface interface { V1UpdatePostgresConfig(ctx context.Context, ref string, body V1UpdatePostgresConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1GetStorageConfig request + V1GetStorageConfig(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // V1UpdateStorageConfigWithBody request with any body + V1UpdateStorageConfigWithBody(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + V1UpdateStorageConfig(ctx context.Context, ref string, body V1UpdateStorageConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1DeleteHostnameConfig request V1DeleteHostnameConfig(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -246,10 +285,10 @@ type ClientInterface interface { // V1ListAllFunctions request V1ListAllFunctions(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) - // CreateFunctionWithBody request with any body - CreateFunctionWithBody(ctx context.Context, ref string, params *CreateFunctionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1CreateAFunctionWithBody request with any body + V1CreateAFunctionWithBody(ctx context.Context, ref string, params *V1CreateAFunctionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - CreateFunction(ctx context.Context, ref string, params *CreateFunctionParams, body CreateFunctionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + V1CreateAFunction(ctx context.Context, ref string, params *V1CreateAFunctionParams, body V1CreateAFunctionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) // V1DeleteAFunction request V1DeleteAFunction(ctx context.Context, ref string, functionSlug string, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -284,6 +323,9 @@ type ClientInterface interface { V1UpdateNetworkRestrictions(ctx context.Context, ref string, body V1UpdateNetworkRestrictionsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1PauseAProject request + V1PauseAProject(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1GetPgsodiumConfig request V1GetPgsodiumConfig(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -316,6 +358,17 @@ type ClientInterface interface { // V1DisableReadonlyModeTemporarily request V1DisableReadonlyModeTemporarily(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1ListAvailableRestoreVersions request + V1ListAvailableRestoreVersions(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // V1RestoreAProjectWithBody request with any body + V1RestoreAProjectWithBody(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + V1RestoreAProject(ctx context.Context, ref string, body V1RestoreAProjectJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // V1CancelAProjectRestoration request + V1CancelAProjectRestoration(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1BulkDeleteSecretsWithBody request with any body V1BulkDeleteSecretsWithBody(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -348,11 +401,11 @@ type ClientInterface interface { V1UpgradePostgresVersion(ctx context.Context, ref string, body V1UpgradePostgresVersionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - // V1GetPostgrestUpgradeEligibility request - V1GetPostgrestUpgradeEligibility(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1GetPostgresUpgradeEligibility request + V1GetPostgresUpgradeEligibility(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) - // V1GetPostgrestUpgradeStatus request - V1GetPostgrestUpgradeStatus(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1GetPostgresUpgradeStatus request + V1GetPostgresUpgradeStatus(ctx context.Context, ref string, params *V1GetPostgresUpgradeStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) // V1DeactivateVanitySubdomainConfig request V1DeactivateVanitySubdomainConfig(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -374,7 +427,7 @@ type ClientInterface interface { V1ListAllSnippets(ctx context.Context, params *V1ListAllSnippetsParams, reqEditors ...RequestEditorFn) (*http.Response, error) // V1GetASnippet request - V1GetASnippet(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + V1GetASnippet(ctx context.Context, id openapi_types.UUID, reqEditors ...RequestEditorFn) (*http.Response, error) } func (c *Client) V1DeleteABranch(ctx context.Context, branchId string, reqEditors ...RequestEditorFn) (*http.Response, error) { @@ -425,6 +478,18 @@ func (c *Client) V1UpdateABranchConfig(ctx context.Context, branchId string, bod return c.Client.Do(req) } +func (c *Client) V1PushABranch(ctx context.Context, branchId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1PushABranchRequest(c.Server, branchId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) V1ResetABranch(ctx context.Context, branchId string, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewV1ResetABranchRequest(c.Server, branchId) if err != nil { @@ -449,6 +514,30 @@ func (c *Client) V1AuthorizeUser(ctx context.Context, params *V1AuthorizeUserPar return c.Client.Do(req) } +func (c *Client) V1RevokeTokenWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1RevokeTokenRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1RevokeToken(ctx context.Context, body V1RevokeTokenJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1RevokeTokenRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) V1ExchangeOauthTokenWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewV1ExchangeOauthTokenRequestWithBody(c.Server, contentType, body) if err != nil { @@ -581,8 +670,104 @@ func (c *Client) V1DeleteAProject(ctx context.Context, ref string, reqEditors .. return c.Client.Do(req) } -func (c *Client) V1GetProjectApiKeys(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewV1GetProjectApiKeysRequest(c.Server, ref) +func (c *Client) V1GetProject(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1GetProjectRequest(c.Server, ref) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetLogs(ctx context.Context, ref string, params *GetLogsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetLogsRequest(c.Server, ref, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1GetProjectApiKeys(ctx context.Context, ref string, params *V1GetProjectApiKeysParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1GetProjectApiKeysRequest(c.Server, ref, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateApiKeyWithBody(ctx context.Context, ref string, params *CreateApiKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateApiKeyRequestWithBody(c.Server, ref, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateApiKey(ctx context.Context, ref string, params *CreateApiKeyParams, body CreateApiKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateApiKeyRequest(c.Server, ref, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteApiKey(ctx context.Context, ref string, id string, params *DeleteApiKeyParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteApiKeyRequest(c.Server, ref, id, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetApiKey(ctx context.Context, ref string, id string, params *GetApiKeyParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetApiKeyRequest(c.Server, ref, id, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateApiKeyWithBody(ctx context.Context, ref string, id string, params *UpdateApiKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateApiKeyRequestWithBody(c.Server, ref, id, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateApiKey(ctx context.Context, ref string, id string, params *UpdateApiKeyParams, body UpdateApiKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateApiKeyRequest(c.Server, ref, id, params, body) if err != nil { return nil, err } @@ -905,6 +1090,42 @@ func (c *Client) V1UpdatePostgresConfig(ctx context.Context, ref string, body V1 return c.Client.Do(req) } +func (c *Client) V1GetStorageConfig(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1GetStorageConfigRequest(c.Server, ref) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1UpdateStorageConfigWithBody(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1UpdateStorageConfigRequestWithBody(c.Server, ref, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1UpdateStorageConfig(ctx context.Context, ref string, body V1UpdateStorageConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1UpdateStorageConfigRequest(c.Server, ref, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) V1DeleteHostnameConfig(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewV1DeleteHostnameConfigRequest(c.Server, ref) if err != nil { @@ -1061,8 +1282,8 @@ func (c *Client) V1ListAllFunctions(ctx context.Context, ref string, reqEditors return c.Client.Do(req) } -func (c *Client) CreateFunctionWithBody(ctx context.Context, ref string, params *CreateFunctionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateFunctionRequestWithBody(c.Server, ref, params, contentType, body) +func (c *Client) V1CreateAFunctionWithBody(ctx context.Context, ref string, params *V1CreateAFunctionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1CreateAFunctionRequestWithBody(c.Server, ref, params, contentType, body) if err != nil { return nil, err } @@ -1073,8 +1294,8 @@ func (c *Client) CreateFunctionWithBody(ctx context.Context, ref string, params return c.Client.Do(req) } -func (c *Client) CreateFunction(ctx context.Context, ref string, params *CreateFunctionParams, body CreateFunctionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateFunctionRequest(c.Server, ref, params, body) +func (c *Client) V1CreateAFunction(ctx context.Context, ref string, params *V1CreateAFunctionParams, body V1CreateAFunctionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1CreateAFunctionRequest(c.Server, ref, params, body) if err != nil { return nil, err } @@ -1229,6 +1450,18 @@ func (c *Client) V1UpdateNetworkRestrictions(ctx context.Context, ref string, bo return c.Client.Do(req) } +func (c *Client) V1PauseAProject(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1PauseAProjectRequest(c.Server, ref) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) V1GetPgsodiumConfig(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewV1GetPgsodiumConfigRequest(c.Server, ref) if err != nil { @@ -1373,6 +1606,54 @@ func (c *Client) V1DisableReadonlyModeTemporarily(ctx context.Context, ref strin return c.Client.Do(req) } +func (c *Client) V1ListAvailableRestoreVersions(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1ListAvailableRestoreVersionsRequest(c.Server, ref) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1RestoreAProjectWithBody(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1RestoreAProjectRequestWithBody(c.Server, ref, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1RestoreAProject(ctx context.Context, ref string, body V1RestoreAProjectJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1RestoreAProjectRequest(c.Server, ref, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1CancelAProjectRestoration(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1CancelAProjectRestorationRequest(c.Server, ref) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) V1BulkDeleteSecretsWithBody(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewV1BulkDeleteSecretsRequestWithBody(c.Server, ref, contentType, body) if err != nil { @@ -1517,8 +1798,8 @@ func (c *Client) V1UpgradePostgresVersion(ctx context.Context, ref string, body return c.Client.Do(req) } -func (c *Client) V1GetPostgrestUpgradeEligibility(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewV1GetPostgrestUpgradeEligibilityRequest(c.Server, ref) +func (c *Client) V1GetPostgresUpgradeEligibility(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1GetPostgresUpgradeEligibilityRequest(c.Server, ref) if err != nil { return nil, err } @@ -1529,8 +1810,8 @@ func (c *Client) V1GetPostgrestUpgradeEligibility(ctx context.Context, ref strin return c.Client.Do(req) } -func (c *Client) V1GetPostgrestUpgradeStatus(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewV1GetPostgrestUpgradeStatusRequest(c.Server, ref) +func (c *Client) V1GetPostgresUpgradeStatus(ctx context.Context, ref string, params *V1GetPostgresUpgradeStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1GetPostgresUpgradeStatusRequest(c.Server, ref, params) if err != nil { return nil, err } @@ -1625,7 +1906,7 @@ func (c *Client) V1ListAllSnippets(ctx context.Context, params *V1ListAllSnippet return c.Client.Do(req) } -func (c *Client) V1GetASnippet(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { +func (c *Client) V1GetASnippet(ctx context.Context, id openapi_types.UUID, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewV1GetASnippetRequest(c.Server, id) if err != nil { return nil, err @@ -1752,6 +2033,40 @@ func NewV1UpdateABranchConfigRequestWithBody(server string, branchId string, con return req, nil } +// NewV1PushABranchRequest generates requests for V1PushABranch +func NewV1PushABranchRequest(server string, branchId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "branch_id", runtime.ParamLocationPath, branchId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/branches/%s/push", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + // NewV1ResetABranchRequest generates requests for V1ResetABranch func NewV1ResetABranchRequest(server string, branchId string) (*http.Request, error) { var err error @@ -1935,19 +2250,19 @@ func NewV1AuthorizeUserRequest(server string, params *V1AuthorizeUserParams) (*h return req, nil } -// NewV1ExchangeOauthTokenRequestWithFormdataBody calls the generic V1ExchangeOauthToken builder with application/x-www-form-urlencoded body -func NewV1ExchangeOauthTokenRequestWithFormdataBody(server string, body V1ExchangeOauthTokenFormdataRequestBody) (*http.Request, error) { +// NewV1RevokeTokenRequest calls the generic V1RevokeToken builder with application/json body +func NewV1RevokeTokenRequest(server string, body V1RevokeTokenJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader - bodyStr, err := runtime.MarshalForm(body, nil) + buf, err := json.Marshal(body) if err != nil { return nil, err } - bodyReader = strings.NewReader(bodyStr.Encode()) - return NewV1ExchangeOauthTokenRequestWithBody(server, "application/x-www-form-urlencoded", bodyReader) + bodyReader = bytes.NewReader(buf) + return NewV1RevokeTokenRequestWithBody(server, "application/json", bodyReader) } -// NewV1ExchangeOauthTokenRequestWithBody generates requests for V1ExchangeOauthToken with any type of body -func NewV1ExchangeOauthTokenRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { +// NewV1RevokeTokenRequestWithBody generates requests for V1RevokeToken with any type of body +func NewV1RevokeTokenRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { var err error serverURL, err := url.Parse(server) @@ -1955,7 +2270,7 @@ func NewV1ExchangeOauthTokenRequestWithBody(server string, contentType string, b return nil, err } - operationPath := fmt.Sprintf("/v1/oauth/token") + operationPath := fmt.Sprintf("/v1/oauth/revoke") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1975,12 +2290,52 @@ func NewV1ExchangeOauthTokenRequestWithBody(server string, contentType string, b return req, nil } -// NewV1ListAllOrganizationsRequest generates requests for V1ListAllOrganizations -func NewV1ListAllOrganizationsRequest(server string) (*http.Request, error) { - var err error - - serverURL, err := url.Parse(server) - if err != nil { +// NewV1ExchangeOauthTokenRequestWithFormdataBody calls the generic V1ExchangeOauthToken builder with application/x-www-form-urlencoded body +func NewV1ExchangeOauthTokenRequestWithFormdataBody(server string, body V1ExchangeOauthTokenFormdataRequestBody) (*http.Request, error) { + var bodyReader io.Reader + bodyStr, err := runtime.MarshalForm(body, nil) + if err != nil { + return nil, err + } + bodyReader = strings.NewReader(bodyStr.Encode()) + return NewV1ExchangeOauthTokenRequestWithBody(server, "application/x-www-form-urlencoded", bodyReader) +} + +// NewV1ExchangeOauthTokenRequestWithBody generates requests for V1ExchangeOauthToken with any type of body +func NewV1ExchangeOauthTokenRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/oauth/token") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewV1ListAllOrganizationsRequest generates requests for V1ListAllOrganizations +func NewV1ListAllOrganizationsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { return nil, err } @@ -2211,8 +2566,193 @@ func NewV1DeleteAProjectRequest(server string, ref string) (*http.Request, error return req, nil } +// NewV1GetProjectRequest generates requests for V1GetProject +func NewV1GetProjectRequest(server string, ref string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetLogsRequest generates requests for GetLogs +func NewGetLogsRequest(server string, ref string, params *GetLogsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/analytics/endpoints/logs.all", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.IsoTimestampEnd != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "iso_timestamp_end", runtime.ParamLocationQuery, *params.IsoTimestampEnd); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.IsoTimestampStart != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "iso_timestamp_start", runtime.ParamLocationQuery, *params.IsoTimestampStart); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Sql != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sql", runtime.ParamLocationQuery, *params.Sql); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + // NewV1GetProjectApiKeysRequest generates requests for V1GetProjectApiKeys -func NewV1GetProjectApiKeysRequest(server string, ref string) (*http.Request, error) { +func NewV1GetProjectApiKeysRequest(server string, ref string, params *V1GetProjectApiKeysParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/api-keys", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "reveal", runtime.ParamLocationQuery, params.Reveal); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateApiKeyRequest calls the generic CreateApiKey builder with application/json body +func NewCreateApiKeyRequest(server string, ref string, params *CreateApiKeyParams, body CreateApiKeyJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateApiKeyRequestWithBody(server, ref, params, "application/json", bodyReader) +} + +// NewCreateApiKeyRequestWithBody generates requests for CreateApiKey with any type of body +func NewCreateApiKeyRequestWithBody(server string, ref string, params *CreateApiKeyParams, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -2237,6 +2777,144 @@ func NewV1GetProjectApiKeysRequest(server string, ref string) (*http.Request, er return nil, err } + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "reveal", runtime.ParamLocationQuery, params.Reveal); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteApiKeyRequest generates requests for DeleteApiKey +func NewDeleteApiKeyRequest(server string, ref string, id string, params *DeleteApiKeyParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/api-keys/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "reveal", runtime.ParamLocationQuery, params.Reveal); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetApiKeyRequest generates requests for GetApiKey +func NewGetApiKeyRequest(server string, ref string, id string, params *GetApiKeyParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/api-keys/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "reveal", runtime.ParamLocationQuery, params.Reveal); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err @@ -2245,6 +2923,78 @@ func NewV1GetProjectApiKeysRequest(server string, ref string) (*http.Request, er return req, nil } +// NewUpdateApiKeyRequest calls the generic UpdateApiKey builder with application/json body +func NewUpdateApiKeyRequest(server string, ref string, id string, params *UpdateApiKeyParams, body UpdateApiKeyJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateApiKeyRequestWithBody(server, ref, id, params, "application/json", bodyReader) +} + +// NewUpdateApiKeyRequestWithBody generates requests for UpdateApiKey with any type of body +func NewUpdateApiKeyRequestWithBody(server string, ref string, id string, params *UpdateApiKeyParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/api-keys/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "reveal", runtime.ParamLocationQuery, params.Reveal); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + // NewV1DisablePreviewBranchingRequest generates requests for V1DisablePreviewBranching func NewV1DisablePreviewBranchingRequest(server string, ref string) (*http.Request, error) { var err error @@ -3017,6 +3767,87 @@ func NewV1UpdatePostgresConfigRequestWithBody(server string, ref string, content return req, nil } +// NewV1GetStorageConfigRequest generates requests for V1GetStorageConfig +func NewV1GetStorageConfigRequest(server string, ref string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/config/storage", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewV1UpdateStorageConfigRequest calls the generic V1UpdateStorageConfig builder with application/json body +func NewV1UpdateStorageConfigRequest(server string, ref string, body V1UpdateStorageConfigJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewV1UpdateStorageConfigRequestWithBody(server, ref, "application/json", bodyReader) +} + +// NewV1UpdateStorageConfigRequestWithBody generates requests for V1UpdateStorageConfig with any type of body +func NewV1UpdateStorageConfigRequestWithBody(server string, ref string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/config/storage", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + // NewV1DeleteHostnameConfigRequest generates requests for V1DeleteHostnameConfig func NewV1DeleteHostnameConfigRequest(server string, ref string) (*http.Request, error) { var err error @@ -3396,19 +4227,19 @@ func NewV1ListAllFunctionsRequest(server string, ref string) (*http.Request, err return req, nil } -// NewCreateFunctionRequest calls the generic CreateFunction builder with application/json body -func NewCreateFunctionRequest(server string, ref string, params *CreateFunctionParams, body CreateFunctionJSONRequestBody) (*http.Request, error) { +// NewV1CreateAFunctionRequest calls the generic V1CreateAFunction builder with application/json body +func NewV1CreateAFunctionRequest(server string, ref string, params *V1CreateAFunctionParams, body V1CreateAFunctionJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewCreateFunctionRequestWithBody(server, ref, params, "application/json", bodyReader) + return NewV1CreateAFunctionRequestWithBody(server, ref, params, "application/json", bodyReader) } -// NewCreateFunctionRequestWithBody generates requests for CreateFunction with any type of body -func NewCreateFunctionRequestWithBody(server string, ref string, params *CreateFunctionParams, contentType string, body io.Reader) (*http.Request, error) { +// NewV1CreateAFunctionRequestWithBody generates requests for V1CreateAFunction with any type of body +func NewV1CreateAFunctionRequestWithBody(server string, ref string, params *V1CreateAFunctionParams, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -3532,6 +4363,22 @@ func NewCreateFunctionRequestWithBody(server string, ref string, params *CreateF } + if params.ComputeMultiplier != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "compute_multiplier", runtime.ParamLocationQuery, *params.ComputeMultiplier); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + queryURL.RawQuery = queryValues.Encode() } @@ -3770,6 +4617,22 @@ func NewV1UpdateAFunctionRequestWithBody(server string, ref string, functionSlug } + if params.ComputeMultiplier != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "compute_multiplier", runtime.ParamLocationQuery, *params.ComputeMultiplier); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + queryURL.RawQuery = queryValues.Encode() } @@ -4054,6 +4917,40 @@ func NewV1UpdateNetworkRestrictionsRequestWithBody(server string, ref string, co return req, nil } +// NewV1PauseAProjectRequest generates requests for V1PauseAProject +func NewV1PauseAProjectRequest(server string, ref string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/pause", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + // NewV1GetPgsodiumConfigRequest generates requests for V1GetPgsodiumConfig func NewV1GetPgsodiumConfigRequest(server string, ref string) (*http.Request, error) { var err error @@ -4378,6 +5275,121 @@ func NewV1DisableReadonlyModeTemporarilyRequest(server string, ref string) (*htt return req, nil } +// NewV1ListAvailableRestoreVersionsRequest generates requests for V1ListAvailableRestoreVersions +func NewV1ListAvailableRestoreVersionsRequest(server string, ref string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/restore", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewV1RestoreAProjectRequest calls the generic V1RestoreAProject builder with application/json body +func NewV1RestoreAProjectRequest(server string, ref string, body V1RestoreAProjectJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewV1RestoreAProjectRequestWithBody(server, ref, "application/json", bodyReader) +} + +// NewV1RestoreAProjectRequestWithBody generates requests for V1RestoreAProject with any type of body +func NewV1RestoreAProjectRequestWithBody(server string, ref string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/restore", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewV1CancelAProjectRestorationRequest generates requests for V1CancelAProjectRestoration +func NewV1CancelAProjectRestorationRequest(server string, ref string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/restore/cancel", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + // NewV1BulkDeleteSecretsRequest calls the generic V1BulkDeleteSecrets builder with application/json body func NewV1BulkDeleteSecretsRequest(server string, ref string, body V1BulkDeleteSecretsJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader @@ -4724,8 +5736,8 @@ func NewV1UpgradePostgresVersionRequestWithBody(server string, ref string, conte return req, nil } -// NewV1GetPostgrestUpgradeEligibilityRequest generates requests for V1GetPostgrestUpgradeEligibility -func NewV1GetPostgrestUpgradeEligibilityRequest(server string, ref string) (*http.Request, error) { +// NewV1GetPostgresUpgradeEligibilityRequest generates requests for V1GetPostgresUpgradeEligibility +func NewV1GetPostgresUpgradeEligibilityRequest(server string, ref string) (*http.Request, error) { var err error var pathParam0 string @@ -4758,8 +5770,8 @@ func NewV1GetPostgrestUpgradeEligibilityRequest(server string, ref string) (*htt return req, nil } -// NewV1GetPostgrestUpgradeStatusRequest generates requests for V1GetPostgrestUpgradeStatus -func NewV1GetPostgrestUpgradeStatusRequest(server string, ref string) (*http.Request, error) { +// NewV1GetPostgresUpgradeStatusRequest generates requests for V1GetPostgresUpgradeStatus +func NewV1GetPostgresUpgradeStatusRequest(server string, ref string, params *V1GetPostgresUpgradeStatusParams) (*http.Request, error) { var err error var pathParam0 string @@ -4774,14 +5786,36 @@ func NewV1GetPostgrestUpgradeStatusRequest(server string, ref string) (*http.Req return nil, err } - operationPath := fmt.Sprintf("/v1/projects/%s/upgrade/status", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + operationPath := fmt.Sprintf("/v1/projects/%s/upgrade/status", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.TrackingId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "tracking_id", runtime.ParamLocationQuery, *params.TrackingId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err + queryURL.RawQuery = queryValues.Encode() } req, err := http.NewRequest("GET", queryURL.String(), nil) @@ -4976,6 +6010,70 @@ func NewV1ListAllSnippetsRequest(server string, params *V1ListAllSnippetsParams) if params != nil { queryValues := queryURL.Query() + if params.Cursor != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "cursor", runtime.ParamLocationQuery, *params.Cursor); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortBy != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_by", runtime.ParamLocationQuery, *params.SortBy); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + if params.ProjectRef != nil { if queryFrag, err := runtime.StyleParamWithLocation("form", true, "project_ref", runtime.ParamLocationQuery, *params.ProjectRef); err != nil { @@ -5004,7 +6102,7 @@ func NewV1ListAllSnippetsRequest(server string, params *V1ListAllSnippetsParams) } // NewV1GetASnippetRequest generates requests for V1GetASnippet -func NewV1GetASnippetRequest(server string, id string) (*http.Request, error) { +func NewV1GetASnippetRequest(server string, id openapi_types.UUID) (*http.Request, error) { var err error var pathParam0 string @@ -5091,12 +6189,20 @@ type ClientWithResponsesInterface interface { V1UpdateABranchConfigWithResponse(ctx context.Context, branchId string, body V1UpdateABranchConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*V1UpdateABranchConfigResponse, error) + // V1PushABranchWithResponse request + V1PushABranchWithResponse(ctx context.Context, branchId string, reqEditors ...RequestEditorFn) (*V1PushABranchResponse, error) + // V1ResetABranchWithResponse request V1ResetABranchWithResponse(ctx context.Context, branchId string, reqEditors ...RequestEditorFn) (*V1ResetABranchResponse, error) // V1AuthorizeUserWithResponse request V1AuthorizeUserWithResponse(ctx context.Context, params *V1AuthorizeUserParams, reqEditors ...RequestEditorFn) (*V1AuthorizeUserResponse, error) + // V1RevokeTokenWithBodyWithResponse request with any body + V1RevokeTokenWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1RevokeTokenResponse, error) + + V1RevokeTokenWithResponse(ctx context.Context, body V1RevokeTokenJSONRequestBody, reqEditors ...RequestEditorFn) (*V1RevokeTokenResponse, error) + // V1ExchangeOauthTokenWithBodyWithResponse request with any body V1ExchangeOauthTokenWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1ExchangeOauthTokenResponse, error) @@ -5127,8 +6233,30 @@ type ClientWithResponsesInterface interface { // V1DeleteAProjectWithResponse request V1DeleteAProjectWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1DeleteAProjectResponse, error) + // V1GetProjectWithResponse request + V1GetProjectWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetProjectResponse, error) + + // GetLogsWithResponse request + GetLogsWithResponse(ctx context.Context, ref string, params *GetLogsParams, reqEditors ...RequestEditorFn) (*GetLogsResponse, error) + // V1GetProjectApiKeysWithResponse request - V1GetProjectApiKeysWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetProjectApiKeysResponse, error) + V1GetProjectApiKeysWithResponse(ctx context.Context, ref string, params *V1GetProjectApiKeysParams, reqEditors ...RequestEditorFn) (*V1GetProjectApiKeysResponse, error) + + // CreateApiKeyWithBodyWithResponse request with any body + CreateApiKeyWithBodyWithResponse(ctx context.Context, ref string, params *CreateApiKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateApiKeyResponse, error) + + CreateApiKeyWithResponse(ctx context.Context, ref string, params *CreateApiKeyParams, body CreateApiKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateApiKeyResponse, error) + + // DeleteApiKeyWithResponse request + DeleteApiKeyWithResponse(ctx context.Context, ref string, id string, params *DeleteApiKeyParams, reqEditors ...RequestEditorFn) (*DeleteApiKeyResponse, error) + + // GetApiKeyWithResponse request + GetApiKeyWithResponse(ctx context.Context, ref string, id string, params *GetApiKeyParams, reqEditors ...RequestEditorFn) (*GetApiKeyResponse, error) + + // UpdateApiKeyWithBodyWithResponse request with any body + UpdateApiKeyWithBodyWithResponse(ctx context.Context, ref string, id string, params *UpdateApiKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateApiKeyResponse, error) + + UpdateApiKeyWithResponse(ctx context.Context, ref string, id string, params *UpdateApiKeyParams, body UpdateApiKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateApiKeyResponse, error) // V1DisablePreviewBranchingWithResponse request V1DisablePreviewBranchingWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1DisablePreviewBranchingResponse, error) @@ -5201,6 +6329,14 @@ type ClientWithResponsesInterface interface { V1UpdatePostgresConfigWithResponse(ctx context.Context, ref string, body V1UpdatePostgresConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*V1UpdatePostgresConfigResponse, error) + // V1GetStorageConfigWithResponse request + V1GetStorageConfigWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetStorageConfigResponse, error) + + // V1UpdateStorageConfigWithBodyWithResponse request with any body + V1UpdateStorageConfigWithBodyWithResponse(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1UpdateStorageConfigResponse, error) + + V1UpdateStorageConfigWithResponse(ctx context.Context, ref string, body V1UpdateStorageConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*V1UpdateStorageConfigResponse, error) + // V1DeleteHostnameConfigWithResponse request V1DeleteHostnameConfigWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1DeleteHostnameConfigResponse, error) @@ -5237,10 +6373,10 @@ type ClientWithResponsesInterface interface { // V1ListAllFunctionsWithResponse request V1ListAllFunctionsWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1ListAllFunctionsResponse, error) - // CreateFunctionWithBodyWithResponse request with any body - CreateFunctionWithBodyWithResponse(ctx context.Context, ref string, params *CreateFunctionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateFunctionResponse, error) + // V1CreateAFunctionWithBodyWithResponse request with any body + V1CreateAFunctionWithBodyWithResponse(ctx context.Context, ref string, params *V1CreateAFunctionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1CreateAFunctionResponse, error) - CreateFunctionWithResponse(ctx context.Context, ref string, params *CreateFunctionParams, body CreateFunctionJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateFunctionResponse, error) + V1CreateAFunctionWithResponse(ctx context.Context, ref string, params *V1CreateAFunctionParams, body V1CreateAFunctionJSONRequestBody, reqEditors ...RequestEditorFn) (*V1CreateAFunctionResponse, error) // V1DeleteAFunctionWithResponse request V1DeleteAFunctionWithResponse(ctx context.Context, ref string, functionSlug string, reqEditors ...RequestEditorFn) (*V1DeleteAFunctionResponse, error) @@ -5275,6 +6411,9 @@ type ClientWithResponsesInterface interface { V1UpdateNetworkRestrictionsWithResponse(ctx context.Context, ref string, body V1UpdateNetworkRestrictionsJSONRequestBody, reqEditors ...RequestEditorFn) (*V1UpdateNetworkRestrictionsResponse, error) + // V1PauseAProjectWithResponse request + V1PauseAProjectWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1PauseAProjectResponse, error) + // V1GetPgsodiumConfigWithResponse request V1GetPgsodiumConfigWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetPgsodiumConfigResponse, error) @@ -5307,6 +6446,17 @@ type ClientWithResponsesInterface interface { // V1DisableReadonlyModeTemporarilyWithResponse request V1DisableReadonlyModeTemporarilyWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1DisableReadonlyModeTemporarilyResponse, error) + // V1ListAvailableRestoreVersionsWithResponse request + V1ListAvailableRestoreVersionsWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1ListAvailableRestoreVersionsResponse, error) + + // V1RestoreAProjectWithBodyWithResponse request with any body + V1RestoreAProjectWithBodyWithResponse(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1RestoreAProjectResponse, error) + + V1RestoreAProjectWithResponse(ctx context.Context, ref string, body V1RestoreAProjectJSONRequestBody, reqEditors ...RequestEditorFn) (*V1RestoreAProjectResponse, error) + + // V1CancelAProjectRestorationWithResponse request + V1CancelAProjectRestorationWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1CancelAProjectRestorationResponse, error) + // V1BulkDeleteSecretsWithBodyWithResponse request with any body V1BulkDeleteSecretsWithBodyWithResponse(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1BulkDeleteSecretsResponse, error) @@ -5339,11 +6489,11 @@ type ClientWithResponsesInterface interface { V1UpgradePostgresVersionWithResponse(ctx context.Context, ref string, body V1UpgradePostgresVersionJSONRequestBody, reqEditors ...RequestEditorFn) (*V1UpgradePostgresVersionResponse, error) - // V1GetPostgrestUpgradeEligibilityWithResponse request - V1GetPostgrestUpgradeEligibilityWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetPostgrestUpgradeEligibilityResponse, error) + // V1GetPostgresUpgradeEligibilityWithResponse request + V1GetPostgresUpgradeEligibilityWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetPostgresUpgradeEligibilityResponse, error) - // V1GetPostgrestUpgradeStatusWithResponse request - V1GetPostgrestUpgradeStatusWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetPostgrestUpgradeStatusResponse, error) + // V1GetPostgresUpgradeStatusWithResponse request + V1GetPostgresUpgradeStatusWithResponse(ctx context.Context, ref string, params *V1GetPostgresUpgradeStatusParams, reqEditors ...RequestEditorFn) (*V1GetPostgresUpgradeStatusResponse, error) // V1DeactivateVanitySubdomainConfigWithResponse request V1DeactivateVanitySubdomainConfigWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1DeactivateVanitySubdomainConfigResponse, error) @@ -5365,7 +6515,7 @@ type ClientWithResponsesInterface interface { V1ListAllSnippetsWithResponse(ctx context.Context, params *V1ListAllSnippetsParams, reqEditors ...RequestEditorFn) (*V1ListAllSnippetsResponse, error) // V1GetASnippetWithResponse request - V1GetASnippetWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*V1GetASnippetResponse, error) + V1GetASnippetWithResponse(ctx context.Context, id openapi_types.UUID, reqEditors ...RequestEditorFn) (*V1GetASnippetResponse, error) } type V1DeleteABranchResponse struct { @@ -5434,10 +6584,32 @@ func (r V1UpdateABranchConfigResponse) StatusCode() int { return 0 } +type V1PushABranchResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *BranchUpdateResponse +} + +// Status returns HTTPResponse.Status +func (r V1PushABranchResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1PushABranchResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type V1ResetABranchResponse struct { Body []byte HTTPResponse *http.Response - JSON201 *BranchResetResponse + JSON201 *BranchUpdateResponse } // Status returns HTTPResponse.Status @@ -5477,6 +6649,27 @@ func (r V1AuthorizeUserResponse) StatusCode() int { return 0 } +type V1RevokeTokenResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r V1RevokeTokenResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1RevokeTokenResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type V1ExchangeOauthTokenResponse struct { Body []byte HTTPResponse *http.Response @@ -5524,11 +6717,143 @@ func (r V1ListAllOrganizationsResponse) StatusCode() int { type V1CreateAnOrganizationResponse struct { Body []byte HTTPResponse *http.Response - JSON201 *OrganizationResponseV1 + JSON201 *OrganizationResponseV1 +} + +// Status returns HTTPResponse.Status +func (r V1CreateAnOrganizationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1CreateAnOrganizationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1GetAnOrganizationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *V1OrganizationSlugResponse +} + +// Status returns HTTPResponse.Status +func (r V1GetAnOrganizationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1GetAnOrganizationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1ListOrganizationMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]V1OrganizationMemberResponse +} + +// Status returns HTTPResponse.Status +func (r V1ListOrganizationMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1ListOrganizationMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1ListAllProjectsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]V1ProjectWithDatabaseResponse +} + +// Status returns HTTPResponse.Status +func (r V1ListAllProjectsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1ListAllProjectsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1CreateAProjectResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *V1ProjectResponse +} + +// Status returns HTTPResponse.Status +func (r V1CreateAProjectResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1CreateAProjectResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1DeleteAProjectResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *V1ProjectRefResponse +} + +// Status returns HTTPResponse.Status +func (r V1DeleteAProjectResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1DeleteAProjectResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1GetProjectResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *V1ProjectWithDatabaseResponse } // Status returns HTTPResponse.Status -func (r V1CreateAnOrganizationResponse) Status() string { +func (r V1GetProjectResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5536,21 +6861,21 @@ func (r V1CreateAnOrganizationResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1CreateAnOrganizationResponse) StatusCode() int { +func (r V1GetProjectResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1GetAnOrganizationResponse struct { +type GetLogsResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *V1OrganizationSlugResponse + JSON200 *V1AnalyticsResponse } // Status returns HTTPResponse.Status -func (r V1GetAnOrganizationResponse) Status() string { +func (r GetLogsResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5558,21 +6883,21 @@ func (r V1GetAnOrganizationResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1GetAnOrganizationResponse) StatusCode() int { +func (r GetLogsResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1ListOrganizationMembersResponse struct { +type V1GetProjectApiKeysResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *[]V1OrganizationMemberResponse + JSON200 *[]ApiKeyResponse } // Status returns HTTPResponse.Status -func (r V1ListOrganizationMembersResponse) Status() string { +func (r V1GetProjectApiKeysResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5580,21 +6905,21 @@ func (r V1ListOrganizationMembersResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1ListOrganizationMembersResponse) StatusCode() int { +func (r V1GetProjectApiKeysResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1ListAllProjectsResponse struct { +type CreateApiKeyResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *[]V1ProjectResponse + JSON201 *ApiKeyResponse } // Status returns HTTPResponse.Status -func (r V1ListAllProjectsResponse) Status() string { +func (r CreateApiKeyResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5602,21 +6927,21 @@ func (r V1ListAllProjectsResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1ListAllProjectsResponse) StatusCode() int { +func (r CreateApiKeyResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1CreateAProjectResponse struct { +type DeleteApiKeyResponse struct { Body []byte HTTPResponse *http.Response - JSON201 *V1ProjectResponse + JSON200 *ApiKeyResponse } // Status returns HTTPResponse.Status -func (r V1CreateAProjectResponse) Status() string { +func (r DeleteApiKeyResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5624,21 +6949,21 @@ func (r V1CreateAProjectResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1CreateAProjectResponse) StatusCode() int { +func (r DeleteApiKeyResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1DeleteAProjectResponse struct { +type GetApiKeyResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *V1ProjectRefResponse + JSON200 *ApiKeyResponse } // Status returns HTTPResponse.Status -func (r V1DeleteAProjectResponse) Status() string { +func (r GetApiKeyResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5646,21 +6971,21 @@ func (r V1DeleteAProjectResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1DeleteAProjectResponse) StatusCode() int { +func (r GetApiKeyResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1GetProjectApiKeysResponse struct { +type UpdateApiKeyResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *[]ApiKeyResponse + JSON200 *ApiKeyResponse } // Status returns HTTPResponse.Status -func (r V1GetProjectApiKeysResponse) Status() string { +func (r UpdateApiKeyResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5668,7 +6993,7 @@ func (r V1GetProjectApiKeysResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1GetProjectApiKeysResponse) StatusCode() int { +func (r UpdateApiKeyResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } @@ -6092,6 +7417,49 @@ func (r V1UpdatePostgresConfigResponse) StatusCode() int { return 0 } +type V1GetStorageConfigResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *StorageConfigResponse +} + +// Status returns HTTPResponse.Status +func (r V1GetStorageConfigResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1GetStorageConfigResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1UpdateStorageConfigResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r V1UpdateStorageConfigResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1UpdateStorageConfigResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type V1DeleteHostnameConfigResponse struct { Body []byte HTTPResponse *http.Response @@ -6309,14 +7677,14 @@ func (r V1ListAllFunctionsResponse) StatusCode() int { return 0 } -type CreateFunctionResponse struct { +type V1CreateAFunctionResponse struct { Body []byte HTTPResponse *http.Response JSON201 *FunctionResponse } // Status returns HTTPResponse.Status -func (r CreateFunctionResponse) Status() string { +func (r V1CreateAFunctionResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -6324,7 +7692,7 @@ func (r CreateFunctionResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r CreateFunctionResponse) StatusCode() int { +func (r V1CreateAFunctionResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } @@ -6526,6 +7894,27 @@ func (r V1UpdateNetworkRestrictionsResponse) StatusCode() int { return 0 } +type V1PauseAProjectResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r V1PauseAProjectResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1PauseAProjectResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type V1GetPgsodiumConfigResponse struct { Body []byte HTTPResponse *http.Response @@ -6699,6 +8088,70 @@ func (r V1DisableReadonlyModeTemporarilyResponse) StatusCode() int { return 0 } +type V1ListAvailableRestoreVersionsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *GetProjectAvailableRestoreVersionsResponse +} + +// Status returns HTTPResponse.Status +func (r V1ListAvailableRestoreVersionsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1ListAvailableRestoreVersionsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1RestoreAProjectResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r V1RestoreAProjectResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1RestoreAProjectResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1CancelAProjectRestorationResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r V1CancelAProjectRestorationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1CancelAProjectRestorationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type V1BulkDeleteSecretsResponse struct { Body []byte HTTPResponse *http.Response @@ -6874,14 +8327,14 @@ func (r V1UpgradePostgresVersionResponse) StatusCode() int { return 0 } -type V1GetPostgrestUpgradeEligibilityResponse struct { +type V1GetPostgresUpgradeEligibilityResponse struct { Body []byte HTTPResponse *http.Response JSON200 *ProjectUpgradeEligibilityResponse } // Status returns HTTPResponse.Status -func (r V1GetPostgrestUpgradeEligibilityResponse) Status() string { +func (r V1GetPostgresUpgradeEligibilityResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -6889,21 +8342,21 @@ func (r V1GetPostgrestUpgradeEligibilityResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1GetPostgrestUpgradeEligibilityResponse) StatusCode() int { +func (r V1GetPostgresUpgradeEligibilityResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1GetPostgrestUpgradeStatusResponse struct { +type V1GetPostgresUpgradeStatusResponse struct { Body []byte HTTPResponse *http.Response JSON200 *DatabaseUpgradeStatusResponse } // Status returns HTTPResponse.Status -func (r V1GetPostgrestUpgradeStatusResponse) Status() string { +func (r V1GetPostgresUpgradeStatusResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -6911,7 +8364,7 @@ func (r V1GetPostgrestUpgradeStatusResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1GetPostgrestUpgradeStatusResponse) StatusCode() int { +func (r V1GetPostgresUpgradeStatusResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } @@ -7084,6 +8537,15 @@ func (c *ClientWithResponses) V1UpdateABranchConfigWithResponse(ctx context.Cont return ParseV1UpdateABranchConfigResponse(rsp) } +// V1PushABranchWithResponse request returning *V1PushABranchResponse +func (c *ClientWithResponses) V1PushABranchWithResponse(ctx context.Context, branchId string, reqEditors ...RequestEditorFn) (*V1PushABranchResponse, error) { + rsp, err := c.V1PushABranch(ctx, branchId, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1PushABranchResponse(rsp) +} + // V1ResetABranchWithResponse request returning *V1ResetABranchResponse func (c *ClientWithResponses) V1ResetABranchWithResponse(ctx context.Context, branchId string, reqEditors ...RequestEditorFn) (*V1ResetABranchResponse, error) { rsp, err := c.V1ResetABranch(ctx, branchId, reqEditors...) @@ -7102,6 +8564,23 @@ func (c *ClientWithResponses) V1AuthorizeUserWithResponse(ctx context.Context, p return ParseV1AuthorizeUserResponse(rsp) } +// V1RevokeTokenWithBodyWithResponse request with arbitrary body returning *V1RevokeTokenResponse +func (c *ClientWithResponses) V1RevokeTokenWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1RevokeTokenResponse, error) { + rsp, err := c.V1RevokeTokenWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1RevokeTokenResponse(rsp) +} + +func (c *ClientWithResponses) V1RevokeTokenWithResponse(ctx context.Context, body V1RevokeTokenJSONRequestBody, reqEditors ...RequestEditorFn) (*V1RevokeTokenResponse, error) { + rsp, err := c.V1RevokeToken(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1RevokeTokenResponse(rsp) +} + // V1ExchangeOauthTokenWithBodyWithResponse request with arbitrary body returning *V1ExchangeOauthTokenResponse func (c *ClientWithResponses) V1ExchangeOauthTokenWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1ExchangeOauthTokenResponse, error) { rsp, err := c.V1ExchangeOauthTokenWithBody(ctx, contentType, body, reqEditors...) @@ -7198,15 +8677,85 @@ func (c *ClientWithResponses) V1DeleteAProjectWithResponse(ctx context.Context, return ParseV1DeleteAProjectResponse(rsp) } +// V1GetProjectWithResponse request returning *V1GetProjectResponse +func (c *ClientWithResponses) V1GetProjectWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetProjectResponse, error) { + rsp, err := c.V1GetProject(ctx, ref, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1GetProjectResponse(rsp) +} + +// GetLogsWithResponse request returning *GetLogsResponse +func (c *ClientWithResponses) GetLogsWithResponse(ctx context.Context, ref string, params *GetLogsParams, reqEditors ...RequestEditorFn) (*GetLogsResponse, error) { + rsp, err := c.GetLogs(ctx, ref, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetLogsResponse(rsp) +} + // V1GetProjectApiKeysWithResponse request returning *V1GetProjectApiKeysResponse -func (c *ClientWithResponses) V1GetProjectApiKeysWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetProjectApiKeysResponse, error) { - rsp, err := c.V1GetProjectApiKeys(ctx, ref, reqEditors...) +func (c *ClientWithResponses) V1GetProjectApiKeysWithResponse(ctx context.Context, ref string, params *V1GetProjectApiKeysParams, reqEditors ...RequestEditorFn) (*V1GetProjectApiKeysResponse, error) { + rsp, err := c.V1GetProjectApiKeys(ctx, ref, params, reqEditors...) if err != nil { return nil, err } return ParseV1GetProjectApiKeysResponse(rsp) } +// CreateApiKeyWithBodyWithResponse request with arbitrary body returning *CreateApiKeyResponse +func (c *ClientWithResponses) CreateApiKeyWithBodyWithResponse(ctx context.Context, ref string, params *CreateApiKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateApiKeyResponse, error) { + rsp, err := c.CreateApiKeyWithBody(ctx, ref, params, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateApiKeyResponse(rsp) +} + +func (c *ClientWithResponses) CreateApiKeyWithResponse(ctx context.Context, ref string, params *CreateApiKeyParams, body CreateApiKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateApiKeyResponse, error) { + rsp, err := c.CreateApiKey(ctx, ref, params, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateApiKeyResponse(rsp) +} + +// DeleteApiKeyWithResponse request returning *DeleteApiKeyResponse +func (c *ClientWithResponses) DeleteApiKeyWithResponse(ctx context.Context, ref string, id string, params *DeleteApiKeyParams, reqEditors ...RequestEditorFn) (*DeleteApiKeyResponse, error) { + rsp, err := c.DeleteApiKey(ctx, ref, id, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteApiKeyResponse(rsp) +} + +// GetApiKeyWithResponse request returning *GetApiKeyResponse +func (c *ClientWithResponses) GetApiKeyWithResponse(ctx context.Context, ref string, id string, params *GetApiKeyParams, reqEditors ...RequestEditorFn) (*GetApiKeyResponse, error) { + rsp, err := c.GetApiKey(ctx, ref, id, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetApiKeyResponse(rsp) +} + +// UpdateApiKeyWithBodyWithResponse request with arbitrary body returning *UpdateApiKeyResponse +func (c *ClientWithResponses) UpdateApiKeyWithBodyWithResponse(ctx context.Context, ref string, id string, params *UpdateApiKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateApiKeyResponse, error) { + rsp, err := c.UpdateApiKeyWithBody(ctx, ref, id, params, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateApiKeyResponse(rsp) +} + +func (c *ClientWithResponses) UpdateApiKeyWithResponse(ctx context.Context, ref string, id string, params *UpdateApiKeyParams, body UpdateApiKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateApiKeyResponse, error) { + rsp, err := c.UpdateApiKey(ctx, ref, id, params, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateApiKeyResponse(rsp) +} + // V1DisablePreviewBranchingWithResponse request returning *V1DisablePreviewBranchingResponse func (c *ClientWithResponses) V1DisablePreviewBranchingWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1DisablePreviewBranchingResponse, error) { rsp, err := c.V1DisablePreviewBranching(ctx, ref, reqEditors...) @@ -7434,6 +8983,32 @@ func (c *ClientWithResponses) V1UpdatePostgresConfigWithResponse(ctx context.Con return ParseV1UpdatePostgresConfigResponse(rsp) } +// V1GetStorageConfigWithResponse request returning *V1GetStorageConfigResponse +func (c *ClientWithResponses) V1GetStorageConfigWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetStorageConfigResponse, error) { + rsp, err := c.V1GetStorageConfig(ctx, ref, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1GetStorageConfigResponse(rsp) +} + +// V1UpdateStorageConfigWithBodyWithResponse request with arbitrary body returning *V1UpdateStorageConfigResponse +func (c *ClientWithResponses) V1UpdateStorageConfigWithBodyWithResponse(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1UpdateStorageConfigResponse, error) { + rsp, err := c.V1UpdateStorageConfigWithBody(ctx, ref, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1UpdateStorageConfigResponse(rsp) +} + +func (c *ClientWithResponses) V1UpdateStorageConfigWithResponse(ctx context.Context, ref string, body V1UpdateStorageConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*V1UpdateStorageConfigResponse, error) { + rsp, err := c.V1UpdateStorageConfig(ctx, ref, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1UpdateStorageConfigResponse(rsp) +} + // V1DeleteHostnameConfigWithResponse request returning *V1DeleteHostnameConfigResponse func (c *ClientWithResponses) V1DeleteHostnameConfigWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1DeleteHostnameConfigResponse, error) { rsp, err := c.V1DeleteHostnameConfig(ctx, ref, reqEditors...) @@ -7548,21 +9123,21 @@ func (c *ClientWithResponses) V1ListAllFunctionsWithResponse(ctx context.Context return ParseV1ListAllFunctionsResponse(rsp) } -// CreateFunctionWithBodyWithResponse request with arbitrary body returning *CreateFunctionResponse -func (c *ClientWithResponses) CreateFunctionWithBodyWithResponse(ctx context.Context, ref string, params *CreateFunctionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateFunctionResponse, error) { - rsp, err := c.CreateFunctionWithBody(ctx, ref, params, contentType, body, reqEditors...) +// V1CreateAFunctionWithBodyWithResponse request with arbitrary body returning *V1CreateAFunctionResponse +func (c *ClientWithResponses) V1CreateAFunctionWithBodyWithResponse(ctx context.Context, ref string, params *V1CreateAFunctionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1CreateAFunctionResponse, error) { + rsp, err := c.V1CreateAFunctionWithBody(ctx, ref, params, contentType, body, reqEditors...) if err != nil { return nil, err } - return ParseCreateFunctionResponse(rsp) + return ParseV1CreateAFunctionResponse(rsp) } -func (c *ClientWithResponses) CreateFunctionWithResponse(ctx context.Context, ref string, params *CreateFunctionParams, body CreateFunctionJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateFunctionResponse, error) { - rsp, err := c.CreateFunction(ctx, ref, params, body, reqEditors...) +func (c *ClientWithResponses) V1CreateAFunctionWithResponse(ctx context.Context, ref string, params *V1CreateAFunctionParams, body V1CreateAFunctionJSONRequestBody, reqEditors ...RequestEditorFn) (*V1CreateAFunctionResponse, error) { + rsp, err := c.V1CreateAFunction(ctx, ref, params, body, reqEditors...) if err != nil { return nil, err } - return ParseCreateFunctionResponse(rsp) + return ParseV1CreateAFunctionResponse(rsp) } // V1DeleteAFunctionWithResponse request returning *V1DeleteAFunctionResponse @@ -7670,6 +9245,15 @@ func (c *ClientWithResponses) V1UpdateNetworkRestrictionsWithResponse(ctx contex return ParseV1UpdateNetworkRestrictionsResponse(rsp) } +// V1PauseAProjectWithResponse request returning *V1PauseAProjectResponse +func (c *ClientWithResponses) V1PauseAProjectWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1PauseAProjectResponse, error) { + rsp, err := c.V1PauseAProject(ctx, ref, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1PauseAProjectResponse(rsp) +} + // V1GetPgsodiumConfigWithResponse request returning *V1GetPgsodiumConfigResponse func (c *ClientWithResponses) V1GetPgsodiumConfigWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetPgsodiumConfigResponse, error) { rsp, err := c.V1GetPgsodiumConfig(ctx, ref, reqEditors...) @@ -7774,6 +9358,41 @@ func (c *ClientWithResponses) V1DisableReadonlyModeTemporarilyWithResponse(ctx c return ParseV1DisableReadonlyModeTemporarilyResponse(rsp) } +// V1ListAvailableRestoreVersionsWithResponse request returning *V1ListAvailableRestoreVersionsResponse +func (c *ClientWithResponses) V1ListAvailableRestoreVersionsWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1ListAvailableRestoreVersionsResponse, error) { + rsp, err := c.V1ListAvailableRestoreVersions(ctx, ref, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1ListAvailableRestoreVersionsResponse(rsp) +} + +// V1RestoreAProjectWithBodyWithResponse request with arbitrary body returning *V1RestoreAProjectResponse +func (c *ClientWithResponses) V1RestoreAProjectWithBodyWithResponse(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1RestoreAProjectResponse, error) { + rsp, err := c.V1RestoreAProjectWithBody(ctx, ref, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1RestoreAProjectResponse(rsp) +} + +func (c *ClientWithResponses) V1RestoreAProjectWithResponse(ctx context.Context, ref string, body V1RestoreAProjectJSONRequestBody, reqEditors ...RequestEditorFn) (*V1RestoreAProjectResponse, error) { + rsp, err := c.V1RestoreAProject(ctx, ref, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1RestoreAProjectResponse(rsp) +} + +// V1CancelAProjectRestorationWithResponse request returning *V1CancelAProjectRestorationResponse +func (c *ClientWithResponses) V1CancelAProjectRestorationWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1CancelAProjectRestorationResponse, error) { + rsp, err := c.V1CancelAProjectRestoration(ctx, ref, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1CancelAProjectRestorationResponse(rsp) +} + // V1BulkDeleteSecretsWithBodyWithResponse request with arbitrary body returning *V1BulkDeleteSecretsResponse func (c *ClientWithResponses) V1BulkDeleteSecretsWithBodyWithResponse(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1BulkDeleteSecretsResponse, error) { rsp, err := c.V1BulkDeleteSecretsWithBody(ctx, ref, contentType, body, reqEditors...) @@ -7878,22 +9497,22 @@ func (c *ClientWithResponses) V1UpgradePostgresVersionWithResponse(ctx context.C return ParseV1UpgradePostgresVersionResponse(rsp) } -// V1GetPostgrestUpgradeEligibilityWithResponse request returning *V1GetPostgrestUpgradeEligibilityResponse -func (c *ClientWithResponses) V1GetPostgrestUpgradeEligibilityWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetPostgrestUpgradeEligibilityResponse, error) { - rsp, err := c.V1GetPostgrestUpgradeEligibility(ctx, ref, reqEditors...) +// V1GetPostgresUpgradeEligibilityWithResponse request returning *V1GetPostgresUpgradeEligibilityResponse +func (c *ClientWithResponses) V1GetPostgresUpgradeEligibilityWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetPostgresUpgradeEligibilityResponse, error) { + rsp, err := c.V1GetPostgresUpgradeEligibility(ctx, ref, reqEditors...) if err != nil { return nil, err } - return ParseV1GetPostgrestUpgradeEligibilityResponse(rsp) + return ParseV1GetPostgresUpgradeEligibilityResponse(rsp) } -// V1GetPostgrestUpgradeStatusWithResponse request returning *V1GetPostgrestUpgradeStatusResponse -func (c *ClientWithResponses) V1GetPostgrestUpgradeStatusWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetPostgrestUpgradeStatusResponse, error) { - rsp, err := c.V1GetPostgrestUpgradeStatus(ctx, ref, reqEditors...) +// V1GetPostgresUpgradeStatusWithResponse request returning *V1GetPostgresUpgradeStatusResponse +func (c *ClientWithResponses) V1GetPostgresUpgradeStatusWithResponse(ctx context.Context, ref string, params *V1GetPostgresUpgradeStatusParams, reqEditors ...RequestEditorFn) (*V1GetPostgresUpgradeStatusResponse, error) { + rsp, err := c.V1GetPostgresUpgradeStatus(ctx, ref, params, reqEditors...) if err != nil { return nil, err } - return ParseV1GetPostgrestUpgradeStatusResponse(rsp) + return ParseV1GetPostgresUpgradeStatusResponse(rsp) } // V1DeactivateVanitySubdomainConfigWithResponse request returning *V1DeactivateVanitySubdomainConfigResponse @@ -7958,7 +9577,7 @@ func (c *ClientWithResponses) V1ListAllSnippetsWithResponse(ctx context.Context, } // V1GetASnippetWithResponse request returning *V1GetASnippetResponse -func (c *ClientWithResponses) V1GetASnippetWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*V1GetASnippetResponse, error) { +func (c *ClientWithResponses) V1GetASnippetWithResponse(ctx context.Context, id openapi_types.UUID, reqEditors ...RequestEditorFn) (*V1GetASnippetResponse, error) { rsp, err := c.V1GetASnippet(ctx, id, reqEditors...) if err != nil { return nil, err @@ -8000,14 +9619,40 @@ func ParseV1GetABranchConfigResponse(rsp *http.Response) (*V1GetABranchConfigRes return nil, err } - response := &V1GetABranchConfigResponse{ + response := &V1GetABranchConfigResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest BranchDetailResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseV1UpdateABranchConfigResponse parses an HTTP response from a V1UpdateABranchConfigWithResponse call +func ParseV1UpdateABranchConfigResponse(rsp *http.Response) (*V1UpdateABranchConfigResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1UpdateABranchConfigResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest BranchDetailResponse + var dest BranchResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -8018,26 +9663,26 @@ func ParseV1GetABranchConfigResponse(rsp *http.Response) (*V1GetABranchConfigRes return response, nil } -// ParseV1UpdateABranchConfigResponse parses an HTTP response from a V1UpdateABranchConfigWithResponse call -func ParseV1UpdateABranchConfigResponse(rsp *http.Response) (*V1UpdateABranchConfigResponse, error) { +// ParseV1PushABranchResponse parses an HTTP response from a V1PushABranchWithResponse call +func ParseV1PushABranchResponse(rsp *http.Response) (*V1PushABranchResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &V1UpdateABranchConfigResponse{ + response := &V1PushABranchResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest BranchResponse + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest BranchUpdateResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON200 = &dest + response.JSON201 = &dest } @@ -8059,7 +9704,7 @@ func ParseV1ResetABranchResponse(rsp *http.Response) (*V1ResetABranchResponse, e switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: - var dest BranchResetResponse + var dest BranchUpdateResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -8086,6 +9731,22 @@ func ParseV1AuthorizeUserResponse(rsp *http.Response) (*V1AuthorizeUserResponse, return response, nil } +// ParseV1RevokeTokenResponse parses an HTTP response from a V1RevokeTokenWithResponse call +func ParseV1RevokeTokenResponse(rsp *http.Response) (*V1RevokeTokenResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1RevokeTokenResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + // ParseV1ExchangeOauthTokenResponse parses an HTTP response from a V1ExchangeOauthTokenWithResponse call func ParseV1ExchangeOauthTokenResponse(rsp *http.Response) (*V1ExchangeOauthTokenResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -8231,7 +9892,7 @@ func ParseV1ListAllProjectsResponse(rsp *http.Response) (*V1ListAllProjectsRespo switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest []V1ProjectResponse + var dest []V1ProjectWithDatabaseResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -8294,6 +9955,58 @@ func ParseV1DeleteAProjectResponse(rsp *http.Response) (*V1DeleteAProjectRespons return response, nil } +// ParseV1GetProjectResponse parses an HTTP response from a V1GetProjectWithResponse call +func ParseV1GetProjectResponse(rsp *http.Response) (*V1GetProjectResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1GetProjectResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest V1ProjectWithDatabaseResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseGetLogsResponse parses an HTTP response from a GetLogsWithResponse call +func ParseGetLogsResponse(rsp *http.Response) (*GetLogsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetLogsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest V1AnalyticsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + // ParseV1GetProjectApiKeysResponse parses an HTTP response from a V1GetProjectApiKeysWithResponse call func ParseV1GetProjectApiKeysResponse(rsp *http.Response) (*V1GetProjectApiKeysResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -8320,6 +10033,110 @@ func ParseV1GetProjectApiKeysResponse(rsp *http.Response) (*V1GetProjectApiKeysR return response, nil } +// ParseCreateApiKeyResponse parses an HTTP response from a CreateApiKeyWithResponse call +func ParseCreateApiKeyResponse(rsp *http.Response) (*CreateApiKeyResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &CreateApiKeyResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ApiKeyResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + } + + return response, nil +} + +// ParseDeleteApiKeyResponse parses an HTTP response from a DeleteApiKeyWithResponse call +func ParseDeleteApiKeyResponse(rsp *http.Response) (*DeleteApiKeyResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DeleteApiKeyResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ApiKeyResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseGetApiKeyResponse parses an HTTP response from a GetApiKeyWithResponse call +func ParseGetApiKeyResponse(rsp *http.Response) (*GetApiKeyResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetApiKeyResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ApiKeyResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseUpdateApiKeyResponse parses an HTTP response from a UpdateApiKeyWithResponse call +func ParseUpdateApiKeyResponse(rsp *http.Response) (*UpdateApiKeyResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &UpdateApiKeyResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ApiKeyResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + // ParseV1DisablePreviewBranchingResponse parses an HTTP response from a V1DisablePreviewBranchingWithResponse call func ParseV1DisablePreviewBranchingResponse(rsp *http.Response) (*V1DisablePreviewBranchingResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -8804,6 +10621,48 @@ func ParseV1UpdatePostgresConfigResponse(rsp *http.Response) (*V1UpdatePostgresC return response, nil } +// ParseV1GetStorageConfigResponse parses an HTTP response from a V1GetStorageConfigWithResponse call +func ParseV1GetStorageConfigResponse(rsp *http.Response) (*V1GetStorageConfigResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1GetStorageConfigResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest StorageConfigResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseV1UpdateStorageConfigResponse parses an HTTP response from a V1UpdateStorageConfigWithResponse call +func ParseV1UpdateStorageConfigResponse(rsp *http.Response) (*V1UpdateStorageConfigResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1UpdateStorageConfigResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + // ParseV1DeleteHostnameConfigResponse parses an HTTP response from a V1DeleteHostnameConfigWithResponse call func ParseV1DeleteHostnameConfigResponse(rsp *http.Response) (*V1DeleteHostnameConfigResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -9034,15 +10893,15 @@ func ParseV1ListAllFunctionsResponse(rsp *http.Response) (*V1ListAllFunctionsRes return response, nil } -// ParseCreateFunctionResponse parses an HTTP response from a CreateFunctionWithResponse call -func ParseCreateFunctionResponse(rsp *http.Response) (*CreateFunctionResponse, error) { +// ParseV1CreateAFunctionResponse parses an HTTP response from a V1CreateAFunctionWithResponse call +func ParseV1CreateAFunctionResponse(rsp *http.Response) (*V1CreateAFunctionResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &CreateFunctionResponse{ + response := &V1CreateAFunctionResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -9264,6 +11123,22 @@ func ParseV1UpdateNetworkRestrictionsResponse(rsp *http.Response) (*V1UpdateNetw return response, nil } +// ParseV1PauseAProjectResponse parses an HTTP response from a V1PauseAProjectWithResponse call +func ParseV1PauseAProjectResponse(rsp *http.Response) (*V1PauseAProjectResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1PauseAProjectResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + // ParseV1GetPgsodiumConfigResponse parses an HTTP response from a V1GetPgsodiumConfigWithResponse call func ParseV1GetPgsodiumConfigResponse(rsp *http.Response) (*V1GetPgsodiumConfigResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -9442,6 +11317,64 @@ func ParseV1DisableReadonlyModeTemporarilyResponse(rsp *http.Response) (*V1Disab return response, nil } +// ParseV1ListAvailableRestoreVersionsResponse parses an HTTP response from a V1ListAvailableRestoreVersionsWithResponse call +func ParseV1ListAvailableRestoreVersionsResponse(rsp *http.Response) (*V1ListAvailableRestoreVersionsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1ListAvailableRestoreVersionsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest GetProjectAvailableRestoreVersionsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseV1RestoreAProjectResponse parses an HTTP response from a V1RestoreAProjectWithResponse call +func ParseV1RestoreAProjectResponse(rsp *http.Response) (*V1RestoreAProjectResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1RestoreAProjectResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + +// ParseV1CancelAProjectRestorationResponse parses an HTTP response from a V1CancelAProjectRestorationWithResponse call +func ParseV1CancelAProjectRestorationResponse(rsp *http.Response) (*V1CancelAProjectRestorationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1CancelAProjectRestorationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + // ParseV1BulkDeleteSecretsResponse parses an HTTP response from a V1BulkDeleteSecretsWithResponse call func ParseV1BulkDeleteSecretsResponse(rsp *http.Response) (*V1BulkDeleteSecretsResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -9640,15 +11573,15 @@ func ParseV1UpgradePostgresVersionResponse(rsp *http.Response) (*V1UpgradePostgr return response, nil } -// ParseV1GetPostgrestUpgradeEligibilityResponse parses an HTTP response from a V1GetPostgrestUpgradeEligibilityWithResponse call -func ParseV1GetPostgrestUpgradeEligibilityResponse(rsp *http.Response) (*V1GetPostgrestUpgradeEligibilityResponse, error) { +// ParseV1GetPostgresUpgradeEligibilityResponse parses an HTTP response from a V1GetPostgresUpgradeEligibilityWithResponse call +func ParseV1GetPostgresUpgradeEligibilityResponse(rsp *http.Response) (*V1GetPostgresUpgradeEligibilityResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &V1GetPostgrestUpgradeEligibilityResponse{ + response := &V1GetPostgresUpgradeEligibilityResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -9666,15 +11599,15 @@ func ParseV1GetPostgrestUpgradeEligibilityResponse(rsp *http.Response) (*V1GetPo return response, nil } -// ParseV1GetPostgrestUpgradeStatusResponse parses an HTTP response from a V1GetPostgrestUpgradeStatusWithResponse call -func ParseV1GetPostgrestUpgradeStatusResponse(rsp *http.Response) (*V1GetPostgrestUpgradeStatusResponse, error) { +// ParseV1GetPostgresUpgradeStatusResponse parses an HTTP response from a V1GetPostgresUpgradeStatusWithResponse call +func ParseV1GetPostgresUpgradeStatusResponse(rsp *http.Response) (*V1GetPostgresUpgradeStatusResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &V1GetPostgrestUpgradeStatusResponse{ + response := &V1GetPostgresUpgradeStatusResponse{ Body: bodyBytes, HTTPResponse: rsp, } diff --git a/pkg/api/types.gen.go b/pkg/api/types.gen.go index c59dae1a5..1bb326b91 100644 --- a/pkg/api/types.gen.go +++ b/pkg/api/types.gen.go @@ -7,6 +7,7 @@ import ( "encoding/json" "github.com/oapi-codegen/runtime" + openapi_types "github.com/oapi-codegen/runtime/types" ) const ( @@ -14,6 +15,18 @@ const ( Oauth2Scopes = "oauth2.Scopes" ) +// Defines values for ApiKeyResponseType. +const ( + ApiKeyResponseTypeLegacy ApiKeyResponseType = "legacy" + ApiKeyResponseTypePublishable ApiKeyResponseType = "publishable" + ApiKeyResponseTypeSecret ApiKeyResponseType = "secret" +) + +// Defines values for AuthHealthResponseName. +const ( + GoTrue AuthHealthResponseName = "GoTrue" +) + // Defines values for BillingPlanId. const ( BillingPlanIdEnterprise BillingPlanId = "enterprise" @@ -33,6 +46,7 @@ const ( BranchDetailResponseStatusPAUSEFAILED BranchDetailResponseStatus = "PAUSE_FAILED" BranchDetailResponseStatusPAUSING BranchDetailResponseStatus = "PAUSING" BranchDetailResponseStatusREMOVED BranchDetailResponseStatus = "REMOVED" + BranchDetailResponseStatusRESIZING BranchDetailResponseStatus = "RESIZING" BranchDetailResponseStatusRESTARTING BranchDetailResponseStatus = "RESTARTING" BranchDetailResponseStatusRESTOREFAILED BranchDetailResponseStatus = "RESTORE_FAILED" BranchDetailResponseStatusRESTORING BranchDetailResponseStatus = "RESTORING" @@ -50,6 +64,12 @@ const ( BranchResponseStatusRUNNINGMIGRATIONS BranchResponseStatus = "RUNNING_MIGRATIONS" ) +// Defines values for CreateApiKeyBodyType. +const ( + CreateApiKeyBodyTypePublishable CreateApiKeyBodyType = "publishable" + CreateApiKeyBodyTypeSecret CreateApiKeyBodyType = "secret" +) + // Defines values for CreateProviderBodyType. const ( Saml CreateProviderBodyType = "saml" @@ -92,16 +112,16 @@ const ( // Defines values for DesiredInstanceSize. const ( - Large DesiredInstanceSize = "large" - Medium DesiredInstanceSize = "medium" - Micro DesiredInstanceSize = "micro" - N12xlarge DesiredInstanceSize = "12xlarge" - N16xlarge DesiredInstanceSize = "16xlarge" - N2xlarge DesiredInstanceSize = "2xlarge" - N4xlarge DesiredInstanceSize = "4xlarge" - N8xlarge DesiredInstanceSize = "8xlarge" - Small DesiredInstanceSize = "small" - Xlarge DesiredInstanceSize = "xlarge" + DesiredInstanceSizeLarge DesiredInstanceSize = "large" + DesiredInstanceSizeMedium DesiredInstanceSize = "medium" + DesiredInstanceSizeMicro DesiredInstanceSize = "micro" + DesiredInstanceSizeN12xlarge DesiredInstanceSize = "12xlarge" + DesiredInstanceSizeN16xlarge DesiredInstanceSize = "16xlarge" + DesiredInstanceSizeN2xlarge DesiredInstanceSize = "2xlarge" + DesiredInstanceSizeN4xlarge DesiredInstanceSize = "4xlarge" + DesiredInstanceSizeN8xlarge DesiredInstanceSize = "8xlarge" + DesiredInstanceSizeSmall DesiredInstanceSize = "small" + DesiredInstanceSizeXlarge DesiredInstanceSize = "xlarge" ) // Defines values for FunctionResponseStatus. @@ -148,6 +168,56 @@ const ( PostgresConfigResponseSessionReplicationRoleReplica PostgresConfigResponseSessionReplicationRole = "replica" ) +// Defines values for PostgresEngine. +const ( + PostgresEngineN15 PostgresEngine = "15" + PostgresEngineN17Oriole PostgresEngine = "17-oriole" +) + +// Defines values for ProjectAvailableRestoreVersionPostgresEngine. +const ( + ProjectAvailableRestoreVersionPostgresEngineN13 ProjectAvailableRestoreVersionPostgresEngine = "13" + ProjectAvailableRestoreVersionPostgresEngineN14 ProjectAvailableRestoreVersionPostgresEngine = "14" + ProjectAvailableRestoreVersionPostgresEngineN15 ProjectAvailableRestoreVersionPostgresEngine = "15" + ProjectAvailableRestoreVersionPostgresEngineN17Oriole ProjectAvailableRestoreVersionPostgresEngine = "17-oriole" +) + +// Defines values for ProjectAvailableRestoreVersionReleaseChannel. +const ( + ProjectAvailableRestoreVersionReleaseChannelAlpha ProjectAvailableRestoreVersionReleaseChannel = "alpha" + ProjectAvailableRestoreVersionReleaseChannelBeta ProjectAvailableRestoreVersionReleaseChannel = "beta" + ProjectAvailableRestoreVersionReleaseChannelGa ProjectAvailableRestoreVersionReleaseChannel = "ga" + ProjectAvailableRestoreVersionReleaseChannelInternal ProjectAvailableRestoreVersionReleaseChannel = "internal" + ProjectAvailableRestoreVersionReleaseChannelPreview ProjectAvailableRestoreVersionReleaseChannel = "preview" + ProjectAvailableRestoreVersionReleaseChannelWithdrawn ProjectAvailableRestoreVersionReleaseChannel = "withdrawn" +) + +// Defines values for ReleaseChannel. +const ( + ReleaseChannelAlpha ReleaseChannel = "alpha" + ReleaseChannelBeta ReleaseChannel = "beta" + ReleaseChannelGa ReleaseChannel = "ga" + ReleaseChannelInternal ReleaseChannel = "internal" + ReleaseChannelPreview ReleaseChannel = "preview" + ReleaseChannelWithdrawn ReleaseChannel = "withdrawn" +) + +// Defines values for RestoreProjectBodyDtoPostgresEngine. +const ( + RestoreProjectBodyDtoPostgresEngineN15 RestoreProjectBodyDtoPostgresEngine = "15" + RestoreProjectBodyDtoPostgresEngineN17Oriole RestoreProjectBodyDtoPostgresEngine = "17-oriole" +) + +// Defines values for RestoreProjectBodyDtoReleaseChannel. +const ( + RestoreProjectBodyDtoReleaseChannelAlpha RestoreProjectBodyDtoReleaseChannel = "alpha" + RestoreProjectBodyDtoReleaseChannelBeta RestoreProjectBodyDtoReleaseChannel = "beta" + RestoreProjectBodyDtoReleaseChannelGa RestoreProjectBodyDtoReleaseChannel = "ga" + RestoreProjectBodyDtoReleaseChannelInternal RestoreProjectBodyDtoReleaseChannel = "internal" + RestoreProjectBodyDtoReleaseChannelPreview RestoreProjectBodyDtoReleaseChannel = "preview" + RestoreProjectBodyDtoReleaseChannelWithdrawn RestoreProjectBodyDtoReleaseChannel = "withdrawn" +) + // Defines values for SetUpReadReplicaBodyReadReplicaRegion. const ( SetUpReadReplicaBodyReadReplicaRegionApEast1 SetUpReadReplicaBodyReadReplicaRegion = "ap-east-1" @@ -158,11 +228,14 @@ const ( SetUpReadReplicaBodyReadReplicaRegionApSoutheast2 SetUpReadReplicaBodyReadReplicaRegion = "ap-southeast-2" SetUpReadReplicaBodyReadReplicaRegionCaCentral1 SetUpReadReplicaBodyReadReplicaRegion = "ca-central-1" SetUpReadReplicaBodyReadReplicaRegionEuCentral1 SetUpReadReplicaBodyReadReplicaRegion = "eu-central-1" + SetUpReadReplicaBodyReadReplicaRegionEuCentral2 SetUpReadReplicaBodyReadReplicaRegion = "eu-central-2" + SetUpReadReplicaBodyReadReplicaRegionEuNorth1 SetUpReadReplicaBodyReadReplicaRegion = "eu-north-1" SetUpReadReplicaBodyReadReplicaRegionEuWest1 SetUpReadReplicaBodyReadReplicaRegion = "eu-west-1" SetUpReadReplicaBodyReadReplicaRegionEuWest2 SetUpReadReplicaBodyReadReplicaRegion = "eu-west-2" SetUpReadReplicaBodyReadReplicaRegionEuWest3 SetUpReadReplicaBodyReadReplicaRegion = "eu-west-3" SetUpReadReplicaBodyReadReplicaRegionSaEast1 SetUpReadReplicaBodyReadReplicaRegion = "sa-east-1" SetUpReadReplicaBodyReadReplicaRegionUsEast1 SetUpReadReplicaBodyReadReplicaRegion = "us-east-1" + SetUpReadReplicaBodyReadReplicaRegionUsEast2 SetUpReadReplicaBodyReadReplicaRegion = "us-east-2" SetUpReadReplicaBodyReadReplicaRegionUsWest1 SetUpReadReplicaBodyReadReplicaRegion = "us-west-1" SetUpReadReplicaBodyReadReplicaRegionUsWest2 SetUpReadReplicaBodyReadReplicaRegion = "us-west-2" ) @@ -261,29 +334,62 @@ const ( V1BackupStatusREMOVED V1BackupStatus = "REMOVED" ) -// Defines values for V1CreateProjectBodyPlan. +// Defines values for V1CreateProjectBodyDtoDesiredInstanceSize. +const ( + V1CreateProjectBodyDtoDesiredInstanceSizeLarge V1CreateProjectBodyDtoDesiredInstanceSize = "large" + V1CreateProjectBodyDtoDesiredInstanceSizeMedium V1CreateProjectBodyDtoDesiredInstanceSize = "medium" + V1CreateProjectBodyDtoDesiredInstanceSizeMicro V1CreateProjectBodyDtoDesiredInstanceSize = "micro" + V1CreateProjectBodyDtoDesiredInstanceSizeN12xlarge V1CreateProjectBodyDtoDesiredInstanceSize = "12xlarge" + V1CreateProjectBodyDtoDesiredInstanceSizeN16xlarge V1CreateProjectBodyDtoDesiredInstanceSize = "16xlarge" + V1CreateProjectBodyDtoDesiredInstanceSizeN2xlarge V1CreateProjectBodyDtoDesiredInstanceSize = "2xlarge" + V1CreateProjectBodyDtoDesiredInstanceSizeN4xlarge V1CreateProjectBodyDtoDesiredInstanceSize = "4xlarge" + V1CreateProjectBodyDtoDesiredInstanceSizeN8xlarge V1CreateProjectBodyDtoDesiredInstanceSize = "8xlarge" + V1CreateProjectBodyDtoDesiredInstanceSizeSmall V1CreateProjectBodyDtoDesiredInstanceSize = "small" + V1CreateProjectBodyDtoDesiredInstanceSizeXlarge V1CreateProjectBodyDtoDesiredInstanceSize = "xlarge" +) + +// Defines values for V1CreateProjectBodyDtoPlan. +const ( + V1CreateProjectBodyDtoPlanFree V1CreateProjectBodyDtoPlan = "free" + V1CreateProjectBodyDtoPlanPro V1CreateProjectBodyDtoPlan = "pro" +) + +// Defines values for V1CreateProjectBodyDtoPostgresEngine. +const ( + V1CreateProjectBodyDtoPostgresEngineN15 V1CreateProjectBodyDtoPostgresEngine = "15" + V1CreateProjectBodyDtoPostgresEngineN17Oriole V1CreateProjectBodyDtoPostgresEngine = "17-oriole" +) + +// Defines values for V1CreateProjectBodyDtoRegion. const ( - V1CreateProjectBodyPlanFree V1CreateProjectBodyPlan = "free" - V1CreateProjectBodyPlanPro V1CreateProjectBodyPlan = "pro" + V1CreateProjectBodyDtoRegionApEast1 V1CreateProjectBodyDtoRegion = "ap-east-1" + V1CreateProjectBodyDtoRegionApNortheast1 V1CreateProjectBodyDtoRegion = "ap-northeast-1" + V1CreateProjectBodyDtoRegionApNortheast2 V1CreateProjectBodyDtoRegion = "ap-northeast-2" + V1CreateProjectBodyDtoRegionApSouth1 V1CreateProjectBodyDtoRegion = "ap-south-1" + V1CreateProjectBodyDtoRegionApSoutheast1 V1CreateProjectBodyDtoRegion = "ap-southeast-1" + V1CreateProjectBodyDtoRegionApSoutheast2 V1CreateProjectBodyDtoRegion = "ap-southeast-2" + V1CreateProjectBodyDtoRegionCaCentral1 V1CreateProjectBodyDtoRegion = "ca-central-1" + V1CreateProjectBodyDtoRegionEuCentral1 V1CreateProjectBodyDtoRegion = "eu-central-1" + V1CreateProjectBodyDtoRegionEuCentral2 V1CreateProjectBodyDtoRegion = "eu-central-2" + V1CreateProjectBodyDtoRegionEuNorth1 V1CreateProjectBodyDtoRegion = "eu-north-1" + V1CreateProjectBodyDtoRegionEuWest1 V1CreateProjectBodyDtoRegion = "eu-west-1" + V1CreateProjectBodyDtoRegionEuWest2 V1CreateProjectBodyDtoRegion = "eu-west-2" + V1CreateProjectBodyDtoRegionEuWest3 V1CreateProjectBodyDtoRegion = "eu-west-3" + V1CreateProjectBodyDtoRegionSaEast1 V1CreateProjectBodyDtoRegion = "sa-east-1" + V1CreateProjectBodyDtoRegionUsEast1 V1CreateProjectBodyDtoRegion = "us-east-1" + V1CreateProjectBodyDtoRegionUsEast2 V1CreateProjectBodyDtoRegion = "us-east-2" + V1CreateProjectBodyDtoRegionUsWest1 V1CreateProjectBodyDtoRegion = "us-west-1" + V1CreateProjectBodyDtoRegionUsWest2 V1CreateProjectBodyDtoRegion = "us-west-2" ) -// Defines values for V1CreateProjectBodyRegion. +// Defines values for V1CreateProjectBodyDtoReleaseChannel. const ( - V1CreateProjectBodyRegionApEast1 V1CreateProjectBodyRegion = "ap-east-1" - V1CreateProjectBodyRegionApNortheast1 V1CreateProjectBodyRegion = "ap-northeast-1" - V1CreateProjectBodyRegionApNortheast2 V1CreateProjectBodyRegion = "ap-northeast-2" - V1CreateProjectBodyRegionApSouth1 V1CreateProjectBodyRegion = "ap-south-1" - V1CreateProjectBodyRegionApSoutheast1 V1CreateProjectBodyRegion = "ap-southeast-1" - V1CreateProjectBodyRegionApSoutheast2 V1CreateProjectBodyRegion = "ap-southeast-2" - V1CreateProjectBodyRegionCaCentral1 V1CreateProjectBodyRegion = "ca-central-1" - V1CreateProjectBodyRegionEuCentral1 V1CreateProjectBodyRegion = "eu-central-1" - V1CreateProjectBodyRegionEuWest1 V1CreateProjectBodyRegion = "eu-west-1" - V1CreateProjectBodyRegionEuWest2 V1CreateProjectBodyRegion = "eu-west-2" - V1CreateProjectBodyRegionEuWest3 V1CreateProjectBodyRegion = "eu-west-3" - V1CreateProjectBodyRegionSaEast1 V1CreateProjectBodyRegion = "sa-east-1" - V1CreateProjectBodyRegionUsEast1 V1CreateProjectBodyRegion = "us-east-1" - V1CreateProjectBodyRegionUsWest1 V1CreateProjectBodyRegion = "us-west-1" - V1CreateProjectBodyRegionUsWest2 V1CreateProjectBodyRegion = "us-west-2" + V1CreateProjectBodyDtoReleaseChannelAlpha V1CreateProjectBodyDtoReleaseChannel = "alpha" + V1CreateProjectBodyDtoReleaseChannelBeta V1CreateProjectBodyDtoReleaseChannel = "beta" + V1CreateProjectBodyDtoReleaseChannelGa V1CreateProjectBodyDtoReleaseChannel = "ga" + V1CreateProjectBodyDtoReleaseChannelInternal V1CreateProjectBodyDtoReleaseChannel = "internal" + V1CreateProjectBodyDtoReleaseChannelPreview V1CreateProjectBodyDtoReleaseChannel = "preview" + V1CreateProjectBodyDtoReleaseChannelWithdrawn V1CreateProjectBodyDtoReleaseChannel = "withdrawn" ) // Defines values for V1OrganizationSlugResponseOptInTags. @@ -309,6 +415,7 @@ const ( V1ProjectResponseStatusPAUSEFAILED V1ProjectResponseStatus = "PAUSE_FAILED" V1ProjectResponseStatusPAUSING V1ProjectResponseStatus = "PAUSING" V1ProjectResponseStatusREMOVED V1ProjectResponseStatus = "REMOVED" + V1ProjectResponseStatusRESIZING V1ProjectResponseStatus = "RESIZING" V1ProjectResponseStatusRESTARTING V1ProjectResponseStatus = "RESTARTING" V1ProjectResponseStatusRESTOREFAILED V1ProjectResponseStatus = "RESTORE_FAILED" V1ProjectResponseStatusRESTORING V1ProjectResponseStatus = "RESTORING" @@ -316,6 +423,25 @@ const ( V1ProjectResponseStatusUPGRADING V1ProjectResponseStatus = "UPGRADING" ) +// Defines values for V1ProjectWithDatabaseResponseStatus. +const ( + V1ProjectWithDatabaseResponseStatusACTIVEHEALTHY V1ProjectWithDatabaseResponseStatus = "ACTIVE_HEALTHY" + V1ProjectWithDatabaseResponseStatusACTIVEUNHEALTHY V1ProjectWithDatabaseResponseStatus = "ACTIVE_UNHEALTHY" + V1ProjectWithDatabaseResponseStatusCOMINGUP V1ProjectWithDatabaseResponseStatus = "COMING_UP" + V1ProjectWithDatabaseResponseStatusGOINGDOWN V1ProjectWithDatabaseResponseStatus = "GOING_DOWN" + V1ProjectWithDatabaseResponseStatusINACTIVE V1ProjectWithDatabaseResponseStatus = "INACTIVE" + V1ProjectWithDatabaseResponseStatusINITFAILED V1ProjectWithDatabaseResponseStatus = "INIT_FAILED" + V1ProjectWithDatabaseResponseStatusPAUSEFAILED V1ProjectWithDatabaseResponseStatus = "PAUSE_FAILED" + V1ProjectWithDatabaseResponseStatusPAUSING V1ProjectWithDatabaseResponseStatus = "PAUSING" + V1ProjectWithDatabaseResponseStatusREMOVED V1ProjectWithDatabaseResponseStatus = "REMOVED" + V1ProjectWithDatabaseResponseStatusRESIZING V1ProjectWithDatabaseResponseStatus = "RESIZING" + V1ProjectWithDatabaseResponseStatusRESTARTING V1ProjectWithDatabaseResponseStatus = "RESTARTING" + V1ProjectWithDatabaseResponseStatusRESTOREFAILED V1ProjectWithDatabaseResponseStatus = "RESTORE_FAILED" + V1ProjectWithDatabaseResponseStatusRESTORING V1ProjectWithDatabaseResponseStatus = "RESTORING" + V1ProjectWithDatabaseResponseStatusUNKNOWN V1ProjectWithDatabaseResponseStatus = "UNKNOWN" + V1ProjectWithDatabaseResponseStatusUPGRADING V1ProjectWithDatabaseResponseStatus = "UPGRADING" +) + // Defines values for V1ServiceHealthResponseName. const ( V1ServiceHealthResponseNameAuth V1ServiceHealthResponseName = "auth" @@ -328,9 +454,9 @@ const ( // Defines values for V1ServiceHealthResponseStatus. const ( - V1ServiceHealthResponseStatusACTIVEHEALTHY V1ServiceHealthResponseStatus = "ACTIVE_HEALTHY" - V1ServiceHealthResponseStatusCOMINGUP V1ServiceHealthResponseStatus = "COMING_UP" - V1ServiceHealthResponseStatusUNHEALTHY V1ServiceHealthResponseStatus = "UNHEALTHY" + ACTIVEHEALTHY V1ServiceHealthResponseStatus = "ACTIVE_HEALTHY" + COMINGUP V1ServiceHealthResponseStatus = "COMING_UP" + UNHEALTHY V1ServiceHealthResponseStatus = "UNHEALTHY" ) // Defines values for VanitySubdomainConfigResponseStatus. @@ -364,6 +490,18 @@ const ( V1GetServicesHealthParamsServicesStorage V1GetServicesHealthParamsServices = "storage" ) +// Defines values for V1ListAllSnippetsParamsSortBy. +const ( + InsertedAt V1ListAllSnippetsParamsSortBy = "inserted_at" + Name V1ListAllSnippetsParamsSortBy = "name" +) + +// Defines values for V1ListAllSnippetsParamsSortOrder. +const ( + Asc V1ListAllSnippetsParamsSortOrder = "asc" + Desc V1ListAllSnippetsParamsSortOrder = "desc" +) + // ActivateVanitySubdomainResponse defines model for ActivateVanitySubdomainResponse. type ActivateVanitySubdomainResponse struct { CustomDomain string `json:"custom_domain"` @@ -371,8 +509,24 @@ type ActivateVanitySubdomainResponse struct { // ApiKeyResponse defines model for ApiKeyResponse. type ApiKeyResponse struct { - ApiKey string `json:"api_key"` - Name string `json:"name"` + ApiKey string `json:"api_key"` + Description *string `json:"description"` + Hash *string `json:"hash"` + Id *string `json:"id"` + InsertedAt *string `json:"inserted_at"` + Name string `json:"name"` + Prefix *string `json:"prefix"` + SecretJwtTemplate *ApiKeySecretJWTTemplate `json:"secret_jwt_template"` + Type *ApiKeyResponseType `json:"type"` + UpdatedAt *string `json:"updated_at"` +} + +// ApiKeyResponseType defines model for ApiKeyResponse.Type. +type ApiKeyResponseType string + +// ApiKeySecretJWTTemplate defines model for ApiKeySecretJWTTemplate. +type ApiKeySecretJWTTemplate struct { + Role string `json:"role"` } // AttributeMapping defines model for AttributeMapping. @@ -407,182 +561,186 @@ type AttributeValue_Default struct { // AuthConfigResponse defines model for AuthConfigResponse. type AuthConfigResponse struct { - ApiMaxRequestDuration *float32 `json:"api_max_request_duration"` - DbMaxPoolSize *float32 `json:"db_max_pool_size"` - DisableSignup *bool `json:"disable_signup"` - ExternalAnonymousUsersEnabled *bool `json:"external_anonymous_users_enabled"` - ExternalAppleAdditionalClientIds *string `json:"external_apple_additional_client_ids"` - ExternalAppleClientId *string `json:"external_apple_client_id"` - ExternalAppleEnabled *bool `json:"external_apple_enabled"` - ExternalAppleSecret *string `json:"external_apple_secret"` - ExternalAzureClientId *string `json:"external_azure_client_id"` - ExternalAzureEnabled *bool `json:"external_azure_enabled"` - ExternalAzureSecret *string `json:"external_azure_secret"` - ExternalAzureUrl *string `json:"external_azure_url"` - ExternalBitbucketClientId *string `json:"external_bitbucket_client_id"` - ExternalBitbucketEnabled *bool `json:"external_bitbucket_enabled"` - ExternalBitbucketSecret *string `json:"external_bitbucket_secret"` - ExternalDiscordClientId *string `json:"external_discord_client_id"` - ExternalDiscordEnabled *bool `json:"external_discord_enabled"` - ExternalDiscordSecret *string `json:"external_discord_secret"` - ExternalEmailEnabled *bool `json:"external_email_enabled"` - ExternalFacebookClientId *string `json:"external_facebook_client_id"` - ExternalFacebookEnabled *bool `json:"external_facebook_enabled"` - ExternalFacebookSecret *string `json:"external_facebook_secret"` - ExternalFigmaClientId *string `json:"external_figma_client_id"` - ExternalFigmaEnabled *bool `json:"external_figma_enabled"` - ExternalFigmaSecret *string `json:"external_figma_secret"` - ExternalGithubClientId *string `json:"external_github_client_id"` - ExternalGithubEnabled *bool `json:"external_github_enabled"` - ExternalGithubSecret *string `json:"external_github_secret"` - ExternalGitlabClientId *string `json:"external_gitlab_client_id"` - ExternalGitlabEnabled *bool `json:"external_gitlab_enabled"` - ExternalGitlabSecret *string `json:"external_gitlab_secret"` - ExternalGitlabUrl *string `json:"external_gitlab_url"` - ExternalGoogleAdditionalClientIds *string `json:"external_google_additional_client_ids"` - ExternalGoogleClientId *string `json:"external_google_client_id"` - ExternalGoogleEnabled *bool `json:"external_google_enabled"` - ExternalGoogleSecret *string `json:"external_google_secret"` - ExternalGoogleSkipNonceCheck *bool `json:"external_google_skip_nonce_check"` - ExternalKakaoClientId *string `json:"external_kakao_client_id"` - ExternalKakaoEnabled *bool `json:"external_kakao_enabled"` - ExternalKakaoSecret *string `json:"external_kakao_secret"` - ExternalKeycloakClientId *string `json:"external_keycloak_client_id"` - ExternalKeycloakEnabled *bool `json:"external_keycloak_enabled"` - ExternalKeycloakSecret *string `json:"external_keycloak_secret"` - ExternalKeycloakUrl *string `json:"external_keycloak_url"` - ExternalLinkedinOidcClientId *string `json:"external_linkedin_oidc_client_id"` - ExternalLinkedinOidcEnabled *bool `json:"external_linkedin_oidc_enabled"` - ExternalLinkedinOidcSecret *string `json:"external_linkedin_oidc_secret"` - ExternalNotionClientId *string `json:"external_notion_client_id"` - ExternalNotionEnabled *bool `json:"external_notion_enabled"` - ExternalNotionSecret *string `json:"external_notion_secret"` - ExternalPhoneEnabled *bool `json:"external_phone_enabled"` - ExternalSlackClientId *string `json:"external_slack_client_id"` - ExternalSlackEnabled *bool `json:"external_slack_enabled"` - ExternalSlackOidcClientId *string `json:"external_slack_oidc_client_id"` - ExternalSlackOidcEnabled *bool `json:"external_slack_oidc_enabled"` - ExternalSlackOidcSecret *string `json:"external_slack_oidc_secret"` - ExternalSlackSecret *string `json:"external_slack_secret"` - ExternalSpotifyClientId *string `json:"external_spotify_client_id"` - ExternalSpotifyEnabled *bool `json:"external_spotify_enabled"` - ExternalSpotifySecret *string `json:"external_spotify_secret"` - ExternalTwitchClientId *string `json:"external_twitch_client_id"` - ExternalTwitchEnabled *bool `json:"external_twitch_enabled"` - ExternalTwitchSecret *string `json:"external_twitch_secret"` - ExternalTwitterClientId *string `json:"external_twitter_client_id"` - ExternalTwitterEnabled *bool `json:"external_twitter_enabled"` - ExternalTwitterSecret *string `json:"external_twitter_secret"` - ExternalWorkosClientId *string `json:"external_workos_client_id"` - ExternalWorkosEnabled *bool `json:"external_workos_enabled"` - ExternalWorkosSecret *string `json:"external_workos_secret"` - ExternalWorkosUrl *string `json:"external_workos_url"` - ExternalZoomClientId *string `json:"external_zoom_client_id"` - ExternalZoomEnabled *bool `json:"external_zoom_enabled"` - ExternalZoomSecret *string `json:"external_zoom_secret"` - HookCustomAccessTokenEnabled *bool `json:"hook_custom_access_token_enabled"` - HookCustomAccessTokenSecrets *string `json:"hook_custom_access_token_secrets"` - HookCustomAccessTokenUri *string `json:"hook_custom_access_token_uri"` - HookMfaVerificationAttemptEnabled *bool `json:"hook_mfa_verification_attempt_enabled"` - HookMfaVerificationAttemptSecrets *string `json:"hook_mfa_verification_attempt_secrets"` - HookMfaVerificationAttemptUri *string `json:"hook_mfa_verification_attempt_uri"` - HookPasswordVerificationAttemptEnabled *bool `json:"hook_password_verification_attempt_enabled"` - HookPasswordVerificationAttemptSecrets *string `json:"hook_password_verification_attempt_secrets"` - HookPasswordVerificationAttemptUri *string `json:"hook_password_verification_attempt_uri"` - HookSendEmailEnabled *bool `json:"hook_send_email_enabled"` - HookSendEmailSecrets *string `json:"hook_send_email_secrets"` - HookSendEmailUri *string `json:"hook_send_email_uri"` - HookSendSmsEnabled *bool `json:"hook_send_sms_enabled"` - HookSendSmsSecrets *string `json:"hook_send_sms_secrets"` - HookSendSmsUri *string `json:"hook_send_sms_uri"` - JwtExp *float32 `json:"jwt_exp"` - MailerAllowUnverifiedEmailSignIns *bool `json:"mailer_allow_unverified_email_sign_ins"` - MailerAutoconfirm *bool `json:"mailer_autoconfirm"` - MailerOtpExp float32 `json:"mailer_otp_exp"` - MailerOtpLength *float32 `json:"mailer_otp_length"` - MailerSecureEmailChangeEnabled *bool `json:"mailer_secure_email_change_enabled"` - MailerSubjectsConfirmation *string `json:"mailer_subjects_confirmation"` - MailerSubjectsEmailChange *string `json:"mailer_subjects_email_change"` - MailerSubjectsInvite *string `json:"mailer_subjects_invite"` - MailerSubjectsMagicLink *string `json:"mailer_subjects_magic_link"` - MailerSubjectsReauthentication *string `json:"mailer_subjects_reauthentication"` - MailerSubjectsRecovery *string `json:"mailer_subjects_recovery"` - MailerTemplatesConfirmationContent *string `json:"mailer_templates_confirmation_content"` - MailerTemplatesEmailChangeContent *string `json:"mailer_templates_email_change_content"` - MailerTemplatesInviteContent *string `json:"mailer_templates_invite_content"` - MailerTemplatesMagicLinkContent *string `json:"mailer_templates_magic_link_content"` - MailerTemplatesReauthenticationContent *string `json:"mailer_templates_reauthentication_content"` - MailerTemplatesRecoveryContent *string `json:"mailer_templates_recovery_content"` - MfaMaxEnrolledFactors *float32 `json:"mfa_max_enrolled_factors"` - MfaPhoneEnrollEnabled *bool `json:"mfa_phone_enroll_enabled"` - MfaPhoneMaxFrequency *float32 `json:"mfa_phone_max_frequency"` - MfaPhoneOtpLength float32 `json:"mfa_phone_otp_length"` - MfaPhoneTemplate *string `json:"mfa_phone_template"` - MfaPhoneVerifyEnabled *bool `json:"mfa_phone_verify_enabled"` - MfaTotpEnrollEnabled *bool `json:"mfa_totp_enroll_enabled"` - MfaTotpVerifyEnabled *bool `json:"mfa_totp_verify_enabled"` - PasswordHibpEnabled *bool `json:"password_hibp_enabled"` - PasswordMinLength *float32 `json:"password_min_length"` - PasswordRequiredCharacters *string `json:"password_required_characters"` - RateLimitAnonymousUsers *float32 `json:"rate_limit_anonymous_users"` - RateLimitEmailSent *float32 `json:"rate_limit_email_sent"` - RateLimitOtp *float32 `json:"rate_limit_otp"` - RateLimitSmsSent *float32 `json:"rate_limit_sms_sent"` - RateLimitTokenRefresh *float32 `json:"rate_limit_token_refresh"` - RateLimitVerify *float32 `json:"rate_limit_verify"` - RefreshTokenRotationEnabled *bool `json:"refresh_token_rotation_enabled"` - SamlEnabled *bool `json:"saml_enabled"` - SamlExternalUrl *string `json:"saml_external_url"` - SecurityCaptchaEnabled *bool `json:"security_captcha_enabled"` - SecurityCaptchaProvider *string `json:"security_captcha_provider"` - SecurityCaptchaSecret *string `json:"security_captcha_secret"` - SecurityManualLinkingEnabled *bool `json:"security_manual_linking_enabled"` - SecurityRefreshTokenReuseInterval *float32 `json:"security_refresh_token_reuse_interval"` - SecurityUpdatePasswordRequireReauthentication *bool `json:"security_update_password_require_reauthentication"` - SessionsInactivityTimeout *float32 `json:"sessions_inactivity_timeout"` - SessionsSinglePerUser *bool `json:"sessions_single_per_user"` - SessionsTags *string `json:"sessions_tags"` - SessionsTimebox *float32 `json:"sessions_timebox"` - SiteUrl *string `json:"site_url"` - SmsAutoconfirm *bool `json:"sms_autoconfirm"` - SmsMaxFrequency *float32 `json:"sms_max_frequency"` - SmsMessagebirdAccessKey *string `json:"sms_messagebird_access_key"` - SmsMessagebirdOriginator *string `json:"sms_messagebird_originator"` - SmsOtpExp *float32 `json:"sms_otp_exp"` - SmsOtpLength float32 `json:"sms_otp_length"` - SmsProvider *string `json:"sms_provider"` - SmsTemplate *string `json:"sms_template"` - SmsTestOtp *string `json:"sms_test_otp"` - SmsTestOtpValidUntil *string `json:"sms_test_otp_valid_until"` - SmsTextlocalApiKey *string `json:"sms_textlocal_api_key"` - SmsTextlocalSender *string `json:"sms_textlocal_sender"` - SmsTwilioAccountSid *string `json:"sms_twilio_account_sid"` - SmsTwilioAuthToken *string `json:"sms_twilio_auth_token"` - SmsTwilioContentSid *string `json:"sms_twilio_content_sid"` - SmsTwilioMessageServiceSid *string `json:"sms_twilio_message_service_sid"` - SmsTwilioVerifyAccountSid *string `json:"sms_twilio_verify_account_sid"` - SmsTwilioVerifyAuthToken *string `json:"sms_twilio_verify_auth_token"` - SmsTwilioVerifyMessageServiceSid *string `json:"sms_twilio_verify_message_service_sid"` - SmsVonageApiKey *string `json:"sms_vonage_api_key"` - SmsVonageApiSecret *string `json:"sms_vonage_api_secret"` - SmsVonageFrom *string `json:"sms_vonage_from"` - SmtpAdminEmail *string `json:"smtp_admin_email"` - SmtpHost *string `json:"smtp_host"` - SmtpMaxFrequency *float32 `json:"smtp_max_frequency"` - SmtpPass *string `json:"smtp_pass"` - SmtpPort *string `json:"smtp_port"` - SmtpSenderName *string `json:"smtp_sender_name"` - SmtpUser *string `json:"smtp_user"` - UriAllowList *string `json:"uri_allow_list"` + ApiMaxRequestDuration *int `json:"api_max_request_duration"` + DbMaxPoolSize *int `json:"db_max_pool_size"` + DisableSignup *bool `json:"disable_signup"` + ExternalAnonymousUsersEnabled *bool `json:"external_anonymous_users_enabled"` + ExternalAppleAdditionalClientIds *string `json:"external_apple_additional_client_ids"` + ExternalAppleClientId *string `json:"external_apple_client_id"` + ExternalAppleEnabled *bool `json:"external_apple_enabled"` + ExternalAppleSecret *string `json:"external_apple_secret"` + ExternalAzureClientId *string `json:"external_azure_client_id"` + ExternalAzureEnabled *bool `json:"external_azure_enabled"` + ExternalAzureSecret *string `json:"external_azure_secret"` + ExternalAzureUrl *string `json:"external_azure_url"` + ExternalBitbucketClientId *string `json:"external_bitbucket_client_id"` + ExternalBitbucketEnabled *bool `json:"external_bitbucket_enabled"` + ExternalBitbucketSecret *string `json:"external_bitbucket_secret"` + ExternalDiscordClientId *string `json:"external_discord_client_id"` + ExternalDiscordEnabled *bool `json:"external_discord_enabled"` + ExternalDiscordSecret *string `json:"external_discord_secret"` + ExternalEmailEnabled *bool `json:"external_email_enabled"` + ExternalFacebookClientId *string `json:"external_facebook_client_id"` + ExternalFacebookEnabled *bool `json:"external_facebook_enabled"` + ExternalFacebookSecret *string `json:"external_facebook_secret"` + ExternalFigmaClientId *string `json:"external_figma_client_id"` + ExternalFigmaEnabled *bool `json:"external_figma_enabled"` + ExternalFigmaSecret *string `json:"external_figma_secret"` + ExternalGithubClientId *string `json:"external_github_client_id"` + ExternalGithubEnabled *bool `json:"external_github_enabled"` + ExternalGithubSecret *string `json:"external_github_secret"` + ExternalGitlabClientId *string `json:"external_gitlab_client_id"` + ExternalGitlabEnabled *bool `json:"external_gitlab_enabled"` + ExternalGitlabSecret *string `json:"external_gitlab_secret"` + ExternalGitlabUrl *string `json:"external_gitlab_url"` + ExternalGoogleAdditionalClientIds *string `json:"external_google_additional_client_ids"` + ExternalGoogleClientId *string `json:"external_google_client_id"` + ExternalGoogleEnabled *bool `json:"external_google_enabled"` + ExternalGoogleSecret *string `json:"external_google_secret"` + ExternalGoogleSkipNonceCheck *bool `json:"external_google_skip_nonce_check"` + ExternalKakaoClientId *string `json:"external_kakao_client_id"` + ExternalKakaoEnabled *bool `json:"external_kakao_enabled"` + ExternalKakaoSecret *string `json:"external_kakao_secret"` + ExternalKeycloakClientId *string `json:"external_keycloak_client_id"` + ExternalKeycloakEnabled *bool `json:"external_keycloak_enabled"` + ExternalKeycloakSecret *string `json:"external_keycloak_secret"` + ExternalKeycloakUrl *string `json:"external_keycloak_url"` + ExternalLinkedinOidcClientId *string `json:"external_linkedin_oidc_client_id"` + ExternalLinkedinOidcEnabled *bool `json:"external_linkedin_oidc_enabled"` + ExternalLinkedinOidcSecret *string `json:"external_linkedin_oidc_secret"` + ExternalNotionClientId *string `json:"external_notion_client_id"` + ExternalNotionEnabled *bool `json:"external_notion_enabled"` + ExternalNotionSecret *string `json:"external_notion_secret"` + ExternalPhoneEnabled *bool `json:"external_phone_enabled"` + ExternalSlackClientId *string `json:"external_slack_client_id"` + ExternalSlackEnabled *bool `json:"external_slack_enabled"` + ExternalSlackOidcClientId *string `json:"external_slack_oidc_client_id"` + ExternalSlackOidcEnabled *bool `json:"external_slack_oidc_enabled"` + ExternalSlackOidcSecret *string `json:"external_slack_oidc_secret"` + ExternalSlackSecret *string `json:"external_slack_secret"` + ExternalSpotifyClientId *string `json:"external_spotify_client_id"` + ExternalSpotifyEnabled *bool `json:"external_spotify_enabled"` + ExternalSpotifySecret *string `json:"external_spotify_secret"` + ExternalTwitchClientId *string `json:"external_twitch_client_id"` + ExternalTwitchEnabled *bool `json:"external_twitch_enabled"` + ExternalTwitchSecret *string `json:"external_twitch_secret"` + ExternalTwitterClientId *string `json:"external_twitter_client_id"` + ExternalTwitterEnabled *bool `json:"external_twitter_enabled"` + ExternalTwitterSecret *string `json:"external_twitter_secret"` + ExternalWorkosClientId *string `json:"external_workos_client_id"` + ExternalWorkosEnabled *bool `json:"external_workos_enabled"` + ExternalWorkosSecret *string `json:"external_workos_secret"` + ExternalWorkosUrl *string `json:"external_workos_url"` + ExternalZoomClientId *string `json:"external_zoom_client_id"` + ExternalZoomEnabled *bool `json:"external_zoom_enabled"` + ExternalZoomSecret *string `json:"external_zoom_secret"` + HookCustomAccessTokenEnabled *bool `json:"hook_custom_access_token_enabled"` + HookCustomAccessTokenSecrets *string `json:"hook_custom_access_token_secrets"` + HookCustomAccessTokenUri *string `json:"hook_custom_access_token_uri"` + HookMfaVerificationAttemptEnabled *bool `json:"hook_mfa_verification_attempt_enabled"` + HookMfaVerificationAttemptSecrets *string `json:"hook_mfa_verification_attempt_secrets"` + HookMfaVerificationAttemptUri *string `json:"hook_mfa_verification_attempt_uri"` + HookPasswordVerificationAttemptEnabled *bool `json:"hook_password_verification_attempt_enabled"` + HookPasswordVerificationAttemptSecrets *string `json:"hook_password_verification_attempt_secrets"` + HookPasswordVerificationAttemptUri *string `json:"hook_password_verification_attempt_uri"` + HookSendEmailEnabled *bool `json:"hook_send_email_enabled"` + HookSendEmailSecrets *string `json:"hook_send_email_secrets"` + HookSendEmailUri *string `json:"hook_send_email_uri"` + HookSendSmsEnabled *bool `json:"hook_send_sms_enabled"` + HookSendSmsSecrets *string `json:"hook_send_sms_secrets"` + HookSendSmsUri *string `json:"hook_send_sms_uri"` + JwtExp *int `json:"jwt_exp"` + MailerAllowUnverifiedEmailSignIns *bool `json:"mailer_allow_unverified_email_sign_ins"` + MailerAutoconfirm *bool `json:"mailer_autoconfirm"` + MailerOtpExp int `json:"mailer_otp_exp"` + MailerOtpLength *int `json:"mailer_otp_length"` + MailerSecureEmailChangeEnabled *bool `json:"mailer_secure_email_change_enabled"` + MailerSubjectsConfirmation *string `json:"mailer_subjects_confirmation"` + MailerSubjectsEmailChange *string `json:"mailer_subjects_email_change"` + MailerSubjectsInvite *string `json:"mailer_subjects_invite"` + MailerSubjectsMagicLink *string `json:"mailer_subjects_magic_link"` + MailerSubjectsReauthentication *string `json:"mailer_subjects_reauthentication"` + MailerSubjectsRecovery *string `json:"mailer_subjects_recovery"` + MailerTemplatesConfirmationContent *string `json:"mailer_templates_confirmation_content"` + MailerTemplatesEmailChangeContent *string `json:"mailer_templates_email_change_content"` + MailerTemplatesInviteContent *string `json:"mailer_templates_invite_content"` + MailerTemplatesMagicLinkContent *string `json:"mailer_templates_magic_link_content"` + MailerTemplatesReauthenticationContent *string `json:"mailer_templates_reauthentication_content"` + MailerTemplatesRecoveryContent *string `json:"mailer_templates_recovery_content"` + MfaMaxEnrolledFactors *int `json:"mfa_max_enrolled_factors"` + MfaPhoneEnrollEnabled *bool `json:"mfa_phone_enroll_enabled"` + MfaPhoneMaxFrequency *int `json:"mfa_phone_max_frequency"` + MfaPhoneOtpLength int `json:"mfa_phone_otp_length"` + MfaPhoneTemplate *string `json:"mfa_phone_template"` + MfaPhoneVerifyEnabled *bool `json:"mfa_phone_verify_enabled"` + MfaTotpEnrollEnabled *bool `json:"mfa_totp_enroll_enabled"` + MfaTotpVerifyEnabled *bool `json:"mfa_totp_verify_enabled"` + MfaWebAuthnEnrollEnabled *bool `json:"mfa_web_authn_enroll_enabled"` + MfaWebAuthnVerifyEnabled *bool `json:"mfa_web_authn_verify_enabled"` + PasswordHibpEnabled *bool `json:"password_hibp_enabled"` + PasswordMinLength *int `json:"password_min_length"` + PasswordRequiredCharacters *string `json:"password_required_characters"` + RateLimitAnonymousUsers *int `json:"rate_limit_anonymous_users"` + RateLimitEmailSent *int `json:"rate_limit_email_sent"` + RateLimitOtp *int `json:"rate_limit_otp"` + RateLimitSmsSent *int `json:"rate_limit_sms_sent"` + RateLimitTokenRefresh *int `json:"rate_limit_token_refresh"` + RateLimitVerify *int `json:"rate_limit_verify"` + RefreshTokenRotationEnabled *bool `json:"refresh_token_rotation_enabled"` + SamlAllowEncryptedAssertions *bool `json:"saml_allow_encrypted_assertions"` + SamlEnabled *bool `json:"saml_enabled"` + SamlExternalUrl *string `json:"saml_external_url"` + SecurityCaptchaEnabled *bool `json:"security_captcha_enabled"` + SecurityCaptchaProvider *string `json:"security_captcha_provider"` + SecurityCaptchaSecret *string `json:"security_captcha_secret"` + SecurityManualLinkingEnabled *bool `json:"security_manual_linking_enabled"` + SecurityRefreshTokenReuseInterval *int `json:"security_refresh_token_reuse_interval"` + SecurityUpdatePasswordRequireReauthentication *bool `json:"security_update_password_require_reauthentication"` + SessionsInactivityTimeout *int `json:"sessions_inactivity_timeout"` + SessionsSinglePerUser *bool `json:"sessions_single_per_user"` + SessionsTags *string `json:"sessions_tags"` + SessionsTimebox *int `json:"sessions_timebox"` + SiteUrl *string `json:"site_url"` + SmsAutoconfirm *bool `json:"sms_autoconfirm"` + SmsMaxFrequency *int `json:"sms_max_frequency"` + SmsMessagebirdAccessKey *string `json:"sms_messagebird_access_key"` + SmsMessagebirdOriginator *string `json:"sms_messagebird_originator"` + SmsOtpExp *int `json:"sms_otp_exp"` + SmsOtpLength int `json:"sms_otp_length"` + SmsProvider *string `json:"sms_provider"` + SmsTemplate *string `json:"sms_template"` + SmsTestOtp *string `json:"sms_test_otp"` + SmsTestOtpValidUntil *string `json:"sms_test_otp_valid_until"` + SmsTextlocalApiKey *string `json:"sms_textlocal_api_key"` + SmsTextlocalSender *string `json:"sms_textlocal_sender"` + SmsTwilioAccountSid *string `json:"sms_twilio_account_sid"` + SmsTwilioAuthToken *string `json:"sms_twilio_auth_token"` + SmsTwilioContentSid *string `json:"sms_twilio_content_sid"` + SmsTwilioMessageServiceSid *string `json:"sms_twilio_message_service_sid"` + SmsTwilioVerifyAccountSid *string `json:"sms_twilio_verify_account_sid"` + SmsTwilioVerifyAuthToken *string `json:"sms_twilio_verify_auth_token"` + SmsTwilioVerifyMessageServiceSid *string `json:"sms_twilio_verify_message_service_sid"` + SmsVonageApiKey *string `json:"sms_vonage_api_key"` + SmsVonageApiSecret *string `json:"sms_vonage_api_secret"` + SmsVonageFrom *string `json:"sms_vonage_from"` + SmtpAdminEmail *string `json:"smtp_admin_email"` + SmtpHost *string `json:"smtp_host"` + SmtpMaxFrequency *int `json:"smtp_max_frequency"` + SmtpPass *string `json:"smtp_pass"` + SmtpPort *string `json:"smtp_port"` + SmtpSenderName *string `json:"smtp_sender_name"` + SmtpUser *string `json:"smtp_user"` + UriAllowList *string `json:"uri_allow_list"` } // AuthHealthResponse defines model for AuthHealthResponse. type AuthHealthResponse struct { - Description string `json:"description"` - Name string `json:"name"` - Version string `json:"version"` + Name AuthHealthResponseName `json:"name"` } +// AuthHealthResponseName defines model for AuthHealthResponse.Name. +type AuthHealthResponseName string + // BillingPlanId defines model for BillingPlanId. type BillingPlanId string @@ -598,32 +756,31 @@ type BranchDetailResponse struct { DbPort int `json:"db_port"` DbUser *string `json:"db_user,omitempty"` JwtSecret *string `json:"jwt_secret,omitempty"` + PostgresEngine string `json:"postgres_engine"` PostgresVersion string `json:"postgres_version"` Ref string `json:"ref"` + ReleaseChannel string `json:"release_channel"` Status BranchDetailResponseStatus `json:"status"` } // BranchDetailResponseStatus defines model for BranchDetailResponse.Status. type BranchDetailResponseStatus string -// BranchResetResponse defines model for BranchResetResponse. -type BranchResetResponse struct { - Message string `json:"message"` -} - // BranchResponse defines model for BranchResponse. type BranchResponse struct { - CreatedAt string `json:"created_at"` - GitBranch *string `json:"git_branch,omitempty"` - Id string `json:"id"` - IsDefault bool `json:"is_default"` + CreatedAt string `json:"created_at"` + GitBranch *string `json:"git_branch,omitempty"` + Id string `json:"id"` + IsDefault bool `json:"is_default"` + + // LatestCheckRunId This field is deprecated and will not be populated. + // Deprecated: LatestCheckRunId *float32 `json:"latest_check_run_id,omitempty"` Name string `json:"name"` ParentProjectRef string `json:"parent_project_ref"` Persistent bool `json:"persistent"` - PrNumber *float32 `json:"pr_number,omitempty"` + PrNumber *int32 `json:"pr_number,omitempty"` ProjectRef string `json:"project_ref"` - ResetOnPush bool `json:"reset_on_push"` Status BranchResponseStatus `json:"status"` UpdatedAt string `json:"updated_at"` } @@ -631,6 +788,12 @@ type BranchResponse struct { // BranchResponseStatus defines model for BranchResponse.Status. type BranchResponseStatus string +// BranchUpdateResponse defines model for BranchUpdateResponse. +type BranchUpdateResponse struct { + Message string `json:"message"` + WorkflowRunId string `json:"workflow_run_id"` +} + // CfResponse defines model for CfResponse. type CfResponse struct { Errors []map[string]interface{} `json:"errors"` @@ -639,17 +802,31 @@ type CfResponse struct { Success bool `json:"success"` } +// CreateApiKeyBody defines model for CreateApiKeyBody. +type CreateApiKeyBody struct { + Description *string `json:"description"` + SecretJwtTemplate *ApiKeySecretJWTTemplate `json:"secret_jwt_template"` + Type CreateApiKeyBodyType `json:"type"` +} + +// CreateApiKeyBodyType defines model for CreateApiKeyBody.Type. +type CreateApiKeyBodyType string + // CreateBranchBody defines model for CreateBranchBody. type CreateBranchBody struct { BranchName string `json:"branch_name"` DesiredInstanceSize *DesiredInstanceSize `json:"desired_instance_size,omitempty"` GitBranch *string `json:"git_branch,omitempty"` Persistent *bool `json:"persistent,omitempty"` - Region *string `json:"region,omitempty"` + + // PostgresEngine Postgres engine version. If not provided, the latest version will be used. + PostgresEngine *PostgresEngine `json:"postgres_engine,omitempty"` + Region *string `json:"region,omitempty"` + ReleaseChannel *ReleaseChannel `json:"release_channel,omitempty"` } -// CreateOrganizationBodyV1 defines model for CreateOrganizationBodyV1. -type CreateOrganizationBodyV1 struct { +// CreateOrganizationV1Dto defines model for CreateOrganizationV1Dto. +type CreateOrganizationV1Dto struct { Name string `json:"name"` } @@ -708,7 +885,7 @@ type DatabaseUpgradeStatus struct { LatestStatusAt string `json:"latest_status_at"` Progress *DatabaseUpgradeStatusProgress `json:"progress,omitempty"` Status DatabaseUpgradeStatusStatus `json:"status"` - TargetVersion float32 `json:"target_version"` + TargetVersion int `json:"target_version"` } // DatabaseUpgradeStatusError defines model for DatabaseUpgradeStatus.Error. @@ -718,7 +895,7 @@ type DatabaseUpgradeStatusError string type DatabaseUpgradeStatusProgress string // DatabaseUpgradeStatusStatus defines model for DatabaseUpgradeStatus.Status. -type DatabaseUpgradeStatusStatus float32 +type DatabaseUpgradeStatusStatus int // DatabaseUpgradeStatusResponse defines model for DatabaseUpgradeStatusResponse. type DatabaseUpgradeStatusResponse struct { @@ -747,17 +924,18 @@ type Domain struct { // FunctionResponse defines model for FunctionResponse. type FunctionResponse struct { - CreatedAt float32 `json:"created_at"` - EntrypointPath *string `json:"entrypoint_path,omitempty"` - Id string `json:"id"` - ImportMap *bool `json:"import_map,omitempty"` - ImportMapPath *string `json:"import_map_path,omitempty"` - Name string `json:"name"` - Slug string `json:"slug"` - Status FunctionResponseStatus `json:"status"` - UpdatedAt float32 `json:"updated_at"` - VerifyJwt *bool `json:"verify_jwt,omitempty"` - Version float32 `json:"version"` + ComputeMultiplier *float32 `json:"compute_multiplier,omitempty"` + CreatedAt int64 `json:"created_at"` + EntrypointPath *string `json:"entrypoint_path,omitempty"` + Id string `json:"id"` + ImportMap *bool `json:"import_map,omitempty"` + ImportMapPath *string `json:"import_map_path,omitempty"` + Name string `json:"name"` + Slug string `json:"slug"` + Status FunctionResponseStatus `json:"status"` + UpdatedAt int64 `json:"updated_at"` + VerifyJwt *bool `json:"verify_jwt,omitempty"` + Version int `json:"version"` } // FunctionResponseStatus defines model for FunctionResponse.Status. @@ -765,22 +943,28 @@ type FunctionResponseStatus string // FunctionSlugResponse defines model for FunctionSlugResponse. type FunctionSlugResponse struct { - CreatedAt float32 `json:"created_at"` - EntrypointPath *string `json:"entrypoint_path,omitempty"` - Id string `json:"id"` - ImportMap *bool `json:"import_map,omitempty"` - ImportMapPath *string `json:"import_map_path,omitempty"` - Name string `json:"name"` - Slug string `json:"slug"` - Status FunctionSlugResponseStatus `json:"status"` - UpdatedAt float32 `json:"updated_at"` - VerifyJwt *bool `json:"verify_jwt,omitempty"` - Version float32 `json:"version"` + ComputeMultiplier *float32 `json:"compute_multiplier,omitempty"` + CreatedAt int64 `json:"created_at"` + EntrypointPath *string `json:"entrypoint_path,omitempty"` + Id string `json:"id"` + ImportMap *bool `json:"import_map,omitempty"` + ImportMapPath *string `json:"import_map_path,omitempty"` + Name string `json:"name"` + Slug string `json:"slug"` + Status FunctionSlugResponseStatus `json:"status"` + UpdatedAt int64 `json:"updated_at"` + VerifyJwt *bool `json:"verify_jwt,omitempty"` + Version int `json:"version"` } // FunctionSlugResponseStatus defines model for FunctionSlugResponse.Status. type FunctionSlugResponseStatus string +// GetProjectAvailableRestoreVersionsResponse defines model for GetProjectAvailableRestoreVersionsResponse. +type GetProjectAvailableRestoreVersionsResponse struct { + AvailableVersions []ProjectAvailableRestoreVersion `json:"available_versions"` +} + // GetProviderResponse defines model for GetProviderResponse. type GetProviderResponse struct { CreatedAt *string `json:"created_at,omitempty"` @@ -820,6 +1004,13 @@ type NetworkRestrictionsResponseEntitlement string // NetworkRestrictionsResponseStatus defines model for NetworkRestrictionsResponse.Status. type NetworkRestrictionsResponseStatus string +// OAuthRevokeTokenBodyDto defines model for OAuthRevokeTokenBodyDto. +type OAuthRevokeTokenBodyDto struct { + ClientId openapi_types.UUID `json:"client_id"` + ClientSecret string `json:"client_secret"` + RefreshToken string `json:"refresh_token"` +} + // OAuthTokenBody defines model for OAuthTokenBody. type OAuthTokenBody struct { ClientId string `json:"client_id"` @@ -837,7 +1028,7 @@ type OAuthTokenBodyGrantType string // OAuthTokenResponse defines model for OAuthTokenResponse. type OAuthTokenResponse struct { AccessToken string `json:"access_token"` - ExpiresIn float32 `json:"expires_in"` + ExpiresIn int64 `json:"expires_in"` RefreshToken string `json:"refresh_token"` TokenType OAuthTokenResponseTokenType `json:"token_type"` } @@ -866,24 +1057,36 @@ type PgsodiumConfigResponse struct { // PostgresConfigResponse defines model for PostgresConfigResponse. type PostgresConfigResponse struct { EffectiveCacheSize *string `json:"effective_cache_size,omitempty"` + LogicalDecodingWorkMem *string `json:"logical_decoding_work_mem,omitempty"` MaintenanceWorkMem *string `json:"maintenance_work_mem,omitempty"` MaxConnections *int `json:"max_connections,omitempty"` MaxLocksPerTransaction *int `json:"max_locks_per_transaction,omitempty"` MaxParallelMaintenanceWorkers *int `json:"max_parallel_maintenance_workers,omitempty"` MaxParallelWorkers *int `json:"max_parallel_workers,omitempty"` MaxParallelWorkersPerGather *int `json:"max_parallel_workers_per_gather,omitempty"` + MaxReplicationSlots *int `json:"max_replication_slots,omitempty"` + MaxSlotWalKeepSize *string `json:"max_slot_wal_keep_size,omitempty"` MaxStandbyArchiveDelay *string `json:"max_standby_archive_delay,omitempty"` MaxStandbyStreamingDelay *string `json:"max_standby_streaming_delay,omitempty"` + MaxWalSenders *int `json:"max_wal_senders,omitempty"` + MaxWalSize *string `json:"max_wal_size,omitempty"` MaxWorkerProcesses *int `json:"max_worker_processes,omitempty"` SessionReplicationRole *PostgresConfigResponseSessionReplicationRole `json:"session_replication_role,omitempty"` SharedBuffers *string `json:"shared_buffers,omitempty"` StatementTimeout *string `json:"statement_timeout,omitempty"` + TrackActivityQuerySize *string `json:"track_activity_query_size,omitempty"` + TrackCommitTimestamp *bool `json:"track_commit_timestamp,omitempty"` + WalKeepSize *string `json:"wal_keep_size,omitempty"` + WalSenderTimeout *string `json:"wal_sender_timeout,omitempty"` WorkMem *string `json:"work_mem,omitempty"` } // PostgresConfigResponseSessionReplicationRole defines model for PostgresConfigResponse.SessionReplicationRole. type PostgresConfigResponseSessionReplicationRole string +// PostgresEngine Postgres engine version. If not provided, the latest version will be used. +type PostgresEngine string + // PostgrestConfigWithJWTSecretResponse defines model for PostgrestConfigWithJWTSecretResponse. type PostgrestConfigWithJWTSecretResponse struct { DbExtraSearchPath string `json:"db_extra_search_path"` @@ -895,16 +1098,30 @@ type PostgrestConfigWithJWTSecretResponse struct { MaxRows int `json:"max_rows"` } +// ProjectAvailableRestoreVersion defines model for ProjectAvailableRestoreVersion. +type ProjectAvailableRestoreVersion struct { + PostgresEngine ProjectAvailableRestoreVersionPostgresEngine `json:"postgres_engine"` + ReleaseChannel ProjectAvailableRestoreVersionReleaseChannel `json:"release_channel"` + Version string `json:"version"` +} + +// ProjectAvailableRestoreVersionPostgresEngine defines model for ProjectAvailableRestoreVersion.PostgresEngine. +type ProjectAvailableRestoreVersionPostgresEngine string + +// ProjectAvailableRestoreVersionReleaseChannel defines model for ProjectAvailableRestoreVersion.ReleaseChannel. +type ProjectAvailableRestoreVersionReleaseChannel string + // ProjectUpgradeEligibilityResponse defines model for ProjectUpgradeEligibilityResponse. type ProjectUpgradeEligibilityResponse struct { - CurrentAppVersion string `json:"current_app_version"` - DurationEstimateHours float32 `json:"duration_estimate_hours"` - Eligible bool `json:"eligible"` - ExtensionDependentObjects []string `json:"extension_dependent_objects"` - LatestAppVersion string `json:"latest_app_version"` - LegacyAuthCustomRoles []string `json:"legacy_auth_custom_roles"` - PotentialBreakingChanges []string `json:"potential_breaking_changes"` - TargetUpgradeVersions []ProjectVersion `json:"target_upgrade_versions"` + CurrentAppVersion string `json:"current_app_version"` + CurrentAppVersionReleaseChannel ReleaseChannel `json:"current_app_version_release_channel"` + DurationEstimateHours int `json:"duration_estimate_hours"` + Eligible bool `json:"eligible"` + ExtensionDependentObjects []string `json:"extension_dependent_objects"` + LatestAppVersion string `json:"latest_app_version"` + LegacyAuthCustomRoles []string `json:"legacy_auth_custom_roles"` + PotentialBreakingChanges []string `json:"potential_breaking_changes"` + TargetUpgradeVersions []ProjectVersion `json:"target_upgrade_versions"` } // ProjectUpgradeInitiateResponse defines model for ProjectUpgradeInitiateResponse. @@ -914,8 +1131,11 @@ type ProjectUpgradeInitiateResponse struct { // ProjectVersion defines model for ProjectVersion. type ProjectVersion struct { - AppVersion string `json:"app_version"` - PostgresVersion float32 `json:"postgres_version"` + AppVersion string `json:"app_version"` + + // PostgresVersion Postgres engine version. If not provided, the latest version will be used. + PostgresVersion PostgresEngine `json:"postgres_version"` + ReleaseChannel ReleaseChannel `json:"release_channel"` } // Provider defines model for Provider. @@ -936,11 +1156,12 @@ type ReadOnlyStatusResponse struct { // RealtimeHealthResponse defines model for RealtimeHealthResponse. type RealtimeHealthResponse struct { - ConnectedCluster float32 `json:"connected_cluster"` - DbConnected bool `json:"db_connected"` - Healthy bool `json:"healthy"` + ConnectedCluster int `json:"connected_cluster"` } +// ReleaseChannel defines model for ReleaseChannel. +type ReleaseChannel string + // RemoveNetworkBanRequest defines model for RemoveNetworkBanRequest. type RemoveNetworkBanRequest struct { Ipv4Addresses []string `json:"ipv4_addresses"` @@ -951,6 +1172,21 @@ type RemoveReadReplicaBody struct { DatabaseIdentifier string `json:"database_identifier"` } +// RestoreProjectBodyDto defines model for RestoreProjectBodyDto. +type RestoreProjectBodyDto struct { + // PostgresEngine Postgres engine version. If not provided, the latest version from the given release channel will be used. + PostgresEngine *RestoreProjectBodyDtoPostgresEngine `json:"postgres_engine,omitempty"` + + // ReleaseChannel Release channel version. If not provided, GeneralAvailability will be used. + ReleaseChannel *RestoreProjectBodyDtoReleaseChannel `json:"release_channel,omitempty"` +} + +// RestoreProjectBodyDtoPostgresEngine Postgres engine version. If not provided, the latest version from the given release channel will be used. +type RestoreProjectBodyDtoPostgresEngine string + +// RestoreProjectBodyDtoReleaseChannel Release channel version. If not provided, GeneralAvailability will be used. +type RestoreProjectBodyDtoReleaseChannel string + // SamlDescriptor defines model for SamlDescriptor. type SamlDescriptor struct { AttributeMapping *AttributeMapping `json:"attribute_mapping,omitempty"` @@ -984,12 +1220,13 @@ type SnippetContent struct { // SnippetList defines model for SnippetList. type SnippetList struct { - Data []SnippetMeta `json:"data"` + Cursor *string `json:"cursor,omitempty"` + Data []SnippetMeta `json:"data"` } // SnippetMeta defines model for SnippetMeta. type SnippetMeta struct { - Description *string `json:"description,omitempty"` + Description *string `json:"description"` Id string `json:"id"` InsertedAt string `json:"inserted_at"` Name string `json:"name"` @@ -1009,14 +1246,14 @@ type SnippetMetaVisibility string // SnippetProject defines model for SnippetProject. type SnippetProject struct { - Id float32 `json:"id"` - Name string `json:"name"` + Id int64 `json:"id"` + Name string `json:"name"` } // SnippetResponse defines model for SnippetResponse. type SnippetResponse struct { Content SnippetContent `json:"content"` - Description *string `json:"description,omitempty"` + Description *string `json:"description"` Id string `json:"id"` InsertedAt string `json:"inserted_at"` Name string `json:"name"` @@ -1036,8 +1273,8 @@ type SnippetResponseVisibility string // SnippetUser defines model for SnippetUser. type SnippetUser struct { - Id float32 `json:"id"` - Username string `json:"username"` + Id int64 `json:"id"` + Username string `json:"username"` } // SslEnforcementRequest defines model for SslEnforcementRequest. @@ -1063,6 +1300,28 @@ type SslValidation struct { ValidationRecords []ValidationRecord `json:"validation_records"` } +// StorageConfigResponse defines model for StorageConfigResponse. +type StorageConfigResponse struct { + Features StorageFeatures `json:"features"` + FileSizeLimit int64 `json:"fileSizeLimit"` +} + +// StorageFeatureImageTransformation defines model for StorageFeatureImageTransformation. +type StorageFeatureImageTransformation struct { + Enabled bool `json:"enabled"` +} + +// StorageFeatureS3Protocol defines model for StorageFeatureS3Protocol. +type StorageFeatureS3Protocol struct { + Enabled bool `json:"enabled"` +} + +// StorageFeatures defines model for StorageFeatures. +type StorageFeatures struct { + ImageTransformation StorageFeatureImageTransformation `json:"imageTransformation"` + S3Protocol StorageFeatureS3Protocol `json:"s3Protocol"` +} + // SubdomainAvailabilityResponse defines model for SubdomainAvailabilityResponse. type SubdomainAvailabilityResponse struct { Available bool `json:"available"` @@ -1074,12 +1333,12 @@ type SupavisorConfigResponse struct { DatabaseType SupavisorConfigResponseDatabaseType `json:"database_type"` DbHost string `json:"db_host"` DbName string `json:"db_name"` - DbPort float32 `json:"db_port"` + DbPort int `json:"db_port"` DbUser string `json:"db_user"` - DefaultPoolSize *float32 `json:"default_pool_size"` + DefaultPoolSize *int `json:"default_pool_size"` Identifier string `json:"identifier"` IsUsingScramAuth bool `json:"is_using_scram_auth"` - MaxClientConn *float32 `json:"max_client_conn"` + MaxClientConn *int `json:"max_client_conn"` PoolMode SupavisorConfigResponsePoolMode `json:"pool_mode"` } @@ -1107,10 +1366,16 @@ type TypescriptResponse struct { Types string `json:"types"` } +// UpdateApiKeyBody defines model for UpdateApiKeyBody. +type UpdateApiKeyBody struct { + Description *string `json:"description"` + SecretJwtTemplate *ApiKeySecretJWTTemplate `json:"secret_jwt_template"` +} + // UpdateAuthConfigBody defines model for UpdateAuthConfigBody. type UpdateAuthConfigBody struct { - ApiMaxRequestDuration *float32 `json:"api_max_request_duration,omitempty"` - DbMaxPoolSize *float32 `json:"db_max_pool_size,omitempty"` + ApiMaxRequestDuration *int `json:"api_max_request_duration,omitempty"` + DbMaxPoolSize *int `json:"db_max_pool_size,omitempty"` DisableSignup *bool `json:"disable_signup,omitempty"` ExternalAnonymousUsersEnabled *bool `json:"external_anonymous_users_enabled,omitempty"` ExternalAppleAdditionalClientIds *string `json:"external_apple_additional_client_ids,omitempty"` @@ -1197,11 +1462,11 @@ type UpdateAuthConfigBody struct { HookSendSmsEnabled *bool `json:"hook_send_sms_enabled,omitempty"` HookSendSmsSecrets *string `json:"hook_send_sms_secrets,omitempty"` HookSendSmsUri *string `json:"hook_send_sms_uri,omitempty"` - JwtExp *float32 `json:"jwt_exp,omitempty"` + JwtExp *int `json:"jwt_exp,omitempty"` MailerAllowUnverifiedEmailSignIns *bool `json:"mailer_allow_unverified_email_sign_ins,omitempty"` MailerAutoconfirm *bool `json:"mailer_autoconfirm,omitempty"` - MailerOtpExp *float32 `json:"mailer_otp_exp,omitempty"` - MailerOtpLength *float32 `json:"mailer_otp_length,omitempty"` + MailerOtpExp *int `json:"mailer_otp_exp,omitempty"` + MailerOtpLength *int `json:"mailer_otp_length,omitempty"` MailerSecureEmailChangeEnabled *bool `json:"mailer_secure_email_change_enabled,omitempty"` MailerSubjectsConfirmation *string `json:"mailer_subjects_confirmation,omitempty"` MailerSubjectsEmailChange *string `json:"mailer_subjects_email_change,omitempty"` @@ -1215,23 +1480,25 @@ type UpdateAuthConfigBody struct { MailerTemplatesMagicLinkContent *string `json:"mailer_templates_magic_link_content,omitempty"` MailerTemplatesReauthenticationContent *string `json:"mailer_templates_reauthentication_content,omitempty"` MailerTemplatesRecoveryContent *string `json:"mailer_templates_recovery_content,omitempty"` - MfaMaxEnrolledFactors *float32 `json:"mfa_max_enrolled_factors,omitempty"` + MfaMaxEnrolledFactors *int `json:"mfa_max_enrolled_factors,omitempty"` MfaPhoneEnrollEnabled *bool `json:"mfa_phone_enroll_enabled,omitempty"` - MfaPhoneMaxFrequency *float32 `json:"mfa_phone_max_frequency,omitempty"` - MfaPhoneOtpLength *float32 `json:"mfa_phone_otp_length,omitempty"` + MfaPhoneMaxFrequency *int `json:"mfa_phone_max_frequency,omitempty"` + MfaPhoneOtpLength *int `json:"mfa_phone_otp_length,omitempty"` MfaPhoneTemplate *string `json:"mfa_phone_template,omitempty"` MfaPhoneVerifyEnabled *bool `json:"mfa_phone_verify_enabled,omitempty"` MfaTotpEnrollEnabled *bool `json:"mfa_totp_enroll_enabled,omitempty"` MfaTotpVerifyEnabled *bool `json:"mfa_totp_verify_enabled,omitempty"` + MfaWebAuthnEnrollEnabled *bool `json:"mfa_web_authn_enroll_enabled,omitempty"` + MfaWebAuthnVerifyEnabled *bool `json:"mfa_web_authn_verify_enabled,omitempty"` PasswordHibpEnabled *bool `json:"password_hibp_enabled,omitempty"` - PasswordMinLength *float32 `json:"password_min_length,omitempty"` + PasswordMinLength *int `json:"password_min_length,omitempty"` PasswordRequiredCharacters *UpdateAuthConfigBodyPasswordRequiredCharacters `json:"password_required_characters,omitempty"` - RateLimitAnonymousUsers *float32 `json:"rate_limit_anonymous_users,omitempty"` - RateLimitEmailSent *float32 `json:"rate_limit_email_sent,omitempty"` - RateLimitOtp *float32 `json:"rate_limit_otp,omitempty"` - RateLimitSmsSent *float32 `json:"rate_limit_sms_sent,omitempty"` - RateLimitTokenRefresh *float32 `json:"rate_limit_token_refresh,omitempty"` - RateLimitVerify *float32 `json:"rate_limit_verify,omitempty"` + RateLimitAnonymousUsers *int `json:"rate_limit_anonymous_users,omitempty"` + RateLimitEmailSent *int `json:"rate_limit_email_sent,omitempty"` + RateLimitOtp *int `json:"rate_limit_otp,omitempty"` + RateLimitSmsSent *int `json:"rate_limit_sms_sent,omitempty"` + RateLimitTokenRefresh *int `json:"rate_limit_token_refresh,omitempty"` + RateLimitVerify *int `json:"rate_limit_verify,omitempty"` RefreshTokenRotationEnabled *bool `json:"refresh_token_rotation_enabled,omitempty"` SamlEnabled *bool `json:"saml_enabled,omitempty"` SamlExternalUrl *string `json:"saml_external_url,omitempty"` @@ -1239,19 +1506,19 @@ type UpdateAuthConfigBody struct { SecurityCaptchaProvider *string `json:"security_captcha_provider,omitempty"` SecurityCaptchaSecret *string `json:"security_captcha_secret,omitempty"` SecurityManualLinkingEnabled *bool `json:"security_manual_linking_enabled,omitempty"` - SecurityRefreshTokenReuseInterval *float32 `json:"security_refresh_token_reuse_interval,omitempty"` + SecurityRefreshTokenReuseInterval *int `json:"security_refresh_token_reuse_interval,omitempty"` SecurityUpdatePasswordRequireReauthentication *bool `json:"security_update_password_require_reauthentication,omitempty"` - SessionsInactivityTimeout *float32 `json:"sessions_inactivity_timeout,omitempty"` + SessionsInactivityTimeout *int `json:"sessions_inactivity_timeout,omitempty"` SessionsSinglePerUser *bool `json:"sessions_single_per_user,omitempty"` SessionsTags *string `json:"sessions_tags,omitempty"` - SessionsTimebox *float32 `json:"sessions_timebox,omitempty"` + SessionsTimebox *int `json:"sessions_timebox,omitempty"` SiteUrl *string `json:"site_url,omitempty"` SmsAutoconfirm *bool `json:"sms_autoconfirm,omitempty"` - SmsMaxFrequency *float32 `json:"sms_max_frequency,omitempty"` + SmsMaxFrequency *int `json:"sms_max_frequency,omitempty"` SmsMessagebirdAccessKey *string `json:"sms_messagebird_access_key,omitempty"` SmsMessagebirdOriginator *string `json:"sms_messagebird_originator,omitempty"` - SmsOtpExp *float32 `json:"sms_otp_exp,omitempty"` - SmsOtpLength *float32 `json:"sms_otp_length,omitempty"` + SmsOtpExp *int `json:"sms_otp_exp,omitempty"` + SmsOtpLength *int `json:"sms_otp_length,omitempty"` SmsProvider *string `json:"sms_provider,omitempty"` SmsTemplate *string `json:"sms_template,omitempty"` SmsTestOtp *string `json:"sms_test_otp,omitempty"` @@ -1270,7 +1537,7 @@ type UpdateAuthConfigBody struct { SmsVonageFrom *string `json:"sms_vonage_from,omitempty"` SmtpAdminEmail *string `json:"smtp_admin_email,omitempty"` SmtpHost *string `json:"smtp_host,omitempty"` - SmtpMaxFrequency *float32 `json:"smtp_max_frequency,omitempty"` + SmtpMaxFrequency *int `json:"smtp_max_frequency,omitempty"` SmtpPass *string `json:"smtp_pass,omitempty"` SmtpPort *string `json:"smtp_port,omitempty"` SmtpSenderName *string `json:"smtp_sender_name,omitempty"` @@ -1283,9 +1550,12 @@ type UpdateAuthConfigBodyPasswordRequiredCharacters string // UpdateBranchBody defines model for UpdateBranchBody. type UpdateBranchBody struct { - BranchName *string `json:"branch_name,omitempty"` - GitBranch *string `json:"git_branch,omitempty"` - Persistent *bool `json:"persistent,omitempty"` + BranchName *string `json:"branch_name,omitempty"` + GitBranch *string `json:"git_branch,omitempty"` + Persistent *bool `json:"persistent,omitempty"` + + // ResetOnPush This field is deprecated and will be ignored. Use v1-reset-a-branch endpoint directly instead. + // Deprecated: ResetOnPush *bool `json:"reset_on_push,omitempty"` Status *UpdateBranchBodyStatus `json:"status,omitempty"` } @@ -1316,18 +1586,28 @@ type UpdatePgsodiumConfigBody struct { // UpdatePostgresConfigBody defines model for UpdatePostgresConfigBody. type UpdatePostgresConfigBody struct { EffectiveCacheSize *string `json:"effective_cache_size,omitempty"` + LogicalDecodingWorkMem *string `json:"logical_decoding_work_mem,omitempty"` MaintenanceWorkMem *string `json:"maintenance_work_mem,omitempty"` MaxConnections *int `json:"max_connections,omitempty"` MaxLocksPerTransaction *int `json:"max_locks_per_transaction,omitempty"` MaxParallelMaintenanceWorkers *int `json:"max_parallel_maintenance_workers,omitempty"` MaxParallelWorkers *int `json:"max_parallel_workers,omitempty"` MaxParallelWorkersPerGather *int `json:"max_parallel_workers_per_gather,omitempty"` + MaxReplicationSlots *int `json:"max_replication_slots,omitempty"` + MaxSlotWalKeepSize *string `json:"max_slot_wal_keep_size,omitempty"` MaxStandbyArchiveDelay *string `json:"max_standby_archive_delay,omitempty"` MaxStandbyStreamingDelay *string `json:"max_standby_streaming_delay,omitempty"` + MaxWalSenders *int `json:"max_wal_senders,omitempty"` + MaxWalSize *string `json:"max_wal_size,omitempty"` MaxWorkerProcesses *int `json:"max_worker_processes,omitempty"` + RestartDatabase *bool `json:"restart_database,omitempty"` SessionReplicationRole *UpdatePostgresConfigBodySessionReplicationRole `json:"session_replication_role,omitempty"` SharedBuffers *string `json:"shared_buffers,omitempty"` StatementTimeout *string `json:"statement_timeout,omitempty"` + TrackActivityQuerySize *string `json:"track_activity_query_size,omitempty"` + TrackCommitTimestamp *bool `json:"track_commit_timestamp,omitempty"` + WalKeepSize *string `json:"wal_keep_size,omitempty"` + WalSenderTimeout *string `json:"wal_sender_timeout,omitempty"` WorkMem *string `json:"work_mem,omitempty"` } @@ -1359,6 +1639,12 @@ type UpdateProviderResponse struct { UpdatedAt *string `json:"updated_at,omitempty"` } +// UpdateStorageConfigBody defines model for UpdateStorageConfigBody. +type UpdateStorageConfigBody struct { + Features *StorageFeatures `json:"features,omitempty"` + FileSizeLimit *int64 `json:"fileSizeLimit,omitempty"` +} + // UpdateSupavisorConfigBody defines model for UpdateSupavisorConfigBody. type UpdateSupavisorConfigBody struct { DefaultPoolSize *int `json:"default_pool_size"` @@ -1373,7 +1659,7 @@ type UpdateSupavisorConfigBodyPoolMode string // UpdateSupavisorConfigResponse defines model for UpdateSupavisorConfigResponse. type UpdateSupavisorConfigResponse struct { - DefaultPoolSize *float32 `json:"default_pool_size"` + DefaultPoolSize *int `json:"default_pool_size"` PoolMode UpdateSupavisorConfigResponsePoolMode `json:"pool_mode"` } @@ -1382,7 +1668,36 @@ type UpdateSupavisorConfigResponsePoolMode string // UpgradeDatabaseBody defines model for UpgradeDatabaseBody. type UpgradeDatabaseBody struct { - TargetVersion float32 `json:"target_version"` + ReleaseChannel ReleaseChannel `json:"release_channel"` + TargetVersion string `json:"target_version"` +} + +// V1AnalyticsResponse defines model for V1AnalyticsResponse. +type V1AnalyticsResponse struct { + Error *V1AnalyticsResponse_Error `json:"error,omitempty"` + Result *[]map[string]interface{} `json:"result,omitempty"` +} + +// V1AnalyticsResponseError0 defines model for . +type V1AnalyticsResponseError0 struct { + Code *float32 `json:"code,omitempty"` + Errors *[]struct { + Domain *string `json:"domain,omitempty"` + Location *string `json:"location,omitempty"` + LocationType *string `json:"locationType,omitempty"` + Message *string `json:"message,omitempty"` + Reason *string `json:"reason,omitempty"` + } `json:"errors,omitempty"` + Message *string `json:"message,omitempty"` + Status *string `json:"status,omitempty"` +} + +// V1AnalyticsResponseError1 defines model for . +type V1AnalyticsResponseError1 = string + +// V1AnalyticsResponse_Error defines model for V1AnalyticsResponse.Error. +type V1AnalyticsResponse_Error struct { + union json.RawMessage } // V1Backup defines model for V1Backup. @@ -1406,23 +1721,24 @@ type V1BackupsResponse struct { // V1CreateFunctionBody defines model for V1CreateFunctionBody. type V1CreateFunctionBody struct { - Body string `json:"body"` - Name string `json:"name"` - Slug string `json:"slug"` - VerifyJwt *bool `json:"verify_jwt,omitempty"` + Body string `json:"body"` + ComputeMultiplier *float32 `json:"compute_multiplier,omitempty"` + Name string `json:"name"` + Slug string `json:"slug"` + VerifyJwt *bool `json:"verify_jwt,omitempty"` } -// V1CreateProjectBody defines model for V1CreateProjectBody. -type V1CreateProjectBody struct { +// V1CreateProjectBodyDto defines model for V1CreateProjectBodyDto. +type V1CreateProjectBodyDto struct { // DbPass Database password - DbPass string `json:"db_pass"` - DesiredInstanceSize *DesiredInstanceSize `json:"desired_instance_size,omitempty"` + DbPass string `json:"db_pass"` + DesiredInstanceSize *V1CreateProjectBodyDtoDesiredInstanceSize `json:"desired_instance_size,omitempty"` // KpsEnabled This field is deprecated and is ignored in this request // Deprecated: KpsEnabled *bool `json:"kps_enabled,omitempty"` - // Name Name of your project, should not contain dots + // Name Name of your project Name string `json:"name"` // OrganizationId Slug of your organization @@ -1430,26 +1746,47 @@ type V1CreateProjectBody struct { // Plan Subscription Plan is now set on organization level and is ignored in this request // Deprecated: - Plan *V1CreateProjectBodyPlan `json:"plan,omitempty"` + Plan *V1CreateProjectBodyDtoPlan `json:"plan,omitempty"` + + // PostgresEngine Postgres engine version. If not provided, the latest version will be used. + PostgresEngine *V1CreateProjectBodyDtoPostgresEngine `json:"postgres_engine,omitempty"` // Region Region you want your server to reside in - Region V1CreateProjectBodyRegion `json:"region"` + Region V1CreateProjectBodyDtoRegion `json:"region"` + + // ReleaseChannel Release channel. If not provided, GA will be used. + ReleaseChannel *V1CreateProjectBodyDtoReleaseChannel `json:"release_channel,omitempty"` // TemplateUrl Template URL used to create the project from the CLI. TemplateUrl *string `json:"template_url,omitempty"` } -// V1CreateProjectBodyPlan Subscription Plan is now set on organization level and is ignored in this request -type V1CreateProjectBodyPlan string +// V1CreateProjectBodyDtoDesiredInstanceSize defines model for V1CreateProjectBodyDto.DesiredInstanceSize. +type V1CreateProjectBodyDtoDesiredInstanceSize string + +// V1CreateProjectBodyDtoPlan Subscription Plan is now set on organization level and is ignored in this request +type V1CreateProjectBodyDtoPlan string + +// V1CreateProjectBodyDtoPostgresEngine Postgres engine version. If not provided, the latest version will be used. +type V1CreateProjectBodyDtoPostgresEngine string -// V1CreateProjectBodyRegion Region you want your server to reside in -type V1CreateProjectBodyRegion string +// V1CreateProjectBodyDtoRegion Region you want your server to reside in +type V1CreateProjectBodyDtoRegion string + +// V1CreateProjectBodyDtoReleaseChannel Release channel. If not provided, GA will be used. +type V1CreateProjectBodyDtoReleaseChannel string // V1DatabaseResponse defines model for V1DatabaseResponse. type V1DatabaseResponse struct { // Host Database host Host string `json:"host"` + // PostgresEngine Database engine + PostgresEngine string `json:"postgres_engine"` + + // ReleaseChannel Release channel + ReleaseChannel string `json:"release_channel"` + // Version Database version Version string `json:"version"` } @@ -1465,10 +1802,11 @@ type V1OrganizationMemberResponse struct { // V1OrganizationSlugResponse defines model for V1OrganizationSlugResponse. type V1OrganizationSlugResponse struct { - Id string `json:"id"` - Name string `json:"name"` - OptInTags []V1OrganizationSlugResponseOptInTags `json:"opt_in_tags"` - Plan *BillingPlanId `json:"plan,omitempty"` + AllowedReleaseChannels []ReleaseChannel `json:"allowed_release_channels"` + Id string `json:"id"` + Name string `json:"name"` + OptInTags []V1OrganizationSlugResponseOptInTags `json:"opt_in_tags"` + Plan *BillingPlanId `json:"plan,omitempty"` } // V1OrganizationSlugResponseOptInTags defines model for V1OrganizationSlugResponse.OptInTags. @@ -1488,8 +1826,8 @@ type V1PgbouncerConfigResponsePoolMode string // V1PhysicalBackup defines model for V1PhysicalBackup. type V1PhysicalBackup struct { - EarliestPhysicalBackupDateUnix *float32 `json:"earliest_physical_backup_date_unix,omitempty"` - LatestPhysicalBackupDateUnix *float32 `json:"latest_physical_backup_date_unix,omitempty"` + EarliestPhysicalBackupDateUnix *int64 `json:"earliest_physical_backup_date_unix,omitempty"` + LatestPhysicalBackupDateUnix *int64 `json:"latest_physical_backup_date_unix,omitempty"` } // V1PostgrestConfigResponse defines model for V1PostgrestConfigResponse. @@ -1504,16 +1842,15 @@ type V1PostgrestConfigResponse struct { // V1ProjectRefResponse defines model for V1ProjectRefResponse. type V1ProjectRefResponse struct { - Id float32 `json:"id"` - Name string `json:"name"` - Ref string `json:"ref"` + Id int64 `json:"id"` + Name string `json:"name"` + Ref string `json:"ref"` } // V1ProjectResponse defines model for V1ProjectResponse. type V1ProjectResponse struct { // CreatedAt Creation timestamp - CreatedAt string `json:"created_at"` - Database *V1DatabaseResponse `json:"database,omitempty"` + CreatedAt string `json:"created_at"` // Id Id of your project Id string `json:"id"` @@ -1532,9 +1869,32 @@ type V1ProjectResponse struct { // V1ProjectResponseStatus defines model for V1ProjectResponse.Status. type V1ProjectResponseStatus string +// V1ProjectWithDatabaseResponse defines model for V1ProjectWithDatabaseResponse. +type V1ProjectWithDatabaseResponse struct { + // CreatedAt Creation timestamp + CreatedAt string `json:"created_at"` + Database V1DatabaseResponse `json:"database"` + + // Id Id of your project + Id string `json:"id"` + + // Name Name of your project + Name string `json:"name"` + + // OrganizationId Slug of your organization + OrganizationId string `json:"organization_id"` + + // Region Region of your project + Region string `json:"region"` + Status V1ProjectWithDatabaseResponseStatus `json:"status"` +} + +// V1ProjectWithDatabaseResponseStatus defines model for V1ProjectWithDatabaseResponse.Status. +type V1ProjectWithDatabaseResponseStatus string + // V1RestorePitrBody defines model for V1RestorePitrBody. type V1RestorePitrBody struct { - RecoveryTimeTargetUnix float32 `json:"recovery_time_target_unix"` + RecoveryTimeTargetUnix int64 `json:"recovery_time_target_unix"` } // V1RunQueryBody defines model for V1RunQueryBody. @@ -1574,9 +1934,10 @@ type V1StorageBucketResponse struct { // V1UpdateFunctionBody defines model for V1UpdateFunctionBody. type V1UpdateFunctionBody struct { - Body *string `json:"body,omitempty"` - Name *string `json:"name,omitempty"` - VerifyJwt *bool `json:"verify_jwt,omitempty"` + Body *string `json:"body,omitempty"` + ComputeMultiplier *float32 `json:"compute_multiplier,omitempty"` + Name *string `json:"name,omitempty"` + VerifyJwt *bool `json:"verify_jwt,omitempty"` } // ValidationError defines model for ValidationError. @@ -1622,24 +1983,58 @@ type V1AuthorizeUserParamsResponseType string // V1AuthorizeUserParamsCodeChallengeMethod defines parameters for V1AuthorizeUser. type V1AuthorizeUserParamsCodeChallengeMethod string -// CreateFunctionParams defines parameters for CreateFunction. -type CreateFunctionParams struct { - Slug *string `form:"slug,omitempty" json:"slug,omitempty"` - Name *string `form:"name,omitempty" json:"name,omitempty"` - VerifyJwt *bool `form:"verify_jwt,omitempty" json:"verify_jwt,omitempty"` - ImportMap *bool `form:"import_map,omitempty" json:"import_map,omitempty"` - EntrypointPath *string `form:"entrypoint_path,omitempty" json:"entrypoint_path,omitempty"` - ImportMapPath *string `form:"import_map_path,omitempty" json:"import_map_path,omitempty"` +// GetLogsParams defines parameters for GetLogs. +type GetLogsParams struct { + IsoTimestampEnd *string `form:"iso_timestamp_end,omitempty" json:"iso_timestamp_end,omitempty"` + IsoTimestampStart *string `form:"iso_timestamp_start,omitempty" json:"iso_timestamp_start,omitempty"` + Sql *string `form:"sql,omitempty" json:"sql,omitempty"` +} + +// V1GetProjectApiKeysParams defines parameters for V1GetProjectApiKeys. +type V1GetProjectApiKeysParams struct { + Reveal bool `form:"reveal" json:"reveal"` +} + +// CreateApiKeyParams defines parameters for CreateApiKey. +type CreateApiKeyParams struct { + Reveal bool `form:"reveal" json:"reveal"` +} + +// DeleteApiKeyParams defines parameters for DeleteApiKey. +type DeleteApiKeyParams struct { + Reveal bool `form:"reveal" json:"reveal"` +} + +// GetApiKeyParams defines parameters for GetApiKey. +type GetApiKeyParams struct { + Reveal bool `form:"reveal" json:"reveal"` +} + +// UpdateApiKeyParams defines parameters for UpdateApiKey. +type UpdateApiKeyParams struct { + Reveal bool `form:"reveal" json:"reveal"` +} + +// V1CreateAFunctionParams defines parameters for V1CreateAFunction. +type V1CreateAFunctionParams struct { + Slug *string `form:"slug,omitempty" json:"slug,omitempty"` + Name *string `form:"name,omitempty" json:"name,omitempty"` + VerifyJwt *bool `form:"verify_jwt,omitempty" json:"verify_jwt,omitempty"` + ImportMap *bool `form:"import_map,omitempty" json:"import_map,omitempty"` + EntrypointPath *string `form:"entrypoint_path,omitempty" json:"entrypoint_path,omitempty"` + ImportMapPath *string `form:"import_map_path,omitempty" json:"import_map_path,omitempty"` + ComputeMultiplier *float32 `form:"compute_multiplier,omitempty" json:"compute_multiplier,omitempty"` } // V1UpdateAFunctionParams defines parameters for V1UpdateAFunction. type V1UpdateAFunctionParams struct { - Slug *string `form:"slug,omitempty" json:"slug,omitempty"` - Name *string `form:"name,omitempty" json:"name,omitempty"` - VerifyJwt *bool `form:"verify_jwt,omitempty" json:"verify_jwt,omitempty"` - ImportMap *bool `form:"import_map,omitempty" json:"import_map,omitempty"` - EntrypointPath *string `form:"entrypoint_path,omitempty" json:"entrypoint_path,omitempty"` - ImportMapPath *string `form:"import_map_path,omitempty" json:"import_map_path,omitempty"` + Slug *string `form:"slug,omitempty" json:"slug,omitempty"` + Name *string `form:"name,omitempty" json:"name,omitempty"` + VerifyJwt *bool `form:"verify_jwt,omitempty" json:"verify_jwt,omitempty"` + ImportMap *bool `form:"import_map,omitempty" json:"import_map,omitempty"` + EntrypointPath *string `form:"entrypoint_path,omitempty" json:"entrypoint_path,omitempty"` + ImportMapPath *string `form:"import_map_path,omitempty" json:"import_map_path,omitempty"` + ComputeMultiplier *float32 `form:"compute_multiplier,omitempty" json:"compute_multiplier,omitempty"` } // V1GetServicesHealthParams defines parameters for V1GetServicesHealth. @@ -1662,22 +2057,46 @@ type V1GenerateTypescriptTypesParams struct { IncludedSchemas *string `form:"included_schemas,omitempty" json:"included_schemas,omitempty"` } +// V1GetPostgresUpgradeStatusParams defines parameters for V1GetPostgresUpgradeStatus. +type V1GetPostgresUpgradeStatusParams struct { + TrackingId *string `form:"tracking_id,omitempty" json:"tracking_id,omitempty"` +} + // V1ListAllSnippetsParams defines parameters for V1ListAllSnippets. type V1ListAllSnippetsParams struct { - ProjectRef *string `form:"project_ref,omitempty" json:"project_ref,omitempty"` + Cursor *string `form:"cursor,omitempty" json:"cursor,omitempty"` + Limit *string `form:"limit,omitempty" json:"limit,omitempty"` + SortBy *V1ListAllSnippetsParamsSortBy `form:"sort_by,omitempty" json:"sort_by,omitempty"` + SortOrder *V1ListAllSnippetsParamsSortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` + ProjectRef *string `form:"project_ref,omitempty" json:"project_ref,omitempty"` } +// V1ListAllSnippetsParamsSortBy defines parameters for V1ListAllSnippets. +type V1ListAllSnippetsParamsSortBy string + +// V1ListAllSnippetsParamsSortOrder defines parameters for V1ListAllSnippets. +type V1ListAllSnippetsParamsSortOrder string + // V1UpdateABranchConfigJSONRequestBody defines body for V1UpdateABranchConfig for application/json ContentType. type V1UpdateABranchConfigJSONRequestBody = UpdateBranchBody +// V1RevokeTokenJSONRequestBody defines body for V1RevokeToken for application/json ContentType. +type V1RevokeTokenJSONRequestBody = OAuthRevokeTokenBodyDto + // V1ExchangeOauthTokenFormdataRequestBody defines body for V1ExchangeOauthToken for application/x-www-form-urlencoded ContentType. type V1ExchangeOauthTokenFormdataRequestBody = OAuthTokenBody // V1CreateAnOrganizationJSONRequestBody defines body for V1CreateAnOrganization for application/json ContentType. -type V1CreateAnOrganizationJSONRequestBody = CreateOrganizationBodyV1 +type V1CreateAnOrganizationJSONRequestBody = CreateOrganizationV1Dto // V1CreateAProjectJSONRequestBody defines body for V1CreateAProject for application/json ContentType. -type V1CreateAProjectJSONRequestBody = V1CreateProjectBody +type V1CreateAProjectJSONRequestBody = V1CreateProjectBodyDto + +// CreateApiKeyJSONRequestBody defines body for CreateApiKey for application/json ContentType. +type CreateApiKeyJSONRequestBody = CreateApiKeyBody + +// UpdateApiKeyJSONRequestBody defines body for UpdateApiKey for application/json ContentType. +type UpdateApiKeyJSONRequestBody = UpdateApiKeyBody // V1CreateABranchJSONRequestBody defines body for V1CreateABranch for application/json ContentType. type V1CreateABranchJSONRequestBody = CreateBranchBody @@ -1700,6 +2119,9 @@ type V1UpdateSupavisorConfigJSONRequestBody = UpdateSupavisorConfigBody // V1UpdatePostgresConfigJSONRequestBody defines body for V1UpdatePostgresConfig for application/json ContentType. type V1UpdatePostgresConfigJSONRequestBody = UpdatePostgresConfigBody +// V1UpdateStorageConfigJSONRequestBody defines body for V1UpdateStorageConfig for application/json ContentType. +type V1UpdateStorageConfigJSONRequestBody = UpdateStorageConfigBody + // V1UpdateHostnameConfigJSONRequestBody defines body for V1UpdateHostnameConfig for application/json ContentType. type V1UpdateHostnameConfigJSONRequestBody = UpdateCustomHostnameBody @@ -1709,8 +2131,8 @@ type V1RestorePitrBackupJSONRequestBody = V1RestorePitrBody // V1RunAQueryJSONRequestBody defines body for V1RunAQuery for application/json ContentType. type V1RunAQueryJSONRequestBody = V1RunQueryBody -// CreateFunctionJSONRequestBody defines body for CreateFunction for application/json ContentType. -type CreateFunctionJSONRequestBody = V1CreateFunctionBody +// V1CreateAFunctionJSONRequestBody defines body for V1CreateAFunction for application/json ContentType. +type V1CreateAFunctionJSONRequestBody = V1CreateFunctionBody // V1UpdateAFunctionJSONRequestBody defines body for V1UpdateAFunction for application/json ContentType. type V1UpdateAFunctionJSONRequestBody = V1UpdateFunctionBody @@ -1733,6 +2155,9 @@ type V1RemoveAReadReplicaJSONRequestBody = RemoveReadReplicaBody // V1SetupAReadReplicaJSONRequestBody defines body for V1SetupAReadReplica for application/json ContentType. type V1SetupAReadReplicaJSONRequestBody = SetUpReadReplicaBody +// V1RestoreAProjectJSONRequestBody defines body for V1RestoreAProject for application/json ContentType. +type V1RestoreAProjectJSONRequestBody = RestoreProjectBodyDto + // V1BulkDeleteSecretsJSONRequestBody defines body for V1BulkDeleteSecrets for application/json ContentType. type V1BulkDeleteSecretsJSONRequestBody = V1BulkDeleteSecretsJSONBody @@ -1865,6 +2290,68 @@ func (t *AttributeValue_Default) UnmarshalJSON(b []byte) error { return err } +// AsV1AnalyticsResponseError0 returns the union data inside the V1AnalyticsResponse_Error as a V1AnalyticsResponseError0 +func (t V1AnalyticsResponse_Error) AsV1AnalyticsResponseError0() (V1AnalyticsResponseError0, error) { + var body V1AnalyticsResponseError0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromV1AnalyticsResponseError0 overwrites any union data inside the V1AnalyticsResponse_Error as the provided V1AnalyticsResponseError0 +func (t *V1AnalyticsResponse_Error) FromV1AnalyticsResponseError0(v V1AnalyticsResponseError0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeV1AnalyticsResponseError0 performs a merge with any union data inside the V1AnalyticsResponse_Error, using the provided V1AnalyticsResponseError0 +func (t *V1AnalyticsResponse_Error) MergeV1AnalyticsResponseError0(v V1AnalyticsResponseError0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsV1AnalyticsResponseError1 returns the union data inside the V1AnalyticsResponse_Error as a V1AnalyticsResponseError1 +func (t V1AnalyticsResponse_Error) AsV1AnalyticsResponseError1() (V1AnalyticsResponseError1, error) { + var body V1AnalyticsResponseError1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromV1AnalyticsResponseError1 overwrites any union data inside the V1AnalyticsResponse_Error as the provided V1AnalyticsResponseError1 +func (t *V1AnalyticsResponse_Error) FromV1AnalyticsResponseError1(v V1AnalyticsResponseError1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeV1AnalyticsResponseError1 performs a merge with any union data inside the V1AnalyticsResponse_Error, using the provided V1AnalyticsResponseError1 +func (t *V1AnalyticsResponse_Error) MergeV1AnalyticsResponseError1(v V1AnalyticsResponseError1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t V1AnalyticsResponse_Error) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *V1AnalyticsResponse_Error) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + // AsAuthHealthResponse returns the union data inside the V1ServiceHealthResponse_Info as a AuthHealthResponse func (t V1ServiceHealthResponse_Info) AsAuthHealthResponse() (AuthHealthResponse, error) { var body AuthHealthResponse diff --git a/pkg/cast/cast.go b/pkg/cast/cast.go new file mode 100644 index 000000000..89840c895 --- /dev/null +++ b/pkg/cast/cast.go @@ -0,0 +1,52 @@ +package cast + +import "math" + +// UintToInt converts a uint to an int, handling potential overflow +func UintToInt(value uint) int { + if value <= math.MaxInt { + return int(value) + } + return math.MaxInt +} + +// UIntToUInt16 converts a uint to an uint16, handling potential overflow +func UIntToUInt16(value uint) uint16 { + if value <= math.MaxUint16 { + return uint16(value) + } + return math.MaxUint16 +} + +// IntToUint converts an int to a uint, handling negative values +func IntToUint(value int) uint { + if value < 0 { + return 0 + } + return uint(value) +} + +func UintToIntPtr(value *uint) *int { + if value == nil { + return nil + } + return Ptr(UintToInt(*value)) +} + +func IntToUintPtr(value *int) *uint { + if value == nil { + return nil + } + return Ptr(IntToUint(*value)) +} + +func Ptr[T any](v T) *T { + return &v +} + +func Val[T any](v *T, def T) T { + if v == nil { + return def + } + return *v +} diff --git a/pkg/config/api.go b/pkg/config/api.go new file mode 100644 index 000000000..ec7c4f86d --- /dev/null +++ b/pkg/config/api.go @@ -0,0 +1,93 @@ +package config + +import ( + "strings" + + v1API "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" + "github.com/supabase/cli/pkg/diff" +) + +type ( + api struct { + Enabled bool `toml:"enabled"` + Schemas []string `toml:"schemas"` + ExtraSearchPath []string `toml:"extra_search_path"` + MaxRows uint `toml:"max_rows"` + // Local only config + Image string `toml:"-"` + KongImage string `toml:"-"` + Port uint16 `toml:"port"` + Tls tlsKong `toml:"tls"` + // TODO: replace [auth|studio].api_url + ExternalUrl string `toml:"external_url"` + } + + tlsKong struct { + Enabled bool `toml:"enabled"` + } +) + +func (a *api) ToUpdatePostgrestConfigBody() v1API.UpdatePostgrestConfigBody { + body := v1API.UpdatePostgrestConfigBody{} + + // When the api is disabled, remote side it just set the dbSchema to an empty value + if !a.Enabled { + body.DbSchema = cast.Ptr("") + return body + } + + // Convert Schemas to a comma-separated string + if len(a.Schemas) > 0 { + schemas := strings.Join(a.Schemas, ",") + body.DbSchema = &schemas + } + + // Convert ExtraSearchPath to a comma-separated string + body.DbExtraSearchPath = cast.Ptr(strings.Join(a.ExtraSearchPath, ",")) + + // Convert MaxRows to int pointer + if a.MaxRows > 0 { + body.MaxRows = cast.Ptr(cast.UintToInt(a.MaxRows)) + } + + // Note: DbPool is not present in the Api struct, so it's not set here + return body +} + +func (a *api) FromRemoteApiConfig(remoteConfig v1API.PostgrestConfigWithJWTSecretResponse) { + if a.Enabled = len(remoteConfig.DbSchema) > 0; !a.Enabled { + return + } + + // Update Schemas if present in remoteConfig + a.Schemas = strToArr(remoteConfig.DbSchema) + // TODO: use slices.Map when upgrade go version + for i, schema := range a.Schemas { + a.Schemas[i] = strings.TrimSpace(schema) + } + + // Update ExtraSearchPath if present in remoteConfig + a.ExtraSearchPath = strToArr(remoteConfig.DbExtraSearchPath) + for i, path := range a.ExtraSearchPath { + a.ExtraSearchPath[i] = strings.TrimSpace(path) + } + + // Update MaxRows if present in remoteConfig + a.MaxRows = cast.IntToUint(remoteConfig.MaxRows) +} + +func (a *api) DiffWithRemote(remoteConfig v1API.PostgrestConfigWithJWTSecretResponse) ([]byte, error) { + copy := *a + // Convert the config values into easily comparable remoteConfig values + currentValue, err := ToTomlBytes(copy) + if err != nil { + return nil, err + } + copy.FromRemoteApiConfig(remoteConfig) + remoteCompare, err := ToTomlBytes(copy) + if err != nil { + return nil, err + } + return diff.Diff("remote[api]", remoteCompare, "local[api]", currentValue), nil +} diff --git a/pkg/config/api_test.go b/pkg/config/api_test.go new file mode 100644 index 000000000..92859c671 --- /dev/null +++ b/pkg/config/api_test.go @@ -0,0 +1,136 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + v1API "github.com/supabase/cli/pkg/api" +) + +func TestApiToUpdatePostgrestConfigBody(t *testing.T) { + t.Run("converts all fields correctly", func(t *testing.T) { + api := &api{ + Enabled: true, + Schemas: []string{"public", "private"}, + ExtraSearchPath: []string{"extensions", "public"}, + MaxRows: 1000, + } + + body := api.ToUpdatePostgrestConfigBody() + + assert.Equal(t, "public,private", *body.DbSchema) + assert.Equal(t, "extensions,public", *body.DbExtraSearchPath) + assert.Equal(t, 1000, *body.MaxRows) + }) + + t.Run("handles empty fields", func(t *testing.T) { + api := &api{} + + body := api.ToUpdatePostgrestConfigBody() + + // remote api will be false by default, leading to an empty schema on api side + assert.Equal(t, "", *body.DbSchema) + }) +} + +func TestApiDiff(t *testing.T) { + t.Run("detects differences", func(t *testing.T) { + api := &api{ + Enabled: true, + Schemas: []string{"public", "private"}, + ExtraSearchPath: []string{"extensions", "public"}, + MaxRows: 1000, + } + + remoteConfig := v1API.PostgrestConfigWithJWTSecretResponse{ + DbSchema: "public", + DbExtraSearchPath: "public", + MaxRows: 500, + } + + diff, err := api.DiffWithRemote(remoteConfig) + assert.NoError(t, err) + + assertSnapshotEqual(t, diff) + }) + + t.Run("handles no differences", func(t *testing.T) { + api := &api{ + Enabled: true, + Schemas: []string{"public"}, + ExtraSearchPath: []string{"public"}, + MaxRows: 500, + } + + remoteConfig := v1API.PostgrestConfigWithJWTSecretResponse{ + DbSchema: "public", + DbExtraSearchPath: "public", + MaxRows: 500, + } + + diff, err := api.DiffWithRemote(remoteConfig) + assert.NoError(t, err) + + assert.Empty(t, diff) + }) + + t.Run("handles multiple schemas and search paths with spaces", func(t *testing.T) { + api := &api{ + Enabled: true, + Schemas: []string{"public", "private"}, + ExtraSearchPath: []string{"extensions", "public"}, + MaxRows: 500, + } + + remoteConfig := v1API.PostgrestConfigWithJWTSecretResponse{ + DbSchema: "public, private", + DbExtraSearchPath: "extensions, public", + MaxRows: 500, + } + + diff, err := api.DiffWithRemote(remoteConfig) + assert.NoError(t, err) + + assert.Empty(t, diff) + }) + + t.Run("handles api disabled on remote side", func(t *testing.T) { + api := &api{ + Enabled: true, + Schemas: []string{"public", "private"}, + ExtraSearchPath: []string{"extensions", "public"}, + MaxRows: 500, + } + + remoteConfig := v1API.PostgrestConfigWithJWTSecretResponse{ + DbSchema: "", + DbExtraSearchPath: "", + MaxRows: 0, + } + + diff, err := api.DiffWithRemote(remoteConfig) + assert.NoError(t, err) + + assertSnapshotEqual(t, diff) + }) + + t.Run("handles api disabled on local side", func(t *testing.T) { + api := &api{ + Enabled: false, + Schemas: []string{"public"}, + ExtraSearchPath: []string{"public"}, + MaxRows: 500, + } + + remoteConfig := v1API.PostgrestConfigWithJWTSecretResponse{ + DbSchema: "public", + DbExtraSearchPath: "public", + MaxRows: 500, + } + + diff, err := api.DiffWithRemote(remoteConfig) + assert.NoError(t, err) + + assertSnapshotEqual(t, diff) + }) +} diff --git a/pkg/config/auth.go b/pkg/config/auth.go new file mode 100644 index 000000000..33b6f9ea2 --- /dev/null +++ b/pkg/config/auth.go @@ -0,0 +1,1045 @@ +package config + +import ( + "strconv" + "strings" + "time" + + v1API "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" + "github.com/supabase/cli/pkg/diff" +) + +type PasswordRequirements string + +const ( + NoRequirements PasswordRequirements = "" + LettersDigits PasswordRequirements = "letters_digits" + LowerUpperLettersDigits PasswordRequirements = "lower_upper_letters_digits" + LowerUpperLettersDigitsSymbols PasswordRequirements = "lower_upper_letters_digits_symbols" +) + +func (r PasswordRequirements) ToChar() v1API.UpdateAuthConfigBodyPasswordRequiredCharacters { + switch r { + case LettersDigits: + return v1API.AbcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 + case LowerUpperLettersDigits: + return v1API.AbcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567891 + case LowerUpperLettersDigitsSymbols: + return v1API.AbcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567892 + } + return v1API.Empty +} + +func NewPasswordRequirement(c v1API.UpdateAuthConfigBodyPasswordRequiredCharacters) PasswordRequirements { + switch c { + case v1API.AbcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789: + return LettersDigits + case v1API.AbcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567891: + return LowerUpperLettersDigits + case v1API.AbcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567892: + return LowerUpperLettersDigitsSymbols + } + return NoRequirements +} + +type ( + auth struct { + Enabled bool `toml:"enabled"` + Image string `toml:"-"` + + SiteUrl string `toml:"site_url" mapstructure:"site_url"` + AdditionalRedirectUrls []string `toml:"additional_redirect_urls"` + JwtExpiry uint `toml:"jwt_expiry"` + EnableRefreshTokenRotation bool `toml:"enable_refresh_token_rotation"` + RefreshTokenReuseInterval uint `toml:"refresh_token_reuse_interval"` + EnableManualLinking bool `toml:"enable_manual_linking"` + EnableSignup bool `toml:"enable_signup"` + EnableAnonymousSignIns bool `toml:"enable_anonymous_sign_ins"` + MinimumPasswordLength uint `toml:"minimum_password_length"` + PasswordRequirements PasswordRequirements `toml:"password_requirements"` + + Hook hook `toml:"hook"` + MFA mfa `toml:"mfa"` + Sessions sessions `toml:"sessions"` + Email email `toml:"email"` + Sms sms `toml:"sms"` + External external `toml:"external"` + + // Custom secrets can be injected from .env file + JwtSecret string `toml:"-" mapstructure:"jwt_secret"` + AnonKey string `toml:"-" mapstructure:"anon_key"` + ServiceRoleKey string `toml:"-" mapstructure:"service_role_key"` + + ThirdParty thirdParty `toml:"third_party"` + } + + external map[string]provider + + thirdParty struct { + Firebase tpaFirebase `toml:"firebase"` + Auth0 tpaAuth0 `toml:"auth0"` + Cognito tpaCognito `toml:"aws_cognito"` + } + + tpaFirebase struct { + Enabled bool `toml:"enabled"` + + ProjectID string `toml:"project_id"` + } + + tpaAuth0 struct { + Enabled bool `toml:"enabled"` + + Tenant string `toml:"tenant"` + TenantRegion string `toml:"tenant_region"` + } + + tpaCognito struct { + Enabled bool `toml:"enabled"` + + UserPoolID string `toml:"user_pool_id"` + UserPoolRegion string `toml:"user_pool_region"` + } + + email struct { + EnableSignup bool `toml:"enable_signup"` + DoubleConfirmChanges bool `toml:"double_confirm_changes"` + EnableConfirmations bool `toml:"enable_confirmations"` + SecurePasswordChange bool `toml:"secure_password_change"` + Template map[string]emailTemplate `toml:"template"` + Smtp *smtp `toml:"smtp"` + MaxFrequency time.Duration `toml:"max_frequency"` + OtpLength uint `toml:"otp_length"` + OtpExpiry uint `toml:"otp_expiry"` + } + + smtp struct { + Enabled *bool `toml:"enabled"` + Host string `toml:"host"` + Port uint16 `toml:"port"` + User string `toml:"user"` + Pass Secret `toml:"pass"` + AdminEmail string `toml:"admin_email"` + SenderName string `toml:"sender_name"` + } + + emailTemplate struct { + Subject *string `toml:"subject"` + Content *string `toml:"content"` + // Only content path is accepted in config.toml + ContentPath string `toml:"content_path"` + } + + sms struct { + EnableSignup bool `toml:"enable_signup"` + EnableConfirmations bool `toml:"enable_confirmations"` + Template string `toml:"template"` + Twilio twilioConfig `toml:"twilio" mapstructure:"twilio"` + TwilioVerify twilioConfig `toml:"twilio_verify" mapstructure:"twilio_verify"` + Messagebird messagebirdConfig `toml:"messagebird" mapstructure:"messagebird"` + Textlocal textlocalConfig `toml:"textlocal" mapstructure:"textlocal"` + Vonage vonageConfig `toml:"vonage" mapstructure:"vonage"` + TestOTP map[string]string `toml:"test_otp"` + MaxFrequency time.Duration `toml:"max_frequency"` + } + + hook struct { + MFAVerificationAttempt *hookConfig `toml:"mfa_verification_attempt"` + PasswordVerificationAttempt *hookConfig `toml:"password_verification_attempt"` + CustomAccessToken *hookConfig `toml:"custom_access_token"` + SendSMS *hookConfig `toml:"send_sms"` + SendEmail *hookConfig `toml:"send_email"` + } + + factorTypeConfiguration struct { + EnrollEnabled bool `toml:"enroll_enabled"` + VerifyEnabled bool `toml:"verify_enabled"` + } + + phoneFactorTypeConfiguration struct { + factorTypeConfiguration + OtpLength uint `toml:"otp_length"` + Template string `toml:"template"` + MaxFrequency time.Duration `toml:"max_frequency"` + } + + mfa struct { + TOTP factorTypeConfiguration `toml:"totp"` + Phone phoneFactorTypeConfiguration `toml:"phone"` + WebAuthn factorTypeConfiguration `toml:"web_authn"` + MaxEnrolledFactors uint `toml:"max_enrolled_factors"` + } + + hookConfig struct { + Enabled bool `toml:"enabled"` + URI string `toml:"uri"` + Secrets Secret `toml:"secrets"` + } + + sessions struct { + Timebox time.Duration `toml:"timebox"` + InactivityTimeout time.Duration `toml:"inactivity_timeout"` + } + + twilioConfig struct { + Enabled bool `toml:"enabled"` + AccountSid string `toml:"account_sid"` + MessageServiceSid string `toml:"message_service_sid"` + AuthToken Secret `toml:"auth_token" mapstructure:"auth_token"` + } + + messagebirdConfig struct { + Enabled bool `toml:"enabled"` + Originator string `toml:"originator"` + AccessKey Secret `toml:"access_key" mapstructure:"access_key"` + } + + textlocalConfig struct { + Enabled bool `toml:"enabled"` + Sender string `toml:"sender"` + ApiKey Secret `toml:"api_key" mapstructure:"api_key"` + } + + vonageConfig struct { + Enabled bool `toml:"enabled"` + From string `toml:"from"` + ApiKey string `toml:"api_key" mapstructure:"api_key"` + ApiSecret Secret `toml:"api_secret" mapstructure:"api_secret"` + } + + provider struct { + Enabled bool `toml:"enabled"` + ClientId string `toml:"client_id"` + Secret Secret `toml:"secret"` + Url string `toml:"url"` + RedirectUri string `toml:"redirect_uri"` + SkipNonceCheck bool `toml:"skip_nonce_check"` + } +) + +func (a *auth) ToUpdateAuthConfigBody() v1API.UpdateAuthConfigBody { + body := v1API.UpdateAuthConfigBody{ + SiteUrl: &a.SiteUrl, + UriAllowList: cast.Ptr(strings.Join(a.AdditionalRedirectUrls, ",")), + JwtExp: cast.UintToIntPtr(&a.JwtExpiry), + RefreshTokenRotationEnabled: &a.EnableRefreshTokenRotation, + SecurityRefreshTokenReuseInterval: cast.UintToIntPtr(&a.RefreshTokenReuseInterval), + SecurityManualLinkingEnabled: &a.EnableManualLinking, + DisableSignup: cast.Ptr(!a.EnableSignup), + ExternalAnonymousUsersEnabled: &a.EnableAnonymousSignIns, + PasswordMinLength: cast.UintToIntPtr(&a.MinimumPasswordLength), + PasswordRequiredCharacters: cast.Ptr(a.PasswordRequirements.ToChar()), + } + a.Hook.toAuthConfigBody(&body) + a.MFA.toAuthConfigBody(&body) + a.Sessions.toAuthConfigBody(&body) + a.Email.toAuthConfigBody(&body) + a.Sms.toAuthConfigBody(&body) + a.External.toAuthConfigBody(&body) + return body +} + +func (a *auth) FromRemoteAuthConfig(remoteConfig v1API.AuthConfigResponse) { + a.SiteUrl = cast.Val(remoteConfig.SiteUrl, "") + a.AdditionalRedirectUrls = strToArr(cast.Val(remoteConfig.UriAllowList, "")) + a.JwtExpiry = cast.IntToUint(cast.Val(remoteConfig.JwtExp, 0)) + a.EnableRefreshTokenRotation = cast.Val(remoteConfig.RefreshTokenRotationEnabled, false) + a.RefreshTokenReuseInterval = cast.IntToUint(cast.Val(remoteConfig.SecurityRefreshTokenReuseInterval, 0)) + a.EnableManualLinking = cast.Val(remoteConfig.SecurityManualLinkingEnabled, false) + a.EnableSignup = !cast.Val(remoteConfig.DisableSignup, false) + a.EnableAnonymousSignIns = cast.Val(remoteConfig.ExternalAnonymousUsersEnabled, false) + a.MinimumPasswordLength = cast.IntToUint(cast.Val(remoteConfig.PasswordMinLength, 0)) + prc := cast.Val(remoteConfig.PasswordRequiredCharacters, "") + a.PasswordRequirements = NewPasswordRequirement(v1API.UpdateAuthConfigBodyPasswordRequiredCharacters(prc)) + a.Hook.fromAuthConfig(remoteConfig) + a.MFA.fromAuthConfig(remoteConfig) + a.Sessions.fromAuthConfig(remoteConfig) + a.Email.fromAuthConfig(remoteConfig) + a.Sms.fromAuthConfig(remoteConfig) + a.External.fromAuthConfig(remoteConfig) +} + +func (h hook) toAuthConfigBody(body *v1API.UpdateAuthConfigBody) { + // When local config is not set, we assume platform defaults should not change + if hook := h.CustomAccessToken; hook != nil { + if body.HookCustomAccessTokenEnabled = &hook.Enabled; hook.Enabled { + body.HookCustomAccessTokenUri = &hook.URI + if len(hook.Secrets.SHA256) > 0 { + body.HookCustomAccessTokenSecrets = &hook.Secrets.Value + } + } + } + if hook := h.SendEmail; hook != nil { + if body.HookSendEmailEnabled = &hook.Enabled; hook.Enabled { + body.HookSendEmailUri = &hook.URI + if len(hook.Secrets.SHA256) > 0 { + body.HookSendEmailSecrets = &hook.Secrets.Value + } + } + } + if hook := h.SendSMS; hook != nil { + if body.HookSendSmsEnabled = &hook.Enabled; hook.Enabled { + body.HookSendSmsUri = &hook.URI + if len(hook.Secrets.SHA256) > 0 { + body.HookSendSmsSecrets = &hook.Secrets.Value + } + } + } + // Enterprise and team only features + if hook := h.MFAVerificationAttempt; hook != nil { + if body.HookMfaVerificationAttemptEnabled = &hook.Enabled; hook.Enabled { + body.HookMfaVerificationAttemptUri = &hook.URI + if len(hook.Secrets.SHA256) > 0 { + body.HookMfaVerificationAttemptSecrets = &hook.Secrets.Value + } + } + } + if hook := h.PasswordVerificationAttempt; hook != nil { + if body.HookPasswordVerificationAttemptEnabled = &hook.Enabled; hook.Enabled { + body.HookPasswordVerificationAttemptUri = &hook.URI + if len(hook.Secrets.SHA256) > 0 { + body.HookPasswordVerificationAttemptSecrets = &hook.Secrets.Value + } + } + } +} +func (h *hook) fromAuthConfig(remoteConfig v1API.AuthConfigResponse) { + // When local config is not set, we assume platform defaults should not change + if hook := h.CustomAccessToken; hook != nil { + // Ignore disabled hooks because their envs are not loaded + if hook.Enabled { + hook.URI = cast.Val(remoteConfig.HookCustomAccessTokenUri, "") + if len(hook.Secrets.SHA256) > 0 { + hook.Secrets.SHA256 = cast.Val(remoteConfig.HookCustomAccessTokenSecrets, "") + } + } + hook.Enabled = cast.Val(remoteConfig.HookCustomAccessTokenEnabled, false) + } + if hook := h.SendEmail; hook != nil { + if hook.Enabled { + hook.URI = cast.Val(remoteConfig.HookSendEmailUri, "") + if len(hook.Secrets.SHA256) > 0 { + hook.Secrets.SHA256 = cast.Val(remoteConfig.HookSendEmailSecrets, "") + } + } + hook.Enabled = cast.Val(remoteConfig.HookSendEmailEnabled, false) + } + if hook := h.SendSMS; hook != nil { + if hook.Enabled { + hook.URI = cast.Val(remoteConfig.HookSendSmsUri, "") + if len(hook.Secrets.SHA256) > 0 { + hook.Secrets.SHA256 = cast.Val(remoteConfig.HookSendSmsSecrets, "") + } + } + hook.Enabled = cast.Val(remoteConfig.HookSendSmsEnabled, false) + } + // Enterprise and team only features + if hook := h.MFAVerificationAttempt; hook != nil { + if hook.Enabled { + hook.URI = cast.Val(remoteConfig.HookMfaVerificationAttemptUri, "") + if len(hook.Secrets.SHA256) > 0 { + hook.Secrets.SHA256 = cast.Val(remoteConfig.HookMfaVerificationAttemptSecrets, "") + } + } + hook.Enabled = cast.Val(remoteConfig.HookMfaVerificationAttemptEnabled, false) + } + if hook := h.PasswordVerificationAttempt; hook != nil { + if hook.Enabled { + hook.URI = cast.Val(remoteConfig.HookPasswordVerificationAttemptUri, "") + if len(hook.Secrets.SHA256) > 0 { + hook.Secrets.SHA256 = cast.Val(remoteConfig.HookPasswordVerificationAttemptSecrets, "") + } + } + hook.Enabled = cast.Val(remoteConfig.HookPasswordVerificationAttemptEnabled, false) + } +} + +func (m mfa) toAuthConfigBody(body *v1API.UpdateAuthConfigBody) { + body.MfaMaxEnrolledFactors = cast.UintToIntPtr(&m.MaxEnrolledFactors) + body.MfaTotpEnrollEnabled = &m.TOTP.EnrollEnabled + body.MfaTotpVerifyEnabled = &m.TOTP.VerifyEnabled + body.MfaPhoneEnrollEnabled = &m.Phone.EnrollEnabled + body.MfaPhoneVerifyEnabled = &m.Phone.VerifyEnabled + body.MfaPhoneOtpLength = cast.UintToIntPtr(&m.Phone.OtpLength) + body.MfaPhoneTemplate = &m.Phone.Template + body.MfaPhoneMaxFrequency = cast.Ptr(int(m.Phone.MaxFrequency.Seconds())) + body.MfaWebAuthnEnrollEnabled = &m.WebAuthn.EnrollEnabled + body.MfaWebAuthnVerifyEnabled = &m.WebAuthn.VerifyEnabled +} + +func (m *mfa) fromAuthConfig(remoteConfig v1API.AuthConfigResponse) { + m.MaxEnrolledFactors = cast.IntToUint(cast.Val(remoteConfig.MfaMaxEnrolledFactors, 0)) + m.TOTP.EnrollEnabled = cast.Val(remoteConfig.MfaTotpEnrollEnabled, false) + m.TOTP.VerifyEnabled = cast.Val(remoteConfig.MfaTotpVerifyEnabled, false) + m.Phone.EnrollEnabled = cast.Val(remoteConfig.MfaPhoneEnrollEnabled, false) + m.Phone.VerifyEnabled = cast.Val(remoteConfig.MfaPhoneVerifyEnabled, false) + m.Phone.OtpLength = cast.IntToUint(remoteConfig.MfaPhoneOtpLength) + m.Phone.Template = cast.Val(remoteConfig.MfaPhoneTemplate, "") + m.Phone.MaxFrequency = time.Duration(cast.Val(remoteConfig.MfaPhoneMaxFrequency, 0)) * time.Second + m.WebAuthn.EnrollEnabled = cast.Val(remoteConfig.MfaWebAuthnEnrollEnabled, false) + m.WebAuthn.VerifyEnabled = cast.Val(remoteConfig.MfaWebAuthnVerifyEnabled, false) +} + +func (s sessions) toAuthConfigBody(body *v1API.UpdateAuthConfigBody) { + body.SessionsTimebox = cast.Ptr(int(s.Timebox.Seconds())) + body.SessionsInactivityTimeout = cast.Ptr(int(s.InactivityTimeout.Seconds())) +} + +func (s *sessions) fromAuthConfig(remoteConfig v1API.AuthConfigResponse) { + s.Timebox = time.Duration(cast.Val(remoteConfig.SessionsTimebox, 0)) * time.Second + s.InactivityTimeout = time.Duration(cast.Val(remoteConfig.SessionsInactivityTimeout, 0)) * time.Second +} + +func (e email) toAuthConfigBody(body *v1API.UpdateAuthConfigBody) { + body.ExternalEmailEnabled = &e.EnableSignup + body.MailerSecureEmailChangeEnabled = &e.DoubleConfirmChanges + body.MailerAutoconfirm = cast.Ptr(!e.EnableConfirmations) + body.MailerOtpLength = cast.UintToIntPtr(&e.OtpLength) + body.MailerOtpExp = cast.UintToIntPtr(&e.OtpExpiry) + body.SecurityUpdatePasswordRequireReauthentication = &e.SecurePasswordChange + body.SmtpMaxFrequency = cast.Ptr(int(e.MaxFrequency.Seconds())) + // When local config is not set, we assume platform defaults should not change + if e.Smtp != nil { + e.Smtp.toAuthConfigBody(body) + } + if len(e.Template) == 0 { + return + } + var tmpl *emailTemplate + tmpl = cast.Ptr(e.Template["invite"]) + body.MailerSubjectsInvite = tmpl.Subject + body.MailerTemplatesInviteContent = tmpl.Content + tmpl = cast.Ptr(e.Template["confirmation"]) + body.MailerSubjectsConfirmation = tmpl.Subject + body.MailerTemplatesConfirmationContent = tmpl.Content + tmpl = cast.Ptr(e.Template["recovery"]) + body.MailerSubjectsRecovery = tmpl.Subject + body.MailerTemplatesRecoveryContent = tmpl.Content + tmpl = cast.Ptr(e.Template["magic_link"]) + body.MailerSubjectsMagicLink = tmpl.Subject + body.MailerTemplatesMagicLinkContent = tmpl.Content + tmpl = cast.Ptr(e.Template["email_change"]) + body.MailerSubjectsEmailChange = tmpl.Subject + body.MailerTemplatesEmailChangeContent = tmpl.Content + tmpl = cast.Ptr(e.Template["reauthentication"]) + body.MailerSubjectsReauthentication = tmpl.Subject + body.MailerTemplatesReauthenticationContent = tmpl.Content +} + +func (e *email) fromAuthConfig(remoteConfig v1API.AuthConfigResponse) { + e.EnableSignup = cast.Val(remoteConfig.ExternalEmailEnabled, false) + e.DoubleConfirmChanges = cast.Val(remoteConfig.MailerSecureEmailChangeEnabled, false) + e.EnableConfirmations = !cast.Val(remoteConfig.MailerAutoconfirm, false) + e.OtpLength = cast.IntToUint(cast.Val(remoteConfig.MailerOtpLength, 0)) + e.OtpExpiry = cast.IntToUint(remoteConfig.MailerOtpExp) + e.SecurePasswordChange = cast.Val(remoteConfig.SecurityUpdatePasswordRequireReauthentication, false) + e.MaxFrequency = time.Duration(cast.Val(remoteConfig.SmtpMaxFrequency, 0)) * time.Second + // When local config is not set, we assume platform defaults should not change + if e.Smtp != nil { + e.Smtp.fromAuthConfig(remoteConfig) + } + if len(e.Template) == 0 { + return + } + if t, ok := e.Template["invite"]; ok { + if t.Subject != nil { + t.Subject = remoteConfig.MailerSubjectsInvite + } + if t.Content != nil { + t.Content = remoteConfig.MailerTemplatesInviteContent + } + e.Template["invite"] = t + } + if t, ok := e.Template["confirmation"]; ok { + if t.Subject != nil { + t.Subject = remoteConfig.MailerSubjectsConfirmation + } + if t.Content != nil { + t.Content = remoteConfig.MailerTemplatesConfirmationContent + } + e.Template["confirmation"] = t + } + if t, ok := e.Template["recovery"]; ok { + if t.Subject != nil { + t.Subject = remoteConfig.MailerSubjectsRecovery + } + if t.Content != nil { + t.Content = remoteConfig.MailerTemplatesRecoveryContent + } + e.Template["recovery"] = t + } + if t, ok := e.Template["magic_link"]; ok { + if t.Subject != nil { + t.Subject = remoteConfig.MailerSubjectsMagicLink + } + if t.Content != nil { + t.Content = remoteConfig.MailerTemplatesMagicLinkContent + } + e.Template["magic_link"] = t + } + if t, ok := e.Template["email_change"]; ok { + if t.Subject != nil { + t.Subject = remoteConfig.MailerSubjectsEmailChange + } + if t.Content != nil { + t.Content = remoteConfig.MailerTemplatesEmailChangeContent + } + e.Template["email_change"] = t + } + if t, ok := e.Template["reauthentication"]; ok { + if t.Subject != nil { + t.Subject = remoteConfig.MailerSubjectsReauthentication + } + if t.Content != nil { + t.Content = remoteConfig.MailerTemplatesReauthenticationContent + } + e.Template["reauthentication"] = t + } +} + +func (s smtp) IsEnabled() bool { + // If Enabled is not defined, or defined and set to true + return cast.Val(s.Enabled, true) +} + +func (s smtp) toAuthConfigBody(body *v1API.UpdateAuthConfigBody) { + if !s.IsEnabled() { + // Setting a single empty string disables SMTP + body.SmtpHost = cast.Ptr("") + return + } + body.SmtpHost = &s.Host + body.SmtpPort = cast.Ptr(strconv.Itoa(int(s.Port))) + body.SmtpUser = &s.User + if len(s.Pass.SHA256) > 0 { + body.SmtpPass = &s.Pass.Value + } + body.SmtpAdminEmail = &s.AdminEmail + body.SmtpSenderName = &s.SenderName +} + +func (s *smtp) fromAuthConfig(remoteConfig v1API.AuthConfigResponse) { + showDiff := s.IsEnabled() + // Api resets all values when SMTP is disabled + if enabled := remoteConfig.SmtpHost != nil; s.Enabled != nil { + *s.Enabled = enabled + } + if !showDiff { + return + } + s.Host = cast.Val(remoteConfig.SmtpHost, "") + s.User = cast.Val(remoteConfig.SmtpUser, "") + if len(s.Pass.SHA256) > 0 { + s.Pass.SHA256 = cast.Val(remoteConfig.SmtpPass, "") + } + s.AdminEmail = cast.Val(remoteConfig.SmtpAdminEmail, "") + s.SenderName = cast.Val(remoteConfig.SmtpSenderName, "") + portStr := cast.Val(remoteConfig.SmtpPort, "0") + if port, err := strconv.ParseUint(portStr, 10, 16); err == nil { + s.Port = uint16(port) + } +} + +func (s sms) toAuthConfigBody(body *v1API.UpdateAuthConfigBody) { + body.ExternalPhoneEnabled = &s.EnableSignup + body.SmsMaxFrequency = cast.Ptr(int(s.MaxFrequency.Seconds())) + body.SmsAutoconfirm = &s.EnableConfirmations + body.SmsTemplate = &s.Template + if otpString := mapToEnv(s.TestOTP); len(otpString) > 0 { + body.SmsTestOtp = &otpString + // Set a 10 year validity for test OTP + timestamp := time.Now().UTC().AddDate(10, 0, 0).Format(time.RFC3339) + body.SmsTestOtpValidUntil = ×tamp + } + // Api only overrides configs of enabled providers + switch { + case s.Twilio.Enabled: + body.SmsProvider = cast.Ptr("twilio") + if len(s.Twilio.AuthToken.SHA256) > 0 { + body.SmsTwilioAuthToken = &s.Twilio.AuthToken.Value + } + body.SmsTwilioAccountSid = &s.Twilio.AccountSid + body.SmsTwilioMessageServiceSid = &s.Twilio.MessageServiceSid + case s.TwilioVerify.Enabled: + body.SmsProvider = cast.Ptr("twilio_verify") + if len(s.TwilioVerify.AuthToken.SHA256) > 0 { + body.SmsTwilioVerifyAuthToken = &s.TwilioVerify.AuthToken.Value + } + body.SmsTwilioVerifyAccountSid = &s.TwilioVerify.AccountSid + body.SmsTwilioVerifyMessageServiceSid = &s.TwilioVerify.MessageServiceSid + case s.Messagebird.Enabled: + body.SmsProvider = cast.Ptr("messagebird") + if len(s.Messagebird.AccessKey.SHA256) > 0 { + body.SmsMessagebirdAccessKey = &s.Messagebird.AccessKey.Value + } + body.SmsMessagebirdOriginator = &s.Messagebird.Originator + case s.Textlocal.Enabled: + body.SmsProvider = cast.Ptr("textlocal") + if len(s.Textlocal.ApiKey.SHA256) > 0 { + body.SmsTextlocalApiKey = &s.Textlocal.ApiKey.Value + } + body.SmsTextlocalSender = &s.Textlocal.Sender + case s.Vonage.Enabled: + body.SmsProvider = cast.Ptr("vonage") + if len(s.Vonage.ApiSecret.SHA256) > 0 { + body.SmsVonageApiSecret = &s.Vonage.ApiSecret.Value + } + body.SmsVonageApiKey = &s.Vonage.ApiKey + body.SmsVonageFrom = &s.Vonage.From + } +} + +func (s *sms) fromAuthConfig(remoteConfig v1API.AuthConfigResponse) { + s.EnableSignup = cast.Val(remoteConfig.ExternalPhoneEnabled, false) + s.MaxFrequency = time.Duration(cast.Val(remoteConfig.SmsMaxFrequency, 0)) * time.Second + s.EnableConfirmations = cast.Val(remoteConfig.SmsAutoconfirm, false) + s.Template = cast.Val(remoteConfig.SmsTemplate, "") + s.TestOTP = envToMap(cast.Val(remoteConfig.SmsTestOtp, "")) + // We are only interested in the provider that's enabled locally + switch { + case s.Twilio.Enabled: + if len(s.Twilio.AuthToken.SHA256) > 0 { + s.Twilio.AuthToken.SHA256 = cast.Val(remoteConfig.SmsTwilioAuthToken, "") + } + s.Twilio.AccountSid = cast.Val(remoteConfig.SmsTwilioAccountSid, "") + s.Twilio.MessageServiceSid = cast.Val(remoteConfig.SmsTwilioMessageServiceSid, "") + case s.TwilioVerify.Enabled: + if len(s.TwilioVerify.AuthToken.SHA256) > 0 { + s.TwilioVerify.AuthToken.SHA256 = cast.Val(remoteConfig.SmsTwilioVerifyAuthToken, "") + } + s.TwilioVerify.AccountSid = cast.Val(remoteConfig.SmsTwilioVerifyAccountSid, "") + s.TwilioVerify.MessageServiceSid = cast.Val(remoteConfig.SmsTwilioVerifyMessageServiceSid, "") + case s.Messagebird.Enabled: + if len(s.Messagebird.AccessKey.SHA256) > 0 { + s.Messagebird.AccessKey.SHA256 = cast.Val(remoteConfig.SmsMessagebirdAccessKey, "") + } + s.Messagebird.Originator = cast.Val(remoteConfig.SmsMessagebirdOriginator, "") + case s.Textlocal.Enabled: + if len(s.Textlocal.ApiKey.SHA256) > 0 { + s.Textlocal.ApiKey.SHA256 = cast.Val(remoteConfig.SmsTextlocalApiKey, "") + } + s.Textlocal.Sender = cast.Val(remoteConfig.SmsTextlocalSender, "") + case s.Vonage.Enabled: + if len(s.Vonage.ApiSecret.SHA256) > 0 { + s.Vonage.ApiSecret.SHA256 = cast.Val(remoteConfig.SmsVonageApiSecret, "") + } + s.Vonage.ApiKey = cast.Val(remoteConfig.SmsVonageApiKey, "") + s.Vonage.From = cast.Val(remoteConfig.SmsVonageFrom, "") + case !s.EnableSignup: + // Nothing to do if both local and remote providers are disabled. + return + } + if provider := cast.Val(remoteConfig.SmsProvider, ""); len(provider) > 0 { + s.Twilio.Enabled = provider == "twilio" + s.TwilioVerify.Enabled = provider == "twilio_verify" + s.Messagebird.Enabled = provider == "messagebird" + s.Textlocal.Enabled = provider == "textlocal" + s.Vonage.Enabled = provider == "vonage" + } +} + +func (e external) toAuthConfigBody(body *v1API.UpdateAuthConfigBody) { + if len(e) == 0 { + return + } + // Ignore configs of disabled providers because their envs are not loaded + if p, ok := e["apple"]; ok { + if body.ExternalAppleEnabled = &p.Enabled; *body.ExternalAppleEnabled { + body.ExternalAppleClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalAppleSecret = &p.Secret.Value + } + } + } + if p, ok := e["azure"]; ok { + if body.ExternalAzureEnabled = &p.Enabled; *body.ExternalAzureEnabled { + body.ExternalAzureClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalAzureSecret = &p.Secret.Value + } + body.ExternalAzureUrl = &p.Url + } + } + if p, ok := e["bitbucket"]; ok { + if body.ExternalBitbucketEnabled = &p.Enabled; *body.ExternalBitbucketEnabled { + body.ExternalBitbucketClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalBitbucketSecret = &p.Secret.Value + } + } + } + if p, ok := e["discord"]; ok { + if body.ExternalDiscordEnabled = &p.Enabled; *body.ExternalDiscordEnabled { + body.ExternalDiscordClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalDiscordSecret = &p.Secret.Value + } + } + } + if p, ok := e["facebook"]; ok { + if body.ExternalFacebookEnabled = &p.Enabled; *body.ExternalFacebookEnabled { + body.ExternalFacebookClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalFacebookSecret = &p.Secret.Value + } + } + } + if p, ok := e["figma"]; ok { + if body.ExternalFigmaEnabled = &p.Enabled; *body.ExternalFigmaEnabled { + body.ExternalFigmaClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalFigmaSecret = &p.Secret.Value + } + } + } + if p, ok := e["github"]; ok { + if body.ExternalGithubEnabled = &p.Enabled; *body.ExternalGithubEnabled { + body.ExternalGithubClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalGithubSecret = &p.Secret.Value + } + } + } + if p, ok := e["gitlab"]; ok { + if body.ExternalGitlabEnabled = &p.Enabled; *body.ExternalGitlabEnabled { + body.ExternalGitlabClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalGitlabSecret = &p.Secret.Value + } + body.ExternalGitlabUrl = &p.Url + } + } + if p, ok := e["google"]; ok { + if body.ExternalGoogleEnabled = &p.Enabled; *body.ExternalGoogleEnabled { + body.ExternalGoogleClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalGoogleSecret = &p.Secret.Value + } + body.ExternalGoogleSkipNonceCheck = &p.SkipNonceCheck + } + } + if p, ok := e["kakao"]; ok { + if body.ExternalKakaoEnabled = &p.Enabled; *body.ExternalKakaoEnabled { + body.ExternalKakaoClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalKakaoSecret = &p.Secret.Value + } + } + } + if p, ok := e["keycloak"]; ok { + if body.ExternalKeycloakEnabled = &p.Enabled; *body.ExternalKeycloakEnabled { + body.ExternalKeycloakClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalKeycloakSecret = &p.Secret.Value + } + body.ExternalKeycloakUrl = &p.Url + } + } + if p, ok := e["linkedin_oidc"]; ok { + if body.ExternalLinkedinOidcEnabled = &p.Enabled; *body.ExternalLinkedinOidcEnabled { + body.ExternalLinkedinOidcClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalLinkedinOidcSecret = &p.Secret.Value + } + } + } + if p, ok := e["notion"]; ok { + if body.ExternalNotionEnabled = &p.Enabled; *body.ExternalNotionEnabled { + body.ExternalNotionClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalNotionSecret = &p.Secret.Value + } + } + } + if p, ok := e["slack_oidc"]; ok { + if body.ExternalSlackOidcEnabled = &p.Enabled; *body.ExternalSlackOidcEnabled { + body.ExternalSlackOidcClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalSlackOidcSecret = &p.Secret.Value + } + } + } + if p, ok := e["spotify"]; ok { + if body.ExternalSpotifyEnabled = &p.Enabled; *body.ExternalSpotifyEnabled { + body.ExternalSpotifyClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalSpotifySecret = &p.Secret.Value + } + } + } + if p, ok := e["twitch"]; ok { + if body.ExternalTwitchEnabled = &p.Enabled; *body.ExternalTwitchEnabled { + body.ExternalTwitchClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalTwitchSecret = &p.Secret.Value + } + } + } + if p, ok := e["twitter"]; ok { + if body.ExternalTwitterEnabled = &p.Enabled; *body.ExternalTwitterEnabled { + body.ExternalTwitterClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalTwitterSecret = &p.Secret.Value + } + } + } + if p, ok := e["workos"]; ok { + if body.ExternalWorkosEnabled = &p.Enabled; *body.ExternalWorkosEnabled { + body.ExternalWorkosClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalWorkosSecret = &p.Secret.Value + } + body.ExternalWorkosUrl = &p.Url + } + } + if p, ok := e["zoom"]; ok { + if body.ExternalZoomEnabled = &p.Enabled; *body.ExternalZoomEnabled { + body.ExternalZoomClientId = &p.ClientId + if len(p.Secret.SHA256) > 0 { + body.ExternalZoomSecret = &p.Secret.Value + } + } + } +} + +func (e external) fromAuthConfig(remoteConfig v1API.AuthConfigResponse) { + if len(e) == 0 { + return + } + // Ignore configs of disabled providers because their envs are not loaded + if p, ok := e["apple"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalAppleClientId, "") + if ids := cast.Val(remoteConfig.ExternalAppleAdditionalClientIds, ""); len(ids) > 0 { + p.ClientId += "," + ids + } + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalAppleSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalAppleEnabled, false) + e["apple"] = p + } + + if p, ok := e["azure"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalAzureClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalAzureSecret, "") + } + p.Url = cast.Val(remoteConfig.ExternalAzureUrl, "") + } + p.Enabled = cast.Val(remoteConfig.ExternalAzureEnabled, false) + e["azure"] = p + } + + if p, ok := e["bitbucket"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalBitbucketClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalBitbucketSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalBitbucketEnabled, false) + e["bitbucket"] = p + } + + if p, ok := e["discord"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalDiscordClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalDiscordSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalDiscordEnabled, false) + e["discord"] = p + } + + if p, ok := e["facebook"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalFacebookClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalFacebookSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalFacebookEnabled, false) + e["facebook"] = p + } + + if p, ok := e["figma"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalFigmaClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalFigmaSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalFigmaEnabled, false) + e["figma"] = p + } + + if p, ok := e["github"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalGithubClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalGithubSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalGithubEnabled, false) + e["github"] = p + } + + if p, ok := e["gitlab"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalGitlabClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalGitlabSecret, "") + } + p.Url = cast.Val(remoteConfig.ExternalGitlabUrl, "") + } + p.Enabled = cast.Val(remoteConfig.ExternalGitlabEnabled, false) + e["gitlab"] = p + } + + if p, ok := e["google"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalGoogleClientId, "") + if ids := cast.Val(remoteConfig.ExternalGoogleAdditionalClientIds, ""); len(ids) > 0 { + p.ClientId += "," + ids + } + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalGoogleSecret, "") + } + p.SkipNonceCheck = cast.Val(remoteConfig.ExternalGoogleSkipNonceCheck, false) + } + p.Enabled = cast.Val(remoteConfig.ExternalGoogleEnabled, false) + e["google"] = p + } + + if p, ok := e["kakao"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalKakaoClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalKakaoSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalKakaoEnabled, false) + e["kakao"] = p + } + + if p, ok := e["keycloak"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalKeycloakClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalKeycloakSecret, "") + } + p.Url = cast.Val(remoteConfig.ExternalKeycloakUrl, "") + } + p.Enabled = cast.Val(remoteConfig.ExternalKeycloakEnabled, false) + e["keycloak"] = p + } + + if p, ok := e["linkedin_oidc"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalLinkedinOidcClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalLinkedinOidcSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalLinkedinOidcEnabled, false) + e["linkedin_oidc"] = p + } + + if p, ok := e["notion"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalNotionClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalNotionSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalNotionEnabled, false) + e["notion"] = p + } + + if p, ok := e["slack_oidc"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalSlackOidcClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalSlackOidcSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalSlackOidcEnabled, false) + e["slack_oidc"] = p + } + + if p, ok := e["spotify"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalSpotifyClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalSpotifySecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalSpotifyEnabled, false) + e["spotify"] = p + } + + if p, ok := e["twitch"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalTwitchClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalTwitchSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalTwitchEnabled, false) + e["twitch"] = p + } + + if p, ok := e["twitter"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalTwitterClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalTwitterSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalTwitterEnabled, false) + e["twitter"] = p + } + + if p, ok := e["workos"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalWorkosClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalWorkosSecret, "") + } + p.Url = cast.Val(remoteConfig.ExternalWorkosUrl, "") + } + p.Enabled = cast.Val(remoteConfig.ExternalWorkosEnabled, false) + e["workos"] = p + } + + if p, ok := e["zoom"]; ok { + if p.Enabled { + p.ClientId = cast.Val(remoteConfig.ExternalZoomClientId, "") + if len(p.Secret.SHA256) > 0 { + p.Secret.SHA256 = cast.Val(remoteConfig.ExternalZoomSecret, "") + } + } + p.Enabled = cast.Val(remoteConfig.ExternalZoomEnabled, false) + e["zoom"] = p + } +} + +func (a *auth) DiffWithRemote(remoteConfig v1API.AuthConfigResponse) ([]byte, error) { + copy := a.Clone() + // Convert the config values into easily comparable remoteConfig values + currentValue, err := ToTomlBytes(copy) + if err != nil { + return nil, err + } + copy.FromRemoteAuthConfig(remoteConfig) + remoteCompare, err := ToTomlBytes(copy) + if err != nil { + return nil, err + } + return diff.Diff("remote[auth]", remoteCompare, "local[auth]", currentValue), nil +} diff --git a/pkg/config/auth_test.go b/pkg/config/auth_test.go new file mode 100644 index 000000000..b34139715 --- /dev/null +++ b/pkg/config/auth_test.go @@ -0,0 +1,1023 @@ +package config + +import ( + "os" + "path/filepath" + "testing" + "time" + + "github.com/go-errors/errors" + "github.com/stretchr/testify/assert" + v1API "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" +) + +func newWithDefaults() auth { + return auth{ + EnableSignup: true, + Email: email{ + EnableConfirmations: true, + }, + Sms: sms{ + TestOTP: map[string]string{}, + }, + } +} + +func assertSnapshotEqual(t *testing.T, actual []byte) { + snapshot := filepath.Join("testdata", filepath.FromSlash(t.Name())) + ".diff" + expected, err := os.ReadFile(snapshot) + if errors.Is(err, os.ErrNotExist) { + assert.NoError(t, os.MkdirAll(filepath.Dir(snapshot), 0755)) + assert.NoError(t, os.WriteFile(snapshot, actual, 0600)) + } + assert.Equal(t, string(expected), string(actual)) +} + +func TestAuthDiff(t *testing.T) { + t.Run("local and remote enabled", func(t *testing.T) { + c := newWithDefaults() + c.SiteUrl = "http://127.0.0.1:3000" + c.AdditionalRedirectUrls = []string{"https://127.0.0.1:3000"} + c.JwtExpiry = 3600 + c.EnableRefreshTokenRotation = true + c.RefreshTokenReuseInterval = 10 + c.EnableManualLinking = true + c.EnableSignup = true + c.EnableAnonymousSignIns = true + c.MinimumPasswordLength = 6 + c.PasswordRequirements = LettersDigits + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + SiteUrl: cast.Ptr("http://127.0.0.1:3000"), + UriAllowList: cast.Ptr("https://127.0.0.1:3000"), + JwtExp: cast.Ptr(3600), + RefreshTokenRotationEnabled: cast.Ptr(true), + SecurityRefreshTokenReuseInterval: cast.Ptr(10), + SecurityManualLinkingEnabled: cast.Ptr(true), + DisableSignup: cast.Ptr(false), + ExternalAnonymousUsersEnabled: cast.Ptr(true), + PasswordMinLength: cast.Ptr(6), + PasswordRequiredCharacters: cast.Ptr(string(v1API.AbcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789)), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) + + t.Run("local enabled and disabled", func(t *testing.T) { + c := newWithDefaults() + c.SiteUrl = "http://127.0.0.1:3000" + c.AdditionalRedirectUrls = []string{"https://127.0.0.1:3000"} + c.JwtExpiry = 3600 + c.EnableRefreshTokenRotation = false + c.RefreshTokenReuseInterval = 10 + c.EnableManualLinking = false + c.EnableSignup = false + c.EnableAnonymousSignIns = false + c.MinimumPasswordLength = 6 + c.PasswordRequirements = LowerUpperLettersDigitsSymbols + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + SiteUrl: cast.Ptr(""), + UriAllowList: cast.Ptr("https://127.0.0.1:3000,https://ref.supabase.co"), + JwtExp: cast.Ptr(0), + RefreshTokenRotationEnabled: cast.Ptr(true), + SecurityRefreshTokenReuseInterval: cast.Ptr(0), + SecurityManualLinkingEnabled: cast.Ptr(true), + DisableSignup: cast.Ptr(false), + ExternalAnonymousUsersEnabled: cast.Ptr(true), + PasswordMinLength: cast.Ptr(8), + PasswordRequiredCharacters: cast.Ptr(string(v1API.AbcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789)), + }) + // Check error + assert.NoError(t, err) + assertSnapshotEqual(t, diff) + }) + + t.Run("local and remote disabled", func(t *testing.T) { + c := newWithDefaults() + c.EnableSignup = false + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + SiteUrl: cast.Ptr(""), + UriAllowList: cast.Ptr(""), + JwtExp: cast.Ptr(0), + RefreshTokenRotationEnabled: cast.Ptr(false), + SecurityRefreshTokenReuseInterval: cast.Ptr(0), + SecurityManualLinkingEnabled: cast.Ptr(false), + DisableSignup: cast.Ptr(true), + ExternalAnonymousUsersEnabled: cast.Ptr(false), + PasswordMinLength: cast.Ptr(0), + PasswordRequiredCharacters: cast.Ptr(""), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) +} + +func TestHookDiff(t *testing.T) { + t.Run("local and remote enabled", func(t *testing.T) { + c := newWithDefaults() + c.Hook = hook{ + CustomAccessToken: &hookConfig{ + Enabled: true, + URI: "http://example.com", + Secrets: Secret{ + Value: "test-secret", + SHA256: "ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252", + }, + }, + SendSMS: &hookConfig{ + Enabled: true, + URI: "http://example.com", + Secrets: Secret{ + Value: "test-secret", + SHA256: "ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252", + }, + }, + SendEmail: &hookConfig{ + Enabled: true, + URI: "https://example.com", + Secrets: Secret{ + Value: "test-secret", + SHA256: "ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252", + }, + }, + MFAVerificationAttempt: &hookConfig{ + Enabled: true, + URI: "https://example.com", + Secrets: Secret{ + Value: "test-secret", + SHA256: "ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252", + }, + }, + PasswordVerificationAttempt: &hookConfig{ + Enabled: true, + URI: "pg-functions://verifyPassword", + }, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + HookCustomAccessTokenEnabled: cast.Ptr(true), + HookCustomAccessTokenUri: cast.Ptr("http://example.com"), + HookCustomAccessTokenSecrets: cast.Ptr("ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252"), + HookSendSmsEnabled: cast.Ptr(true), + HookSendSmsUri: cast.Ptr("http://example.com"), + HookSendSmsSecrets: cast.Ptr("ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252"), + HookSendEmailEnabled: cast.Ptr(true), + HookSendEmailUri: cast.Ptr("https://example.com"), + HookSendEmailSecrets: cast.Ptr("ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252"), + HookMfaVerificationAttemptEnabled: cast.Ptr(true), + HookMfaVerificationAttemptUri: cast.Ptr("https://example.com"), + HookMfaVerificationAttemptSecrets: cast.Ptr("ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252"), + HookPasswordVerificationAttemptEnabled: cast.Ptr(true), + HookPasswordVerificationAttemptUri: cast.Ptr("pg-functions://verifyPassword"), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) + + t.Run("local disabled remote enabled", func(t *testing.T) { + c := newWithDefaults() + c.Hook = hook{ + CustomAccessToken: &hookConfig{ + Enabled: false, + }, + SendSMS: &hookConfig{ + Enabled: false, + URI: "https://example.com", + Secrets: Secret{Value: "test-secret"}, + }, + SendEmail: &hookConfig{ + Enabled: false, + }, + MFAVerificationAttempt: &hookConfig{ + Enabled: false, + URI: "pg-functions://postgres/public/verifyMFA", + }, + PasswordVerificationAttempt: nil, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + HookCustomAccessTokenEnabled: cast.Ptr(true), + HookCustomAccessTokenUri: cast.Ptr("http://example.com"), + HookCustomAccessTokenSecrets: cast.Ptr("ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252"), + HookSendSmsEnabled: cast.Ptr(true), + HookSendSmsUri: cast.Ptr("https://example.com"), + HookSendSmsSecrets: cast.Ptr("ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252"), + HookSendEmailEnabled: cast.Ptr(true), + HookSendEmailUri: cast.Ptr("pg-functions://postgres/public/sendEmail"), + HookMfaVerificationAttemptEnabled: cast.Ptr(true), + HookMfaVerificationAttemptUri: cast.Ptr("pg-functions://postgres/public/verifyMFA"), + HookPasswordVerificationAttemptEnabled: cast.Ptr(true), + HookPasswordVerificationAttemptUri: cast.Ptr("https://example.com"), + HookPasswordVerificationAttemptSecrets: cast.Ptr("ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252"), + }) + // Check error + assert.NoError(t, err) + assertSnapshotEqual(t, diff) + }) + + t.Run("local enabled remote disabled", func(t *testing.T) { + c := newWithDefaults() + c.Hook = hook{ + CustomAccessToken: &hookConfig{ + Enabled: true, + URI: "http://example.com", + Secrets: Secret{ + Value: "test-secret", + SHA256: "ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252", + }, + }, + SendSMS: &hookConfig{ + Enabled: true, + URI: "https://example.com", + Secrets: Secret{ + Value: "test-secret", + SHA256: "ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252", + }, + }, + SendEmail: &hookConfig{ + Enabled: true, + URI: "pg-functions://postgres/public/sendEmail", + }, + MFAVerificationAttempt: &hookConfig{ + Enabled: true, + URI: "pg-functions://postgres/public/verifyMFA", + }, + PasswordVerificationAttempt: nil, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + HookCustomAccessTokenEnabled: cast.Ptr(false), + HookCustomAccessTokenUri: cast.Ptr("pg-functions://postgres/public/customToken"), + HookSendSmsEnabled: cast.Ptr(false), + HookSendSmsUri: cast.Ptr("https://example.com"), + HookSendSmsSecrets: cast.Ptr("ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252"), + HookSendEmailEnabled: cast.Ptr(false), + HookSendEmailUri: cast.Ptr("https://example.com"), + HookSendEmailSecrets: cast.Ptr("ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252"), + HookMfaVerificationAttemptEnabled: cast.Ptr(false), + HookMfaVerificationAttemptUri: cast.Ptr("pg-functions://postgres/public/verifyMFA"), + HookPasswordVerificationAttemptEnabled: cast.Ptr(false), + }) + // Check error + assert.NoError(t, err) + assertSnapshotEqual(t, diff) + }) + + t.Run("local and remote disabled", func(t *testing.T) { + c := newWithDefaults() + c.Hook = hook{ + CustomAccessToken: &hookConfig{Enabled: false}, + SendSMS: &hookConfig{Enabled: false}, + SendEmail: &hookConfig{Enabled: false}, + MFAVerificationAttempt: &hookConfig{Enabled: false}, + PasswordVerificationAttempt: &hookConfig{Enabled: false}, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + HookCustomAccessTokenEnabled: cast.Ptr(false), + HookSendSmsEnabled: cast.Ptr(false), + HookSendEmailEnabled: cast.Ptr(false), + HookMfaVerificationAttemptEnabled: cast.Ptr(false), + HookPasswordVerificationAttemptEnabled: cast.Ptr(false), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) +} + +func TestMfaDiff(t *testing.T) { + t.Run("local and remote enabled", func(t *testing.T) { + c := newWithDefaults() + c.MFA = mfa{ + TOTP: factorTypeConfiguration{ + EnrollEnabled: true, + VerifyEnabled: true, + }, + Phone: phoneFactorTypeConfiguration{ + factorTypeConfiguration: factorTypeConfiguration{ + EnrollEnabled: true, + VerifyEnabled: true, + }, + OtpLength: 6, + Template: "Your code is {{ .Code }}", + MaxFrequency: 5 * time.Second, + }, + WebAuthn: factorTypeConfiguration{ + EnrollEnabled: true, + VerifyEnabled: true, + }, + MaxEnrolledFactors: 10, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + MfaMaxEnrolledFactors: cast.Ptr(10), + MfaTotpEnrollEnabled: cast.Ptr(true), + MfaTotpVerifyEnabled: cast.Ptr(true), + MfaPhoneEnrollEnabled: cast.Ptr(true), + MfaPhoneVerifyEnabled: cast.Ptr(true), + MfaPhoneOtpLength: 6, + MfaPhoneTemplate: cast.Ptr("Your code is {{ .Code }}"), + MfaPhoneMaxFrequency: cast.Ptr(5), + MfaWebAuthnEnrollEnabled: cast.Ptr(true), + MfaWebAuthnVerifyEnabled: cast.Ptr(true), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) + + t.Run("local enabled and disabled", func(t *testing.T) { + c := newWithDefaults() + c.MFA = mfa{ + TOTP: factorTypeConfiguration{ + EnrollEnabled: false, + VerifyEnabled: false, + }, + Phone: phoneFactorTypeConfiguration{ + factorTypeConfiguration: factorTypeConfiguration{ + EnrollEnabled: true, + VerifyEnabled: true, + }, + }, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + MfaMaxEnrolledFactors: cast.Ptr(10), + MfaTotpEnrollEnabled: cast.Ptr(false), + MfaTotpVerifyEnabled: cast.Ptr(false), + MfaPhoneEnrollEnabled: cast.Ptr(false), + MfaPhoneVerifyEnabled: cast.Ptr(false), + MfaPhoneOtpLength: 6, + MfaPhoneTemplate: cast.Ptr("Your code is {{ .Code }}"), + MfaPhoneMaxFrequency: cast.Ptr(5), + MfaWebAuthnEnrollEnabled: cast.Ptr(false), + MfaWebAuthnVerifyEnabled: cast.Ptr(false), + }) + // Check error + assert.NoError(t, err) + assertSnapshotEqual(t, diff) + }) + + t.Run("local and remote disabled", func(t *testing.T) { + c := newWithDefaults() + c.MFA = mfa{ + MaxEnrolledFactors: 10, + Phone: phoneFactorTypeConfiguration{ + OtpLength: 6, + Template: "Your code is {{ .Code }}", + MaxFrequency: 5 * time.Second, + }, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + MfaMaxEnrolledFactors: cast.Ptr(10), + MfaTotpEnrollEnabled: cast.Ptr(false), + MfaTotpVerifyEnabled: cast.Ptr(false), + MfaPhoneEnrollEnabled: cast.Ptr(false), + MfaPhoneVerifyEnabled: cast.Ptr(false), + MfaPhoneOtpLength: 6, + MfaPhoneTemplate: cast.Ptr("Your code is {{ .Code }}"), + MfaPhoneMaxFrequency: cast.Ptr(5), + MfaWebAuthnEnrollEnabled: cast.Ptr(false), + MfaWebAuthnVerifyEnabled: cast.Ptr(false), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) +} + +func TestEmailDiff(t *testing.T) { + t.Run("local enabled remote enabled", func(t *testing.T) { + c := newWithDefaults() + c.Email = email{ + EnableSignup: true, + DoubleConfirmChanges: true, + EnableConfirmations: true, + SecurePasswordChange: true, + Template: map[string]emailTemplate{ + "invite": { + Subject: cast.Ptr("invite-subject"), + Content: cast.Ptr("invite-content"), + }, + "confirmation": { + Subject: cast.Ptr("confirmation-subject"), + Content: cast.Ptr("confirmation-content"), + }, + "recovery": { + Subject: cast.Ptr("recovery-subject"), + Content: cast.Ptr("recovery-content"), + }, + "magic_link": { + Subject: cast.Ptr("magic-link-subject"), + Content: cast.Ptr("magic-link-content"), + }, + "email_change": { + Subject: cast.Ptr("email-change-subject"), + Content: cast.Ptr("email-change-content"), + }, + "reauthentication": { + Subject: cast.Ptr("reauthentication-subject"), + Content: cast.Ptr("reauthentication-content"), + }, + }, + Smtp: &smtp{ + Enabled: cast.Ptr(true), + Host: "smtp.sendgrid.net", + Port: 587, + User: "apikey", + Pass: Secret{ + Value: "test-key", + SHA256: "ed64b7695a606bc6ab4fcb41fe815b5ddf1063ccbc87afe1fa89756635db520e", + }, + AdminEmail: "admin@email.com", + SenderName: "Admin", + }, + MaxFrequency: time.Second, + OtpLength: 6, + OtpExpiry: 3600, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalEmailEnabled: cast.Ptr(true), + MailerSecureEmailChangeEnabled: cast.Ptr(true), + MailerAutoconfirm: cast.Ptr(false), + MailerOtpLength: cast.Ptr(6), + MailerOtpExp: 3600, + SecurityUpdatePasswordRequireReauthentication: cast.Ptr(true), + SmtpHost: cast.Ptr("smtp.sendgrid.net"), + SmtpPort: cast.Ptr("587"), + SmtpUser: cast.Ptr("apikey"), + SmtpPass: cast.Ptr("ed64b7695a606bc6ab4fcb41fe815b5ddf1063ccbc87afe1fa89756635db520e"), + SmtpAdminEmail: cast.Ptr("admin@email.com"), + SmtpSenderName: cast.Ptr("Admin"), + SmtpMaxFrequency: cast.Ptr(1), + // Custom templates + MailerSubjectsInvite: cast.Ptr("invite-subject"), + MailerTemplatesInviteContent: cast.Ptr("invite-content"), + MailerSubjectsConfirmation: cast.Ptr("confirmation-subject"), + MailerTemplatesConfirmationContent: cast.Ptr("confirmation-content"), + MailerSubjectsRecovery: cast.Ptr("recovery-subject"), + MailerTemplatesRecoveryContent: cast.Ptr("recovery-content"), + MailerSubjectsMagicLink: cast.Ptr("magic-link-subject"), + MailerTemplatesMagicLinkContent: cast.Ptr("magic-link-content"), + MailerSubjectsEmailChange: cast.Ptr("email-change-subject"), + MailerTemplatesEmailChangeContent: cast.Ptr("email-change-content"), + MailerSubjectsReauthentication: cast.Ptr("reauthentication-subject"), + MailerTemplatesReauthenticationContent: cast.Ptr("reauthentication-content"), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) + + t.Run("local enabled remote disabled", func(t *testing.T) { + c := newWithDefaults() + c.Email = email{ + EnableSignup: true, + DoubleConfirmChanges: true, + EnableConfirmations: true, + SecurePasswordChange: true, + Template: map[string]emailTemplate{ + "invite": { + Subject: cast.Ptr("invite-subject"), + Content: cast.Ptr("invite-content"), + }, + "confirmation": { + Subject: cast.Ptr("confirmation-subject"), + }, + "recovery": { + Content: cast.Ptr("recovery-content"), + }, + "magic_link": { + Subject: cast.Ptr("magic-link-subject"), + Content: cast.Ptr("magic-link-content"), + }, + "email_change": { + Subject: cast.Ptr("email-change-subject"), + Content: cast.Ptr("email-change-content"), + }, + "reauthentication": { + Subject: cast.Ptr(""), + Content: cast.Ptr(""), + }, + }, + Smtp: &smtp{ + Host: "smtp.sendgrid.net", + Port: 587, + User: "apikey", + Pass: Secret{ + Value: "test-key", + SHA256: "ed64b7695a606bc6ab4fcb41fe815b5ddf1063ccbc87afe1fa89756635db520e", + }, + AdminEmail: "admin@email.com", + SenderName: "Admin", + }, + MaxFrequency: time.Second, + OtpLength: 8, + OtpExpiry: 86400, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalEmailEnabled: cast.Ptr(false), + MailerSecureEmailChangeEnabled: cast.Ptr(false), + MailerAutoconfirm: cast.Ptr(true), + MailerOtpLength: cast.Ptr(6), + MailerOtpExp: 3600, + SecurityUpdatePasswordRequireReauthentication: cast.Ptr(false), + SmtpMaxFrequency: cast.Ptr(60), + // Custom templates + MailerTemplatesConfirmationContent: cast.Ptr("confirmation-content"), + MailerSubjectsRecovery: cast.Ptr("recovery-subject"), + MailerSubjectsMagicLink: cast.Ptr("magic-link-subject"), + MailerTemplatesEmailChangeContent: cast.Ptr("email-change-content"), + }) + // Check error + assert.NoError(t, err) + assertSnapshotEqual(t, diff) + }) + + t.Run("local disabled remote enabled", func(t *testing.T) { + c := newWithDefaults() + c.Email = email{ + EnableConfirmations: false, + Template: map[string]emailTemplate{ + "invite": {}, + "confirmation": {}, + "recovery": {}, + "magic_link": {}, + "email_change": {}, + "reauthentication": {}, + }, + MaxFrequency: time.Minute, + OtpLength: 8, + OtpExpiry: 86400, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalEmailEnabled: cast.Ptr(true), + MailerSecureEmailChangeEnabled: cast.Ptr(true), + MailerAutoconfirm: cast.Ptr(false), + MailerOtpLength: cast.Ptr(6), + MailerOtpExp: 3600, + SecurityUpdatePasswordRequireReauthentication: cast.Ptr(true), + SmtpHost: cast.Ptr("smtp.sendgrid.net"), + SmtpPort: cast.Ptr("587"), + SmtpUser: cast.Ptr("apikey"), + SmtpPass: cast.Ptr("ed64b7695a606bc6ab4fcb41fe815b5ddf1063ccbc87afe1fa89756635db520e"), + SmtpAdminEmail: cast.Ptr("admin@email.com"), + SmtpSenderName: cast.Ptr("Admin"), + SmtpMaxFrequency: cast.Ptr(1), + // Custom templates + MailerSubjectsInvite: cast.Ptr("invite-subject"), + MailerTemplatesInviteContent: cast.Ptr("invite-content"), + MailerSubjectsConfirmation: cast.Ptr("confirmation-subject"), + MailerTemplatesConfirmationContent: cast.Ptr("confirmation-content"), + MailerSubjectsRecovery: cast.Ptr("recovery-subject"), + MailerTemplatesRecoveryContent: cast.Ptr("recovery-content"), + MailerSubjectsMagicLink: cast.Ptr("magic-link-subject"), + MailerTemplatesMagicLinkContent: cast.Ptr("magic-link-content"), + MailerSubjectsEmailChange: cast.Ptr("email-change-subject"), + MailerTemplatesEmailChangeContent: cast.Ptr("email-change-content"), + MailerSubjectsReauthentication: cast.Ptr("reauthentication-subject"), + MailerTemplatesReauthenticationContent: cast.Ptr("reauthentication-content"), + }) + // Check error + assert.NoError(t, err) + assertSnapshotEqual(t, diff) + }) + + t.Run("local disabled remote disabled", func(t *testing.T) { + c := newWithDefaults() + c.Email = email{ + EnableConfirmations: false, + Template: map[string]emailTemplate{ + "invite": {}, + "confirmation": {}, + "recovery": {}, + "magic_link": {}, + "email_change": {}, + "reauthentication": {}, + }, + Smtp: &smtp{ + Enabled: cast.Ptr(false), + Host: "smtp.sendgrid.net", + Port: 587, + User: "apikey", + Pass: Secret{ + Value: "test-key", + SHA256: "ed64b7695a606bc6ab4fcb41fe815b5ddf1063ccbc87afe1fa89756635db520e", + }, + AdminEmail: "admin@email.com", + SenderName: "Admin", + }, + MaxFrequency: time.Minute, + OtpLength: 6, + OtpExpiry: 3600, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalEmailEnabled: cast.Ptr(false), + MailerSecureEmailChangeEnabled: cast.Ptr(false), + MailerAutoconfirm: cast.Ptr(true), + MailerOtpLength: cast.Ptr(6), + MailerOtpExp: 3600, + SecurityUpdatePasswordRequireReauthentication: cast.Ptr(false), + SmtpMaxFrequency: cast.Ptr(60), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) +} + +func TestSmsDiff(t *testing.T) { + t.Run("local enabled remote enabled", func(t *testing.T) { + c := newWithDefaults() + c.Sms = sms{ + EnableSignup: true, + EnableConfirmations: true, + Template: "Your code is {{ .Code }}", + TestOTP: map[string]string{"123": "456"}, + MaxFrequency: time.Minute, + Twilio: twilioConfig{ + Enabled: true, + AccountSid: "test-account", + MessageServiceSid: "test-service", + AuthToken: Secret{ + Value: "test-token", + SHA256: "c84443bc59b92caef8ec8500ff443584793756749523811eb333af2bbc74fc88", + }, + }, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalPhoneEnabled: cast.Ptr(true), + SmsAutoconfirm: cast.Ptr(true), + SmsMaxFrequency: cast.Ptr(60), + SmsOtpExp: cast.Ptr(3600), + SmsOtpLength: 6, + SmsProvider: cast.Ptr("twilio"), + SmsTemplate: cast.Ptr("Your code is {{ .Code }}"), + SmsTestOtp: cast.Ptr("123=456"), + SmsTestOtpValidUntil: cast.Ptr("2050-01-01T01:00:00Z"), + SmsTwilioAccountSid: cast.Ptr("test-account"), + SmsTwilioAuthToken: cast.Ptr("c84443bc59b92caef8ec8500ff443584793756749523811eb333af2bbc74fc88"), + SmsTwilioContentSid: cast.Ptr("test-content"), + SmsTwilioMessageServiceSid: cast.Ptr("test-service"), + // Extra configs returned from api can be ignored + SmsMessagebirdAccessKey: cast.Ptr("test-messagebird-key"), + SmsMessagebirdOriginator: cast.Ptr("test-messagebird-originator"), + SmsTextlocalApiKey: cast.Ptr("test-textlocal-key"), + SmsTextlocalSender: cast.Ptr("test-textlocal-sencer"), + SmsTwilioVerifyAccountSid: cast.Ptr("test-verify-account"), + SmsTwilioVerifyAuthToken: cast.Ptr("test-verify-token"), + SmsTwilioVerifyMessageServiceSid: cast.Ptr("test-verify-service"), + SmsVonageApiKey: cast.Ptr("test-vonage-key"), + SmsVonageApiSecret: cast.Ptr("test-vonage-secret"), + SmsVonageFrom: cast.Ptr("test-vonage-from"), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) + + t.Run("local disabled remote enabled", func(t *testing.T) { + c := newWithDefaults() + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalPhoneEnabled: cast.Ptr(true), + SmsAutoconfirm: cast.Ptr(true), + SmsMaxFrequency: cast.Ptr(60), + SmsOtpExp: cast.Ptr(3600), + SmsOtpLength: 6, + SmsProvider: cast.Ptr("twilio"), + SmsTemplate: cast.Ptr("Your code is {{ .Code }}"), + SmsTestOtp: cast.Ptr("123=456,456=123"), + SmsTestOtpValidUntil: cast.Ptr("2050-01-01T01:00:00Z"), + SmsTwilioAccountSid: cast.Ptr("test-account"), + SmsTwilioAuthToken: cast.Ptr("c84443bc59b92caef8ec8500ff443584793756749523811eb333af2bbc74fc88"), + SmsTwilioContentSid: cast.Ptr("test-content"), + SmsTwilioMessageServiceSid: cast.Ptr("test-service"), + }) + // Check error + assert.NoError(t, err) + assertSnapshotEqual(t, diff) + }) + + t.Run("local enabled remote disabled", func(t *testing.T) { + c := newWithDefaults() + c.Sms = sms{ + EnableSignup: true, + EnableConfirmations: true, + Template: "Your code is {{ .Code }}", + TestOTP: map[string]string{"123": "456"}, + MaxFrequency: time.Minute, + Messagebird: messagebirdConfig{ + Enabled: true, + Originator: "test-originator", + AccessKey: Secret{ + Value: "test-access-key", + SHA256: "ab60d03fc809fb02dae838582f3ddc13d1d6cb32ffba77c4b969dd3caa496f13", + }, + }, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalPhoneEnabled: cast.Ptr(false), + SmsAutoconfirm: cast.Ptr(false), + SmsMaxFrequency: cast.Ptr(0), + SmsOtpExp: cast.Ptr(3600), + SmsOtpLength: 6, + SmsProvider: cast.Ptr("twilio"), + SmsTemplate: cast.Ptr(""), + SmsTwilioAccountSid: cast.Ptr("test-account"), + SmsTwilioAuthToken: cast.Ptr("c84443bc59b92caef8ec8500ff443584793756749523811eb333af2bbc74fc88"), + SmsTwilioContentSid: cast.Ptr("test-content"), + SmsTwilioMessageServiceSid: cast.Ptr("test-service"), + }) + // Check error + assert.NoError(t, err) + assertSnapshotEqual(t, diff) + }) + + t.Run("local disabled remote disabled", func(t *testing.T) { + c := newWithDefaults() + c.Sms = sms{ + EnableSignup: false, + EnableConfirmations: true, + Template: "Your code is {{ .Code }}", + TestOTP: map[string]string{"123": "456"}, + MaxFrequency: time.Minute, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalPhoneEnabled: cast.Ptr(false), + SmsAutoconfirm: cast.Ptr(true), + SmsMaxFrequency: cast.Ptr(60), + SmsOtpExp: cast.Ptr(3600), + SmsOtpLength: 6, + SmsTemplate: cast.Ptr("Your code is {{ .Code }}"), + SmsTestOtp: cast.Ptr("123=456"), + SmsTestOtpValidUntil: cast.Ptr("2050-01-01T01:00:00Z"), + SmsProvider: cast.Ptr("messagebird"), + SmsMessagebirdAccessKey: cast.Ptr("test-messagebird-key"), + SmsMessagebirdOriginator: cast.Ptr("test-messagebird-originator"), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) + + t.Run("enable sign up without provider", func(t *testing.T) { + // This is not a valid config because platform requires a SMS provider. + // For consistency, we handle this in config.Load and emit a warning. + c := newWithDefaults() + c.Sms.EnableSignup = true + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalPhoneEnabled: cast.Ptr(false), + SmsProvider: cast.Ptr("twilio"), + }) + // Check error + assert.NoError(t, err) + assertSnapshotEqual(t, diff) + }) + + t.Run("enable provider without sign up", func(t *testing.T) { + c := newWithDefaults() + c.Sms.Messagebird.Enabled = true + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalPhoneEnabled: cast.Ptr(false), + SmsProvider: cast.Ptr("messagebird"), + SmsMessagebirdAccessKey: cast.Ptr(""), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) +} + +func TestExternalDiff(t *testing.T) { + t.Run("local and remote enabled", func(t *testing.T) { + c := newWithDefaults() + c.External = map[string]provider{ + "apple": {Enabled: true}, + "azure": {Enabled: true}, + "bitbucket": {Enabled: true}, + "discord": {Enabled: true}, + "facebook": {Enabled: true}, + "figma": {Enabled: true}, + "github": {Enabled: true}, + "gitlab": {Enabled: true}, + "google": {Enabled: true}, + "kakao": {Enabled: true}, + "keycloak": {Enabled: true}, + "linkedin_oidc": {Enabled: true}, + "notion": {Enabled: true}, + "slack_oidc": {Enabled: true}, + "spotify": {Enabled: true}, + "twitch": {Enabled: true}, + "twitter": {Enabled: true}, + "workos": {Enabled: true}, + "zoom": {Enabled: true}, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalAppleAdditionalClientIds: cast.Ptr(""), + ExternalAppleClientId: cast.Ptr(""), + ExternalAppleEnabled: cast.Ptr(true), + ExternalAppleSecret: cast.Ptr(""), + ExternalAzureClientId: cast.Ptr(""), + ExternalAzureEnabled: cast.Ptr(true), + ExternalAzureSecret: cast.Ptr(""), + ExternalAzureUrl: cast.Ptr(""), + ExternalBitbucketClientId: cast.Ptr(""), + ExternalBitbucketEnabled: cast.Ptr(true), + ExternalBitbucketSecret: cast.Ptr(""), + ExternalDiscordClientId: cast.Ptr(""), + ExternalDiscordEnabled: cast.Ptr(true), + ExternalDiscordSecret: cast.Ptr(""), + ExternalFacebookClientId: cast.Ptr(""), + ExternalFacebookEnabled: cast.Ptr(true), + ExternalFacebookSecret: cast.Ptr(""), + ExternalFigmaClientId: cast.Ptr(""), + ExternalFigmaEnabled: cast.Ptr(true), + ExternalFigmaSecret: cast.Ptr(""), + ExternalGithubClientId: cast.Ptr(""), + ExternalGithubEnabled: cast.Ptr(true), + ExternalGithubSecret: cast.Ptr(""), + ExternalGitlabClientId: cast.Ptr(""), + ExternalGitlabEnabled: cast.Ptr(true), + ExternalGitlabSecret: cast.Ptr(""), + ExternalGitlabUrl: cast.Ptr(""), + ExternalGoogleAdditionalClientIds: cast.Ptr(""), + ExternalGoogleClientId: cast.Ptr(""), + ExternalGoogleEnabled: cast.Ptr(true), + ExternalGoogleSecret: cast.Ptr(""), + ExternalGoogleSkipNonceCheck: cast.Ptr(false), + ExternalKakaoClientId: cast.Ptr(""), + ExternalKakaoEnabled: cast.Ptr(true), + ExternalKakaoSecret: cast.Ptr(""), + ExternalKeycloakClientId: cast.Ptr(""), + ExternalKeycloakEnabled: cast.Ptr(true), + ExternalKeycloakSecret: cast.Ptr(""), + ExternalKeycloakUrl: cast.Ptr(""), + ExternalLinkedinOidcClientId: cast.Ptr(""), + ExternalLinkedinOidcEnabled: cast.Ptr(true), + ExternalLinkedinOidcSecret: cast.Ptr(""), + ExternalNotionClientId: cast.Ptr(""), + ExternalNotionEnabled: cast.Ptr(true), + ExternalNotionSecret: cast.Ptr(""), + ExternalSlackOidcClientId: cast.Ptr(""), + ExternalSlackOidcEnabled: cast.Ptr(true), + ExternalSlackOidcSecret: cast.Ptr(""), + ExternalSpotifyClientId: cast.Ptr(""), + ExternalSpotifyEnabled: cast.Ptr(true), + ExternalSpotifySecret: cast.Ptr(""), + ExternalTwitchClientId: cast.Ptr(""), + ExternalTwitchEnabled: cast.Ptr(true), + ExternalTwitchSecret: cast.Ptr(""), + ExternalTwitterClientId: cast.Ptr(""), + ExternalTwitterEnabled: cast.Ptr(true), + ExternalTwitterSecret: cast.Ptr(""), + ExternalWorkosClientId: cast.Ptr(""), + ExternalWorkosEnabled: cast.Ptr(true), + ExternalWorkosSecret: cast.Ptr(""), + ExternalWorkosUrl: cast.Ptr(""), + ExternalZoomClientId: cast.Ptr(""), + ExternalZoomEnabled: cast.Ptr(true), + ExternalZoomSecret: cast.Ptr(""), + // Deprecated fields should be ignored + ExternalSlackClientId: cast.Ptr(""), + ExternalSlackEnabled: cast.Ptr(true), + ExternalSlackSecret: cast.Ptr(""), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) + + t.Run("local enabled and disabled", func(t *testing.T) { + c := newWithDefaults() + c.External = map[string]provider{ + "apple": { + Enabled: true, + ClientId: "test-client-1,test-client-2", + Secret: Secret{ + Value: "test-secret", + SHA256: "ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252", + }, + }, + "azure": { + Enabled: true, + ClientId: "test-client-1", + Secret: Secret{ + Value: "test-secret", + SHA256: "ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252", + }, + }, + "bitbucket": {}, + "discord": {}, + "facebook": {}, + "figma": {}, + "github": {}, + "gitlab": {}, + "google": { + Enabled: false, + ClientId: "test-client-2", + Secret: Secret{Value: "env(test_secret)"}, + SkipNonceCheck: false, + }, + // "kakao": {}, + "keycloak": {}, + "linkedin_oidc": {}, + "notion": {}, + "slack_oidc": {}, + "spotify": {}, + "twitch": {}, + "twitter": {}, + "workos": {}, + "zoom": {}, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalAppleAdditionalClientIds: cast.Ptr("test-client-2"), + ExternalAppleClientId: cast.Ptr("test-client-1"), + ExternalAppleEnabled: cast.Ptr(false), + ExternalAppleSecret: cast.Ptr("ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252"), + ExternalGoogleAdditionalClientIds: cast.Ptr("test-client-2"), + ExternalGoogleClientId: cast.Ptr("test-client-1"), + ExternalGoogleEnabled: cast.Ptr(true), + ExternalGoogleSecret: cast.Ptr("b613679a0814d9ec772f95d778c35fc5ff1697c493715653c6c712144292c5ad"), + ExternalGoogleSkipNonceCheck: cast.Ptr(true), + ExternalKakaoClientId: cast.Ptr("test-client-2"), + ExternalKakaoEnabled: cast.Ptr(true), + ExternalKakaoSecret: cast.Ptr("b613679a0814d9ec772f95d778c35fc5ff1697c493715653c6c712144292c5ad"), + }) + // Check error + assert.NoError(t, err) + assertSnapshotEqual(t, diff) + }) + + t.Run("local and remote disabled", func(t *testing.T) { + c := newWithDefaults() + c.External = map[string]provider{ + "apple": {}, + "azure": {}, + "bitbucket": {}, + "discord": {}, + "facebook": {}, + "figma": {}, + "github": {}, + "gitlab": {}, + "google": {}, + "kakao": {}, + "keycloak": {}, + "linkedin_oidc": {}, + "notion": {}, + "slack_oidc": {}, + "spotify": {}, + "twitch": {}, + "twitter": {}, + "workos": {}, + "zoom": {}, + } + // Run test + diff, err := c.DiffWithRemote(v1API.AuthConfigResponse{ + ExternalAppleEnabled: cast.Ptr(false), + ExternalAzureEnabled: cast.Ptr(false), + ExternalBitbucketEnabled: cast.Ptr(false), + ExternalDiscordEnabled: cast.Ptr(false), + ExternalFacebookEnabled: cast.Ptr(false), + ExternalFigmaEnabled: cast.Ptr(false), + ExternalGithubEnabled: cast.Ptr(false), + ExternalGitlabEnabled: cast.Ptr(false), + ExternalGoogleEnabled: cast.Ptr(false), + ExternalGoogleSkipNonceCheck: cast.Ptr(false), + ExternalKakaoEnabled: cast.Ptr(false), + ExternalKeycloakEnabled: cast.Ptr(false), + ExternalLinkedinOidcEnabled: cast.Ptr(false), + ExternalNotionEnabled: cast.Ptr(false), + ExternalSlackOidcEnabled: cast.Ptr(false), + ExternalSpotifyEnabled: cast.Ptr(false), + ExternalTwitchEnabled: cast.Ptr(false), + ExternalTwitterEnabled: cast.Ptr(false), + ExternalWorkosEnabled: cast.Ptr(false), + ExternalZoomEnabled: cast.Ptr(false), + // Deprecated fields should be ignored + ExternalSlackEnabled: cast.Ptr(false), + }) + // Check error + assert.NoError(t, err) + assert.Empty(t, string(diff)) + }) +} diff --git a/pkg/config/config.go b/pkg/config/config.go index 3d3be2f27..5d7616575 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -9,12 +9,16 @@ import ( "fmt" "io" "io/fs" + "maps" "net" "net/http" "net/url" "os" + "path" "path/filepath" + "reflect" "regexp" + "sort" "strconv" "strings" "text/template" @@ -25,10 +29,11 @@ import ( "github.com/go-errors/errors" "github.com/golang-jwt/jwt/v5" "github.com/joho/godotenv" + "github.com/mitchellh/mapstructure" "github.com/spf13/viper" - "golang.org/x/mod/semver" - + "github.com/supabase/cli/pkg/cast" "github.com/supabase/cli/pkg/fetcher" + "golang.org/x/mod/semver" ) // Type for turning human-friendly bytes string ("5MB", "32kB") into an int64 during toml decoding. @@ -53,13 +58,6 @@ const ( LogflareBigQuery LogflareBackend = "bigquery" ) -type PoolMode string - -const ( - TransactionMode PoolMode = "transaction" - SessionMode PoolMode = "session" -) - type AddressFamily string const ( @@ -119,7 +117,8 @@ func (c CustomClaims) NewToken() *jwt.Token { // // Default values for internal configs should be added to `var Config` initializer. type ( - config struct { + // Common config fields between our "base" config and any "remote" branch specific + baseConfig struct { ProjectId string `toml:"project_id"` Hostname string `toml:"-"` Api api `toml:"api"` @@ -132,47 +131,12 @@ type ( EdgeRuntime edgeRuntime `toml:"edge_runtime"` Functions FunctionConfig `toml:"functions"` Analytics analytics `toml:"analytics"` - Experimental experimental `toml:"experimental" mapstructure:"-"` + Experimental experimental `toml:"experimental"` } - api struct { - Enabled bool `toml:"enabled"` - Image string `toml:"-"` - KongImage string `toml:"-"` - Port uint16 `toml:"port"` - Schemas []string `toml:"schemas"` - ExtraSearchPath []string `toml:"extra_search_path"` - MaxRows uint `toml:"max_rows"` - Tls tlsKong `toml:"tls"` - // TODO: replace [auth|studio].api_url - ExternalUrl string `toml:"external_url"` - } - - tlsKong struct { - Enabled bool `toml:"enabled"` - } - - db struct { - Image string `toml:"-"` - Port uint16 `toml:"port"` - ShadowPort uint16 `toml:"shadow_port"` - MajorVersion uint `toml:"major_version"` - Password string `toml:"-"` - RootKey string `toml:"-" mapstructure:"root_key"` - Pooler pooler `toml:"pooler"` - } - - pooler struct { - Enabled bool `toml:"enabled"` - Image string `toml:"-"` - Port uint16 `toml:"port"` - PoolMode PoolMode `toml:"pool_mode"` - DefaultPoolSize uint `toml:"default_pool_size"` - MaxClientConn uint `toml:"max_client_conn"` - ConnectionString string `toml:"-"` - TenantId string `toml:"-"` - EncryptionKey string `toml:"-"` - SecretKeyBase string `toml:"-"` + config struct { + baseConfig `mapstructure:",squash"` + Remotes map[string]baseConfig `toml:"remotes"` } realtime struct { @@ -195,205 +159,15 @@ type ( } inbucket struct { - Enabled bool `toml:"enabled"` - Image string `toml:"-"` - Port uint16 `toml:"port"` - SmtpPort uint16 `toml:"smtp_port"` - Pop3Port uint16 `toml:"pop3_port"` - } - - storage struct { - Enabled bool `toml:"enabled"` - Image string `toml:"-"` - FileSizeLimit sizeInBytes `toml:"file_size_limit"` - S3Credentials storageS3Credentials `toml:"-"` - ImageTransformation imageTransformation `toml:"image_transformation"` - Buckets BucketConfig `toml:"buckets"` - } - - BucketConfig map[string]bucket - - bucket struct { - Public *bool `toml:"public"` - FileSizeLimit sizeInBytes `toml:"file_size_limit"` - AllowedMimeTypes []string `toml:"allowed_mime_types"` - ObjectsPath string `toml:"objects_path"` - } - - imageTransformation struct { - Enabled bool `toml:"enabled"` - Image string `toml:"-"` - } - - storageS3Credentials struct { - AccessKeyId string `toml:"-"` - SecretAccessKey string `toml:"-"` - Region string `toml:"-"` - } - - auth struct { - Enabled bool `toml:"enabled"` - Image string `toml:"-"` - SiteUrl string `toml:"site_url"` - AdditionalRedirectUrls []string `toml:"additional_redirect_urls"` - - JwtExpiry uint `toml:"jwt_expiry"` - EnableRefreshTokenRotation bool `toml:"enable_refresh_token_rotation"` - RefreshTokenReuseInterval uint `toml:"refresh_token_reuse_interval"` - EnableManualLinking bool `toml:"enable_manual_linking"` - - Hook hook `toml:"hook"` - MFA mfa `toml:"mfa"` - Sessions sessions `toml:"sessions"` - - EnableSignup bool `toml:"enable_signup"` - EnableAnonymousSignIns bool `toml:"enable_anonymous_sign_ins"` - Email email `toml:"email"` - Sms sms `toml:"sms"` - External map[string]provider - - // Custom secrets can be injected from .env file - JwtSecret string `toml:"-" mapstructure:"jwt_secret"` - AnonKey string `toml:"-" mapstructure:"anon_key"` - ServiceRoleKey string `toml:"-" mapstructure:"service_role_key"` - - ThirdParty thirdParty `toml:"third_party"` - } - - thirdParty struct { - Firebase tpaFirebase `toml:"firebase"` - Auth0 tpaAuth0 `toml:"auth0"` - Cognito tpaCognito `toml:"aws_cognito"` - } - - tpaFirebase struct { - Enabled bool `toml:"enabled"` - - ProjectID string `toml:"project_id"` - } - - tpaAuth0 struct { - Enabled bool `toml:"enabled"` - - Tenant string `toml:"tenant"` - TenantRegion string `toml:"tenant_region"` - } - - tpaCognito struct { - Enabled bool `toml:"enabled"` - - UserPoolID string `toml:"user_pool_id"` - UserPoolRegion string `toml:"user_pool_region"` - } - - email struct { - EnableSignup bool `toml:"enable_signup"` - DoubleConfirmChanges bool `toml:"double_confirm_changes"` - EnableConfirmations bool `toml:"enable_confirmations"` - SecurePasswordChange bool `toml:"secure_password_change"` - Template map[string]emailTemplate `toml:"template"` - Smtp smtp `toml:"smtp"` - MaxFrequency time.Duration `toml:"max_frequency"` - } - - smtp struct { - Host string `toml:"host"` + Enabled bool `toml:"enabled"` + Image string `toml:"-"` Port uint16 `toml:"port"` - User string `toml:"user"` - Pass string `toml:"pass"` + SmtpPort uint16 `toml:"smtp_port"` + Pop3Port uint16 `toml:"pop3_port"` AdminEmail string `toml:"admin_email"` SenderName string `toml:"sender_name"` } - emailTemplate struct { - Subject string `toml:"subject"` - ContentPath string `toml:"content_path"` - } - - sms struct { - EnableSignup bool `toml:"enable_signup"` - EnableConfirmations bool `toml:"enable_confirmations"` - Template string `toml:"template"` - Twilio twilioConfig `toml:"twilio" mapstructure:"twilio"` - TwilioVerify twilioConfig `toml:"twilio_verify" mapstructure:"twilio_verify"` - Messagebird messagebirdConfig `toml:"messagebird" mapstructure:"messagebird"` - Textlocal textlocalConfig `toml:"textlocal" mapstructure:"textlocal"` - Vonage vonageConfig `toml:"vonage" mapstructure:"vonage"` - TestOTP map[string]string `toml:"test_otp"` - MaxFrequency time.Duration `toml:"max_frequency"` - } - - hook struct { - MFAVerificationAttempt hookConfig `toml:"mfa_verification_attempt"` - PasswordVerificationAttempt hookConfig `toml:"password_verification_attempt"` - CustomAccessToken hookConfig `toml:"custom_access_token"` - SendSMS hookConfig `toml:"send_sms"` - SendEmail hookConfig `toml:"send_email"` - } - factorTypeConfiguration struct { - EnrollEnabled bool `toml:"enroll_enabled"` - VerifyEnabled bool `toml:"verify_enabled"` - } - - phoneFactorTypeConfiguration struct { - factorTypeConfiguration - OtpLength uint `toml:"otp_length"` - Template string `toml:"template"` - MaxFrequency time.Duration `toml:"max_frequency"` - } - - mfa struct { - TOTP factorTypeConfiguration `toml:"totp"` - Phone phoneFactorTypeConfiguration `toml:"phone"` - MaxEnrolledFactors uint `toml:"max_enrolled_factors"` - } - - hookConfig struct { - Enabled bool `toml:"enabled"` - URI string `toml:"uri"` - Secrets string `toml:"secrets"` - } - - sessions struct { - Timebox time.Duration `toml:"timebox"` - InactivityTimeout time.Duration `toml:"inactivity_timeout"` - } - - twilioConfig struct { - Enabled bool `toml:"enabled"` - AccountSid string `toml:"account_sid"` - MessageServiceSid string `toml:"message_service_sid"` - AuthToken string `toml:"auth_token" mapstructure:"auth_token"` - } - - messagebirdConfig struct { - Enabled bool `toml:"enabled"` - Originator string `toml:"originator"` - AccessKey string `toml:"access_key" mapstructure:"access_key"` - } - - textlocalConfig struct { - Enabled bool `toml:"enabled"` - Sender string `toml:"sender"` - ApiKey string `toml:"api_key" mapstructure:"api_key"` - } - - vonageConfig struct { - Enabled bool `toml:"enabled"` - From string `toml:"from"` - ApiKey string `toml:"api_key" mapstructure:"api_key"` - ApiSecret string `toml:"api_secret" mapstructure:"api_secret"` - } - - provider struct { - Enabled bool `toml:"enabled"` - ClientId string `toml:"client_id"` - Secret string `toml:"secret"` - Url string `toml:"url"` - RedirectUri string `toml:"redirect_uri"` - SkipNonceCheck bool `toml:"skip_nonce_check"` - } - edgeRuntime struct { Enabled bool `toml:"enabled"` Image string `toml:"-"` @@ -404,9 +178,11 @@ type ( FunctionConfig map[string]function function struct { - VerifyJWT *bool `toml:"verify_jwt" json:"verifyJWT"` - ImportMap string `toml:"import_map" json:"importMapPath,omitempty"` - Entrypoint string `json:"-"` + Enabled *bool `toml:"enabled" json:"-"` + VerifyJWT *bool `toml:"verify_jwt" json:"verifyJWT"` + ImportMap string `toml:"import_map" json:"importMapPath,omitempty"` + Entrypoint string `toml:"entrypoint" json:"entrypointPath,omitempty"` + StaticFiles []string `toml:"static_files" json:"staticFiles,omitempty"` } analytics struct { @@ -423,15 +199,69 @@ type ( VectorPort uint16 `toml:"vector_port"` } + webhooks struct { + Enabled bool `toml:"enabled"` + } + experimental struct { - OrioleDBVersion string `toml:"orioledb_version"` - S3Host string `toml:"s3_host"` - S3Region string `toml:"s3_region"` - S3AccessKey string `toml:"s3_access_key"` - S3SecretKey string `toml:"s3_secret_key"` + OrioleDBVersion string `toml:"orioledb_version"` + S3Host string `toml:"s3_host"` + S3Region string `toml:"s3_region"` + S3AccessKey string `toml:"s3_access_key"` + S3SecretKey string `toml:"s3_secret_key"` + Webhooks *webhooks `toml:"webhooks"` } ) +func (f function) IsEnabled() bool { + // If Enabled is not defined, or defined and set to true + return f.Enabled == nil || *f.Enabled +} + +func (a *auth) Clone() auth { + copy := *a + copy.External = maps.Clone(a.External) + if a.Email.Smtp != nil { + mailer := *a.Email.Smtp + copy.Email.Smtp = &mailer + } + if a.Hook.MFAVerificationAttempt != nil { + hook := *a.Hook.MFAVerificationAttempt + copy.Hook.MFAVerificationAttempt = &hook + } + if a.Hook.PasswordVerificationAttempt != nil { + hook := *a.Hook.PasswordVerificationAttempt + copy.Hook.PasswordVerificationAttempt = &hook + } + if a.Hook.CustomAccessToken != nil { + hook := *a.Hook.CustomAccessToken + copy.Hook.CustomAccessToken = &hook + } + if a.Hook.SendSMS != nil { + hook := *a.Hook.SendSMS + copy.Hook.SendSMS = &hook + } + if a.Hook.SendEmail != nil { + hook := *a.Hook.SendEmail + copy.Hook.SendEmail = &hook + } + copy.Email.Template = maps.Clone(a.Email.Template) + copy.Sms.TestOTP = maps.Clone(a.Sms.TestOTP) + return copy +} + +func (c *baseConfig) Clone() baseConfig { + copy := *c + copy.Storage.Buckets = maps.Clone(c.Storage.Buckets) + copy.Functions = maps.Clone(c.Functions) + copy.Auth = c.Auth.Clone() + if c.Experimental.Webhooks != nil { + webhooks := *c.Experimental.Webhooks + copy.Experimental.Webhooks = &webhooks + } + return copy +} + type ConfigEditor func(*config) func WithHostname(hostname string) ConfigEditor { @@ -441,7 +271,7 @@ func WithHostname(hostname string) ConfigEditor { } func NewConfig(editors ...ConfigEditor) config { - initial := config{ + initial := config{baseConfig: baseConfig{ Hostname: "127.0.0.1", Api: api{ Image: postgrestImage, @@ -457,6 +287,10 @@ func NewConfig(editors ...ConfigEditor) config { EncryptionKey: "12345678901234567890123456789032", SecretKeyBase: "EAx3IQ/wRG1v47ZD4NE4/9RzBI8Jmil3x0yhcW4V2NHBP6c2iPIzwjofi2Ep4HIG", }, + Seed: seed{ + Enabled: true, + GlobPatterns: []string{"./seed.sql"}, + }, }, Realtime: realtime{ Image: realtimeImage, @@ -467,58 +301,29 @@ func NewConfig(editors ...ConfigEditor) config { SecretKeyBase: "EAx3IQ/wRG1v47ZD4NE4/9RzBI8Jmil3x0yhcW4V2NHBP6c2iPIzwjofi2Ep4HIG", }, Storage: storage{ - Image: storageImage, + Image: storageImage, + ImgProxyImage: imageProxyImage, S3Credentials: storageS3Credentials{ AccessKeyId: "625729a08b95bf1b7ff351a663f3a23c", SecretAccessKey: "850181e4652dd023b7a98c58ae0d2d34bd487ee0cc3254aed6eda37307425907", Region: "local", }, - ImageTransformation: imageTransformation{ - Enabled: true, - Image: imageProxyImage, - }, }, Auth: auth{ Image: gotrueImage, Email: email{ - Template: map[string]emailTemplate{ - "invite": {}, - "confirmation": {}, - "recovery": {}, - "magic_link": {}, - "email_change": {}, - }, - Smtp: smtp{ - Host: "inbucket", - Port: 2500, - AdminEmail: "admin@email.com", - }, + Template: map[string]emailTemplate{}, }, - External: map[string]provider{ - "apple": {}, - "azure": {}, - "bitbucket": {}, - "discord": {}, - "facebook": {}, - "github": {}, - "gitlab": {}, - "google": {}, - "keycloak": {}, - "linkedin": {}, // TODO: remove this field in v2 - "linkedin_oidc": {}, - "notion": {}, - "twitch": {}, - "twitter": {}, - "slack": {}, // TODO: remove this field in v2 - "slack_oidc": {}, - "spotify": {}, - "workos": {}, - "zoom": {}, + Sms: sms{ + TestOTP: map[string]string{}, }, + External: map[string]provider{}, JwtSecret: defaultJwtSecret, }, Inbucket: inbucket{ - Image: inbucketImage, + Image: inbucketImage, + AdminEmail: "admin@email.com", + SenderName: "Admin", }, Studio: studio{ Image: studioImage, @@ -534,7 +339,7 @@ func NewConfig(editors ...ConfigEditor) config { EdgeRuntime: edgeRuntime{ Image: edgeRuntimeImage, }, - } + }} for _, apply := range editors { apply(&initial) } @@ -548,6 +353,7 @@ var ( invalidProjectId = regexp.MustCompile("[^a-zA-Z0-9_.-]+") envPattern = regexp.MustCompile(`^env\((.*)\)$`) + refPattern = regexp.MustCompile(`^[a-z]{20}$`) ) func (c *config) Eject(w io.Writer) error { @@ -567,33 +373,117 @@ func (c *config) Eject(w io.Writer) error { return nil } -func (c *config) Load(path string, fsys fs.FS) error { - builder := NewPathBuilder(path) +// Loads custom config file to struct fields tagged with toml. +func (c *config) loadFromFile(filename string, fsys fs.FS) error { + v := viper.New() + v.SetConfigType("toml") // Load default values var buf bytes.Buffer if err := initConfigTemplate.Option("missingkey=zero").Execute(&buf, c); err != nil { - return errors.Errorf("failed to initialise config template: %w", err) + return errors.Errorf("failed to initialise template config: %w", err) + } else if err := c.loadFromReader(v, &buf); err != nil { + return err + } + // Load custom config + if ext := filepath.Ext(filename); len(ext) > 0 { + v.SetConfigType(ext[1:]) + } + f, err := fsys.Open(filename) + if err != nil { + return errors.Errorf("failed to read file config: %w", err) } - dec := toml.NewDecoder(&buf) - if _, err := dec.Decode(c); err != nil { - return errors.Errorf("failed to decode config template: %w", err) + defer f.Close() + return c.loadFromReader(v, f) +} + +func (c *config) loadFromReader(v *viper.Viper, r io.Reader) error { + if err := v.MergeConfig(r); err != nil { + return errors.Errorf("failed to merge config: %w", err) + } + // Find [remotes.*] block to override base config + baseId := v.GetString("project_id") + idToName := map[string]string{baseId: "base"} + for name, remote := range v.GetStringMap("remotes") { + projectId := v.GetString(fmt.Sprintf("remotes.%s.project_id", name)) + // Track remote project_id to check for duplication + if other, exists := idToName[projectId]; exists { + return errors.Errorf("duplicate project_id for [remotes.%s] and %s", name, other) + } + idToName[projectId] = fmt.Sprintf("[remotes.%s]", name) + if projectId == c.ProjectId { + fmt.Fprintln(os.Stderr, "Loading config override:", idToName[projectId]) + if err := v.MergeConfigMap(remote.(map[string]any)); err != nil { + return err + } + v.Set("project_id", baseId) + } } - // Load user defined config - if metadata, err := toml.DecodeFS(fsys, builder.ConfigPath, c); err != nil { - cwd, osErr := os.Getwd() - if osErr != nil { - cwd = "current directory" + // Manually parse [functions.*] to empty struct for backwards compatibility + for key, value := range v.GetStringMap("functions") { + if m, ok := value.(map[string]any); ok && len(m) == 0 { + v.Set("functions."+key, function{}) } - return errors.Errorf("cannot read config in %s: %w", cwd, err) - } else if undecoded := metadata.Undecoded(); len(undecoded) > 0 { - fmt.Fprintf(os.Stderr, "Unknown config fields: %+v\n", undecoded) } + if err := v.UnmarshalExact(c, func(dc *mapstructure.DecoderConfig) { + dc.TagName = "toml" + dc.Squash = true + dc.ZeroFields = true + dc.DecodeHook = c.newDecodeHook(LoadEnvHook) + }); err != nil { + return errors.Errorf("failed to parse config: %w", err) + } + return nil +} + +func (c *config) newDecodeHook(fs ...mapstructure.DecodeHookFunc) mapstructure.DecodeHookFunc { + fs = append(fs, + mapstructure.StringToTimeDurationHookFunc(), + mapstructure.StringToIPHookFunc(), + mapstructure.StringToSliceHookFunc(","), + mapstructure.TextUnmarshallerHookFunc(), + DecryptSecretHookFunc(c.ProjectId), + ) + return mapstructure.ComposeDecodeHookFunc(fs...) +} + +// Loads envs prefixed with supabase_ to struct fields tagged with mapstructure. +func (c *config) loadFromEnv() error { + v := viper.New() + v.SetEnvPrefix("SUPABASE") + v.SetEnvKeyReplacer(strings.NewReplacer(".", "_")) + v.AutomaticEnv() + // Viper does not parse env vars automatically. Instead of calling viper.BindEnv + // per key, we decode all keys from an existing struct, and merge them to viper. + // Ref: https://github.com/spf13/viper/issues/761#issuecomment-859306364 + envKeysMap := map[string]interface{}{} + if dec, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ + Result: &envKeysMap, + IgnoreUntaggedFields: true, + }); err != nil { + return errors.Errorf("failed to create decoder: %w", err) + } else if err := dec.Decode(c.baseConfig); err != nil { + return errors.Errorf("failed to decode env: %w", err) + } else if err := v.MergeConfigMap(envKeysMap); err != nil { + return errors.Errorf("failed to merge env config: %w", err) + } + // Writes viper state back to config struct, with automatic env substitution + if err := v.UnmarshalExact(c, viper.DecodeHook(c.newDecodeHook())); err != nil { + return errors.Errorf("failed to parse env override: %w", err) + } + return nil +} + +func (c *config) Load(path string, fsys fs.FS) error { + builder := NewPathBuilder(path) // Load secrets from .env file - if err := loadDefaultEnv(); err != nil { + if err := loadNestedEnv(builder.SupabaseDirPath); err != nil { + return err + } + if err := c.loadFromFile(builder.ConfigPath, fsys); err != nil { return err } - if err := viper.Unmarshal(c); err != nil { - return errors.Errorf("failed to parse env to config: %w", err) + if err := c.loadFromEnv(); err != nil { + return err } // Generate JWT tokens if len(c.Auth.AnonKey) == 0 { @@ -655,37 +545,75 @@ func (c *config) Load(path string, fsys fs.FS) error { if version, err := fs.ReadFile(fsys, builder.PgmetaVersionPath); err == nil && len(version) > 0 { c.Studio.PgmetaImage = replaceImageTag(pgmetaImage, string(version)) } + // TODO: replace derived config resolution with viper decode hooks + if err := c.baseConfig.resolve(builder, fsys); err != nil { + return err + } + return c.Validate(fsys) +} + +func (c *baseConfig) resolve(builder pathBuilder, fsys fs.FS) error { + // Update content paths + for name, tmpl := range c.Auth.Email.Template { + // FIXME: only email template is relative to repo directory + cwd := filepath.Dir(builder.SupabaseDirPath) + if len(tmpl.ContentPath) > 0 && !filepath.IsAbs(tmpl.ContentPath) { + tmpl.ContentPath = filepath.Join(cwd, tmpl.ContentPath) + } + c.Auth.Email.Template[name] = tmpl + } // Update fallback configs for name, bucket := range c.Storage.Buckets { if bucket.FileSizeLimit == 0 { bucket.FileSizeLimit = c.Storage.FileSizeLimit } + if len(bucket.ObjectsPath) > 0 && !filepath.IsAbs(bucket.ObjectsPath) { + bucket.ObjectsPath = filepath.Join(builder.SupabaseDirPath, bucket.ObjectsPath) + } c.Storage.Buckets[name] = bucket } + // Resolve functions config for slug, function := range c.Functions { - // TODO: support configuring alternative entrypoint path, such as index.js if len(function.Entrypoint) == 0 { function.Entrypoint = filepath.Join(builder.FunctionsDir, slug, "index.ts") } else if !filepath.IsAbs(function.Entrypoint) { // Append supabase/ because paths in configs are specified relative to config.toml function.Entrypoint = filepath.Join(builder.SupabaseDirPath, function.Entrypoint) } - // Functions may not use import map so we don't set a default value - if len(function.ImportMap) > 0 && !filepath.IsAbs(function.ImportMap) { + if len(function.ImportMap) == 0 { + functionDir := filepath.Dir(function.Entrypoint) + denoJsonPath := filepath.Join(functionDir, "deno.json") + denoJsoncPath := filepath.Join(functionDir, "deno.jsonc") + if _, err := fs.Stat(fsys, denoJsonPath); err == nil { + function.ImportMap = denoJsonPath + } else if _, err := fs.Stat(fsys, denoJsoncPath); err == nil { + function.ImportMap = denoJsoncPath + } + // Functions may not use import map so we don't set a default value + } else if !filepath.IsAbs(function.ImportMap) { function.ImportMap = filepath.Join(builder.SupabaseDirPath, function.ImportMap) } + for i, val := range function.StaticFiles { + function.StaticFiles[i] = filepath.Join(builder.SupabaseDirPath, val) + } c.Functions[slug] = function } - return c.Validate() + return c.Db.Seed.loadSeedPaths(builder.SupabaseDirPath, fsys) } -func (c *config) Validate() error { +func (c *config) Validate(fsys fs.FS) error { if c.ProjectId == "" { return errors.New("Missing required field in config: project_id") } else if sanitized := sanitizeProjectId(c.ProjectId); sanitized != c.ProjectId { - fmt.Fprintln(os.Stderr, "WARNING:", "project_id field in config is invalid. Auto-fixing to", sanitized) + fmt.Fprintln(os.Stderr, "WARN: project_id field in config is invalid. Auto-fixing to", sanitized) c.ProjectId = sanitized } + // Since remote config is merged to base, we only need to validate the project_id field. + for name, remote := range c.Remotes { + if !refPattern.MatchString(remote.ProjectId) { + return errors.Errorf("Invalid config for remotes.%s.project_id. Must be like: abcdefghijklmnopqrst", name) + } + } // Validate api config if c.Api.Enabled { if c.Api.Port == 0 { @@ -693,6 +621,12 @@ func (c *config) Validate() error { } } // Validate db config + if c.Db.Settings.SessionReplicationRole != nil { + allowedRoles := []SessionReplicationRole{SessionReplicationRoleOrigin, SessionReplicationRoleReplica, SessionReplicationRoleLocal} + if !sliceContains(allowedRoles, *c.Db.Settings.SessionReplicationRole) { + return errors.Errorf("Invalid config for db.session_replication_role. Must be one of: %v", allowedRoles) + } + } if c.Db.Port == 0 { return errors.New("Missing required field in config: db.port") } @@ -708,17 +642,16 @@ func (c *config) Validate() error { case 15: if len(c.Experimental.OrioleDBVersion) > 0 { c.Db.Image = "supabase/postgres:orioledb-" + c.Experimental.OrioleDBVersion - var err error - if c.Experimental.S3Host, err = maybeLoadEnv(c.Experimental.S3Host); err != nil { + if err := assertEnvLoaded(c.Experimental.S3Host); err != nil { return err } - if c.Experimental.S3Region, err = maybeLoadEnv(c.Experimental.S3Region); err != nil { + if err := assertEnvLoaded(c.Experimental.S3Region); err != nil { return err } - if c.Experimental.S3AccessKey, err = maybeLoadEnv(c.Experimental.S3AccessKey); err != nil { + if err := assertEnvLoaded(c.Experimental.S3AccessKey); err != nil { return err } - if c.Experimental.S3SecretKey, err = maybeLoadEnv(c.Experimental.S3SecretKey); err != nil { + if err := assertEnvLoaded(c.Experimental.S3SecretKey); err != nil { return err } } @@ -755,7 +688,6 @@ func (c *config) Validate() error { } else if parsed.Host == "" || parsed.Host == c.Hostname { c.Studio.ApiUrl = c.Api.ExternalUrl } - c.Studio.OpenaiApiKey, _ = maybeLoadEnv(c.Studio.OpenaiApiKey) } // Validate smtp config if c.Inbucket.Enabled { @@ -768,132 +700,37 @@ func (c *config) Validate() error { if c.Auth.SiteUrl == "" { return errors.New("Missing required field in config: auth.site_url") } - var err error - if c.Auth.SiteUrl, err = maybeLoadEnv(c.Auth.SiteUrl); err != nil { + if err := assertEnvLoaded(c.Auth.SiteUrl); err != nil { return err } - // Validate email config - for name, tmpl := range c.Auth.Email.Template { - if len(tmpl.ContentPath) > 0 && !fs.ValidPath(filepath.Clean(tmpl.ContentPath)) { - return errors.Errorf("Invalid config for auth.email.%s.content_path: %s", name, tmpl.ContentPath) - } - } - if c.Auth.Email.Smtp.Pass, err = maybeLoadEnv(c.Auth.Email.Smtp.Pass); err != nil { - return err - } - // Validate sms config - if c.Auth.Sms.Twilio.Enabled { - if len(c.Auth.Sms.Twilio.AccountSid) == 0 { - return errors.New("Missing required field in config: auth.sms.twilio.account_sid") - } - if len(c.Auth.Sms.Twilio.MessageServiceSid) == 0 { - return errors.New("Missing required field in config: auth.sms.twilio.message_service_sid") - } - if len(c.Auth.Sms.Twilio.AuthToken) == 0 { - return errors.New("Missing required field in config: auth.sms.twilio.auth_token") - } - if c.Auth.Sms.Twilio.AuthToken, err = maybeLoadEnv(c.Auth.Sms.Twilio.AuthToken); err != nil { - return err - } - } - if c.Auth.Sms.TwilioVerify.Enabled { - if len(c.Auth.Sms.TwilioVerify.AccountSid) == 0 { - return errors.New("Missing required field in config: auth.sms.twilio_verify.account_sid") - } - if len(c.Auth.Sms.TwilioVerify.MessageServiceSid) == 0 { - return errors.New("Missing required field in config: auth.sms.twilio_verify.message_service_sid") - } - if len(c.Auth.Sms.TwilioVerify.AuthToken) == 0 { - return errors.New("Missing required field in config: auth.sms.twilio_verify.auth_token") - } - if c.Auth.Sms.TwilioVerify.AuthToken, err = maybeLoadEnv(c.Auth.Sms.TwilioVerify.AuthToken); err != nil { - return err - } - } - if c.Auth.Sms.Messagebird.Enabled { - if len(c.Auth.Sms.Messagebird.Originator) == 0 { - return errors.New("Missing required field in config: auth.sms.messagebird.originator") - } - if len(c.Auth.Sms.Messagebird.AccessKey) == 0 { - return errors.New("Missing required field in config: auth.sms.messagebird.access_key") - } - if c.Auth.Sms.Messagebird.AccessKey, err = maybeLoadEnv(c.Auth.Sms.Messagebird.AccessKey); err != nil { - return err - } - } - if c.Auth.Sms.Textlocal.Enabled { - if len(c.Auth.Sms.Textlocal.Sender) == 0 { - return errors.New("Missing required field in config: auth.sms.textlocal.sender") - } - if len(c.Auth.Sms.Textlocal.ApiKey) == 0 { - return errors.New("Missing required field in config: auth.sms.textlocal.api_key") - } - if c.Auth.Sms.Textlocal.ApiKey, err = maybeLoadEnv(c.Auth.Sms.Textlocal.ApiKey); err != nil { - return err + for i, url := range c.Auth.AdditionalRedirectUrls { + if err := assertEnvLoaded(url); err != nil { + return errors.Errorf("Invalid config for auth.additional_redirect_urls[%d]: %v", i, err) } } - if c.Auth.Sms.Vonage.Enabled { - if len(c.Auth.Sms.Vonage.From) == 0 { - return errors.New("Missing required field in config: auth.sms.vonage.from") - } - if len(c.Auth.Sms.Vonage.ApiKey) == 0 { - return errors.New("Missing required field in config: auth.sms.vonage.api_key") - } - if len(c.Auth.Sms.Vonage.ApiSecret) == 0 { - return errors.New("Missing required field in config: auth.sms.vonage.api_secret") - } - if c.Auth.Sms.Vonage.ApiKey, err = maybeLoadEnv(c.Auth.Sms.Vonage.ApiKey); err != nil { - return err - } - if c.Auth.Sms.Vonage.ApiSecret, err = maybeLoadEnv(c.Auth.Sms.Vonage.ApiSecret); err != nil { - return err - } + allowed := []PasswordRequirements{NoRequirements, LettersDigits, LowerUpperLettersDigits, LowerUpperLettersDigitsSymbols} + if !sliceContains(allowed, c.Auth.PasswordRequirements) { + return errors.Errorf("Invalid config for auth.password_requirements. Must be one of: %v", allowed) } - if err := c.Auth.Hook.MFAVerificationAttempt.HandleHook("mfa_verification_attempt"); err != nil { + if err := c.Auth.Hook.validate(); err != nil { return err } - if err := c.Auth.Hook.PasswordVerificationAttempt.HandleHook("password_verification_attempt"); err != nil { + if err := c.Auth.MFA.validate(); err != nil { return err } - if err := c.Auth.Hook.CustomAccessToken.HandleHook("custom_access_token"); err != nil { + if err := c.Auth.Email.validate(fsys); err != nil { return err } - if err := c.Auth.Hook.SendSMS.HandleHook("send_sms"); err != nil { + if err := c.Auth.Sms.validate(); err != nil { return err } - if err := c.Auth.Hook.SendEmail.HandleHook("send_email"); err != nil { + if err := c.Auth.External.validate(); err != nil { return err } - // Validate oauth config - for ext, provider := range c.Auth.External { - if !provider.Enabled { - continue - } - if provider.ClientId == "" { - return errors.Errorf("Missing required field in config: auth.external.%s.client_id", ext) - } - if !sliceContains([]string{"apple", "google"}, ext) && provider.Secret == "" { - return errors.Errorf("Missing required field in config: auth.external.%s.secret", ext) - } - if provider.ClientId, err = maybeLoadEnv(provider.ClientId); err != nil { - return err - } - if provider.Secret, err = maybeLoadEnv(provider.Secret); err != nil { - return err - } - if provider.RedirectUri, err = maybeLoadEnv(provider.RedirectUri); err != nil { - return err - } - if provider.Url, err = maybeLoadEnv(provider.Url); err != nil { - return err - } - c.Auth.External[ext] = provider + if err := c.Auth.ThirdParty.validate(); err != nil { + return err } } - // Validate Third-Party Auth config - if err := c.Auth.ThirdParty.validate(); err != nil { - return err - } // Validate functions config if c.EdgeRuntime.Enabled { allowed := []RequestPolicy{PolicyPerWorker, PolicyOneshot} @@ -926,21 +763,30 @@ func (c *config) Validate() error { return errors.Errorf("Invalid config for analytics.backend. Must be one of: %v", allowed) } } + if err := c.Experimental.validate(); err != nil { + return err + } return nil } -func maybeLoadEnv(s string) (string, error) { - matches := envPattern.FindStringSubmatch(s) - if len(matches) == 0 { - return s, nil +func assertEnvLoaded(s string) error { + if matches := envPattern.FindStringSubmatch(s); len(matches) > 1 { + fmt.Fprintln(os.Stderr, "WARN: environment variable is unset:", matches[1]) } + return nil +} - envName := matches[1] - if value := os.Getenv(envName); value != "" { - return value, nil +func LoadEnvHook(f reflect.Kind, t reflect.Kind, data interface{}) (interface{}, error) { + if f != reflect.String { + return data, nil } - - return "", errors.Errorf(`Error evaluating "%s": environment variable %s is unset.`, s, envName) + value := data.(string) + if matches := envPattern.FindStringSubmatch(value); len(matches) > 1 { + if env := os.Getenv(matches[1]); len(env) > 0 { + value = env + } + } + return value, nil } func truncateText(text string, maxLen int) string { @@ -962,8 +808,30 @@ func sanitizeProjectId(src string) string { return truncateText(sanitized, maxProjectIdLength) } -func loadDefaultEnv() error { +func loadNestedEnv(basePath string) error { + repoDir, err := os.Getwd() + if err != nil { + return errors.Errorf("failed to get repo directory: %w", err) + } + if !filepath.IsAbs(basePath) { + basePath = filepath.Join(repoDir, basePath) + } env := viper.GetString("ENV") + for cwd := basePath; cwd != filepath.Dir(repoDir); cwd = filepath.Dir(cwd) { + if err := os.Chdir(cwd); err != nil && !errors.Is(err, os.ErrNotExist) { + return errors.Errorf("failed to change directory: %w", err) + } + if err := loadDefaultEnv(env); err != nil { + return err + } + } + if err := os.Chdir(repoDir); err != nil { + return errors.Errorf("failed to restore directory: %w", err) + } + return nil +} + +func loadDefaultEnv(env string) error { if env == "" { env = "development" } @@ -987,31 +855,259 @@ func loadEnvIfExists(path string) error { return nil } -func (h *hookConfig) HandleHook(hookType string) error { +// Match the glob patterns from the config to get a deduplicated +// array of all migrations files to apply in the declared order. +func (c *seed) loadSeedPaths(basePath string, fsys fs.FS) error { + if !c.Enabled { + return nil + } + if c.SqlPaths != nil { + // Reuse already allocated array + c.SqlPaths = c.SqlPaths[:0] + } + set := make(map[string]struct{}) + for _, pattern := range c.GlobPatterns { + // Glob expects / as path separator on windows + pattern = filepath.ToSlash(pattern) + if !filepath.IsAbs(pattern) { + pattern = path.Join(basePath, pattern) + } + matches, err := fs.Glob(fsys, pattern) + if err != nil { + return errors.Errorf("failed to apply glob pattern: %w", err) + } + if len(matches) == 0 { + fmt.Fprintln(os.Stderr, "WARN: no seed files matched pattern:", pattern) + } + sort.Strings(matches) + // Remove duplicates + for _, item := range matches { + if _, exists := set[item]; !exists { + set[item] = struct{}{} + c.SqlPaths = append(c.SqlPaths, item) + } + } + } + return nil +} + +func (e *email) validate(fsys fs.FS) (err error) { + for name, tmpl := range e.Template { + if len(tmpl.ContentPath) == 0 { + if tmpl.Content != nil { + return errors.Errorf("Invalid config for auth.email.%s.content: please use content_path instead", name) + } + continue + } + if content, err := fs.ReadFile(fsys, tmpl.ContentPath); err != nil { + return errors.Errorf("Invalid config for auth.email.%s.content_path: %w", name, err) + } else { + tmpl.Content = cast.Ptr(string(content)) + } + e.Template[name] = tmpl + } + if e.Smtp != nil && e.Smtp.IsEnabled() { + if len(e.Smtp.Host) == 0 { + return errors.New("Missing required field in config: auth.email.smtp.host") + } + if e.Smtp.Port == 0 { + return errors.New("Missing required field in config: auth.email.smtp.port") + } + if len(e.Smtp.User) == 0 { + return errors.New("Missing required field in config: auth.email.smtp.user") + } + if len(e.Smtp.Pass.Value) == 0 { + return errors.New("Missing required field in config: auth.email.smtp.pass") + } + if len(e.Smtp.AdminEmail) == 0 { + return errors.New("Missing required field in config: auth.email.smtp.admin_email") + } + if err := assertEnvLoaded(e.Smtp.Pass.Value); err != nil { + return err + } + } + return nil +} + +func (s *sms) validate() (err error) { + switch { + case s.Twilio.Enabled: + if len(s.Twilio.AccountSid) == 0 { + return errors.New("Missing required field in config: auth.sms.twilio.account_sid") + } + if len(s.Twilio.MessageServiceSid) == 0 { + return errors.New("Missing required field in config: auth.sms.twilio.message_service_sid") + } + if len(s.Twilio.AuthToken.Value) == 0 { + return errors.New("Missing required field in config: auth.sms.twilio.auth_token") + } + if err := assertEnvLoaded(s.Twilio.AuthToken.Value); err != nil { + return err + } + case s.TwilioVerify.Enabled: + if len(s.TwilioVerify.AccountSid) == 0 { + return errors.New("Missing required field in config: auth.sms.twilio_verify.account_sid") + } + if len(s.TwilioVerify.MessageServiceSid) == 0 { + return errors.New("Missing required field in config: auth.sms.twilio_verify.message_service_sid") + } + if len(s.TwilioVerify.AuthToken.Value) == 0 { + return errors.New("Missing required field in config: auth.sms.twilio_verify.auth_token") + } + if err := assertEnvLoaded(s.TwilioVerify.AuthToken.Value); err != nil { + return err + } + case s.Messagebird.Enabled: + if len(s.Messagebird.Originator) == 0 { + return errors.New("Missing required field in config: auth.sms.messagebird.originator") + } + if len(s.Messagebird.AccessKey.Value) == 0 { + return errors.New("Missing required field in config: auth.sms.messagebird.access_key") + } + if err := assertEnvLoaded(s.Messagebird.AccessKey.Value); err != nil { + return err + } + case s.Textlocal.Enabled: + if len(s.Textlocal.Sender) == 0 { + return errors.New("Missing required field in config: auth.sms.textlocal.sender") + } + if len(s.Textlocal.ApiKey.Value) == 0 { + return errors.New("Missing required field in config: auth.sms.textlocal.api_key") + } + if err := assertEnvLoaded(s.Textlocal.ApiKey.Value); err != nil { + return err + } + case s.Vonage.Enabled: + if len(s.Vonage.From) == 0 { + return errors.New("Missing required field in config: auth.sms.vonage.from") + } + if len(s.Vonage.ApiKey) == 0 { + return errors.New("Missing required field in config: auth.sms.vonage.api_key") + } + if len(s.Vonage.ApiSecret.Value) == 0 { + return errors.New("Missing required field in config: auth.sms.vonage.api_secret") + } + if err := assertEnvLoaded(s.Vonage.ApiKey); err != nil { + return err + } + if err := assertEnvLoaded(s.Vonage.ApiSecret.Value); err != nil { + return err + } + case s.EnableSignup: + s.EnableSignup = false + fmt.Fprintln(os.Stderr, "WARN: no SMS provider is enabled. Disabling phone login") + } + return nil +} + +func (e external) validate() (err error) { + for _, ext := range []string{"linkedin", "slack"} { + if e[ext].Enabled { + fmt.Fprintf(os.Stderr, `WARN: disabling deprecated "%[1]s" provider. Please use [auth.external.%[1]s_oidc] instead\n`, ext) + } + delete(e, ext) + } + for ext, provider := range e { + if !provider.Enabled { + continue + } + if provider.ClientId == "" { + return errors.Errorf("Missing required field in config: auth.external.%s.client_id", ext) + } + if !sliceContains([]string{"apple", "google"}, ext) && len(provider.Secret.Value) == 0 { + return errors.Errorf("Missing required field in config: auth.external.%s.secret", ext) + } + if err := assertEnvLoaded(provider.ClientId); err != nil { + return err + } + if err := assertEnvLoaded(provider.Secret.Value); err != nil { + return err + } + if err := assertEnvLoaded(provider.RedirectUri); err != nil { + return err + } + if err := assertEnvLoaded(provider.Url); err != nil { + return err + } + e[ext] = provider + } + return nil +} + +func (h *hook) validate() error { + if hook := h.MFAVerificationAttempt; hook != nil { + if err := hook.validate("mfa_verification_attempt"); err != nil { + return err + } + } + if hook := h.PasswordVerificationAttempt; hook != nil { + if err := hook.validate("password_verification_attempt"); err != nil { + return err + } + } + if hook := h.CustomAccessToken; hook != nil { + if err := hook.validate("custom_access_token"); err != nil { + return err + } + } + if hook := h.SendSMS; hook != nil { + if err := hook.validate("send_sms"); err != nil { + return err + } + } + if hook := h.SendEmail; hook != nil { + if err := h.SendEmail.validate("send_email"); err != nil { + return err + } + } + return nil +} + +var hookSecretPattern = regexp.MustCompile(`^v1,whsec_[A-Za-z0-9+/=]{32,88}$`) + +func (h *hookConfig) validate(hookType string) (err error) { // If not enabled do nothing if !h.Enabled { return nil } if h.URI == "" { - return errors.Errorf("missing required field in config: auth.hook.%s.uri", hookType) + return errors.Errorf("Missing required field in config: auth.hook.%s.uri", hookType) } - if err := validateHookURI(h.URI, hookType); err != nil { - return err + parsed, err := url.Parse(h.URI) + if err != nil { + return errors.Errorf("failed to parse template url: %w", err) } - var err error - if h.Secrets, err = maybeLoadEnv(h.Secrets); err != nil { - return errors.Errorf("missing required field in config: auth.hook.%s.secrets", hookType) + switch strings.ToLower(parsed.Scheme) { + case "http", "https": + if len(h.Secrets.Value) == 0 { + return errors.Errorf("Missing required field in config: auth.hook.%s.secrets", hookType) + } else if err := assertEnvLoaded(h.Secrets.Value); err != nil { + return err + } + for _, secret := range strings.Split(h.Secrets.Value, "|") { + if !hookSecretPattern.MatchString(secret) { + return errors.Errorf(`Invalid hook config: auth.hook.%s.secrets must be formatted as "v1,whsec_" with a minimum length of 32 characters.`, hookType) + } + } + case "pg-functions": + if len(h.Secrets.Value) > 0 { + return errors.Errorf("Invalid hook config: auth.hook.%s.secrets is unsupported for pg-functions URI", hookType) + } + default: + return errors.Errorf("Invalid hook config: auth.hook.%s.uri should be a HTTP, HTTPS, or pg-functions URI", hookType) } return nil } -func validateHookURI(uri, hookName string) error { - parsed, err := url.Parse(uri) - if err != nil { - return errors.Errorf("failed to parse template url: %w", err) +func (m *mfa) validate() error { + if m.TOTP.EnrollEnabled && !m.TOTP.VerifyEnabled { + return errors.Errorf("Invalid MFA config: auth.mfa.totp.enroll_enabled requires verify_enabled") + } + if m.Phone.EnrollEnabled && !m.Phone.VerifyEnabled { + return errors.Errorf("Invalid MFA config: auth.mfa.phone.enroll_enabled requires verify_enabled") } - if !(parsed.Scheme == "http" || parsed.Scheme == "https" || parsed.Scheme == "pg-functions") { - return errors.Errorf("Invalid HTTP hook config: auth.hook.%v should be a Postgres function URI, or a HTTP or HTTPS URL", hookName) + if m.WebAuthn.EnrollEnabled && !m.WebAuthn.VerifyEnabled { + return errors.Errorf("Invalid MFA config: auth.mfa.web_authn.enroll_enabled requires verify_enabled") } return nil } @@ -1068,13 +1164,17 @@ func (c *tpaCognito) issuerURL() string { return fmt.Sprintf("https://cognito-idp.%s.amazonaws.com/%s", c.UserPoolRegion, c.UserPoolID) } -func (c *tpaCognito) validate() error { +func (c *tpaCognito) validate() (err error) { if c.UserPoolID == "" { return errors.New("Invalid config: auth.third_party.cognito is enabled but without a user_pool_id.") + } else if err := assertEnvLoaded(c.UserPoolID); err != nil { + return err } if c.UserPoolRegion == "" { return errors.New("Invalid config: auth.third_party.cognito is enabled but without a user_pool_region.") + } else if err := assertEnvLoaded(c.UserPoolRegion); err != nil { + return err } return nil @@ -1216,3 +1316,48 @@ func (a *auth) ResolveJWKS(ctx context.Context) (string, error) { return string(jwksEncoded), nil } + +func (c *baseConfig) GetServiceImages() []string { + return []string{ + c.Db.Image, + c.Auth.Image, + c.Api.Image, + c.Realtime.Image, + c.Storage.Image, + c.EdgeRuntime.Image, + c.Studio.Image, + c.Studio.PgmetaImage, + c.Analytics.Image, + c.Db.Pooler.Image, + } +} + +// Retrieve the final base config to use taking into account the remotes override +// Pre: config must be loaded after setting config.ProjectID = "ref" +func (c *config) GetRemoteByProjectRef(projectRef string) (baseConfig, error) { + base := c.baseConfig.Clone() + for _, remote := range c.Remotes { + if remote.ProjectId == projectRef { + base.ProjectId = projectRef + return base, nil + } + } + return base, errors.Errorf("no remote found for project_id: %s", projectRef) +} + +func ToTomlBytes(config any) ([]byte, error) { + var buf bytes.Buffer + enc := toml.NewEncoder(&buf) + enc.Indent = "" + if err := enc.Encode(config); err != nil { + return nil, errors.Errorf("failed to marshal toml config: %w", err) + } + return buf.Bytes(), nil +} + +func (e *experimental) validate() error { + if e.Webhooks != nil && !e.Webhooks.Enabled { + return errors.Errorf("Webhooks cannot be deactivated. [experimental.webhooks] enabled can either be true or left undefined") + } + return nil +} diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go index 29a605059..e13e67e59 100644 --- a/pkg/config/config_test.go +++ b/pkg/config/config_test.go @@ -3,6 +3,7 @@ package config import ( "bytes" _ "embed" + "path" "strings" "testing" fs "testing/fstest" @@ -40,10 +41,15 @@ func TestConfigParsing(t *testing.T) { t.Setenv("AZURE_SECRET", "this is cool") t.Setenv("AUTH_SEND_SMS_SECRETS", "v1,whsec_aWxpa2VzdXBhYmFzZXZlcnltdWNoYW5kaWhvcGV5b3Vkb3Rvbw==") t.Setenv("SENDGRID_API_KEY", "sendgrid") + t.Setenv("AUTH_CALLBACK_URL", "http://localhost:3000/auth/callback") assert.NoError(t, config.Load("", fsys)) // Check error assert.Equal(t, "hello", config.Auth.External["azure"].ClientId) - assert.Equal(t, "this is cool", config.Auth.External["azure"].Secret) + assert.Equal(t, "this is cool", config.Auth.External["azure"].Secret.Value) + assert.Equal(t, []string{ + "https://127.0.0.1:3000", + "http://localhost:3000/auth/callback", + }, config.Auth.AdditionalRedirectUrls) }) t.Run("config file with environment variables fails when unset", func(t *testing.T) { @@ -55,6 +61,45 @@ func TestConfigParsing(t *testing.T) { // Run test assert.Error(t, config.Load("", fsys)) }) + + t.Run("config file with remotes", func(t *testing.T) { + config := NewConfig() + // Setup in-memory fs + fsys := fs.MapFS{ + "supabase/config.toml": &fs.MapFile{Data: testInitConfigEmbed}, + "supabase/templates/invite.html": &fs.MapFile{}, + } + // Run test + t.Setenv("TWILIO_AUTH_TOKEN", "token") + t.Setenv("AZURE_CLIENT_ID", "hello") + t.Setenv("AZURE_SECRET", "this is cool") + t.Setenv("AUTH_SEND_SMS_SECRETS", "v1,whsec_aWxpa2VzdXBhYmFzZXZlcnltdWNoYW5kaWhvcGV5b3Vkb3Rvbw==") + t.Setenv("SENDGRID_API_KEY", "sendgrid") + t.Setenv("AUTH_CALLBACK_URL", "http://localhost:3000/auth/callback") + assert.NoError(t, config.Load("", fsys)) + // Check the default value in the config + assert.Equal(t, "http://127.0.0.1:3000", config.Auth.SiteUrl) + assert.Equal(t, true, config.Auth.EnableSignup) + assert.Equal(t, true, config.Auth.External["azure"].Enabled) + assert.Equal(t, []string{"image/png", "image/jpeg"}, config.Storage.Buckets["images"].AllowedMimeTypes) + // Check the values for remotes override + production, ok := config.Remotes["production"] + assert.True(t, ok) + staging, ok := config.Remotes["staging"] + assert.True(t, ok) + // Check the values for production override + assert.Equal(t, "vpefcjyosynxeiebfscx", production.ProjectId) + assert.Equal(t, "http://feature-auth-branch.com/", production.Auth.SiteUrl) + assert.Equal(t, false, production.Auth.EnableSignup) + assert.Equal(t, false, production.Auth.External["azure"].Enabled) + assert.Equal(t, "nope", production.Auth.External["azure"].ClientId) + // Check seed should be disabled by default for remote configs + assert.Equal(t, false, production.Db.Seed.Enabled) + // Check the values for the staging override + assert.Equal(t, "bvikqvbczudanvggcord", staging.ProjectId) + assert.Equal(t, []string{"image/png"}, staging.Storage.Buckets["images"].AllowedMimeTypes) + assert.Equal(t, true, staging.Db.Seed.Enabled) + }) } func TestFileSizeLimitConfigParsing(t *testing.T) { @@ -160,50 +205,74 @@ func TestSigningJWT(t *testing.T) { func TestValidateHookURI(t *testing.T) { tests := []struct { - name string - uri string - hookName string - shouldErr bool - errorMsg string + hookConfig + name string + errorMsg string }{ { - name: "valid http URL", - uri: "http://example.com", - hookName: "testHook", - shouldErr: false, + name: "valid http URL", + hookConfig: hookConfig{ + Enabled: true, + URI: "http://example.com", + Secrets: Secret{Value: "v1,whsec_aWxpa2VzdXBhYmFzZXZlcnltdWNoYW5kaWhvcGV5b3Vkb3Rvbw=="}, + }, + }, + { + name: "valid https URL", + hookConfig: hookConfig{ + Enabled: true, + URI: "https://example.com", + Secrets: Secret{Value: "v1,whsec_aWxpa2VzdXBhYmFzZXZlcnltdWNoYW5kaWhvcGV5b3Vkb3Rvbw=="}, + }, + }, + { + name: "valid pg-functions URI", + hookConfig: hookConfig{ + Enabled: true, + URI: "pg-functions://functionName", + }, }, { - name: "valid https URL", - uri: "https://example.com", - hookName: "testHook", - shouldErr: false, + name: "invalid URI with unsupported scheme", + hookConfig: hookConfig{ + Enabled: true, + URI: "ftp://example.com", + Secrets: Secret{Value: "v1,whsec_aWxpa2VzdXBhYmFzZXZlcnltdWNoYW5kaWhvcGV5b3Vkb3Rvbw=="}, + }, + errorMsg: "Invalid hook config: auth.hook.invalid URI with unsupported scheme.uri should be a HTTP, HTTPS, or pg-functions URI", }, { - name: "valid pg-functions URI", - uri: "pg-functions://functionName", - hookName: "pgHook", - shouldErr: false, + name: "invalid URI with parsing error", + hookConfig: hookConfig{ + Enabled: true, + URI: "http://a b.com", + Secrets: Secret{Value: "v1,whsec_aWxpa2VzdXBhYmFzZXZlcnltdWNoYW5kaWhvcGV5b3Vkb3Rvbw=="}, + }, + errorMsg: "failed to parse template url: parse \"http://a b.com\": invalid character \" \" in host name", }, { - name: "invalid URI with unsupported scheme", - uri: "ftp://example.com", - hookName: "malformedHook", - shouldErr: true, - errorMsg: "Invalid HTTP hook config: auth.hook.malformedHook should be a Postgres function URI, or a HTTP or HTTPS URL", + name: "valid http URL with missing secrets", + hookConfig: hookConfig{ + Enabled: true, + URI: "http://example.com", + }, + errorMsg: "Missing required field in config: auth.hook.valid http URL with missing secrets.secrets", }, { - name: "invalid URI with parsing error", - uri: "http://a b.com", - hookName: "errorHook", - shouldErr: true, - errorMsg: "failed to parse template url: parse \"http://a b.com\": invalid character \" \" in host name", + name: "valid pg-functions URI with unsupported secrets", + hookConfig: hookConfig{ + Enabled: true, + URI: "pg-functions://functionName", + Secrets: Secret{Value: "test-secret"}, + }, + errorMsg: "Invalid hook config: auth.hook.valid pg-functions URI with unsupported secrets.secrets is unsupported for pg-functions URI", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - err := validateHookURI(tt.uri, tt.hookName) - if tt.shouldErr { + err := tt.hookConfig.validate(tt.name) + if len(tt.errorMsg) > 0 { assert.Error(t, err, "Expected an error for %v", tt.name) assert.EqualError(t, err, tt.errorMsg, "Expected error message does not match for %v", tt.name) } else { @@ -212,3 +281,153 @@ func TestValidateHookURI(t *testing.T) { }) } } + +func TestLoadSeedPaths(t *testing.T) { + t.Run("returns seed files matching patterns", func(t *testing.T) { + // Setup in-memory fs + fsys := fs.MapFS{ + "supabase/seeds/seed1.sql": &fs.MapFile{Data: []byte("INSERT INTO table1 VALUES (1);")}, + "supabase/seeds/seed2.sql": &fs.MapFile{Data: []byte("INSERT INTO table2 VALUES (2);")}, + "supabase/seeds/seed3.sql": &fs.MapFile{Data: []byte("INSERT INTO table2 VALUES (2);")}, + "supabase/seeds/another.sql": &fs.MapFile{Data: []byte("INSERT INTO table2 VALUES (2);")}, + "supabase/seeds/ignore.sql": &fs.MapFile{Data: []byte("INSERT INTO table3 VALUES (3);")}, + } + // Mock config patterns + config := seed{ + Enabled: true, + GlobPatterns: []string{ + "seeds/seed[12].sql", + "seeds/ano*.sql", + }, + } + // Run test + err := config.loadSeedPaths("supabase", fsys) + // Check error + assert.NoError(t, err) + // Validate files + assert.ElementsMatch(t, []string{ + "supabase/seeds/seed1.sql", + "supabase/seeds/seed2.sql", + "supabase/seeds/another.sql", + }, config.SqlPaths) + }) + t.Run("returns seed files matching patterns skip duplicates", func(t *testing.T) { + // Setup in-memory fs + fsys := fs.MapFS{ + "supabase/seeds/seed1.sql": &fs.MapFile{Data: []byte("INSERT INTO table1 VALUES (1);")}, + "supabase/seeds/seed2.sql": &fs.MapFile{Data: []byte("INSERT INTO table2 VALUES (2);")}, + "supabase/seeds/seed3.sql": &fs.MapFile{Data: []byte("INSERT INTO table2 VALUES (2);")}, + "supabase/seeds/another.sql": &fs.MapFile{Data: []byte("INSERT INTO table2 VALUES (2);")}, + "supabase/seeds/ignore.sql": &fs.MapFile{Data: []byte("INSERT INTO table3 VALUES (3);")}, + } + // Mock config patterns + config := seed{ + Enabled: true, + GlobPatterns: []string{ + "seeds/seed[12].sql", + "seeds/ano*.sql", + "seeds/seed*.sql", + }, + } + // Run test + err := config.loadSeedPaths("supabase", fsys) + // Check error + assert.NoError(t, err) + // Validate files + assert.ElementsMatch(t, []string{ + "supabase/seeds/seed1.sql", + "supabase/seeds/seed2.sql", + "supabase/seeds/another.sql", + "supabase/seeds/seed3.sql", + }, config.SqlPaths) + }) + + t.Run("returns error on invalid pattern", func(t *testing.T) { + // Setup in-memory fs + fsys := fs.MapFS{} + // Mock config patterns + config := seed{Enabled: true, GlobPatterns: []string{"[*!#@D#"}} + // Run test + err := config.loadSeedPaths("", fsys) + // Check error + assert.ErrorIs(t, err, path.ErrBadPattern) + // The resuling seed list should be empty + assert.Empty(t, config.SqlPaths) + }) + + t.Run("returns empty list if no files match", func(t *testing.T) { + // Setup in-memory fs + fsys := fs.MapFS{} + // Mock config patterns + config := seed{Enabled: true, GlobPatterns: []string{"seeds/*.sql"}} + // Run test + err := config.loadSeedPaths("", fsys) + // Check error + assert.NoError(t, err) + // Validate files + assert.Empty(t, config.SqlPaths) + }) +} + +func TestLoadEnv(t *testing.T) { + t.Setenv("SUPABASE_AUTH_JWT_SECRET", "test-secret") + t.Setenv("SUPABASE_DB_ROOT_KEY", "test-root-key") + config := NewConfig() + // Run test + err := config.loadFromEnv() + // Check error + assert.NoError(t, err) + assert.Equal(t, "test-secret", config.Auth.JwtSecret) + assert.Equal(t, "test-root-key", config.Db.RootKey) +} + +func TestLoadFunctionImportMap(t *testing.T) { + t.Run("uses deno.json as import map when present", func(t *testing.T) { + config := NewConfig() + fsys := fs.MapFS{ + "supabase/config.toml": &fs.MapFile{Data: []byte(` + project_id = "bvikqvbczudanvggcord" + [functions.hello] + `)}, + "supabase/functions/hello/deno.json": &fs.MapFile{}, + "supabase/functions/hello/index.ts": &fs.MapFile{}, + } + // Run test + assert.NoError(t, config.Load("", fsys)) + // Check that deno.json was set as import map + assert.Equal(t, "supabase/functions/hello/deno.json", config.Functions["hello"].ImportMap) + }) + + t.Run("uses deno.jsonc as import map when present", func(t *testing.T) { + config := NewConfig() + fsys := fs.MapFS{ + "supabase/config.toml": &fs.MapFile{Data: []byte(` + project_id = "bvikqvbczudanvggcord" + [functions.hello] + `)}, + "supabase/functions/hello/deno.jsonc": &fs.MapFile{}, + "supabase/functions/hello/index.ts": &fs.MapFile{}, + } + // Run test + assert.NoError(t, config.Load("", fsys)) + // Check that deno.jsonc was set as import map + assert.Equal(t, "supabase/functions/hello/deno.jsonc", config.Functions["hello"].ImportMap) + }) + + t.Run("config.toml takes precedence over deno.json", func(t *testing.T) { + config := NewConfig() + fsys := fs.MapFS{ + "supabase/config.toml": &fs.MapFile{Data: []byte(` + project_id = "bvikqvbczudanvggcord" + [functions] + hello.import_map = "custom_import_map.json" + `)}, + "supabase/functions/hello/deno.json": &fs.MapFile{}, + "supabase/functions/hello/index.ts": &fs.MapFile{}, + } + // Run test + assert.NoError(t, config.Load("", fsys)) + // Check that config.toml takes precedence over deno.json + assert.Equal(t, "supabase/custom_import_map.json", config.Functions["hello"].ImportMap) + }) +} diff --git a/pkg/config/constants.go b/pkg/config/constants.go index f81e21a40..56be17077 100644 --- a/pkg/config/constants.go +++ b/pkg/config/constants.go @@ -3,21 +3,21 @@ package config const ( pg13Image = "supabase/postgres:13.3.0" pg14Image = "supabase/postgres:14.1.0.89" - Pg15Image = "supabase/postgres:15.1.1.78" + Pg15Image = "supabase/postgres:15.8.1.020" // Append to ServiceImages when adding new dependencies below // TODO: try https://github.com/axllent/mailpit kongImage = "library/kong:2.8.1" inbucketImage = "inbucket/inbucket:3.0.3" postgrestImage = "postgrest/postgrest:v12.2.0" - pgmetaImage = "supabase/postgres-meta:v0.83.2" - studioImage = "supabase/studio:20240729-ce42139" + pgmetaImage = "supabase/postgres-meta:v0.84.2" + studioImage = "supabase/studio:20250130-b048539" imageProxyImage = "darthsim/imgproxy:v3.8.0" - edgeRuntimeImage = "supabase/edge-runtime:v1.58.2" + edgeRuntimeImage = "supabase/edge-runtime:v1.66.5" vectorImage = "timberio/vector:0.28.1-alpine" supavisorImage = "supabase/supavisor:1.1.56" - gotrueImage = "supabase/gotrue:v2.158.1" - realtimeImage = "supabase/realtime:v2.30.34" - storageImage = "supabase/storage-api:v1.10.1" + gotrueImage = "supabase/gotrue:v2.167.0" + realtimeImage = "supabase/realtime:v2.34.7" + storageImage = "supabase/storage-api:v1.14.5" logflareImage = "supabase/logflare:1.4.0" // Append to JobImages when adding new dependencies below DifferImage = "supabase/pgadmin-schema-diff:cli-0.0.5" diff --git a/pkg/config/db.go b/pkg/config/db.go new file mode 100644 index 000000000..eaa02e036 --- /dev/null +++ b/pkg/config/db.go @@ -0,0 +1,168 @@ +package config + +import ( + "bytes" + + v1API "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" + "github.com/supabase/cli/pkg/diff" +) + +type PoolMode string + +const ( + TransactionMode PoolMode = "transaction" + SessionMode PoolMode = "session" +) + +type SessionReplicationRole string + +const ( + SessionReplicationRoleOrigin SessionReplicationRole = "origin" + SessionReplicationRoleReplica SessionReplicationRole = "replica" + SessionReplicationRoleLocal SessionReplicationRole = "local" +) + +type ( + settings struct { + EffectiveCacheSize *string `toml:"effective_cache_size"` + LogicalDecodingWorkMem *string `toml:"logical_decoding_work_mem"` + MaintenanceWorkMem *string `toml:"maintenance_work_mem"` + MaxConnections *uint `toml:"max_connections"` + MaxLocksPerTransaction *uint `toml:"max_locks_per_transaction"` + MaxParallelMaintenanceWorkers *uint `toml:"max_parallel_maintenance_workers"` + MaxParallelWorkers *uint `toml:"max_parallel_workers"` + MaxParallelWorkersPerGather *uint `toml:"max_parallel_workers_per_gather"` + MaxReplicationSlots *uint `toml:"max_replication_slots"` + MaxSlotWalKeepSize *string `toml:"max_slot_wal_keep_size"` + MaxStandbyArchiveDelay *string `toml:"max_standby_archive_delay"` + MaxStandbyStreamingDelay *string `toml:"max_standby_streaming_delay"` + MaxWalSize *string `toml:"max_wal_size"` + MaxWalSenders *uint `toml:"max_wal_senders"` + MaxWorkerProcesses *uint `toml:"max_worker_processes"` + SessionReplicationRole *SessionReplicationRole `toml:"session_replication_role"` + SharedBuffers *string `toml:"shared_buffers"` + StatementTimeout *string `toml:"statement_timeout"` + TrackActivityQuerySize *string `toml:"track_activity_query_size"` + TrackCommitTimestamp *bool `toml:"track_commit_timestamp"` + WalKeepSize *string `toml:"wal_keep_size"` + WalSenderTimeout *string `toml:"wal_sender_timeout"` + WorkMem *string `toml:"work_mem"` + } + + db struct { + Image string `toml:"-"` + Port uint16 `toml:"port"` + ShadowPort uint16 `toml:"shadow_port"` + MajorVersion uint `toml:"major_version"` + Password string `toml:"-"` + RootKey string `toml:"-" mapstructure:"root_key"` + Pooler pooler `toml:"pooler"` + Seed seed `toml:"seed"` + Settings settings `toml:"settings"` + Vault map[string]Secret `toml:"vault"` + } + + seed struct { + Enabled bool `toml:"enabled"` + GlobPatterns []string `toml:"sql_paths"` + SqlPaths []string `toml:"-"` + } + + pooler struct { + Enabled bool `toml:"enabled"` + Image string `toml:"-"` + Port uint16 `toml:"port"` + PoolMode PoolMode `toml:"pool_mode"` + DefaultPoolSize uint `toml:"default_pool_size"` + MaxClientConn uint `toml:"max_client_conn"` + ConnectionString string `toml:"-"` + TenantId string `toml:"-"` + EncryptionKey string `toml:"-"` + SecretKeyBase string `toml:"-"` + } +) + +func (a *settings) ToUpdatePostgresConfigBody() v1API.UpdatePostgresConfigBody { + body := v1API.UpdatePostgresConfigBody{} + + // Parameters that require restart + body.MaxConnections = cast.UintToIntPtr(a.MaxConnections) + body.MaxWorkerProcesses = cast.UintToIntPtr(a.MaxWorkerProcesses) + body.MaxParallelWorkers = cast.UintToIntPtr(a.MaxParallelWorkers) + body.MaxWalSenders = cast.UintToIntPtr(a.MaxWalSenders) + body.MaxReplicationSlots = cast.UintToIntPtr(a.MaxReplicationSlots) + body.SharedBuffers = a.SharedBuffers + + // Parameters that can be changed without restart + body.EffectiveCacheSize = a.EffectiveCacheSize + body.LogicalDecodingWorkMem = a.LogicalDecodingWorkMem + body.MaintenanceWorkMem = a.MaintenanceWorkMem + body.MaxLocksPerTransaction = cast.UintToIntPtr(a.MaxLocksPerTransaction) + body.MaxParallelMaintenanceWorkers = cast.UintToIntPtr(a.MaxParallelMaintenanceWorkers) + body.MaxParallelWorkersPerGather = cast.UintToIntPtr(a.MaxParallelWorkersPerGather) + body.MaxSlotWalKeepSize = a.MaxSlotWalKeepSize + body.MaxStandbyArchiveDelay = a.MaxStandbyArchiveDelay + body.MaxStandbyStreamingDelay = a.MaxStandbyStreamingDelay + body.MaxWalSize = a.MaxWalSize + body.SessionReplicationRole = (*v1API.UpdatePostgresConfigBodySessionReplicationRole)(a.SessionReplicationRole) + body.StatementTimeout = a.StatementTimeout + body.TrackActivityQuerySize = a.TrackActivityQuerySize + body.TrackCommitTimestamp = a.TrackCommitTimestamp + body.WalKeepSize = a.WalKeepSize + body.WalSenderTimeout = a.WalSenderTimeout + body.WorkMem = a.WorkMem + return body +} + +func (a *settings) FromRemotePostgresConfig(remoteConfig v1API.PostgresConfigResponse) { + a.EffectiveCacheSize = remoteConfig.EffectiveCacheSize + a.LogicalDecodingWorkMem = remoteConfig.LogicalDecodingWorkMem + a.MaintenanceWorkMem = remoteConfig.MaintenanceWorkMem + a.MaxConnections = cast.IntToUintPtr(remoteConfig.MaxConnections) + a.MaxLocksPerTransaction = cast.IntToUintPtr(remoteConfig.MaxLocksPerTransaction) + a.MaxParallelMaintenanceWorkers = cast.IntToUintPtr(remoteConfig.MaxParallelMaintenanceWorkers) + a.MaxParallelWorkers = cast.IntToUintPtr(remoteConfig.MaxParallelWorkers) + a.MaxParallelWorkersPerGather = cast.IntToUintPtr(remoteConfig.MaxParallelWorkersPerGather) + a.MaxReplicationSlots = cast.IntToUintPtr(remoteConfig.MaxReplicationSlots) + a.MaxSlotWalKeepSize = remoteConfig.MaxSlotWalKeepSize + a.MaxStandbyArchiveDelay = remoteConfig.MaxStandbyArchiveDelay + a.MaxStandbyStreamingDelay = remoteConfig.MaxStandbyStreamingDelay + a.MaxWalSenders = cast.IntToUintPtr(remoteConfig.MaxWalSenders) + a.MaxWalSize = remoteConfig.MaxWalSize + a.MaxWorkerProcesses = cast.IntToUintPtr(remoteConfig.MaxWorkerProcesses) + a.SessionReplicationRole = (*SessionReplicationRole)(remoteConfig.SessionReplicationRole) + a.SharedBuffers = remoteConfig.SharedBuffers + a.StatementTimeout = remoteConfig.StatementTimeout + a.TrackActivityQuerySize = remoteConfig.TrackActivityQuerySize + a.TrackCommitTimestamp = remoteConfig.TrackCommitTimestamp + a.WalKeepSize = remoteConfig.WalKeepSize + a.WalSenderTimeout = remoteConfig.WalSenderTimeout + a.WorkMem = remoteConfig.WorkMem +} + +const pgConfHeader = "\n# supabase [db.settings] configuration\n" + +// create a valid string to append to /etc/postgresql/postgresql.conf +func (a *settings) ToPostgresConfig() string { + // Assuming postgres settings is always a flat struct, we can serialise + // using toml, then replace double quotes with single. + data, _ := ToTomlBytes(*a) + body := bytes.ReplaceAll(data, []byte{'"'}, []byte{'\''}) + return pgConfHeader + string(body) +} + +func (a *settings) DiffWithRemote(remoteConfig v1API.PostgresConfigResponse) ([]byte, error) { + copy := *a + // Convert the config values into easily comparable remoteConfig values + currentValue, err := ToTomlBytes(copy) + if err != nil { + return nil, err + } + copy.FromRemotePostgresConfig(remoteConfig) + remoteCompare, err := ToTomlBytes(copy) + if err != nil { + return nil, err + } + return diff.Diff("remote[db.settings]", remoteCompare, "local[db.settings]", currentValue), nil +} diff --git a/pkg/config/db_test.go b/pkg/config/db_test.go new file mode 100644 index 000000000..575fd202d --- /dev/null +++ b/pkg/config/db_test.go @@ -0,0 +1,184 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + v1API "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" +) + +func TestDbSettingsToUpdatePostgresConfigBody(t *testing.T) { + t.Run("converts all fields correctly", func(t *testing.T) { + db := &db{ + Settings: settings{ + EffectiveCacheSize: cast.Ptr("4GB"), + MaxConnections: cast.Ptr(uint(100)), + SharedBuffers: cast.Ptr("1GB"), + StatementTimeout: cast.Ptr("30s"), + SessionReplicationRole: cast.Ptr(SessionReplicationRoleReplica), + }, + } + + body := db.Settings.ToUpdatePostgresConfigBody() + + assert.Equal(t, "4GB", *body.EffectiveCacheSize) + assert.Equal(t, 100, *body.MaxConnections) + assert.Equal(t, "1GB", *body.SharedBuffers) + assert.Equal(t, "30s", *body.StatementTimeout) + assert.Equal(t, v1API.UpdatePostgresConfigBodySessionReplicationRoleReplica, *body.SessionReplicationRole) + }) + + t.Run("handles empty fields", func(t *testing.T) { + db := &db{} + + body := db.Settings.ToUpdatePostgresConfigBody() + + assert.Nil(t, body.EffectiveCacheSize) + assert.Nil(t, body.MaxConnections) + assert.Nil(t, body.SharedBuffers) + assert.Nil(t, body.StatementTimeout) + assert.Nil(t, body.SessionReplicationRole) + }) +} + +func TestDbSettingsDiff(t *testing.T) { + t.Run("detects differences", func(t *testing.T) { + db := &db{ + Settings: settings{ + EffectiveCacheSize: cast.Ptr("4GB"), + MaxConnections: cast.Ptr(uint(100)), + SharedBuffers: cast.Ptr("1GB"), + }, + } + + remoteConfig := v1API.PostgresConfigResponse{ + EffectiveCacheSize: cast.Ptr("8GB"), + MaxConnections: cast.Ptr(200), + SharedBuffers: cast.Ptr("2GB"), + } + + diff, err := db.Settings.DiffWithRemote(remoteConfig) + assert.NoError(t, err) + + assertSnapshotEqual(t, diff) + }) + + t.Run("handles no differences", func(t *testing.T) { + db := &db{ + Settings: settings{ + EffectiveCacheSize: cast.Ptr("4GB"), + MaxConnections: cast.Ptr(uint(100)), + SharedBuffers: cast.Ptr("1GB"), + }, + } + + remoteConfig := v1API.PostgresConfigResponse{ + EffectiveCacheSize: cast.Ptr("4GB"), + MaxConnections: cast.Ptr(100), + SharedBuffers: cast.Ptr("1GB"), + } + + diff, err := db.Settings.DiffWithRemote(remoteConfig) + assert.NoError(t, err) + + assert.Empty(t, diff) + }) + + t.Run("handles multiple schemas and search paths with spaces", func(t *testing.T) { + db := &db{ + Settings: settings{ + EffectiveCacheSize: cast.Ptr("4GB"), + MaxConnections: cast.Ptr(uint(100)), + SharedBuffers: cast.Ptr("1GB"), + }, + } + + remoteConfig := v1API.PostgresConfigResponse{ + EffectiveCacheSize: cast.Ptr("4GB"), + MaxConnections: cast.Ptr(100), + SharedBuffers: cast.Ptr("1GB"), + } + + diff, err := db.Settings.DiffWithRemote(remoteConfig) + assert.NoError(t, err) + + assert.Empty(t, diff) + }) + + t.Run("handles api disabled on remote side", func(t *testing.T) { + db := &db{ + Settings: settings{ + EffectiveCacheSize: cast.Ptr("4GB"), + MaxConnections: cast.Ptr(uint(100)), + SharedBuffers: cast.Ptr("1GB"), + }, + } + + remoteConfig := v1API.PostgresConfigResponse{ + // All fields are nil to simulate disabled API + } + + diff, err := db.Settings.DiffWithRemote(remoteConfig) + assert.NoError(t, err) + + assertSnapshotEqual(t, diff) + }) + + t.Run("handles api disabled on local side", func(t *testing.T) { + db := &db{ + Settings: settings{ + // All fields are nil to simulate disabled API + }, + } + + remoteConfig := v1API.PostgresConfigResponse{ + EffectiveCacheSize: cast.Ptr("4GB"), + MaxConnections: cast.Ptr(100), + SharedBuffers: cast.Ptr("1GB"), + } + + diff, err := db.Settings.DiffWithRemote(remoteConfig) + assert.NoError(t, err) + + assertSnapshotEqual(t, diff) + }) +} + +func TestSettingsToPostgresConfig(t *testing.T) { + t.Run("Only set values should appear", func(t *testing.T) { + settings := settings{ + MaxConnections: cast.Ptr(uint(100)), + MaxLocksPerTransaction: cast.Ptr(uint(64)), + SharedBuffers: cast.Ptr("128MB"), + WorkMem: cast.Ptr("4MB"), + } + got := settings.ToPostgresConfig() + + assert.Contains(t, got, "max_connections = 100") + assert.Contains(t, got, "max_locks_per_transaction = 64") + assert.Contains(t, got, "shared_buffers = '128MB'") + assert.Contains(t, got, "work_mem = '4MB'") + + assert.NotContains(t, got, "effective_cache_size") + assert.NotContains(t, got, "maintenance_work_mem") + assert.NotContains(t, got, "max_parallel_workers") + }) + + t.Run("SessionReplicationRole should be handled correctly", func(t *testing.T) { + settings := settings{ + SessionReplicationRole: cast.Ptr(SessionReplicationRoleOrigin), + } + got := settings.ToPostgresConfig() + + assert.Contains(t, got, "session_replication_role = 'origin'") + }) + + t.Run("Empty settings should result in empty string", func(t *testing.T) { + settings := settings{} + got := settings.ToPostgresConfig() + + assert.Equal(t, got, "\n# supabase [db.settings] configuration\n") + assert.NotContains(t, got, "=") + }) +} diff --git a/pkg/config/secret.go b/pkg/config/secret.go new file mode 100644 index 000000000..eab7f3f00 --- /dev/null +++ b/pkg/config/secret.go @@ -0,0 +1,95 @@ +package config + +import ( + "encoding/base64" + "os" + "reflect" + "strings" + + ecies "github.com/ecies/go/v2" + "github.com/go-errors/errors" + "github.com/mitchellh/mapstructure" +) + +type Secret struct { + Value string + SHA256 string +} + +const HASHED_PREFIX = "hash:" + +func (s Secret) MarshalText() (text []byte, err error) { + if len(s.SHA256) == 0 { + return []byte{}, nil + } + return []byte(HASHED_PREFIX + s.SHA256), nil +} + +const ENCRYPTED_PREFIX = "encrypted:" + +// Decrypt secret values following dotenvx convention: +// https://github.com/dotenvx/dotenvx/blob/main/src/lib/helpers/decryptKeyValue.js +func decrypt(key, value string) (string, error) { + if !strings.HasPrefix(value, ENCRYPTED_PREFIX) { + return value, nil + } + if len(key) == 0 { + return value, errors.New("missing private key") + } + // Verify private key exists + privateKey, err := ecies.NewPrivateKeyFromHex(key) + if err != nil { + return value, errors.Errorf("failed to hex decode private key: %w", err) + } + // Verify ciphertext is base64 encoded + encoded := value[len(ENCRYPTED_PREFIX):] + ciphertext, err := base64.StdEncoding.DecodeString(encoded) + if err != nil { + return value, errors.Errorf("failed to base64 decode secret: %w", err) + } + // Return decrypted value + plaintext, err := ecies.Decrypt(privateKey, ciphertext) + if err != nil { + return value, errors.Errorf("failed to decrypt secret: %w", err) + } + return string(plaintext), nil +} + +func DecryptSecretHookFunc(hashKey string) mapstructure.DecodeHookFunc { + return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + var result Secret + if t != reflect.TypeOf(result) { + return data, nil + } + // Get all env vars and filter for DOTENV_PRIVATE_KEY + var privateKeys []string + for _, env := range os.Environ() { + key := strings.Split(env, "=")[0] + if key == "DOTENV_PRIVATE_KEY" || strings.HasPrefix(key, "DOTENV_PRIVATE_KEY_") { + if value := os.Getenv(key); value != "" { + privateKeys = append(privateKeys, value) + } + } + } + + // Try each private key + var err error + privKey := strings.Join(privateKeys, ",") + for _, k := range strings.Split(privKey, ",") { + // Use the first private key that successfully decrypts the secret + if result.Value, err = decrypt(k, data.(string)); err == nil { + // Unloaded env() references may be returned verbatim. + // Don't hash those values as they are meaningless. + if !envPattern.MatchString(result.Value) { + result.SHA256 = sha256Hmac(hashKey, result.Value) + } + break + } + } + // If we get here, none of the keys worked + return result, err + } +} diff --git a/pkg/config/secret_test.go b/pkg/config/secret_test.go new file mode 100644 index 000000000..c4bf74e5b --- /dev/null +++ b/pkg/config/secret_test.go @@ -0,0 +1,52 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDecryptSecret(t *testing.T) { + key := "7fd7210cef8f331ee8c55897996aaaafd853a2b20a4dc73d6d75759f65d2a7eb" + value := "encrypted:BKiXH15AyRzeohGyUrmB6cGjSklCrrBjdesQlX1VcXo/Xp20Bi2gGZ3AlIqxPQDmjVAALnhZamKnuY73l8Dz1P+BYiZUgxTSLzdCvdYUyVbNekj2UudbdUizBViERtZkuQwZHIv/" + + t.Run("decrypts secret value", func(t *testing.T) { + // Run test + plaintext, err := decrypt(key, value) + // Check error + assert.NoError(t, err) + assert.Equal(t, "value", plaintext) + }) + + t.Run("throws error on missing key", func(t *testing.T) { + // Run test + plaintext, err := decrypt("", value) + // Check error + assert.ErrorContains(t, err, "missing private key") + assert.Equal(t, value, plaintext) + }) + + t.Run("throws error on non-hex key", func(t *testing.T) { + // Run test + plaintext, err := decrypt("invalid", value) + // Check error + assert.ErrorContains(t, err, "failed to hex decode private key: cannot decode hex string") + assert.Equal(t, value, plaintext) + }) + + t.Run("throws error on non-base64 value", func(t *testing.T) { + // Run test + plaintext, err := decrypt(key, "encrypted:invalid") + // Check error + assert.ErrorContains(t, err, "failed to base64 decode secret: illegal base64 data at input byte 4") + assert.Equal(t, "encrypted:invalid", plaintext) + }) + + t.Run("throws error on empty ciphertext", func(t *testing.T) { + // Run test + plaintext, err := decrypt(key, "encrypted:") + // Check error + assert.ErrorContains(t, err, "failed to decrypt secret: invalid length of message") + assert.Equal(t, "encrypted:", plaintext) + }) +} diff --git a/pkg/config/storage.go b/pkg/config/storage.go new file mode 100644 index 000000000..b026d0cf6 --- /dev/null +++ b/pkg/config/storage.go @@ -0,0 +1,80 @@ +package config + +import ( + v1API "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" + "github.com/supabase/cli/pkg/diff" +) + +type ( + storage struct { + Enabled bool `toml:"enabled"` + Image string `toml:"-"` + ImgProxyImage string `toml:"-"` + FileSizeLimit sizeInBytes `toml:"file_size_limit"` + ImageTransformation *imageTransformation `toml:"image_transformation"` + S3Credentials storageS3Credentials `toml:"-"` + Buckets BucketConfig `toml:"buckets"` + } + + imageTransformation struct { + Enabled bool `toml:"enabled"` + } + + storageS3Credentials struct { + AccessKeyId string `toml:"-"` + SecretAccessKey string `toml:"-"` + Region string `toml:"-"` + } + + BucketConfig map[string]bucket + + bucket struct { + Public *bool `toml:"public"` + FileSizeLimit sizeInBytes `toml:"file_size_limit"` + AllowedMimeTypes []string `toml:"allowed_mime_types"` + ObjectsPath string `toml:"objects_path"` + } +) + +func (s *storage) ToUpdateStorageConfigBody() v1API.UpdateStorageConfigBody { + body := v1API.UpdateStorageConfigBody{ + FileSizeLimit: cast.Ptr(int64(s.FileSizeLimit)), + } + // When local config is not set, we assume platform defaults should not change + if s.ImageTransformation != nil { + body.Features = &v1API.StorageFeatures{ + ImageTransformation: v1API.StorageFeatureImageTransformation{ + Enabled: s.ImageTransformation.Enabled, + }, + } + } + return body +} + +func (s *storage) FromRemoteStorageConfig(remoteConfig v1API.StorageConfigResponse) { + s.FileSizeLimit = sizeInBytes(remoteConfig.FileSizeLimit) + // When local config is not set, we assume platform defaults should not change + if s.ImageTransformation != nil { + s.ImageTransformation.Enabled = remoteConfig.Features.ImageTransformation.Enabled + } +} + +func (s *storage) DiffWithRemote(remoteConfig v1API.StorageConfigResponse) ([]byte, error) { + copy := *s + if s.ImageTransformation != nil { + img := *s.ImageTransformation + copy.ImageTransformation = &img + } + // Convert the config values into easily comparable remoteConfig values + currentValue, err := ToTomlBytes(copy) + if err != nil { + return nil, err + } + copy.FromRemoteStorageConfig(remoteConfig) + remoteCompare, err := ToTomlBytes(copy) + if err != nil { + return nil, err + } + return diff.Diff("remote[storage]", remoteCompare, "local[storage]", currentValue), nil +} diff --git a/pkg/config/templates/config.toml b/pkg/config/templates/config.toml index 37646aa63..c91ec4b31 100644 --- a/pkg/config/templates/config.toml +++ b/pkg/config/templates/config.toml @@ -1,3 +1,5 @@ +# For detailed configuration reference documentation, visit: +# https://supabase.com/docs/guides/local-development/cli/config # A string used to distinguish different Supabase projects on the same host. Defaults to the # working directory name when running `supabase init`. project_id = "{{ .ProjectId }}" @@ -7,15 +9,16 @@ enabled = true # Port to use for the API URL. port = 54321 # Schemas to expose in your API. Tables, views and stored procedures in this schema will get API -# endpoints. `public` is always included. +# endpoints. `public` and `graphql_public` schemas are included by default. schemas = ["public", "graphql_public"] -# Extra schemas to add to the search_path of every request. `public` is always included. +# Extra schemas to add to the search_path of every request. extra_search_path = ["public", "extensions"] # The maximum number of rows returns from a view, table, or stored procedure. Limits payload size # for accidental or malicious requests. max_rows = 1000 [api.tls] +# Enable HTTPS endpoints locally using a self-signed certificate. enabled = false [db] @@ -39,6 +42,16 @@ default_pool_size = 20 # Maximum number of client connections allowed. max_client_conn = 100 +# [db.vault] +# secret_key = "env(SECRET_VALUE)" + +[db.seed] +# If enabled, seeds the database after migrations during a db reset. +enabled = true +# Specifies an ordered list of seed files to load during db reset. +# Supports glob patterns relative to supabase directory: "./seeds/*.sql" +sql_paths = ["./seed.sql"] + [realtime] enabled = true # Bind realtime via either IPv4 or IPv6. (default: IPv4) @@ -64,14 +77,17 @@ port = 54324 # Uncomment to expose additional ports for testing user applications that send emails. # smtp_port = 54325 # pop3_port = 54326 +# admin_email = "admin@email.com" +# sender_name = "Admin" [storage] enabled = true # The maximum file size allowed (e.g. "5MB", "500KB"). file_size_limit = "50MiB" -[storage.image_transformation] -enabled = true +# Image transformation API is available to Supabase Pro plan. +# [storage.image_transformation] +# enabled = true # Uncomment to configure local storage buckets # [storage.buckets.images] @@ -100,6 +116,11 @@ enable_signup = true enable_anonymous_sign_ins = false # Allow/disallow testing manual linking of accounts enable_manual_linking = false +# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more. +minimum_password_length = 6 +# Passwords that do not meet the following requirements will be rejected as weak. Supported values +# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols` +password_requirements = "" [auth.email] # Allow/disallow new user signups via email to your project. @@ -113,9 +134,14 @@ enable_confirmations = false secure_password_change = false # Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email. max_frequency = "1s" +# Number of characters used in the email OTP. +otp_length = 6 +# Number of seconds before the email OTP expires (defaults to 1 hour). +otp_expiry = 3600 # Use a production-ready SMTP server # [auth.email.smtp] +# enabled = true # host = "smtp.sendgrid.net" # port = 587 # user = "apikey" @@ -130,11 +156,11 @@ max_frequency = "1s" [auth.sms] # Allow/disallow new user signups via SMS to your project. -enable_signup = true +enable_signup = false # If enabled, users need to confirm their phone number before signing in. enable_confirmations = false # Template for sending OTP to users -template = "Your code is {{ `{{ .Code }}` }} ." +template = "Your code is {{ `{{ .Code }}` }}" # Controls the minimum amount of time that must pass before sending another sms otp. max_frequency = "5s" @@ -162,22 +188,28 @@ message_service_sid = "" # DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)" +# Multi-factor-authentication is available to Supabase Pro plan. [auth.mfa] # Control how many MFA factors can be enrolled at once per user. max_enrolled_factors = 10 -# Control use of MFA via App Authenticator (TOTP) +# Control MFA via App Authenticator (TOTP) [auth.mfa.totp] -enroll_enabled = true -verify_enabled = true +enroll_enabled = false +verify_enabled = false + +# Configure MFA via Phone Messaging +[auth.mfa.phone] +enroll_enabled = false +verify_enabled = false +otp_length = 6 +template = "Your code is {{ `{{ .Code }}` }}" +max_frequency = "5s" -# Configure Multi-factor-authentication via Phone Messaging -# [auth.mfa.phone] +# Configure MFA via WebAuthn +# [auth.mfa.web_authn] # enroll_enabled = true # verify_enabled = true -# otp_length = 6 -# template = "Your code is {{ `{{ .Code }}` }} ." -# max_frequency = "10s" # Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, # `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`, @@ -217,8 +249,21 @@ enabled = true # Configure one of the supported request policies: `oneshot`, `per_worker`. # Use `oneshot` for hot reload, or `per_worker` for load testing. policy = "oneshot" +# Port to attach the Chrome inspector for debugging edge functions. inspector_port = 8083 +# Use these configurations to customize your Edge Function. +# [functions.MY_FUNCTION_NAME] +# enabled = true +# verify_jwt = true +# import_map = "./functions/MY_FUNCTION_NAME/deno.json" +# Uncomment to specify a custom file path to the entrypoint. +# Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx +# entrypoint = "./functions/MY_FUNCTION_NAME/index.ts" +# Specifies static files to be bundled with the function. Supports glob patterns. +# For example, if you want to serve static HTML pages in your function: +# static_files = [ "./functions/MY_FUNCTION_NAME/*.html" ] + [analytics] enabled = true port = 54327 diff --git a/pkg/config/testdata/TestApiDiff/detects_differences.diff b/pkg/config/testdata/TestApiDiff/detects_differences.diff new file mode 100644 index 000000000..5d0e9603e --- /dev/null +++ b/pkg/config/testdata/TestApiDiff/detects_differences.diff @@ -0,0 +1,14 @@ +diff remote[api] local[api] +--- remote[api] ++++ local[api] +@@ -1,7 +1,7 @@ + enabled = true +-schemas = ["public"] +-extra_search_path = ["public"] +-max_rows = 500 ++schemas = ["public", "private"] ++extra_search_path = ["extensions", "public"] ++max_rows = 1000 + port = 0 + external_url = "" + diff --git a/pkg/config/testdata/TestApiDiff/handles_api_disabled_on_local_side.diff b/pkg/config/testdata/TestApiDiff/handles_api_disabled_on_local_side.diff new file mode 100644 index 000000000..67a5ccd5c --- /dev/null +++ b/pkg/config/testdata/TestApiDiff/handles_api_disabled_on_local_side.diff @@ -0,0 +1,9 @@ +diff remote[api] local[api] +--- remote[api] ++++ local[api] +@@ -1,4 +1,4 @@ +-enabled = true ++enabled = false + schemas = ["public"] + extra_search_path = ["public"] + max_rows = 500 diff --git a/pkg/config/testdata/TestApiDiff/handles_api_disabled_on_remote_side.diff b/pkg/config/testdata/TestApiDiff/handles_api_disabled_on_remote_side.diff new file mode 100644 index 000000000..200d36c40 --- /dev/null +++ b/pkg/config/testdata/TestApiDiff/handles_api_disabled_on_remote_side.diff @@ -0,0 +1,9 @@ +diff remote[api] local[api] +--- remote[api] ++++ local[api] +@@ -1,4 +1,4 @@ +-enabled = false ++enabled = true + schemas = ["public", "private"] + extra_search_path = ["extensions", "public"] + max_rows = 500 diff --git a/pkg/config/testdata/TestAuthDiff/local_enabled_and_disabled.diff b/pkg/config/testdata/TestAuthDiff/local_enabled_and_disabled.diff new file mode 100644 index 000000000..b4022569c --- /dev/null +++ b/pkg/config/testdata/TestAuthDiff/local_enabled_and_disabled.diff @@ -0,0 +1,28 @@ +diff remote[auth] local[auth] +--- remote[auth] ++++ local[auth] +@@ -1,14 +1,14 @@ + enabled = false +-site_url = "" +-additional_redirect_urls = ["https://127.0.0.1:3000", "https://ref.supabase.co"] +-jwt_expiry = 0 +-enable_refresh_token_rotation = true +-refresh_token_reuse_interval = 0 +-enable_manual_linking = true +-enable_signup = true +-enable_anonymous_sign_ins = true +-minimum_password_length = 8 +-password_requirements = "letters_digits" ++site_url = "http://127.0.0.1:3000" ++additional_redirect_urls = ["https://127.0.0.1:3000"] ++jwt_expiry = 3600 ++enable_refresh_token_rotation = false ++refresh_token_reuse_interval = 10 ++enable_manual_linking = false ++enable_signup = false ++enable_anonymous_sign_ins = false ++minimum_password_length = 6 ++password_requirements = "lower_upper_letters_digits_symbols" + + [hook] + diff --git a/pkg/config/testdata/TestDbSettingsDiff/detects_differences.diff b/pkg/config/testdata/TestDbSettingsDiff/detects_differences.diff new file mode 100644 index 000000000..12aeb50ae --- /dev/null +++ b/pkg/config/testdata/TestDbSettingsDiff/detects_differences.diff @@ -0,0 +1,10 @@ +diff remote[db.settings] local[db.settings] +--- remote[db.settings] ++++ local[db.settings] +@@ -1,3 +1,3 @@ +-effective_cache_size = "8GB" +-max_connections = 200 +-shared_buffers = "2GB" ++effective_cache_size = "4GB" ++max_connections = 100 ++shared_buffers = "1GB" diff --git a/pkg/config/testdata/TestDbSettingsDiff/handles_api_disabled_on_local_side.diff b/pkg/config/testdata/TestDbSettingsDiff/handles_api_disabled_on_local_side.diff new file mode 100644 index 000000000..392143fd1 --- /dev/null +++ b/pkg/config/testdata/TestDbSettingsDiff/handles_api_disabled_on_local_side.diff @@ -0,0 +1,7 @@ +diff remote[db.settings] local[db.settings] +--- remote[db.settings] ++++ local[db.settings] +@@ -1,3 +0,0 @@ +-effective_cache_size = "4GB" +-max_connections = 100 +-shared_buffers = "1GB" diff --git a/pkg/config/testdata/TestDbSettingsDiff/handles_api_disabled_on_remote_side.diff b/pkg/config/testdata/TestDbSettingsDiff/handles_api_disabled_on_remote_side.diff new file mode 100644 index 000000000..938242538 --- /dev/null +++ b/pkg/config/testdata/TestDbSettingsDiff/handles_api_disabled_on_remote_side.diff @@ -0,0 +1,7 @@ +diff remote[db.settings] local[db.settings] +--- remote[db.settings] ++++ local[db.settings] +@@ -0,0 +1,3 @@ ++effective_cache_size = "4GB" ++max_connections = 100 ++shared_buffers = "1GB" diff --git a/pkg/config/testdata/TestEmailDiff/local_disabled_remote_enabled.diff b/pkg/config/testdata/TestEmailDiff/local_disabled_remote_enabled.diff new file mode 100644 index 000000000..99a50071f --- /dev/null +++ b/pkg/config/testdata/TestEmailDiff/local_disabled_remote_enabled.diff @@ -0,0 +1,24 @@ +diff remote[auth] local[auth] +--- remote[auth] ++++ local[auth] +@@ -31,13 +31,13 @@ + inactivity_timeout = "0s" + + [email] +-enable_signup = true +-double_confirm_changes = true +-enable_confirmations = true +-secure_password_change = true +-max_frequency = "1s" +-otp_length = 6 +-otp_expiry = 3600 ++enable_signup = false ++double_confirm_changes = false ++enable_confirmations = false ++secure_password_change = false ++max_frequency = "1m0s" ++otp_length = 8 ++otp_expiry = 86400 + [email.template] + [email.template.confirmation] + content_path = "" diff --git a/pkg/config/testdata/TestEmailDiff/local_enabled_remote_disabled.diff b/pkg/config/testdata/TestEmailDiff/local_enabled_remote_disabled.diff new file mode 100644 index 000000000..ad89a5d1e --- /dev/null +++ b/pkg/config/testdata/TestEmailDiff/local_enabled_remote_disabled.diff @@ -0,0 +1,60 @@ +diff remote[auth] local[auth] +--- remote[auth] ++++ local[auth] +@@ -31,35 +31,43 @@ + inactivity_timeout = "0s" + + [email] +-enable_signup = false +-double_confirm_changes = false +-enable_confirmations = false +-secure_password_change = false +-max_frequency = "1m0s" +-otp_length = 6 +-otp_expiry = 3600 ++enable_signup = true ++double_confirm_changes = true ++enable_confirmations = true ++secure_password_change = true ++max_frequency = "1s" ++otp_length = 8 ++otp_expiry = 86400 + [email.template] + [email.template.confirmation] ++subject = "confirmation-subject" + content_path = "" + [email.template.email_change] ++subject = "email-change-subject" + content = "email-change-content" + content_path = "" + [email.template.invite] ++subject = "invite-subject" ++content = "invite-content" + content_path = "" + [email.template.magic_link] + subject = "magic-link-subject" ++content = "magic-link-content" + content_path = "" + [email.template.reauthentication] ++subject = "" ++content = "" + content_path = "" + [email.template.recovery] ++content = "recovery-content" + content_path = "" + [email.smtp] +-host = "" +-port = 0 +-user = "" +-pass = "" +-admin_email = "" +-sender_name = "" ++host = "smtp.sendgrid.net" ++port = 587 ++user = "apikey" ++pass = "hash:ed64b7695a606bc6ab4fcb41fe815b5ddf1063ccbc87afe1fa89756635db520e" ++admin_email = "admin@email.com" ++sender_name = "Admin" + + [sms] + enable_signup = false diff --git a/pkg/config/testdata/TestExternalDiff/local_enabled_and_disabled.diff b/pkg/config/testdata/TestExternalDiff/local_enabled_and_disabled.diff new file mode 100644 index 000000000..8b78fb240 --- /dev/null +++ b/pkg/config/testdata/TestExternalDiff/local_enabled_and_disabled.diff @@ -0,0 +1,34 @@ +diff remote[auth] local[auth] +--- remote[auth] ++++ local[auth] +@@ -71,7 +71,7 @@ + + [external] + [external.apple] +-enabled = false ++enabled = true + client_id = "test-client-1,test-client-2" + secret = "hash:ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252" + url = "" +@@ -78,9 +78,9 @@ + redirect_uri = "" + skip_nonce_check = false + [external.azure] +-enabled = false +-client_id = "" +-secret = "" ++enabled = true ++client_id = "test-client-1" ++secret = "hash:ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252" + url = "" + redirect_uri = "" + skip_nonce_check = false +@@ -127,7 +127,7 @@ + redirect_uri = "" + skip_nonce_check = false + [external.google] +-enabled = true ++enabled = false + client_id = "test-client-2" + secret = "" + url = "" diff --git a/pkg/config/testdata/TestHookDiff/local_disabled_remote_enabled.diff b/pkg/config/testdata/TestHookDiff/local_disabled_remote_enabled.diff new file mode 100644 index 000000000..247618f0c --- /dev/null +++ b/pkg/config/testdata/TestHookDiff/local_disabled_remote_enabled.diff @@ -0,0 +1,27 @@ +diff remote[auth] local[auth] +--- remote[auth] ++++ local[auth] +@@ -11,19 +11,19 @@ + + [hook] + [hook.mfa_verification_attempt] +-enabled = true ++enabled = false + uri = "pg-functions://postgres/public/verifyMFA" + secrets = "" + [hook.custom_access_token] +-enabled = true ++enabled = false + uri = "" + secrets = "" + [hook.send_sms] +-enabled = true ++enabled = false + uri = "https://example.com" + secrets = "" + [hook.send_email] +-enabled = true ++enabled = false + uri = "" + secrets = "" + diff --git a/pkg/config/testdata/TestHookDiff/local_enabled_remote_disabled.diff b/pkg/config/testdata/TestHookDiff/local_enabled_remote_disabled.diff new file mode 100644 index 000000000..556b13ff5 --- /dev/null +++ b/pkg/config/testdata/TestHookDiff/local_enabled_remote_disabled.diff @@ -0,0 +1,31 @@ +diff remote[auth] local[auth] +--- remote[auth] ++++ local[auth] +@@ -11,20 +11,20 @@ + + [hook] + [hook.mfa_verification_attempt] +-enabled = false ++enabled = true + uri = "pg-functions://postgres/public/verifyMFA" + secrets = "" + [hook.custom_access_token] +-enabled = false +-uri = "pg-functions://postgres/public/customToken" +-secrets = "" ++enabled = true ++uri = "http://example.com" ++secrets = "hash:ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252" + [hook.send_sms] +-enabled = false ++enabled = true + uri = "https://example.com" + secrets = "hash:ce62bb9bcced294fd4afe668f8ab3b50a89cf433093c526fffa3d0e46bf55252" + [hook.send_email] +-enabled = false +-uri = "https://example.com" ++enabled = true ++uri = "pg-functions://postgres/public/sendEmail" + secrets = "" + + [mfa] diff --git a/pkg/config/testdata/TestMfaDiff/local_enabled_and_disabled.diff b/pkg/config/testdata/TestMfaDiff/local_enabled_and_disabled.diff new file mode 100644 index 000000000..e5fa7c476 --- /dev/null +++ b/pkg/config/testdata/TestMfaDiff/local_enabled_and_disabled.diff @@ -0,0 +1,26 @@ +diff remote[auth] local[auth] +--- remote[auth] ++++ local[auth] +@@ -12,16 +12,16 @@ + [hook] + + [mfa] +-max_enrolled_factors = 10 ++max_enrolled_factors = 0 + [mfa.totp] + enroll_enabled = false + verify_enabled = false + [mfa.phone] +-enroll_enabled = false +-verify_enabled = false +-otp_length = 6 +-template = "Your code is {{ .Code }}" +-max_frequency = "5s" ++enroll_enabled = true ++verify_enabled = true ++otp_length = 0 ++template = "" ++max_frequency = "0s" + [mfa.web_authn] + enroll_enabled = false + verify_enabled = false diff --git a/pkg/config/testdata/TestSmsDiff/enable_sign_up_without_provider.diff b/pkg/config/testdata/TestSmsDiff/enable_sign_up_without_provider.diff new file mode 100644 index 000000000..a7c312c91 --- /dev/null +++ b/pkg/config/testdata/TestSmsDiff/enable_sign_up_without_provider.diff @@ -0,0 +1,12 @@ +diff remote[auth] local[auth] +--- remote[auth] ++++ local[auth] +@@ -40,7 +40,7 @@ + otp_expiry = 0 + + [sms] +-enable_signup = false ++enable_signup = true + enable_confirmations = false + template = "" + max_frequency = "0s" diff --git a/pkg/config/testdata/TestSmsDiff/local_disabled_remote_enabled.diff b/pkg/config/testdata/TestSmsDiff/local_disabled_remote_enabled.diff new file mode 100644 index 000000000..1dd938cd7 --- /dev/null +++ b/pkg/config/testdata/TestSmsDiff/local_disabled_remote_enabled.diff @@ -0,0 +1,30 @@ +diff remote[auth] local[auth] +--- remote[auth] ++++ local[auth] +@@ -40,12 +40,12 @@ + otp_expiry = 0 + + [sms] +-enable_signup = true +-enable_confirmations = true +-template = "Your code is {{ .Code }}" +-max_frequency = "1m0s" ++enable_signup = false ++enable_confirmations = false ++template = "" ++max_frequency = "0s" + [sms.twilio] +-enabled = true ++enabled = false + account_sid = "" + message_service_sid = "" + auth_token = "" +@@ -68,8 +68,6 @@ + api_key = "" + api_secret = "" + [sms.test_otp] +-123 = "456" +-456 = "123" + + [third_party] + [third_party.firebase] diff --git a/pkg/config/testdata/TestSmsDiff/local_enabled_remote_disabled.diff b/pkg/config/testdata/TestSmsDiff/local_enabled_remote_disabled.diff new file mode 100644 index 000000000..02f0b76cd --- /dev/null +++ b/pkg/config/testdata/TestSmsDiff/local_enabled_remote_disabled.diff @@ -0,0 +1,42 @@ +diff remote[auth] local[auth] +--- remote[auth] ++++ local[auth] +@@ -40,12 +40,12 @@ + otp_expiry = 0 + + [sms] +-enable_signup = false +-enable_confirmations = false +-template = "" +-max_frequency = "0s" ++enable_signup = true ++enable_confirmations = true ++template = "Your code is {{ .Code }}" ++max_frequency = "1m0s" + [sms.twilio] +-enabled = true ++enabled = false + account_sid = "" + message_service_sid = "" + auth_token = "" +@@ -55,9 +55,9 @@ + message_service_sid = "" + auth_token = "" + [sms.messagebird] +-enabled = false +-originator = "" +-access_key = "" ++enabled = true ++originator = "test-originator" ++access_key = "hash:ab60d03fc809fb02dae838582f3ddc13d1d6cb32ffba77c4b969dd3caa496f13" + [sms.textlocal] + enabled = false + sender = "" +@@ -68,6 +68,7 @@ + api_key = "" + api_secret = "" + [sms.test_otp] ++123 = "456" + + [third_party] + [third_party.firebase] diff --git a/pkg/config/testdata/config.toml b/pkg/config/testdata/config.toml index f7061c1e7..8fcffa41d 100644 --- a/pkg/config/testdata/config.toml +++ b/pkg/config/testdata/config.toml @@ -1,3 +1,5 @@ +# For detailed configuration reference documentation, visit: +# https://supabase.com/docs/guides/local-development/cli/config # A string used to distinguish different Supabase projects on the same host. Defaults to the # working directory name when running `supabase init`. project_id = "test" @@ -7,7 +9,7 @@ enabled = true # Port to use for the API URL. port = 54321 # Schemas to expose in your API. Tables, views and stored procedures in this schema will get API -# endpoints. public and storage are always included. +# endpoints. `public` and `graphql_public` schemas are included by default. schemas = ["public", "graphql_public"] # Extra schemas to add to the search_path of every request. public is always included. extra_search_path = ["public", "extensions"] @@ -16,6 +18,7 @@ extra_search_path = ["public", "extensions"] max_rows = 1000 [api.tls] +# Enable HTTPS endpoints locally using a self-signed certificate. enabled = true [db] @@ -39,6 +42,16 @@ default_pool_size = 20 # Maximum number of client connections allowed. max_client_conn = 100 +[db.vault] +test_key = "test_value" + +[db.seed] +# If enabled, seeds the database after migrations during a db reset. +enabled = true +# Specifies an ordered list of seed files to load during db reset. +# Supports glob patterns relative to supabase directory: "./seeds/*.sql" +sql_paths = ["./seed.sql"] + [realtime] enabled = true # Bind realtime via either IPv4 or IPv6. (default: IPv6) @@ -64,14 +77,17 @@ port = 54324 # Uncomment to expose additional ports for testing user applications that send emails. # smtp_port = 54325 # pop3_port = 54326 +# admin_email = "admin@email.com" +# sender_name = "Admin" [storage] enabled = true # The maximum file size allowed (e.g. "5MB", "500KB"). file_size_limit = "50MiB" +# Image transformation API is available to Supabase Pro plan. [storage.image_transformation] -enabled = false +enabled = true # Uncomment to configure local storage buckets [storage.buckets.images] @@ -86,7 +102,7 @@ enabled = true # in emails. site_url = "http://127.0.0.1:3000" # A list of *exact* URLs that auth providers are permitted to redirect to post authentication. -additional_redirect_urls = ["https://127.0.0.1:3000"] +additional_redirect_urls = ["https://127.0.0.1:3000", "env(AUTH_CALLBACK_URL)"] # How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). jwt_expiry = 3600 # If disabled, the refresh token will never expire. @@ -96,8 +112,15 @@ enable_refresh_token_rotation = true refresh_token_reuse_interval = 10 # Allow/disallow new user signups to your project. enable_signup = true +# Allow/disallow anonymous sign-ins to your project. +enable_anonymous_sign_ins = true # Allow/disallow testing manual linking of accounts enable_manual_linking = true +# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more. +minimum_password_length = 6 +# Passwords that do not meet the following requirements will be rejected as weak. Supported values +# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols` +password_requirements = "" [auth.email] # Allow/disallow new user signups via email to your project. @@ -107,11 +130,18 @@ enable_signup = true double_confirm_changes = true # If enabled, users need to confirm their email address before signing in. enable_confirmations = false +# If enabled, users will need to reauthenticate or have logged in recently to change their password. +secure_password_change = true # Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email. max_frequency = "1s" +# Number of characters used in the email OTP. +otp_length = 6 +# Number of seconds before the email OTP expires (defaults to 1 hour). +otp_expiry = 3600 # Use a production-ready SMTP server [auth.email.smtp] +enabled = true host = "smtp.sendgrid.net" port = 587 user = "apikey" @@ -130,7 +160,7 @@ enable_signup = true # If enabled, users need to confirm their phone number before signing in. enable_confirmations = false # Template for sending OTP to users -template = "Your code is {{ `{{ .Code }}` }} ." +template = "Your code is {{ `{{ .Code }}` }}" # Controls the minimum amount of time that must pass before sending another sms otp. max_frequency = "5s" @@ -145,6 +175,7 @@ timebox = "24h" # Force log out if the user has been inactive longer than the specified duration. inactivity_timeout = "8h" +# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used. [auth.hook.custom_access_token] enabled = true uri = "pg-functions://postgres/auth/custom-access-token-hook" @@ -154,7 +185,6 @@ enabled = true uri = "http://host.docker.internal/functions/v1/send_sms" secrets = "env(AUTH_SEND_SMS_SECRETS)" - # Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`. [auth.sms.twilio] enabled = true @@ -163,6 +193,7 @@ message_service_sid = "message_service_sid" # DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: auth_token = "env(TWILIO_AUTH_TOKEN)" +# Multi-factor-authentication is available to Supabase Pro plan. [auth.mfa] max_enrolled_factors = 10 @@ -171,13 +202,18 @@ max_enrolled_factors = 10 enroll_enabled = true verify_enabled = true -# Configure Multi-factor-authentication via Phone Messaging +# Configure MFA via Phone Messaging [auth.mfa.phone] enroll_enabled = true verify_enabled = true otp_length = 6 -template = "Your code is {{ `{{ .Code }}` }} ." -max_frequency = "10s" +template = "Your code is {{ `{{ .Code }}` }}" +max_frequency = "5s" + +# Configure MFA via Phone Messaging +[auth.mfa.web_authn] +enroll_enabled = true +verify_enabled = true # Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, # `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`, @@ -217,3 +253,23 @@ s3_region = "ap-southeast-1" s3_access_key = "" # Configures AWS_SECRET_ACCESS_KEY for S3 bucket s3_secret_key = "" + +[remotes.production] +project_id = "vpefcjyosynxeiebfscx" + +[remotes.production.auth] +site_url = "http://feature-auth-branch.com/" +enable_signup = false + +[remotes.production.auth.external.azure] +enabled = false +client_id = "nope" + +[remotes.staging] +project_id = "bvikqvbczudanvggcord" + +[remotes.staging.db.seed] +enabled = true + +[remotes.staging.storage.buckets.images] +allowed_mime_types = ["image/png"] diff --git a/pkg/config/updater.go b/pkg/config/updater.go new file mode 100644 index 000000000..e5f42aceb --- /dev/null +++ b/pkg/config/updater.go @@ -0,0 +1,180 @@ +package config + +import ( + "context" + "fmt" + "os" + + "github.com/go-errors/errors" + v1API "github.com/supabase/cli/pkg/api" +) + +type ConfigUpdater struct { + client v1API.ClientWithResponses +} + +func NewConfigUpdater(client v1API.ClientWithResponses) ConfigUpdater { + return ConfigUpdater{client: client} +} + +func (u *ConfigUpdater) UpdateRemoteConfig(ctx context.Context, remote baseConfig, filter ...func(string) bool) error { + if err := u.UpdateApiConfig(ctx, remote.ProjectId, remote.Api, filter...); err != nil { + return err + } + if err := u.UpdateDbConfig(ctx, remote.ProjectId, remote.Db, filter...); err != nil { + return err + } + if err := u.UpdateAuthConfig(ctx, remote.ProjectId, remote.Auth, filter...); err != nil { + return err + } + if err := u.UpdateStorageConfig(ctx, remote.ProjectId, remote.Storage, filter...); err != nil { + return err + } + if err := u.UpdateExperimentalConfig(ctx, remote.ProjectId, remote.Experimental, filter...); err != nil { + return err + } + return nil +} + +func (u *ConfigUpdater) UpdateApiConfig(ctx context.Context, projectRef string, c api, filter ...func(string) bool) error { + apiConfig, err := u.client.V1GetPostgrestServiceConfigWithResponse(ctx, projectRef) + if err != nil { + return errors.Errorf("failed to read API config: %w", err) + } else if apiConfig.JSON200 == nil { + return errors.Errorf("unexpected status %d: %s", apiConfig.StatusCode(), string(apiConfig.Body)) + } + apiDiff, err := c.DiffWithRemote(*apiConfig.JSON200) + if err != nil { + return err + } else if len(apiDiff) == 0 { + fmt.Fprintln(os.Stderr, "Remote API config is up to date.") + return nil + } + fmt.Fprintln(os.Stderr, "Updating API service with config:", string(apiDiff)) + for _, keep := range filter { + if !keep("api") { + return nil + } + } + if resp, err := u.client.V1UpdatePostgrestServiceConfigWithResponse(ctx, projectRef, c.ToUpdatePostgrestConfigBody()); err != nil { + return errors.Errorf("failed to update API config: %w", err) + } else if resp.JSON200 == nil { + return errors.Errorf("unexpected status %d: %s", resp.StatusCode(), string(resp.Body)) + } + return nil +} + +func (u *ConfigUpdater) UpdateDbSettingsConfig(ctx context.Context, projectRef string, s settings, filter ...func(string) bool) error { + dbConfig, err := u.client.V1GetPostgresConfigWithResponse(ctx, projectRef) + if err != nil { + return errors.Errorf("failed to read DB config: %w", err) + } else if dbConfig.JSON200 == nil { + return errors.Errorf("unexpected status %d: %s", dbConfig.StatusCode(), string(dbConfig.Body)) + } + dbDiff, err := s.DiffWithRemote(*dbConfig.JSON200) + if err != nil { + return err + } else if len(dbDiff) == 0 { + fmt.Fprintln(os.Stderr, "Remote DB config is up to date.") + return nil + } + fmt.Fprintln(os.Stderr, "Updating DB service with config:", string(dbDiff)) + for _, keep := range filter { + if !keep("db") { + return nil + } + } + updateBody := s.ToUpdatePostgresConfigBody() + if resp, err := u.client.V1UpdatePostgresConfigWithResponse(ctx, projectRef, updateBody); err != nil { + return errors.Errorf("failed to update DB config: %w", err) + } else if resp.JSON200 == nil { + return errors.Errorf("unexpected status %d: %s", resp.StatusCode(), string(resp.Body)) + } + return nil +} + +func (u *ConfigUpdater) UpdateDbConfig(ctx context.Context, projectRef string, c db, filter ...func(string) bool) error { + if err := u.UpdateDbSettingsConfig(ctx, projectRef, c.Settings, filter...); err != nil { + return err + } + return nil +} + +func (u *ConfigUpdater) UpdateAuthConfig(ctx context.Context, projectRef string, c auth, filter ...func(string) bool) error { + if !c.Enabled { + return nil + } + authConfig, err := u.client.V1GetAuthServiceConfigWithResponse(ctx, projectRef) + if err != nil { + return errors.Errorf("failed to read Auth config: %w", err) + } else if authConfig.JSON200 == nil { + return errors.Errorf("unexpected status %d: %s", authConfig.StatusCode(), string(authConfig.Body)) + } + authDiff, err := c.DiffWithRemote(*authConfig.JSON200) + if err != nil { + return err + } else if len(authDiff) == 0 { + fmt.Fprintln(os.Stderr, "Remote Auth config is up to date.") + return nil + } + fmt.Fprintln(os.Stderr, "Updating Auth service with config:", string(authDiff)) + for _, keep := range filter { + if !keep("auth") { + return nil + } + } + if resp, err := u.client.V1UpdateAuthServiceConfigWithResponse(ctx, projectRef, c.ToUpdateAuthConfigBody()); err != nil { + return errors.Errorf("failed to update Auth config: %w", err) + } else if status := resp.StatusCode(); status < 200 || status >= 300 { + return errors.Errorf("unexpected status %d: %s", status, string(resp.Body)) + } + return nil +} + +func (u *ConfigUpdater) UpdateStorageConfig(ctx context.Context, projectRef string, c storage, filter ...func(string) bool) error { + if !c.Enabled { + return nil + } + storageConfig, err := u.client.V1GetStorageConfigWithResponse(ctx, projectRef) + if err != nil { + return errors.Errorf("failed to read Storage config: %w", err) + } else if storageConfig.JSON200 == nil { + return errors.Errorf("unexpected status %d: %s", storageConfig.StatusCode(), string(storageConfig.Body)) + } + storageDiff, err := c.DiffWithRemote(*storageConfig.JSON200) + if err != nil { + return err + } else if len(storageDiff) == 0 { + fmt.Fprintln(os.Stderr, "Remote Storage config is up to date.") + return nil + } + fmt.Fprintln(os.Stderr, "Updating Storage service with config:", string(storageDiff)) + for _, keep := range filter { + if !keep("storage") { + return nil + } + } + if resp, err := u.client.V1UpdateStorageConfigWithResponse(ctx, projectRef, c.ToUpdateStorageConfigBody()); err != nil { + return errors.Errorf("failed to update Storage config: %w", err) + } else if status := resp.StatusCode(); status < 200 || status >= 300 { + return errors.Errorf("unexpected status %d: %s", status, string(resp.Body)) + } + return nil +} + +func (u *ConfigUpdater) UpdateExperimentalConfig(ctx context.Context, projectRef string, exp experimental, filter ...func(string) bool) error { + if exp.Webhooks != nil && exp.Webhooks.Enabled { + fmt.Fprintln(os.Stderr, "Enabling webhooks for project:", projectRef) + for _, keep := range filter { + if !keep("webhooks") { + return nil + } + } + if resp, err := u.client.V1EnableDatabaseWebhookWithResponse(ctx, projectRef); err != nil { + return errors.Errorf("failed to enable webhooks: %w", err) + } else if status := resp.StatusCode(); status < 200 || status >= 300 { + return errors.Errorf("unexpected enable webhook status %d: %s", status, string(resp.Body)) + } + } + return nil +} diff --git a/pkg/config/updater_test.go b/pkg/config/updater_test.go new file mode 100644 index 000000000..f3eb0bcb4 --- /dev/null +++ b/pkg/config/updater_test.go @@ -0,0 +1,353 @@ +package config + +import ( + "context" + "net/http" + "testing" + + "github.com/h2non/gock" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + v1API "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" +) + +func TestUpdateApi(t *testing.T) { + server := "http://localhost" + client, err := v1API.NewClientWithResponses(server) + require.NoError(t, err) + + t.Run("updates remote config", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Setup mock server + defer gock.Off() + gock.New(server). + Get("/v1/projects/test-project/postgrest"). + Reply(http.StatusOK). + JSON(v1API.PostgrestConfigWithJWTSecretResponse{}) + gock.New(server). + Patch("/v1/projects/test-project/postgrest"). + Reply(http.StatusOK). + JSON(v1API.PostgrestConfigWithJWTSecretResponse{ + DbSchema: "public,graphql_public", + DbExtraSearchPath: "public,extensions", + MaxRows: 1000, + }) + // Run test + err := updater.UpdateApiConfig(context.Background(), "test-project", api{ + Enabled: true, + Schemas: []string{"public", "graphql_public"}, + ExtraSearchPath: []string{"public", "extensions"}, + MaxRows: 1000, + }) + // Check result + assert.NoError(t, err) + assert.True(t, gock.IsDone()) + }) + + t.Run("skips update if no diff", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Setup mock server + defer gock.Off() + gock.New(server). + Get("/v1/projects/test-project/postgrest"). + Reply(http.StatusOK). + JSON(v1API.PostgrestConfigWithJWTSecretResponse{ + DbSchema: "", + DbExtraSearchPath: "public,extensions", + MaxRows: 1000, + }) + // Run test + err := updater.UpdateApiConfig(context.Background(), "test-project", api{}) + // Check result + assert.NoError(t, err) + assert.True(t, gock.IsDone()) + }) +} + +func TestUpdateDbConfig(t *testing.T) { + server := "http://localhost" + client, err := v1API.NewClientWithResponses(server) + require.NoError(t, err) + + t.Run("updates remote DB config", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Setup mock server + defer gock.Off() + gock.New(server). + Get("/v1/projects/test-project/config/database"). + Reply(http.StatusOK). + JSON(v1API.PostgresConfigResponse{}) + gock.New(server). + Put("/v1/projects/test-project/config/database"). + Reply(http.StatusOK). + JSON(v1API.PostgresConfigResponse{ + MaxConnections: cast.Ptr(cast.UintToInt(100)), + }) + // Run test + err := updater.UpdateDbConfig(context.Background(), "test-project", db{ + Settings: settings{ + MaxConnections: cast.Ptr(cast.IntToUint(100)), + }, + }) + // Check result + assert.NoError(t, err) + assert.True(t, gock.IsDone()) + }) + + t.Run("skips update if no diff in DB config", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Setup mock server + defer gock.Off() + gock.New(server). + Get("/v1/projects/test-project/config/database"). + Reply(http.StatusOK). + JSON(v1API.PostgresConfigResponse{ + MaxConnections: cast.Ptr(cast.UintToInt(100)), + }) + // Run test + err := updater.UpdateDbConfig(context.Background(), "test-project", db{ + Settings: settings{ + MaxConnections: cast.Ptr(cast.IntToUint(100)), + }, + }) + // Check result + assert.NoError(t, err) + assert.True(t, gock.IsDone()) + }) +} + +func TestUpdateExperimentalConfig(t *testing.T) { + server := "http://localhost" + client, err := v1API.NewClientWithResponses(server) + require.NoError(t, err) + + t.Run("enables webhooks", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Setup mock server + defer gock.Off() + gock.New(server). + Post("/v1/projects/test-project/database/webhooks/enable"). + Reply(http.StatusOK). + JSON(map[string]interface{}{}) + // Run test + err := updater.UpdateExperimentalConfig(context.Background(), "test-project", experimental{ + Webhooks: &webhooks{ + Enabled: true, + }, + }) + // Check result + assert.NoError(t, err) + assert.True(t, gock.IsDone()) + }) + + t.Run("skips update if webhooks not enabled", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Run test + err := updater.UpdateExperimentalConfig(context.Background(), "test-project", experimental{ + Webhooks: &webhooks{ + Enabled: false, + }, + }) + // Check result + assert.NoError(t, err) + assert.True(t, gock.IsDone()) + }) +} + +func TestUpdateAuthConfig(t *testing.T) { + server := "http://localhost" + client, err := v1API.NewClientWithResponses(server) + require.NoError(t, err) + + t.Run("updates remote Auth config", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Setup mock server + defer gock.Off() + gock.New(server). + Get("/v1/projects/test-project/config/auth"). + Reply(http.StatusOK). + JSON(v1API.AuthConfigResponse{ + SiteUrl: cast.Ptr("http://localhost:3000"), + }) + gock.New(server). + Patch("/v1/projects/test-project/config/auth"). + Reply(http.StatusOK) + // Run test + err := updater.UpdateAuthConfig(context.Background(), "test-project", auth{Enabled: true}) + // Check result + assert.NoError(t, err) + assert.True(t, gock.IsDone()) + }) + + t.Run("skips update if no diff in Auth config", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Setup mock server + defer gock.Off() + gock.New(server). + Get("/v1/projects/test-project/config/auth"). + Reply(http.StatusOK). + JSON(v1API.AuthConfigResponse{}) + // Run test + err := updater.UpdateAuthConfig(context.Background(), "test-project", auth{ + Enabled: true, + EnableSignup: true, + Email: email{EnableConfirmations: true}, + Sms: sms{TestOTP: map[string]string{}}, + }) + // Check result + assert.NoError(t, err) + assert.True(t, gock.IsDone()) + }) + + t.Run("skips update if disabled locally", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Run test + err := updater.UpdateAuthConfig(context.Background(), "test-project", auth{}) + // Check result + assert.NoError(t, err) + }) +} + +func TestUpdateStorageConfig(t *testing.T) { + server := "http://localhost" + client, err := v1API.NewClientWithResponses(server) + require.NoError(t, err) + + t.Run("updates remote Storage config", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Setup mock server + defer gock.Off() + gock.New(server). + Get("/v1/projects/test-project/config/storage"). + Reply(http.StatusOK). + JSON(v1API.StorageConfigResponse{ + FileSizeLimit: 100, + Features: v1API.StorageFeatures{ + ImageTransformation: v1API.StorageFeatureImageTransformation{ + Enabled: true, + }, + }, + }) + gock.New(server). + Patch("/v1/projects/test-project/config/storage"). + Reply(http.StatusOK) + // Run test + err := updater.UpdateStorageConfig(context.Background(), "test-project", storage{Enabled: true}) + // Check result + assert.NoError(t, err) + assert.True(t, gock.IsDone()) + }) + + t.Run("skips update if no diff in Storage config", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Setup mock server + defer gock.Off() + gock.New(server). + Get("/v1/projects/test-project/config/storage"). + Reply(http.StatusOK). + JSON(v1API.StorageConfigResponse{}) + // Run test + err := updater.UpdateStorageConfig(context.Background(), "test-project", storage{Enabled: true}) + // Check result + assert.NoError(t, err) + assert.True(t, gock.IsDone()) + }) + + t.Run("skips update if disabled locally", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Run test + err := updater.UpdateStorageConfig(context.Background(), "test-project", storage{}) + // Check result + assert.NoError(t, err) + }) +} + +func TestUpdateRemoteConfig(t *testing.T) { + server := "http://localhost" + client, err := v1API.NewClientWithResponses(server) + require.NoError(t, err) + + t.Run("updates all configs", func(t *testing.T) { + updater := NewConfigUpdater(*client) + // Setup mock server + defer gock.Off() + // API config + gock.New(server). + Get("/v1/projects/test-project/postgrest"). + Reply(http.StatusOK). + JSON(v1API.PostgrestConfigWithJWTSecretResponse{}) + gock.New(server). + Patch("/v1/projects/test-project/postgrest"). + Reply(http.StatusOK). + JSON(v1API.PostgrestConfigWithJWTSecretResponse{ + DbSchema: "public", + MaxRows: 1000, + }) + // DB config + gock.New(server). + Get("/v1/projects/test-project/config/database"). + Reply(http.StatusOK). + JSON(v1API.PostgresConfigResponse{}) + gock.New(server). + Put("/v1/projects/test-project/config/database"). + Reply(http.StatusOK). + JSON(v1API.PostgresConfigResponse{ + MaxConnections: cast.Ptr(cast.UintToInt(100)), + }) + // Auth config + gock.New(server). + Get("/v1/projects/test-project/config/auth"). + Reply(http.StatusOK). + JSON(v1API.AuthConfigResponse{}) + gock.New(server). + Patch("/v1/projects/test-project/config/auth"). + Reply(http.StatusOK) + // Storage config + gock.New(server). + Get("/v1/projects/test-project/config/storage"). + Reply(http.StatusOK). + JSON(v1API.StorageConfigResponse{}) + gock.New(server). + Patch("/v1/projects/test-project/config/storage"). + Reply(http.StatusOK) + // Experimental config + gock.New(server). + Post("/v1/projects/test-project/database/webhooks/enable"). + Reply(http.StatusOK). + JSON(map[string]interface{}{}) + // Run test + err := updater.UpdateRemoteConfig(context.Background(), baseConfig{ + ProjectId: "test-project", + Api: api{ + Enabled: true, + Schemas: []string{"public", "private"}, + MaxRows: 1000, + }, + Db: db{ + Settings: settings{ + MaxConnections: cast.Ptr(cast.IntToUint(100)), + }, + }, + Auth: auth{ + Enabled: true, + SiteUrl: "http://localhost:3000", + }, + Storage: storage{ + Enabled: true, + FileSizeLimit: 100, + ImageTransformation: &imageTransformation{ + Enabled: true, + }, + }, + Experimental: experimental{ + Webhooks: &webhooks{ + Enabled: true, + }, + }, + }) + // Check result + assert.NoError(t, err) + assert.True(t, gock.IsDone()) + }) +} diff --git a/pkg/config/utils.go b/pkg/config/utils.go index b2318b5a9..9a46d94a9 100644 --- a/pkg/config/utils.go +++ b/pkg/config/utils.go @@ -1,6 +1,10 @@ package config import ( + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "fmt" "path/filepath" "strings" ) @@ -29,7 +33,6 @@ type pathBuilder struct { FallbackImportMapPath string FallbackEnvFilePath string DbTestsDir string - SeedDataPath string CustomRolesPath string } @@ -63,7 +66,6 @@ func NewPathBuilder(configPath string) pathBuilder { FallbackImportMapPath: filepath.Join(base, "functions", "import_map.json"), FallbackEnvFilePath: filepath.Join(base, "functions", ".env"), DbTestsDir: filepath.Join(base, "tests"), - SeedDataPath: filepath.Join(base, "seed.sql"), CustomRolesPath: filepath.Join(base, "roles.sql"), } } @@ -81,3 +83,37 @@ func replaceImageTag(image string, tag string) string { index := strings.IndexByte(image, ':') return image[:index+1] + strings.TrimSpace(tag) } + +func strToArr(v string) []string { + // Avoid returning [""] if v is empty + if len(v) == 0 { + return nil + } + return strings.Split(v, ",") +} + +func mapToEnv(input map[string]string) string { + var result []string + for k, v := range input { + kv := fmt.Sprintf("%s=%s", k, v) + result = append(result, kv) + } + return strings.Join(result, ",") +} + +func envToMap(input string) map[string]string { + env := strToArr(input) + result := make(map[string]string, len(env)) + for _, kv := range env { + if parts := strings.Split(kv, "="); len(parts) > 1 { + result[parts[0]] = parts[1] + } + } + return result +} + +func sha256Hmac(key, value string) string { + h := hmac.New(sha256.New, []byte(key)) + h.Write([]byte(value)) + return hex.EncodeToString(h.Sum(nil)) +} diff --git a/internal/link/diff.go b/pkg/diff/diff.go similarity index 99% rename from internal/link/diff.go rename to pkg/diff/diff.go index 84f2e3494..6a40b23fc 100644 --- a/internal/link/diff.go +++ b/pkg/diff/diff.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package link +package diff import ( "bytes" diff --git a/pkg/function/api.go b/pkg/function/api.go index e3f641dee..9fdf52b51 100644 --- a/pkg/function/api.go +++ b/pkg/function/api.go @@ -14,7 +14,7 @@ type EdgeRuntimeAPI struct { } type EszipBundler interface { - Bundle(ctx context.Context, entrypoint string, importMap string, output io.Writer) error + Bundle(ctx context.Context, entrypoint string, importMap string, staticFiles []string, output io.Writer) error } func NewEdgeRuntimeAPI(project string, client api.ClientWithResponses, bundler EszipBundler) EdgeRuntimeAPI { diff --git a/pkg/function/batch.go b/pkg/function/batch.go index 424a8a8cd..d06e5bf04 100644 --- a/pkg/function/batch.go +++ b/pkg/function/batch.go @@ -35,13 +35,17 @@ func (s *EdgeRuntimeAPI) UpsertFunctions(ctx context.Context, functionConfig con exists[f.Slug] = struct{}{} } for slug, function := range functionConfig { + if !function.IsEnabled() { + fmt.Fprintln(os.Stderr, "Skipped deploying Function:", slug) + continue + } for _, keep := range filter { if !keep(slug) { continue } } var body bytes.Buffer - if err := s.eszip.Bundle(ctx, function.Entrypoint, function.ImportMap, &body); err != nil { + if err := s.eszip.Bundle(ctx, function.Entrypoint, function.ImportMap, function.StaticFiles, &body); err != nil { return err } // Update if function already exists @@ -51,19 +55,19 @@ func (s *EdgeRuntimeAPI) UpsertFunctions(ctx context.Context, functionConfig con VerifyJwt: function.VerifyJWT, ImportMapPath: toFileURL(function.ImportMap), EntrypointPath: toFileURL(function.Entrypoint), - }, eszipContentType, &body); err != nil { + }, eszipContentType, bytes.NewReader(body.Bytes())); err != nil { return errors.Errorf("failed to update function: %w", err) } else if resp.JSON200 == nil { return errors.Errorf("unexpected status %d: %s", resp.StatusCode(), string(resp.Body)) } } else { - if resp, err := s.client.CreateFunctionWithBodyWithResponse(ctx, s.project, &api.CreateFunctionParams{ + if resp, err := s.client.V1CreateAFunctionWithBodyWithResponse(ctx, s.project, &api.V1CreateAFunctionParams{ Slug: &slug, Name: &slug, VerifyJwt: function.VerifyJWT, ImportMapPath: toFileURL(function.ImportMap), EntrypointPath: toFileURL(function.Entrypoint), - }, eszipContentType, &body); err != nil { + }, eszipContentType, bytes.NewReader(body.Bytes())); err != nil { return errors.Errorf("failed to create function: %w", err) } else if resp.JSON201 == nil { return errors.Errorf("unexpected status %d: %s", resp.StatusCode(), string(resp.Body)) diff --git a/pkg/function/batch_test.go b/pkg/function/batch_test.go index 1489cd329..101cabb79 100644 --- a/pkg/function/batch_test.go +++ b/pkg/function/batch_test.go @@ -17,7 +17,7 @@ import ( type MockBundler struct { } -func (b *MockBundler) Bundle(ctx context.Context, entrypoint string, importMap string, output io.Writer) error { +func (b *MockBundler) Bundle(ctx context.Context, entrypoint string, importMap string, staticFiles []string, output io.Writer) error { return nil } diff --git a/pkg/function/bundle.go b/pkg/function/bundle.go index ed12806e8..d015624ba 100644 --- a/pkg/function/bundle.go +++ b/pkg/function/bundle.go @@ -28,7 +28,7 @@ func NewNativeBundler(tempDir string, fsys fs.FS) EszipBundler { // Use a package private variable to allow testing without gosec complaining about G204 var edgeRuntimeBin = "edge-runtime" -func (b *nativeBundler) Bundle(ctx context.Context, entrypoint string, importMap string, output io.Writer) error { +func (b *nativeBundler) Bundle(ctx context.Context, entrypoint string, importMap string, staticFiles []string, output io.Writer) error { slug := filepath.Base(filepath.Dir(entrypoint)) outputPath := filepath.Join(b.tempDir, slug+".eszip") // TODO: make edge runtime write to stdout @@ -36,6 +36,9 @@ func (b *nativeBundler) Bundle(ctx context.Context, entrypoint string, importMap if len(importMap) > 0 { args = append(args, "--import-map", importMap) } + for _, staticFile := range staticFiles { + args = append(args, "--static", staticFile) + } cmd := exec.CommandContext(ctx, edgeRuntimeBin, args...) cmd.Stderr = os.Stderr cmd.Stdout = os.Stdout diff --git a/pkg/function/bundle_test.go b/pkg/function/bundle_test.go index 1aa63f212..b29d3bc87 100644 --- a/pkg/function/bundle_test.go +++ b/pkg/function/bundle_test.go @@ -36,7 +36,7 @@ func TestBundleFunction(t *testing.T) { // Setup mock bundler bundler := nativeBundler{fsys: fsys} // Run test - err := bundler.Bundle(context.Background(), "hello/index.ts", "", &body) + err := bundler.Bundle(context.Background(), "hello/index.ts", "", nil, &body) // Check error assert.NoError(t, err) assert.Equal(t, compressedEszipMagicID+";", body.String()) diff --git a/pkg/migration/drop.go b/pkg/migration/drop.go index 362f2a9d2..4fcdd805c 100644 --- a/pkg/migration/drop.go +++ b/pkg/migration/drop.go @@ -24,6 +24,7 @@ var ( `\_realtime`, `\_supavisor`, "pgbouncer", + "pgmq", "pgsodium", "pgtle", `supabase\_migrations`, diff --git a/pkg/migration/file.go b/pkg/migration/file.go index 04a945565..79da1c86a 100644 --- a/pkg/migration/file.go +++ b/pkg/migration/file.go @@ -2,6 +2,8 @@ package migration import ( "context" + "crypto/sha256" + "encoding/hex" "io" "io/fs" "path/filepath" @@ -24,6 +26,22 @@ type MigrationFile struct { var migrateFilePattern = regexp.MustCompile(`^([0-9]+)_(.*)\.sql$`) func NewMigrationFromFile(path string, fsys fs.FS) (*MigrationFile, error) { + lines, err := parseFile(path, fsys) + if err != nil { + return nil, err + } + file := MigrationFile{Statements: lines} + // Parse version from file name + filename := filepath.Base(path) + matches := migrateFilePattern.FindStringSubmatch(filename) + if len(matches) > 2 { + file.Version = matches[1] + file.Name = matches[2] + } + return &file, nil +} + +func parseFile(path string, fsys fs.FS) ([]string, error) { sql, err := fsys.Open(path) if err != nil { return nil, errors.Errorf("failed to open migration file: %w", err) @@ -37,17 +55,7 @@ func NewMigrationFromFile(path string, fsys fs.FS) (*MigrationFile, error) { } } } - file, err := NewMigrationFromReader(sql) - if err == nil { - // Parse version from file name - filename := filepath.Base(path) - matches := migrateFilePattern.FindStringSubmatch(filename) - if len(matches) > 2 { - file.Version = matches[1] - file.Name = matches[2] - } - } - return file, err + return parser.SplitAndTrim(sql) } func NewMigrationFromReader(sql io.Reader) (*MigrationFile, error) { @@ -112,12 +120,40 @@ func (m *MigrationFile) insertVersionSQL(conn *pgx.Conn, batch *pgconn.Batch) er return nil } -func (m *MigrationFile) ExecBatchWithCache(ctx context.Context, conn *pgx.Conn) error { +type SeedFile struct { + Path string + Hash string + Dirty bool `db:"-"` +} + +func NewSeedFile(path string, fsys fs.FS) (*SeedFile, error) { + sql, err := fsys.Open(path) + if err != nil { + return nil, errors.Errorf("failed to open seed file: %w", err) + } + defer sql.Close() + hash := sha256.New() + if _, err := io.Copy(hash, sql); err != nil { + return nil, errors.Errorf("failed to hash file: %w", err) + } + digest := hex.EncodeToString(hash.Sum(nil)) + return &SeedFile{Path: path, Hash: digest}, nil +} + +func (m *SeedFile) ExecBatchWithCache(ctx context.Context, conn *pgx.Conn, fsys fs.FS) error { + // Parse each file individually to reduce memory usage + lines, err := parseFile(m.Path, fsys) + if err != nil { + return err + } // Data statements don't mutate schemas, safe to use statement cache batch := pgx.Batch{} - for _, line := range m.Statements { - batch.Queue(line) + if !m.Dirty { + for _, line := range lines { + batch.Queue(line) + } } + batch.Queue(UPSERT_SEED_FILE, m.Path, m.Hash) // No need to track version here because there are no schema changes if err := conn.SendBatch(ctx, &batch).Close(); err != nil { return errors.Errorf("failed to send batch: %w", err) diff --git a/pkg/migration/history.go b/pkg/migration/history.go index 6dc8b5fc8..ca35d2da2 100644 --- a/pkg/migration/history.go +++ b/pkg/migration/history.go @@ -19,8 +19,11 @@ const ( DELETE_MIGRATION_VERSION = "DELETE FROM supabase_migrations.schema_migrations WHERE version = ANY($1)" DELETE_MIGRATION_BEFORE = "DELETE FROM supabase_migrations.schema_migrations WHERE version <= $1" TRUNCATE_VERSION_TABLE = "TRUNCATE supabase_migrations.schema_migrations" - SELECT_VERSION_TABLE = "SELECT * FROM supabase_migrations.schema_migrations" + SELECT_VERSION_TABLE = "SELECT version, coalesce(name, '') as name, statements FROM supabase_migrations.schema_migrations" LIST_MIGRATION_VERSION = "SELECT version FROM supabase_migrations.schema_migrations ORDER BY version" + CREATE_SEED_TABLE = "CREATE TABLE IF NOT EXISTS supabase_migrations.seed_files (path text NOT NULL PRIMARY KEY, hash text NOT NULL)" + UPSERT_SEED_FILE = "INSERT INTO supabase_migrations.seed_files(path, hash) VALUES($1, $2) ON CONFLICT (path) DO UPDATE SET hash = EXCLUDED.hash" + SELECT_SEED_TABLE = "SELECT path, hash FROM supabase_migrations.seed_files" ) // TODO: support overriding `supabase_migrations.schema_migrations` with user defined . @@ -46,3 +49,24 @@ func ReadMigrationTable(ctx context.Context, conn *pgx.Conn) ([]MigrationFile, e } return pgxv5.CollectRows[MigrationFile](rows) } + +func CreateSeedTable(ctx context.Context, conn *pgx.Conn) error { + // This must be run without prepared statements because each statement in the batch depends on + // the previous schema change. The lock timeout will be reset when implicit transaction ends. + batch := pgconn.Batch{} + batch.ExecParams(SET_LOCK_TIMEOUT, nil, nil, nil, nil) + batch.ExecParams(CREATE_VERSION_SCHEMA, nil, nil, nil, nil) + batch.ExecParams(CREATE_SEED_TABLE, nil, nil, nil, nil) + if _, err := conn.PgConn().ExecBatch(ctx, &batch).ReadAll(); err != nil { + return errors.Errorf("failed to create seed table: %w", err) + } + return nil +} + +func ReadSeedTable(ctx context.Context, conn *pgx.Conn) ([]SeedFile, error) { + rows, err := conn.Query(ctx, SELECT_SEED_TABLE) + if err != nil { + return nil, errors.Errorf("failed to read seed table: %w", err) + } + return pgxv5.CollectRows[SeedFile](rows) +} diff --git a/pkg/migration/queries/drop.sql b/pkg/migration/queries/drop.sql index 368ee7228..71564659c 100644 --- a/pkg/migration/queries/drop.sql +++ b/pkg/migration/queries/drop.sql @@ -33,12 +33,14 @@ begin execute format('drop table if exists %I.%I cascade', rec.relnamespace::regnamespace::name, rec.relname); end loop; - -- truncate tables in auth and migrations schema + -- truncate tables in auth, storage, webhooks, and migrations schema for rec in select * from pg_class c where (c.relnamespace::regnamespace::name = 'auth' and c.relname != 'schema_migrations' + or c.relnamespace::regnamespace::name = 'storage' and c.relname != 'migrations' + or c.relnamespace::regnamespace::name = 'supabase_functions' and c.relname != 'migrations' or c.relnamespace::regnamespace::name = 'supabase_migrations') and c.relkind = 'r' loop @@ -74,4 +76,14 @@ begin loop execute format('drop policy if exists %I on %I.%I cascade', rec.policyname, rec.schemaname, rec.tablename); end loop; + + -- publications + for rec in + select * + from pg_publication p + where + p.pubname not like 'supabase_realtime%' and p.pubname not like 'realtime_messages%' + loop + execute format('drop publication if exists %I', rec.pubname); + end loop; end $$; diff --git a/pkg/migration/seed.go b/pkg/migration/seed.go index 79b2dbc22..988e8d4bc 100644 --- a/pkg/migration/seed.go +++ b/pkg/migration/seed.go @@ -7,17 +7,70 @@ import ( "os" "path/filepath" + "github.com/go-errors/errors" + "github.com/jackc/pgconn" + "github.com/jackc/pgerrcode" "github.com/jackc/pgx/v4" ) -func SeedData(ctx context.Context, pending []string, conn *pgx.Conn, fsys fs.FS) error { - for _, path := range pending { - filename := filepath.Base(path) - fmt.Fprintf(os.Stderr, "Seeding data from %s...\n", filename) - // Batch seed commands, safe to use statement cache - if seed, err := NewMigrationFromFile(path, fsys); err != nil { +func getRemoteSeeds(ctx context.Context, conn *pgx.Conn) (map[string]string, error) { + remotes, err := ReadSeedTable(ctx, conn) + if err != nil { + var pgErr *pgconn.PgError + if errors.As(err, &pgErr) && pgErr.Code == pgerrcode.UndefinedTable { + // If seed table is undefined, the remote project has no migrations + return nil, nil + } + return nil, err + } + applied := make(map[string]string, len(remotes)) + for _, seed := range remotes { + applied[seed.Path] = seed.Hash + } + return applied, nil +} + +func GetPendingSeeds(ctx context.Context, locals []string, conn *pgx.Conn, fsys fs.FS) ([]SeedFile, error) { + if len(locals) == 0 { + return nil, nil + } + applied, err := getRemoteSeeds(ctx, conn) + if err != nil { + return nil, err + } + var pending []SeedFile + for _, path := range locals { + seed, err := NewSeedFile(path, fsys) + if err != nil { + return nil, err + } + if hash, exists := applied[seed.Path]; exists { + // Skip seed files that already exist + if hash == seed.Hash { + continue + } + // Mark seed file as dirty + seed.Dirty = true + } + pending = append(pending, *seed) + } + return pending, nil +} + +func SeedData(ctx context.Context, pending []SeedFile, conn *pgx.Conn, fsys fs.FS) error { + if len(pending) > 0 { + if err := CreateSeedTable(ctx, conn); err != nil { return err - } else if err := seed.ExecBatchWithCache(ctx, conn); err != nil { + } + } + for _, seed := range pending { + if seed.Dirty { + fmt.Fprintf(os.Stderr, "Updating seed hash to %s...\n", seed.Path) + } else { + fmt.Fprintf(os.Stderr, "Seeding data from %s...\n", seed.Path) + } + // Batch seed commands, safe to use statement cache + if err := seed.ExecBatchWithCache(ctx, conn, fsys); err != nil { return err } } @@ -28,9 +81,13 @@ func SeedGlobals(ctx context.Context, pending []string, conn *pgx.Conn, fsys fs. for _, path := range pending { filename := filepath.Base(path) fmt.Fprintf(os.Stderr, "Seeding globals from %s...\n", filename) - if globals, err := NewMigrationFromFile(path, fsys); err != nil { + globals, err := NewMigrationFromFile(path, fsys) + if err != nil { return err - } else if err := globals.ExecBatch(ctx, conn); err != nil { + } + // Skip inserting to migration history + globals.Version = "" + if err := globals.ExecBatch(ctx, conn); err != nil { return err } } diff --git a/pkg/migration/seed_test.go b/pkg/migration/seed_test.go index cf88a818f..7e9cd4aca 100644 --- a/pkg/migration/seed_test.go +++ b/pkg/migration/seed_test.go @@ -8,55 +8,143 @@ import ( fs "testing/fstest" "github.com/jackc/pgerrcode" + "github.com/jackc/pgx/v4" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/supabase/cli/pkg/pgtest" ) //go:embed testdata/seed.sql var testSeed string -func TestSeedData(t *testing.T) { +func TestPendingSeeds(t *testing.T) { pending := []string{"testdata/seed.sql"} - t.Run("seeds from file", func(t *testing.T) { + t.Run("finds new seeds", func(t *testing.T) { // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) - conn.Query(testSeed). - Reply("INSERT 0 1") + conn.Query(SELECT_SEED_TABLE). + Reply("SELECT 0") + // Run test + seeds, err := GetPendingSeeds(context.Background(), pending, conn.MockClient(t), testMigrations) + // Check error + assert.NoError(t, err) + require.Len(t, seeds, 1) + assert.Equal(t, seeds[0].Path, pending[0]) + assert.Equal(t, seeds[0].Hash, "61868484fc0ddca2a2022217629a9fd9a4cf1ca479432046290797d6d40ffcc3") + assert.False(t, seeds[0].Dirty) + }) + + t.Run("finds dirty seeds", func(t *testing.T) { + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(SELECT_SEED_TABLE). + Reply("SELECT 1", SeedFile{Path: pending[0], Hash: "outdated"}) + // Run test + seeds, err := GetPendingSeeds(context.Background(), pending, conn.MockClient(t), testMigrations) + // Check error + assert.NoError(t, err) + require.Len(t, seeds, 1) + assert.Equal(t, seeds[0].Path, pending[0]) + assert.Equal(t, seeds[0].Hash, "61868484fc0ddca2a2022217629a9fd9a4cf1ca479432046290797d6d40ffcc3") + assert.True(t, seeds[0].Dirty) + }) + + t.Run("skips applied seed", func(t *testing.T) { + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(SELECT_SEED_TABLE). + Reply("SELECT 1", SeedFile{Path: pending[0], Hash: "61868484fc0ddca2a2022217629a9fd9a4cf1ca479432046290797d6d40ffcc3"}) + // Run test + seeds, err := GetPendingSeeds(context.Background(), pending, conn.MockClient(t), testMigrations) + // Check error + assert.NoError(t, err) + require.Empty(t, seeds) + }) + + t.Run("ignores missing seed table", func(t *testing.T) { + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(SELECT_SEED_TABLE). + ReplyError(pgerrcode.UndefinedTable, `relation "seed_files" does not exist`) // Run test - err := SeedData(context.Background(), pending, conn.MockClient(t), testMigrations) + _, err := GetPendingSeeds(context.Background(), pending, conn.MockClient(t), testMigrations) // Check error assert.NoError(t, err) }) t.Run("throws error on missing file", func(t *testing.T) { + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(SELECT_SEED_TABLE). + Reply("SELECT 0") // Setup in-memory fs fsys := fs.MapFS{} // Run test - err := SeedData(context.Background(), pending, nil, fsys) + _, err := GetPendingSeeds(context.Background(), pending, conn.MockClient(t), fsys) // Check error assert.ErrorIs(t, err, os.ErrNotExist) }) +} + +func TestSeedData(t *testing.T) { + t.Run("seeds from file", func(t *testing.T) { + seed := SeedFile{ + Path: "testdata/seed.sql", + Hash: "61868484fc0ddca2a2022217629a9fd9a4cf1ca479432046290797d6d40ffcc3", + Dirty: true, + } + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + mockSeedHistory(conn). + Query(UPSERT_SEED_FILE, seed.Path, seed.Hash). + Reply("INSERT 0 1") + // Run test + err := SeedData(context.Background(), []SeedFile{seed}, conn.MockClient(t), testMigrations) + // Check error + assert.NoError(t, err) + }) - t.Run("throws error on insert failure", func(t *testing.T) { + t.Run("throws error on upsert failure", func(t *testing.T) { + seed := SeedFile{ + Path: "testdata/seed.sql", + Hash: "61868484fc0ddca2a2022217629a9fd9a4cf1ca479432046290797d6d40ffcc3", + } // Setup mock postgres conn := pgtest.NewConn() defer conn.Close(t) - conn.Query(testSeed). + mockSeedHistory(conn). + Query(testSeed+`;INSERT INTO supabase_migrations.seed_files(path, hash) VALUES( 'testdata/seed.sql' , '61868484fc0ddca2a2022217629a9fd9a4cf1ca479432046290797d6d40ffcc3' ) ON CONFLICT (path) DO UPDATE SET hash = EXCLUDED.hash`). ReplyError(pgerrcode.NotNullViolation, `null value in column "age" of relation "employees"`) // Run test - err := SeedData(context.Background(), pending, conn.MockClient(t), testMigrations) + err := SeedData(context.Background(), []SeedFile{seed}, conn.MockClient(t, func(cc *pgx.ConnConfig) { + cc.PreferSimpleProtocol = true + }), testMigrations) // Check error assert.ErrorContains(t, err, `ERROR: null value in column "age" of relation "employees" (SQLSTATE 23502)`) }) } -//go:embed testdata/globals.sql +func mockSeedHistory(conn *pgtest.MockConn) *pgtest.MockConn { + conn.Query(SET_LOCK_TIMEOUT). + Query(CREATE_VERSION_SCHEMA). + Reply("CREATE SCHEMA"). + Query(CREATE_SEED_TABLE). + Reply("CREATE TABLE") + return conn +} + +//go:embed testdata/1_globals.sql var testGlobals string func TestSeedGlobals(t *testing.T) { - pending := []string{"testdata/globals.sql"} + pending := []string{"testdata/1_globals.sql"} t.Run("seeds from file", func(t *testing.T) { // Setup mock postgres diff --git a/pkg/migration/testdata/globals.sql b/pkg/migration/testdata/1_globals.sql similarity index 100% rename from pkg/migration/testdata/globals.sql rename to pkg/migration/testdata/1_globals.sql diff --git a/pkg/parser/state.go b/pkg/parser/state.go index 38556f376..f32a67131 100644 --- a/pkg/parser/state.go +++ b/pkg/parser/state.go @@ -47,7 +47,7 @@ func (s *ReadyState) Next(r rune, data []byte) State { fallthrough case 'C': offset := len(data) - len(BEGIN_ATOMIC) - if offset >= 0 && strings.ToUpper(string(data[offset:])) == BEGIN_ATOMIC { + if offset >= 0 && strings.EqualFold(string(data[offset:]), BEGIN_ATOMIC) { return &AtomicState{prev: s, delimiter: []byte(END_ATOMIC)} } } @@ -176,7 +176,7 @@ func (s *AtomicState) Next(r rune, data []byte) State { if _, ok := s.prev.(*ReadyState); ok { window := data[len(data)-len(s.delimiter):] // Treat delimiter as case insensitive - if strings.ToUpper(string(window)) == string(s.delimiter) { + if strings.EqualFold(string(window), string(s.delimiter)) { return &ReadyState{} } } diff --git a/pkg/parser/token.go b/pkg/parser/token.go index 018a6f5d4..db0084342 100644 --- a/pkg/parser/token.go +++ b/pkg/parser/token.go @@ -8,6 +8,7 @@ import ( "github.com/go-errors/errors" "github.com/spf13/viper" + "github.com/supabase/cli/pkg/cast" ) // Equal to `startBufSize` from `bufio/scan.go` @@ -83,7 +84,7 @@ func Split(sql io.Reader, transform ...func(string) string) (stats []string, err // Increase scanner capacity to support very long lines containing e.g. geodata buf := make([]byte, startBufSize) - maxbuf := int(viper.GetSizeInBytes("SCANNER_BUFFER_SIZE")) + maxbuf := cast.UintToInt(viper.GetSizeInBytes("SCANNER_BUFFER_SIZE")) if maxbuf == 0 { maxbuf = MaxScannerCapacity } diff --git a/pkg/pgtest/mock.go b/pkg/pgtest/mock.go index b73ad255a..0f5e9bb84 100644 --- a/pkg/pgtest/mock.go +++ b/pkg/pgtest/mock.go @@ -153,7 +153,10 @@ func (r *MockConn) Reply(tag string, rows ...interface{}) *MockConn { } else if t := reflect.TypeOf(rows[0]); t.Kind() == reflect.Struct { s := reflect.ValueOf(rows[0]) for i := 0; i < s.NumField(); i++ { - name := t.Field(i).Name + name := pgxv5.GetColumnName(t.Field(i)) + if len(name) == 0 { + continue + } v := s.Field(i).Interface() if fd := toFieldDescription(v); fd != nil { fd.Name = []byte(name) @@ -182,6 +185,9 @@ func (r *MockConn) Reply(tag string, rows ...interface{}) *MockConn { } else if t := reflect.TypeOf(data); t.Kind() == reflect.Struct { s := reflect.ValueOf(rows[0]) for i := 0; i < s.NumField(); i++ { + if name := pgxv5.GetColumnName(t.Field(i)); len(name) == 0 { + continue + } v := s.Field(i).Interface() if value, oid := r.encodeValueArg(v); oid > 0 { dr.Values = append(dr.Values, value) diff --git a/pkg/pgxv5/rows.go b/pkg/pgxv5/rows.go index 299ce868c..96aa16f5a 100644 --- a/pkg/pgxv5/rows.go +++ b/pkg/pgxv5/rows.go @@ -104,18 +104,11 @@ func appendScanTargets(dstElemValue reflect.Value, scanTargets []any, fldDescs [ return nil, err } } else { - dbTag, dbTagPresent := sf.Tag.Lookup(structTagKey) - if dbTagPresent { - dbTag = strings.Split(dbTag, ",")[0] - } - if dbTag == "-" { + colName := GetColumnName(sf) + if len(colName) == 0 { // Field is ignored, skip it. continue } - colName := dbTag - if !dbTagPresent { - colName = sf.Name - } fpos := fieldPosByName(fldDescs, colName) if fpos == -1 || fpos >= len(scanTargets) { return nil, errors.Errorf("cannot find field %s in returned row", colName) @@ -126,3 +119,14 @@ func appendScanTargets(dstElemValue reflect.Value, scanTargets []any, fldDescs [ return scanTargets, err } + +func GetColumnName(sf reflect.StructField) string { + dbTag, dbTagPresent := sf.Tag.Lookup(structTagKey) + if !dbTagPresent { + return sf.Name + } + if dbTag = strings.Split(dbTag, ",")[0]; dbTag != "-" { + return dbTag + } + return "" +} diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index 8a681242d..63638e9a9 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -91,17 +91,9 @@ func (s *StorageAPI) UpsertObjects(ctx context.Context, bucketConfig config.Buck } fmt.Fprintln(os.Stderr, "Uploading:", filePath, "=>", dstPath) job := func() error { - f, err := fsys.Open(filePath) - if err != nil { - return errors.Errorf("failed to open file: %w", err) - } - defer f.Close() - fo, err := ParseFileOptions(f) - if err != nil { - return err - } - fo.Overwrite = true - return s.UploadObjectStream(ctx, dstPath, f, *fo) + return s.UploadObject(ctx, dstPath, filePath, fsys, func(fo *FileOptions) { + fo.Overwrite = true + }) } return jq.Put(job) } diff --git a/pkg/storage/objects.go b/pkg/storage/objects.go index f048cecf6..f338a63b2 100644 --- a/pkg/storage/objects.go +++ b/pkg/storage/objects.go @@ -4,9 +4,11 @@ import ( "context" "io" "io/fs" + "mime" "net/http" "os" "path" + "path/filepath" "strings" "github.com/go-errors/errors" @@ -88,7 +90,7 @@ func ParseFileOptions(f fs.File, opts ...func(*FileOptions)) (*FileOptions, erro return fo, nil } -func (s *StorageAPI) UploadObject(ctx context.Context, remotePath, localPath string, fsys afero.Fs, opts ...func(*FileOptions)) error { +func (s *StorageAPI) UploadObject(ctx context.Context, remotePath, localPath string, fsys fs.FS, opts ...func(*FileOptions)) error { f, err := fsys.Open(localPath) if err != nil { return errors.Errorf("failed to open file: %w", err) @@ -98,6 +100,13 @@ func (s *StorageAPI) UploadObject(ctx context.Context, remotePath, localPath str if err != nil { return err } + // For text/plain content types, we try to determine a more specific type + // based on the file extension, as the initial detection might be too generic + if strings.Contains(fo.ContentType, "text/plain") { + if extensionType := mime.TypeByExtension(filepath.Ext(localPath)); extensionType != "" { + fo.ContentType = extensionType + } + } return s.UploadObjectStream(ctx, remotePath, f, *fo) } diff --git a/pkg/storage/objects_test.go b/pkg/storage/objects_test.go new file mode 100644 index 000000000..a2e132a9b --- /dev/null +++ b/pkg/storage/objects_test.go @@ -0,0 +1,92 @@ +package storage + +import ( + "context" + "mime" + "net/http" + "testing" + fs "testing/fstest" + + "github.com/h2non/gock" + "github.com/stretchr/testify/assert" + "github.com/supabase/cli/internal/testing/apitest" + "github.com/supabase/cli/pkg/fetcher" +) + +var mockApi = StorageAPI{Fetcher: fetcher.NewFetcher( + "http://127.0.0.1", +)} + +func TestParseFileOptionsContentTypeDetection(t *testing.T) { + tests := []struct { + name string + content []byte + filename string + opts []func(*FileOptions) + wantMimeType string + wantCacheCtrl string + }{ + { + name: "detects PNG image", + content: []byte{0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A}, // PNG header + filename: "test.image", + wantMimeType: "image/png", + wantCacheCtrl: "max-age=3600", + }, + { + name: "detects JavaScript file", + content: []byte("const hello = () => console.log('Hello, World!');"), + filename: "script.js", + wantMimeType: mime.TypeByExtension(".js"), + wantCacheCtrl: "max-age=3600", + }, + { + name: "detects CSS file", + content: []byte(".header { color: #333; font-size: 16px; }"), + filename: "styles.css", + wantMimeType: mime.TypeByExtension(".css"), + wantCacheCtrl: "max-age=3600", + }, + { + name: "detects SQL file", + content: []byte("SELECT * FROM users WHERE id = 1;"), + filename: "query.sql", + wantMimeType: mime.TypeByExtension(".sql"), + wantCacheCtrl: "max-age=3600", + }, + { + name: "use text/plain as fallback for unrecognized extensions", + content: []byte("const hello = () => console.log('Hello, World!');"), + filename: "main.nonexistent", + wantMimeType: "text/plain; charset=utf-8", + wantCacheCtrl: "max-age=3600", + }, + { + name: "respects custom content type", + content: []byte("const hello = () => console.log('Hello, World!');"), + filename: "custom.js", + wantMimeType: "application/custom", + wantCacheCtrl: "max-age=3600", + opts: []func(*FileOptions){func(fo *FileOptions) { fo.ContentType = "application/custom" }}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create a temporary file with test content + fsys := fs.MapFS{tt.filename: &fs.MapFile{Data: tt.content}} + // Setup mock api + defer gock.OffAll() + gock.New("http://127.0.0.1"). + Post("/storage/v1/object/"+tt.filename). + MatchHeader("Content-Type", tt.wantMimeType). + MatchHeader("Cache-Control", tt.wantCacheCtrl). + Reply(http.StatusOK) + // Parse options + err := mockApi.UploadObject(context.Background(), tt.filename, tt.filename, fsys, tt.opts...) + // Assert results + assert.NoError(t, err) + assert.Empty(t, apitest.ListUnmatchedRequests()) + }) + } +} diff --git a/pkg/vault/batch.go b/pkg/vault/batch.go new file mode 100644 index 000000000..8f44bb797 --- /dev/null +++ b/pkg/vault/batch.go @@ -0,0 +1,60 @@ +package vault + +import ( + "context" + "fmt" + "os" + + "github.com/go-errors/errors" + "github.com/jackc/pgx/v4" + "github.com/supabase/cli/pkg/config" + "github.com/supabase/cli/pkg/pgxv5" +) + +const ( + CREATE_VAULT_KV = "SELECT vault.create_secret($1, $2)" + READ_VAULT_KV = "SELECT id, name FROM vault.secrets WHERE name = ANY($1)" + UPDATE_VAULT_KV = "SELECT vault.update_secret($1, $2)" +) + +type VaultTable struct { + Id string + Name string +} + +func UpsertVaultSecrets(ctx context.Context, secrets map[string]config.Secret, conn *pgx.Conn) error { + var keys []string + toInsert := map[string]string{} + for k, v := range secrets { + if len(v.SHA256) > 0 { + keys = append(keys, k) + toInsert[k] = v.Value + } + } + if len(keys) == 0 { + return nil + } + fmt.Fprintln(os.Stderr, "Updating vault secrets...") + rows, err := conn.Query(ctx, READ_VAULT_KV, keys) + if err != nil { + return errors.Errorf("failed to read vault: %w", err) + } + toUpdate, err := pgxv5.CollectRows[VaultTable](rows) + if err != nil { + return err + } + batch := pgx.Batch{} + for _, r := range toUpdate { + secret := secrets[r.Name] + batch.Queue(UPDATE_VAULT_KV, r.Id, secret.Value) + delete(toInsert, r.Name) + } + // Remaining secrets should be created + for k, v := range toInsert { + batch.Queue(CREATE_VAULT_KV, v, k) + } + if err := conn.SendBatch(ctx, &batch).Close(); err != nil { + return errors.Errorf("failed to update vault: %w", err) + } + return nil +} diff --git a/scripts/postinstall.js b/scripts/postinstall.js index cc28b12fe..517617ffb 100755 --- a/scripts/postinstall.js +++ b/scripts/postinstall.js @@ -1,7 +1,7 @@ #!/usr/bin/env node // Ref 1: https://github.com/sanathkr/go-npm -// Ref 2: https://blog.xendit.engineer/how-we-repurposed-npm-to-publish-and-distribute-our-go-binaries-for-internal-cli-23981b80911b +// Ref 2: https://medium.com/xendit-engineering/how-we-repurposed-npm-to-publish-and-distribute-our-go-binaries-for-internal-cli-23981b80911b "use strict"; import binLinks from "bin-links"; @@ -98,45 +98,53 @@ async function main() { throw errUnsupported; } + // Read from package.json and prepare for the installation. const pkg = await readPackageJson(); if (platform === "windows") { // Update bin path in package.json pkg.bin[pkg.name] += ".exe"; } + // Prepare the installation path by creating the directory if it doesn't exist. const binPath = pkg.bin[pkg.name]; const binDir = path.dirname(binPath); await fs.promises.mkdir(binDir, { recursive: true }); - // First we will Un-GZip, then we will untar. - const ungz = zlib.createGunzip(); - const binName = path.basename(binPath); - const untar = extract({ cwd: binDir }, [binName]); - - const url = getDownloadUrl(pkg); - console.info("Downloading", url); + // Create the agent that will be used for all the fetch requests later. const proxyUrl = process.env.npm_config_https_proxy || process.env.npm_config_http_proxy || process.env.npm_config_proxy; - // Keeps the TCP connection alive when sending multiple requests // Ref: https://github.com/node-fetch/node-fetch/issues/1735 const agent = proxyUrl ? new HttpsProxyAgent(proxyUrl, { keepAlive: true }) : new Agent({ keepAlive: true }); - const resp = await fetch(url, { agent }); + // First, fetch the checksum map. + const checksumMap = await fetchAndParseCheckSumFile(pkg, agent); + + // Then, download the binary. + const url = getDownloadUrl(pkg); + console.info("Downloading", url); + const resp = await fetch(url, { agent }); const hash = createHash("sha256"); const pkgNameWithPlatform = `${pkg.name}_${platform}_${arch}.tar.gz`; - const checksumMap = await fetchAndParseCheckSumFile(pkg, agent); + // Then, decompress the binary -- we will first Un-GZip, then we will untar. + const ungz = zlib.createGunzip(); + const binName = path.basename(binPath); + const untar = extract({ cwd: binDir }, [binName]); + + // Update the hash with the binary data as it's being downloaded. resp.body .on("data", (chunk) => { hash.update(chunk); }) + // Pipe the data to the ungz stream. .pipe(ungz); + // After the ungz stream has ended, verify the checksum. ungz .on("end", () => { const expectedChecksum = checksumMap?.[pkgNameWithPlatform]; @@ -151,8 +159,10 @@ async function main() { } console.info("Checksum verified."); }) + // Pipe the data to the untar stream. .pipe(untar); + // Wait for the untar stream to finish. await new Promise((resolve, reject) => { untar.on("error", reject); untar.on("end", () => resolve()); diff --git a/test/branch_test.go b/test/branch_test.go deleted file mode 100644 index a3d869024..000000000 --- a/test/branch_test.go +++ /dev/null @@ -1,38 +0,0 @@ -package integration - -import ( - "context" - "encoding/json" - "os" - - "github.com/docker/docker/api/types" - "github.com/stretchr/testify/require" -) - -// this is the part of Database test suite - DBTestSuite -// test functions -func (suite *DBTestSuite) TestBranchCreate() { - suite.T().Skip("Local branching is deprecated") - // create branch - branch := "test-branch" - create, args, err := suite.cmd.Traverse([]string{"db", "branch", "create", branch}) - require.NoError(suite.T(), err) - create.SetContext(context.Background()) - err = create.RunE(create, args) - require.NoError(suite.T(), err) - - // check if branch dir exists - _, err = os.Stat("supabase/.branches/" + branch) - require.NoError(suite.T(), err) - - // check if all exec calls were made to docker api - ids := suite.constructParams() - require.ElementsMatch(suite.T(), suite.params, ids) - - // check commands in exec calls - require.Equal(suite.T(), 2, len(suite.bodies)) - var execBody types.ExecConfig - require.NoError(suite.T(), json.Unmarshal([]byte(suite.bodies[0]), &execBody)) - var startBody types.ExecStartCheck - require.NoError(suite.T(), json.Unmarshal([]byte(suite.bodies[1]), &startBody)) -} diff --git a/test/db_test.go b/test/db_test.go deleted file mode 100644 index 45de0ef2d..000000000 --- a/test/db_test.go +++ /dev/null @@ -1,144 +0,0 @@ -package integration - -// Basic imports -import ( - "context" - "io" - "net/http" - "os" - "path/filepath" - "sync" - "testing" - - "github.com/gin-gonic/gin" - gonanoid "github.com/matoous/go-nanoid/v2" - "github.com/spf13/cobra" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - clicmd "github.com/supabase/cli/cmd" - "github.com/supabase/cli/test/mocks/docker" -) - -type DBTestSuite struct { - suite.Suite - cmd *cobra.Command - tempDir string - ids []string - bodies []string - params []gin.Params - mtx sync.RWMutex -} - -// test functions -// add tests here <- - -// hooks -func (suite *DBTestSuite) SetupTest() { - suite.tempDir = NewTempDir(Logger, TempDir) - suite.mtx.Lock() - suite.ids = []string{} - suite.bodies = []string{} - suite.params = []gin.Params{} - suite.mtx.Unlock() - - // add docker mock handlers - DockerMock.ExecCreateHandler = func(c *gin.Context) { - suite.addParams(c.Copy()) - body, err := io.ReadAll(c.Request.Body) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{ - "message": "error reading body", - }) - return - } - suite.addBody(body) - - id := gonanoid.MustGenerate(docker.IDAlphabet, docker.IDLength) - c.JSON(http.StatusCreated, gin.H{ - "Id": id, - }) - suite.addID(id) - } - - DockerMock.ExecStartHandler = func(c *gin.Context) { - suite.addParams(c.Copy()) - body, err := io.ReadAll(c.Request.Body) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{ - "message": "error reading body", - }) - return - } - suite.addBody(body) - - docker.HijackedResponse(c, "0") - } - - DockerMock.ContainerInspectHandler = func(c *gin.Context) { - c.JSON(http.StatusOK, gin.H{}) - } - - // create supabase dir - suite.cmd = clicmd.GetRootCmd() - init, _, err := suite.cmd.Find([]string{"init"}) - require.NoError(suite.T(), err) - init.SetContext(context.Background()) - err = init.RunE(init, []string{}) - require.NoError(suite.T(), err) - - err = os.Mkdir("supabase/.branches", os.FileMode(0755)) - require.NoError(suite.T(), err) -} - -func (suite *DBTestSuite) TeardownTest() { - require.NoError(suite.T(), os.Chdir(TempDir)) -} - -// In order for 'go test' to run this suite, we need to create -// a normal test function and pass our suite to suite.Run -func TestDBTestSuite(t *testing.T) { - suite.Run(t, new(DBTestSuite)) -} - -// helper functions -func (suite *DBTestSuite) addParams(c *gin.Context) { - suite.mtx.Lock() - defer suite.mtx.Unlock() - suite.params = append(suite.params, c.Params) -} - -func (suite *DBTestSuite) addBody(body []byte) { - suite.mtx.Lock() - defer suite.mtx.Unlock() - suite.bodies = append(suite.bodies, string(body)) -} - -func (suite *DBTestSuite) addID(id string) { - suite.mtx.Lock() - defer suite.mtx.Unlock() - suite.ids = append(suite.ids, id) -} - -func (suite *DBTestSuite) constructParams() []gin.Params { - ids := []gin.Params{} - // for each exec docker call we have to calls to docker api: - // one to create exec, one to start exec - for _, id := range suite.ids { - // this one represents call to create exec - ids = append(ids, gin.Params{ - gin.Param{ - Key: "id", - Value: "supabase_db_" + filepath.Base(suite.tempDir), - }, - }) - - // this one represents call to start exec - ids = append(ids, gin.Params{ - gin.Param{ - Key: "id", - Value: id, - }, - }) - } - return ids -} diff --git a/test/init_test.go b/test/init_test.go deleted file mode 100644 index bdc8b7b7d..000000000 --- a/test/init_test.go +++ /dev/null @@ -1,50 +0,0 @@ -package integration - -// Basic imports -import ( - "context" - "os" - "testing" - - "github.com/spf13/cobra" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - clicmd "github.com/supabase/cli/cmd" - "github.com/supabase/cli/internal/utils" -) - -type InitTestSuite struct { - suite.Suite - tempDir string - cmd *cobra.Command -} - -// test functions -func (suite *InitTestSuite) TestInit() { - // init supabase - init, _, err := suite.cmd.Find([]string{"init"}) - require.NoError(suite.T(), err) - init.SetContext(context.Background()) - require.NoError(suite.T(), init.RunE(init, []string{})) - - // check if init dir exists - _, err = os.Stat(utils.ConfigPath) - require.NoError(suite.T(), err) -} - -// hooks -func (suite *InitTestSuite) SetupTest() { - // init cli - suite.cmd = clicmd.GetRootCmd() - suite.tempDir = NewTempDir(Logger, TempDir) -} - -func (suite *InitTestSuite) TeardownTest() { - require.NoError(suite.T(), os.Chdir(TempDir)) -} - -// In order for 'go test' to run this suite, we need to create -// a normal test function and pass our suite to suite.Run -func TestInitTestSuite(t *testing.T) { - suite.Run(t, new(InitTestSuite)) -} diff --git a/test/link_test.go b/test/link_test.go deleted file mode 100644 index afef6c6f8..000000000 --- a/test/link_test.go +++ /dev/null @@ -1,105 +0,0 @@ -package integration - -// Basic imports -import ( - "context" - "fmt" - "net/http" - "os" - "sync" - "testing" - - "github.com/gin-gonic/gin" - gonanoid "github.com/matoous/go-nanoid/v2" - "github.com/spf13/cobra" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - clicmd "github.com/supabase/cli/cmd" - "github.com/supabase/cli/internal/utils" - "github.com/supabase/cli/pkg/api" - "github.com/supabase/cli/test/mocks/supabase" -) - -type LinkTestSuite struct { - suite.Suite - tempDir string - cmd *cobra.Command - - ids []string - headers []http.Header - - mtx sync.RWMutex -} - -// test functions -func (suite *LinkTestSuite) TestLink() { - // run command - link, _, err := suite.cmd.Find([]string{"link"}) - link.SetContext(context.Background()) - require.NoError(suite.T(), err) - - id := gonanoid.MustGenerate(supabase.IDAlphabet, supabase.IDLength) - require.NoError(suite.T(), link.Flags().Set("project-ref", id)) - require.NoError(suite.T(), link.Flags().Set("password", "postgres")) - - require.NoError(suite.T(), link.RunE(link, []string{})) - - // check request details - suite.mtx.RLock() - defer suite.mtx.RUnlock() - require.Contains(suite.T(), suite.ids, id) - require.Contains(suite.T(), suite.headers, http.Header{ - "Authorization": []string{fmt.Sprintf("Bearer %s", supabase.AccessToken)}, - "Accept-Encoding": []string{"gzip"}, - "User-Agent": []string{"Go-http-client/1.1"}, - }) - _, err = os.Stat(utils.ProjectRefPath) - require.NoError(suite.T(), err) - ref, err := os.ReadFile(utils.ProjectRefPath) - require.NoError(suite.T(), err) - require.Equal(suite.T(), id, string(ref)) -} - -// hooks -func (suite *LinkTestSuite) SetupTest() { - // init cli - suite.cmd = clicmd.GetRootCmd() - suite.tempDir = NewTempDir(Logger, TempDir) - - // init supabase - init, _, err := suite.cmd.Find([]string{"init"}) - init.SetContext(context.Background()) - require.NoError(suite.T(), err) - require.NoError(suite.T(), init.RunE(init, []string{})) - - // implement mocks - SupaMock.FunctionsHandler = func(c *gin.Context) { - suite.addHeaders(c.Request.Header) - suite.addID(c.Params.ByName("id")) - - c.JSON(http.StatusOK, []api.FunctionResponse{}) - } -} - -func (suite *LinkTestSuite) TeardownTest() { - require.NoError(suite.T(), os.Chdir(TempDir)) -} - -// In order for 'go test' to run this suite, we need to create -// a normal test function and pass our suite to suite.Run -func TestLinkTestSuite(t *testing.T) { - // suite.Run(t, new(LinkTestSuite)) -} - -// helper functions -func (suite *LinkTestSuite) addID(id string) { - suite.mtx.Lock() - defer suite.mtx.Unlock() - suite.ids = append(suite.ids, id) -} - -func (suite *LinkTestSuite) addHeaders(headers http.Header) { - suite.mtx.Lock() - defer suite.mtx.Unlock() - suite.headers = append(suite.headers, headers) -} diff --git a/test/login_test.go b/test/login_test.go deleted file mode 100644 index 81f9cb895..000000000 --- a/test/login_test.go +++ /dev/null @@ -1,111 +0,0 @@ -package integration - -// Basic imports -import ( - "context" - "net/http" - "os" - "path/filepath" - "sync" - "testing" - - "github.com/gin-gonic/gin" - gonanoid "github.com/matoous/go-nanoid/v2" - "github.com/spf13/cobra" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - clicmd "github.com/supabase/cli/cmd" - "github.com/supabase/cli/test/mocks/supabase" -) - -type LoginTestSuite struct { - suite.Suite - tempDir string - cmd *cobra.Command - - ids []string - headers []http.Header - - mtx sync.RWMutex -} - -// test functions -func (suite *LoginTestSuite) TestLink() { - // run command - login, _, err := suite.cmd.Find([]string{"login"}) - require.NoError(suite.T(), err) - login.SetContext(context.Background()) - key := "sbp_" + gonanoid.MustGenerate(supabase.KeyAlphabet, supabase.KeyLength) - - // change stdin to read from a file - content := []byte(key) - tmpfile, err := os.CreateTemp(suite.tempDir, "key") - require.NoError(suite.T(), err) - defer os.Remove(tmpfile.Name()) // clean up - - _, err = tmpfile.Write(content) - require.NoError(suite.T(), err) - _, err = tmpfile.Seek(0, 0) - require.NoError(suite.T(), err) - - oldStdin := os.Stdin - defer func() { os.Stdin = oldStdin }() - os.Stdin = tmpfile - - err = login.Flags().Set("token", key) - require.NoError(suite.T(), err) - require.NoError(suite.T(), login.RunE(login, []string{})) - - // check token is saved - home, err := os.UserHomeDir() - require.NoError(suite.T(), err) - _, err = os.Stat(filepath.Join(home, ".supabase/access-token")) - require.NoError(suite.T(), err) - token, err := os.ReadFile(filepath.Join(home, ".supabase/access-token")) - require.NoError(suite.T(), err) - require.Equal(suite.T(), key, string(token)) -} - -// hooks -func (suite *LoginTestSuite) SetupTest() { - // init cli - suite.cmd = clicmd.GetRootCmd() - suite.tempDir = NewTempDir(Logger, TempDir) - - // init supabase - init, _, err := suite.cmd.Find([]string{"init"}) - require.NoError(suite.T(), err) - init.SetContext(context.Background()) - require.NoError(suite.T(), init.RunE(init, []string{})) - - // implement mocks - SupaMock.FunctionsHandler = func(c *gin.Context) { - suite.addHeaders(c.Request.Header) - suite.addID(c.Params.ByName("id")) - - c.JSON(http.StatusOK, gin.H{}) - } -} - -func (suite *LoginTestSuite) TeardownTest() { - require.NoError(suite.T(), os.Chdir(TempDir)) -} - -// In order for 'go test' to run this suite, we need to create -// a normal test function and pass our suite to suite.Run -func TestLoginTestSuite(t *testing.T) { - suite.Run(t, new(LoginTestSuite)) -} - -// helper functions -func (suite *LoginTestSuite) addID(id string) { - suite.mtx.Lock() - defer suite.mtx.Unlock() - suite.ids = append(suite.ids, id) -} - -func (suite *LoginTestSuite) addHeaders(headers http.Header) { - suite.mtx.Lock() - defer suite.mtx.Unlock() - suite.headers = append(suite.headers, headers) -} diff --git a/test/main_test.go b/test/main_test.go deleted file mode 100644 index 6d694e709..000000000 --- a/test/main_test.go +++ /dev/null @@ -1,103 +0,0 @@ -package integration - -import ( - "log" - "os" - "testing" - - "github.com/docker/docker/client" - "github.com/spf13/viper" - "github.com/supabase/cli/internal/utils" - "github.com/supabase/cli/test/mocks/docker" - "github.com/supabase/cli/test/mocks/supabase" -) - -const ( - DockerPort = ":2375" - SupabasePort = ":2376" -) - -var ( - TempDir string -) - -var ( - Logger *log.Logger - DockerMock *docker.Server - SupaMock *supabase.Server -) - -func TestMain(m *testing.M) { - Logger := log.New(os.Stdout, "", 0) - - Logger.Println("Global tests setup") - - DockerMock = newDockerMock(Logger) - SupaMock = newSupabaseMock(Logger) - TempDir = NewTempDir(Logger, "") - - // redirect clients to mock servers - os.Setenv("DOCKER_HOST", "tcp://127.0.0.1"+DockerPort) - utils.Docker = utils.NewDocker() - if err := client.WithVersion(docker.APIVersion)(utils.Docker); err != nil { - Logger.Fatal(err) - } - viper.Set("INTERNAL_API_HOST", "http://127.0.0.1"+SupabasePort) - os.Setenv("SUPABASE_ACCESS_TOKEN", supabase.AccessToken) - os.Setenv("HOME", TempDir) - - // run tests - exitVal := m.Run() - - Logger.Println("Global teardown") - os.RemoveAll(TempDir) - - // exit process with tests exit code - os.Exit(exitVal) -} - -func newDockerMock(Logger *log.Logger) *docker.Server { - dockerMock := docker.NewServer() - dockerRouter := dockerMock.NewRouter() - go func() { - err := dockerRouter.Run(DockerPort) - if err != nil { - Logger.Fatal(err) - } - }() - - return dockerMock -} - -func newSupabaseMock(Logger *log.Logger) *supabase.Server { - supaMock := supabase.NewServer() - supaRouter := supaMock.NewRouter() - go func() { - err := supaRouter.Run(SupabasePort) - if err != nil { - Logger.Fatal(err) - } - }() - - return supaMock -} - -func NewTempDir(Logger *log.Logger, baseDir string) string { - wd := baseDir - var err error - if baseDir == "" { - wd, err = os.Getwd() - } - if err != nil { - Logger.Fatal(err) - } - tempDir, err := os.MkdirTemp(wd, "cli-test-") - if err != nil { - Logger.Fatal(err) - } - err = os.Chdir(tempDir) - if err != nil { - Logger.Fatal(err) - } - return tempDir -} diff --git a/test/migration_test.go b/test/migration_test.go deleted file mode 100644 index 2321735d0..000000000 --- a/test/migration_test.go +++ /dev/null @@ -1,56 +0,0 @@ -package integration - -// Basic imports -import ( - "os" - "testing" - - gonanoid "github.com/matoous/go-nanoid/v2" - "github.com/spf13/cobra" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - clicmd "github.com/supabase/cli/cmd" - "github.com/supabase/cli/test/mocks/supabase" -) - -type MigrationTestSuite struct { - suite.Suite - tempDir string - cmd *cobra.Command -} - -// test functions -func (suite *MigrationTestSuite) TestNewMigration() { - // run command - migration, _, err := suite.cmd.Find([]string{"migration", "new"}) - require.NoError(suite.T(), err) - name := gonanoid.MustGenerate(supabase.IDAlphabet, 10) - require.NoError(suite.T(), migration.RunE(migration, []string{name})) - - // check migrations file created - subs, err := os.ReadDir("supabase/migrations") - require.NoError(suite.T(), err) - require.Regexp(suite.T(), `[0-9]{14}_`+name+".sql", subs[0].Name()) -} - -// hooks -func (suite *MigrationTestSuite) SetupTest() { - // init cli - suite.cmd = clicmd.GetRootCmd() - suite.tempDir = NewTempDir(Logger, TempDir) - - // init supabase - init, _, err := suite.cmd.Find([]string{"init"}) - require.NoError(suite.T(), err) - require.NoError(suite.T(), init.RunE(init, []string{})) -} - -func (suite *MigrationTestSuite) TeardownTest() { - require.NoError(suite.T(), os.Chdir(TempDir)) -} - -// In order for 'go test' to run this suite, we need to create -// a normal test function and pass our suite to suite.Run -func TestMigrationTestSuite(t *testing.T) { - suite.Run(t, new(MigrationTestSuite)) -} diff --git a/test/mocks/docker/httputils.go b/test/mocks/docker/httputils.go deleted file mode 100644 index c8a4496aa..000000000 --- a/test/mocks/docker/httputils.go +++ /dev/null @@ -1,67 +0,0 @@ -package docker - -import ( - "fmt" - "io" - "net/http" - - "github.com/docker/docker/pkg/ioutils" - "github.com/docker/docker/pkg/stdcopy" - "github.com/gin-gonic/gin" -) - -func CloseStreams(streams ...interface{}) { - for _, stream := range streams { - if tcpc, ok := stream.(interface { - CloseWrite() error - }); ok { - _ = tcpc.CloseWrite() - } else if closer, ok := stream.(io.Closer); ok { - _ = closer.Close() - } - } -} - -func HijackedResponse(c *gin.Context, exitCode string, output ...string) { - // hijack the connection - hijacker, ok := c.Writer.(http.Hijacker) - if !ok { - c.JSON(http.StatusBadRequest, gin.H{ - "message": "error hijacking connection", - }) - return - } - conn, _, err := hijacker.Hijack() - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{ - "message": "error hijacking connection", - }) - return - } - _, err = conn.Write([]byte{}) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{ - "message": "error hijacking connection", - }) - return - } - - // write success code signalizing that the connection is established and ready to stream data - fmt.Fprintf(conn, "HTTP/1.1 200 OK\r\nContent-Type: application/vnd.docker.raw-stream\r\n\r\n") - - // setup closer - closer := func() error { - CloseStreams(conn) - return nil - } - - // write some output if command suppose to write to stdout - outStream := stdcopy.NewStdWriter(conn, stdcopy.Stdout) - if len(output) > 0 { - fmt.Fprint(outStream, output) - } - // finish with exit code and close stream and connection as the command is done - fmt.Fprintf(outStream, "exit code %s", exitCode) - rc := ioutils.NewReadCloserWrapper(conn, closer) - rc.Close() -} diff --git a/test/mocks/docker/server.go b/test/mocks/docker/server.go deleted file mode 100644 index 705c7b4e1..000000000 --- a/test/mocks/docker/server.go +++ /dev/null @@ -1,99 +0,0 @@ -package docker - -import ( - "net/http" - - "github.com/gin-gonic/gin" -) - -const ( - APIVersion = "1.44" - IDAlphabet = "abcdef0123456789" - IDLength = 12 -) - -// Server struct with route handlers -type Server struct { - PingHandler func(c *gin.Context) - ContainerInspectHandler func(c *gin.Context) - ExecCreateHandler func(c *gin.Context) - ExecStartHandler func(c *gin.Context) -} - -var defaultHandler = func(c *gin.Context) { - c.JSON(http.StatusNotImplemented, gin.H{ - "message": "Not implemented", - }) -} - -// NewServer creates a new server with default handlers -func NewServer() *Server { - s := Server{ - ExecCreateHandler: defaultHandler, - ExecStartHandler: defaultHandler, - ContainerInspectHandler: defaultHandler, - } - return &s -} - -// NewRouter creating a new router and setting the routes for the server. -func (s *Server) NewRouter() *gin.Engine { - root := gin.Default() - root.HEAD("/_ping", s.ping) - root.GET("/_ping", s.ping) - - router := root.Group("/v" + APIVersion) - - containers := router.Group("/containers") - containers.GET("/:id/json", s.inspectContainer) - containers.POST("/:id/exec", s.createExec) - - exec := router.Group("/exec") - exec.POST("/:id/start", s.startExec) - exec.GET("/:id/json", s.inspectContainer) - - return root -} - -// ping -func (s *Server) ping(c *gin.Context) { - if s.PingHandler == nil { - c.Header("API-Version", APIVersion) - c.Header("OSType", "linux") - c.Status(http.StatusOK) - } else { - s.PingHandler(c) - } -} - -// container -func (s *Server) inspectContainer(c *gin.Context) { - if s.ContainerInspectHandler == nil { - c.JSON(http.StatusInternalServerError, gin.H{ - "message": "handler is nil", - }) - } else { - s.ContainerInspectHandler(c) - } -} - -// exec -func (s *Server) createExec(c *gin.Context) { - if s.ExecCreateHandler == nil { - c.JSON(http.StatusInternalServerError, gin.H{ - "message": "handler is nil", - }) - } else { - s.ExecCreateHandler(c) - } -} - -func (s *Server) startExec(c *gin.Context) { - if s.ExecStartHandler == nil { - c.JSON(http.StatusInternalServerError, gin.H{ - "message": "handler is nil", - }) - } else { - s.ExecStartHandler(c) - } -} diff --git a/test/mocks/supabase/server.go b/test/mocks/supabase/server.go deleted file mode 100644 index 41eb8c542..000000000 --- a/test/mocks/supabase/server.go +++ /dev/null @@ -1,74 +0,0 @@ -package supabase - -import ( - "net/http" - - "github.com/gin-gonic/gin" - gonanoid "github.com/matoous/go-nanoid/v2" -) - -const ( - IDAlphabet = "abcdefghijklmnopqrstuvwxyz" - IDLength = 20 - KeyAlphabet = "abcdef0123456789" - KeyLength = 40 -) - -var AccessToken = "sbp_" + gonanoid.MustGenerate(KeyAlphabet, KeyLength) - -// Server struct with route handlers -type Server struct { - FunctionsHandler func(c *gin.Context) - SecretsHandler func(c *gin.Context) -} - -var defaultHandler = func(c *gin.Context) { - c.JSON(http.StatusNotImplemented, gin.H{ - "message": "Not implemented", - }) -} - -// NewServer creates a new server with default handlers -func NewServer() *Server { - s := Server{ - FunctionsHandler: defaultHandler, - SecretsHandler: defaultHandler, - } - return &s -} - -// NewRouter creating a new router and setting the routes for the server. -func (s *Server) NewRouter() *gin.Engine { - root := gin.Default() - router := root.Group("/v1") - - projects := router.Group("/projects") - projects.GET("/:id/functions", s.functions) - projects.GET("/:id/secrets", s.secrets) - projects.GET("/:id/api-keys", func(c *gin.Context) { - c.JSON(http.StatusOK, []gin.H{}) - }) - - return root -} - -// project routes -func (s *Server) functions(c *gin.Context) { - if s.FunctionsHandler == nil { - c.JSON(http.StatusInternalServerError, gin.H{ - "message": "handler is nil", - }) - } else { - s.FunctionsHandler(c) - } -} - -func (s *Server) secrets(c *gin.Context) { - if s.SecretsHandler == nil { - c.JSON(http.StatusInternalServerError, gin.H{ - "message": "handler is nil", - }) - } else { - s.SecretsHandler(c) - } -} diff --git a/test/secrets_test.go b/test/secrets_test.go deleted file mode 100644 index b195e23ea..000000000 --- a/test/secrets_test.go +++ /dev/null @@ -1,121 +0,0 @@ -package integration - -// Basic imports -import ( - "context" - "fmt" - "net/http" - "os" - "sync" - "testing" - - "github.com/gin-gonic/gin" - gonanoid "github.com/matoous/go-nanoid/v2" - "github.com/spf13/cobra" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - clicmd "github.com/supabase/cli/cmd" - "github.com/supabase/cli/internal/utils/flags" - "github.com/supabase/cli/test/mocks/supabase" -) - -type SecretsTestSuite struct { - suite.Suite - tempDir string - cmd *cobra.Command - - ids []string - headers []http.Header - - mtx sync.RWMutex -} - -// test functions -func (suite *SecretsTestSuite) TestList() { - // run command - list, _, err := suite.cmd.Find([]string{"secrets", "list"}) - list.SetContext(context.Background()) - require.NoError(suite.T(), err) - - // set stdout to write into file so we can capture cmd output - tmpfile, err := os.CreateTemp(suite.tempDir, "output") - require.NoError(suite.T(), err) - defer os.Remove(tmpfile.Name()) // clean up - oldStdout := os.Stdout - defer func() { os.Stdout = oldStdout }() - os.Stdout = tmpfile - - flags.ProjectRef = gonanoid.MustGenerate(supabase.IDAlphabet, supabase.IDLength) - require.NoError(suite.T(), list.RunE(list, []string{})) - - // check request details - suite.mtx.RLock() - defer suite.mtx.RUnlock() - require.Contains(suite.T(), suite.ids, flags.ProjectRef) - require.Contains(suite.T(), suite.headers, http.Header{ - "Authorization": []string{fmt.Sprintf("Bearer %s", supabase.AccessToken)}, - "Accept-Encoding": []string{"gzip"}, - "User-Agent": []string{"SupabaseCLI/"}, - }) - - contents, err := os.ReadFile(tmpfile.Name()) - require.NoError(suite.T(), err) - require.Contains(suite.T(), string(contents), "some-key") - require.Contains(suite.T(), string(contents), "another") -} - -// hooks -func (suite *SecretsTestSuite) SetupTest() { - // init cli - suite.cmd = clicmd.GetRootCmd() - suite.tempDir = NewTempDir(Logger, TempDir) - - // init supabase - init, _, err := suite.cmd.Find([]string{"init"}) - require.NoError(suite.T(), err) - init.SetContext(context.Background()) - require.NoError(suite.T(), init.RunE(init, []string{})) - - // add `link` dir - require.NoError(suite.T(), os.MkdirAll("supabase/.temp", os.FileMode(0755))) - - // implement mocks - SupaMock.SecretsHandler = func(c *gin.Context) { - suite.addHeaders(c.Request.Header) - suite.addID(c.Params.ByName("id")) - - c.JSON(http.StatusOK, []gin.H{ - { - "Name": "some-key", - "Value": gonanoid.Must(), - }, - { - "Name": "another", - "Value": gonanoid.Must(), - }, - }) - } -} - -func (suite *SecretsTestSuite) TeardownTest() { - require.NoError(suite.T(), os.Chdir(TempDir)) -} - -// In order for 'go test' to run this suite, we need to create -// a normal test function and pass our suite to suite.Run -func TestSecretsTestSuite(t *testing.T) { - suite.Run(t, new(SecretsTestSuite)) -} - -// helper functions -func (suite *SecretsTestSuite) addID(id string) { - suite.mtx.Lock() - defer suite.mtx.Unlock() - suite.ids = append(suite.ids, id) -} - -func (suite *SecretsTestSuite) addHeaders(headers http.Header) { - suite.mtx.Lock() - defer suite.mtx.Unlock() - suite.headers = append(suite.headers, headers) -} diff --git a/test/status_test.go b/test/status_test.go deleted file mode 100644 index 509345d18..000000000 --- a/test/status_test.go +++ /dev/null @@ -1,108 +0,0 @@ -package integration - -// Basic imports -import ( - "context" - "net/http" - "os" - "path/filepath" - "sync" - "testing" - - "github.com/docker/docker/api/types" - "github.com/gin-gonic/gin" - "github.com/spf13/cobra" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - clicmd "github.com/supabase/cli/cmd" -) - -type StatusTestSuite struct { - suite.Suite - tempDir string - cmd *cobra.Command - - params []gin.Params - mtx sync.RWMutex -} - -// test functions -func (suite *StatusTestSuite) TestStatus() { - suite.T().Skip("Status command is no longer mocked") - // run command - status, _, err := suite.cmd.Find([]string{"status"}) - status.SetContext(context.Background()) - require.NoError(suite.T(), err) - - // set stdout to write into file so we can capture cmd output - tmpfile, err := os.CreateTemp(suite.tempDir, "output") - require.NoError(suite.T(), err) - defer os.Remove(tmpfile.Name()) // clean up - oldStdout := os.Stdout - defer func() { os.Stdout = oldStdout }() - os.Stdout = tmpfile - - // run command - require.NoError(suite.T(), status.RunE(status, []string{})) - - // check request details - suite.mtx.RLock() - defer suite.mtx.RUnlock() - require.Contains(suite.T(), suite.params, gin.Params{ - gin.Param{ - Key: "id", - Value: "supabase_db_" + filepath.Base(suite.tempDir), - }, - }) - - contents, err := os.ReadFile(tmpfile.Name()) - require.NoError(suite.T(), err) - require.Contains(suite.T(), string(contents), "API URL: http://127.0.0.1:54321") - require.Contains(suite.T(), string(contents), "GraphQL URL: http://127.0.0.1:54321/graphql/v1") - require.Contains(suite.T(), string(contents), "DB URL: postgresql://postgres:postgres@127.0.0.1:54322/postgres") - require.Contains(suite.T(), string(contents), "Studio URL: http://127.0.0.1:54323") - require.Contains(suite.T(), string(contents), "Inbucket URL: http://127.0.0.1:54324") - require.Contains(suite.T(), string(contents), "JWT secret: super-secret-jwt-token-with-at-least-32-characters-long") - require.Contains(suite.T(), string(contents), "anon key: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9") - require.Contains(suite.T(), string(contents), "service_role key: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9") -} - -// hooks -func (suite *StatusTestSuite) SetupTest() { - // init cli - suite.cmd = clicmd.GetRootCmd() - suite.tempDir = NewTempDir(Logger, TempDir) - - // init supabase - init, _, err := suite.cmd.Find([]string{"init"}) - require.NoError(suite.T(), err) - init.SetContext(context.Background()) - require.NoError(suite.T(), init.RunE(init, []string{})) - - // implement mocks - DockerMock.ContainerInspectHandler = func(c *gin.Context) { - suite.addParams(c.Copy()) - c.JSON(http.StatusOK, types.ContainerJSON{ - ContainerJSONBase: &types.ContainerJSONBase{ - State: &types.ContainerState{Running: true}, - }, - }) - } -} - -func (suite *StatusTestSuite) TeardownTest() { - require.NoError(suite.T(), os.Chdir(TempDir)) -} - -// In order for 'go test' to run this suite, we need to create -// a normal test function and pass our suite to suite.Run -func TestStatusTestSuite(t *testing.T) { - suite.Run(t, new(StatusTestSuite)) -} - -// helper functions -func (suite *StatusTestSuite) addParams(c *gin.Context) { - suite.mtx.Lock() - defer suite.mtx.Unlock() - suite.params = append(suite.params, c.Params) -}