diff --git a/.changeset/rotten-glasses-smile.md b/.changeset/rotten-glasses-smile.md new file mode 100644 index 000000000000..ddeb7bdd69eb --- /dev/null +++ b/.changeset/rotten-glasses-smile.md @@ -0,0 +1,10 @@ +--- +"wrangler": patch +--- + +Improves the Wrangler auto-provisioning feature (gated behind the experimental flag `--x-provision`) by: + +- Writing back changes to the user's config file (not necessary, but can make it resilient to binding name changes) +- Fixing `--dry-run`, which previously threw an error when your config file had auto provisioned resources +- Improve R2 bindings display to include the `bucket_name` from the config file on upload +- Fixing bindings view for specific versions to not display TOML diff --git a/packages/wrangler/e2e/provision.test.ts b/packages/wrangler/e2e/provision.test.ts index b18ba07f01ce..07885c55adcd 100644 --- a/packages/wrangler/e2e/provision.test.ts +++ b/packages/wrangler/e2e/provision.test.ts @@ -1,7 +1,7 @@ import assert from "node:assert"; import dedent from "ts-dedent"; import { fetch } from "undici"; -import { afterAll, beforeAll, describe, expect, it } from "vitest"; +import { afterAll, beforeEach, describe, expect, it } from "vitest"; import { CLOUDFLARE_ACCOUNT_ID } from "./helpers/account-id"; import { WranglerE2ETestHelper } from "./helpers/e2e-wrangler-test"; import { fetchText } from "./helpers/fetch-text"; @@ -38,7 +38,7 @@ describe.skipIf(!CLOUDFLARE_ACCOUNT_ID)( expect(text).toMatchInlineSnapshot(`"Hello World!"`); }); - beforeAll(async () => { + beforeEach(async () => { await helper.seed({ "wrangler.toml": dedent` name = "${workerName}" @@ -46,15 +46,19 @@ describe.skipIf(!CLOUDFLARE_ACCOUNT_ID)( compatibility_date = "2023-01-01" workers_dev = true - [[kv_namespaces]] - binding = "KV" + [[kv_namespaces]] + binding = "KV" - [[r2_buckets]] - binding = "R2" + [[r2_buckets]] + binding = "R2" - [[d1_databases]] - binding = "D1" - `, + [[r2_buckets]] + binding = "R2_WITH_NAME" + bucket_name = "does-not-exist" + + [[d1_databases]] + binding = "D1" + `, "src/index.ts": dedent` export default { fetch(request) { @@ -76,32 +80,33 @@ describe.skipIf(!CLOUDFLARE_ACCOUNT_ID)( await worker.exitCode; const output = await worker.output; expect(normalize(output)).toMatchInlineSnapshot(` - "Total Upload: xx KiB / gzip: xx KiB - The following bindings need to be provisioned: - Binding Resource - env.KV KV Namespace - env.D1 D1 Database - env.R2 R2 Bucket - Provisioning KV (KV Namespace)... - 🌀 Creating new KV Namespace "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-kv"... - ✨ KV provisioned 🎉 - Provisioning D1 (D1 Database)... - 🌀 Creating new D1 Database "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-d1"... - ✨ D1 provisioned 🎉 - Provisioning R2 (R2 Bucket)... - 🌀 Creating new R2 Bucket "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-r2"... - ✨ R2 provisioned 🎉 - 🎉 All resources provisioned, continuing with deployment... - Your Worker has access to the following bindings: - Binding Resource - env.KV (00000000000000000000000000000000) KV Namespace - env.D1 (00000000-0000-0000-0000-000000000000) D1 Database - env.R2 (tmp-e2e-worker-00000000-0000-0000-0000-000000000000-r2) R2 Bucket - Uploaded tmp-e2e-worker-00000000-0000-0000-0000-000000000000 (TIMINGS) - Deployed tmp-e2e-worker-00000000-0000-0000-0000-000000000000 triggers (TIMINGS) - https://tmp-e2e-worker-00000000-0000-0000-0000-000000000000.SUBDOMAIN.workers.dev - Current Version ID: 00000000-0000-0000-0000-000000000000" - `); + "Total Upload: xx KiB / gzip: xx KiB + The following bindings need to be provisioned: + Binding Resource + env.KV KV Namespace + env.D1 D1 Database + env.R2 R2 Bucket + Provisioning KV (KV Namespace)... + 🌀 Creating new KV Namespace "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-kv"... + ✨ KV provisioned 🎉 + Provisioning D1 (D1 Database)... + 🌀 Creating new D1 Database "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-d1"... + ✨ D1 provisioned 🎉 + Provisioning R2 (R2 Bucket)... + 🌀 Creating new R2 Bucket "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-r2"... + ✨ R2 provisioned 🎉 + 🎉 All resources provisioned, continuing with deployment... + Your Worker has access to the following bindings: + Binding Resource + env.KV (00000000000000000000000000000000) KV Namespace + env.D1 (00000000-0000-0000-0000-000000000000) D1 Database + env.R2 (tmp-e2e-worker-00000000-0000-0000-0000-000000000000-r2) R2 Bucket + env.R2_WITH_NAME (does-not-exist) R2 Bucket + Uploaded tmp-e2e-worker-00000000-0000-0000-0000-000000000000 (TIMINGS) + Deployed tmp-e2e-worker-00000000-0000-0000-0000-000000000000 triggers (TIMINGS) + https://tmp-e2e-worker-00000000-0000-0000-0000-000000000000.SUBDOMAIN.workers.dev + Current Version ID: 00000000-0000-0000-0000-000000000000" + `); const urlMatch = output.match( /(?https:\/\/tmp-e2e-.+?\..+?\.workers\.dev)/ ); @@ -130,17 +135,18 @@ describe.skipIf(!CLOUDFLARE_ACCOUNT_ID)( await worker.exitCode; const output = await worker.output; expect(normalize(output)).toMatchInlineSnapshot(` - "Total Upload: xx KiB / gzip: xx KiB - Your Worker has access to the following bindings: - Binding Resource - env.KV (inherited) KV Namespace - env.D1 (inherited) D1 Database - env.R2 (inherited) R2 Bucket - Uploaded tmp-e2e-worker-00000000-0000-0000-0000-000000000000 (TIMINGS) - Deployed tmp-e2e-worker-00000000-0000-0000-0000-000000000000 triggers (TIMINGS) - https://tmp-e2e-worker-00000000-0000-0000-0000-000000000000.SUBDOMAIN.workers.dev - Current Version ID: 00000000-0000-0000-0000-000000000000" - `); + "Total Upload: xx KiB / gzip: xx KiB + Your Worker has access to the following bindings: + Binding Resource + env.KV (inherited) KV Namespace + env.D1 (inherited) D1 Database + env.R2 (inherited) R2 Bucket + env.R2_WITH_NAME (does-not-exist) R2 Bucket + Uploaded tmp-e2e-worker-00000000-0000-0000-0000-000000000000 (TIMINGS) + Deployed tmp-e2e-worker-00000000-0000-0000-0000-000000000000 triggers (TIMINGS) + https://tmp-e2e-worker-00000000-0000-0000-0000-000000000000.SUBDOMAIN.workers.dev + Current Version ID: 00000000-0000-0000-0000-000000000000" + `); const response = await retry( (resp) => !resp.ok, @@ -148,7 +154,36 @@ describe.skipIf(!CLOUDFLARE_ACCOUNT_ID)( ); await expect(response.text()).resolves.toEqual("Hello World!"); }); + it("can inspect current bindings", async () => { + const versionsRaw = await helper.run( + `wrangler versions list --json --x-provision` + ); + const versions = JSON.parse(versionsRaw.stdout) as unknown[]; + + const latest = versions.at(-1) as { id: string }; + + const versionView = await helper.run( + `wrangler versions view ${latest.id} --x-provision` + ); + + expect(normalizeOutput(versionView.output)).toMatchInlineSnapshot(` + "Version ID: 00000000-0000-0000-0000-000000000000 + Created: TIMESTAMP + Author: + Source: Unknown (version_upload) + Tag: - + Message: - + Handlers: fetch + Compatibility Date: 2023-01-01 + Your Worker has access to the following bindings: + Binding Resource + env.KV (00000000000000000000000000000000) KV Namespace + env.D1 (00000000-0000-0000-0000-000000000000) D1 Database + env.R2 (tmp-e2e-worker-00000000-0000-0000-0000-000000000000-r2) R2 Bucket + env.R2_WITH_NAME (does-not-exist) R2 Bucket" + `); + }); it("can inherit and provision resources on version upload", async () => { await helper.seed({ "wrangler.toml": dedent` diff --git a/packages/wrangler/e2e/versions.test.ts b/packages/wrangler/e2e/versions.test.ts index ee0f37467fe0..e52cd1cf0196 100644 --- a/packages/wrangler/e2e/versions.test.ts +++ b/packages/wrangler/e2e/versions.test.ts @@ -643,7 +643,6 @@ describe.skipIf(!CLOUDFLARE_ACCOUNT_ID)( Source: Unknown (version_upload) Tag: e2e-upload-assets Message: Upload via e2e test - ------------------------------------------------------------ Compatibility Date: 2023-01-01" `); }); diff --git a/packages/wrangler/src/__tests__/config/configuration.test.ts b/packages/wrangler/src/__tests__/config/configuration.test.ts index 6605b3585e07..711182351ede 100644 --- a/packages/wrangler/src/__tests__/config/configuration.test.ts +++ b/packages/wrangler/src/__tests__/config/configuration.test.ts @@ -2820,6 +2820,7 @@ describe("normalizeAndValidateConfig()", () => { MULTIWORKER: false, REMOTE_BINDINGS: false, DEPLOY_REMOTE_DIFF_CHECK: false, + AUTOCREATE_RESOURCES: false, }, () => normalizeAndValidateConfig( @@ -2977,6 +2978,7 @@ describe("normalizeAndValidateConfig()", () => { MULTIWORKER: false, REMOTE_BINDINGS: false, DEPLOY_REMOTE_DIFF_CHECK: false, + AUTOCREATE_RESOURCES: false, }, () => normalizeAndValidateConfig( @@ -3316,6 +3318,7 @@ describe("normalizeAndValidateConfig()", () => { MULTIWORKER: false, REMOTE_BINDINGS: false, DEPLOY_REMOTE_DIFF_CHECK: false, + AUTOCREATE_RESOURCES: false, }, () => normalizeAndValidateConfig( diff --git a/packages/wrangler/src/__tests__/dev/remote-bindings.test.ts b/packages/wrangler/src/__tests__/dev/remote-bindings.test.ts index d02217c0b1a3..a856e676b86b 100644 --- a/packages/wrangler/src/__tests__/dev/remote-bindings.test.ts +++ b/packages/wrangler/src/__tests__/dev/remote-bindings.test.ts @@ -472,7 +472,6 @@ describe("dev with remote bindings", { sequential: true }, () => { name: "EMAIL", remote: true, remoteProxyConnectionString, - type: "send_email", }, ], }, diff --git a/packages/wrangler/src/__tests__/experimental-commands-api.test.ts b/packages/wrangler/src/__tests__/experimental-commands-api.test.ts index 6be37284ac20..63a44635d3a4 100644 --- a/packages/wrangler/src/__tests__/experimental-commands-api.test.ts +++ b/packages/wrangler/src/__tests__/experimental-commands-api.test.ts @@ -30,6 +30,13 @@ describe("experimental_getWranglerCommands", () => { "requiresArg": true, "type": "string", }, + "experimental-auto-create": Object { + "alias": "x-auto-create", + "default": true, + "describe": "Automatically provision draft bindings with new resources", + "hidden": true, + "type": "boolean", + }, "experimental-provision": Object { "alias": Array [ "x-provision", diff --git a/packages/wrangler/src/__tests__/metrics.test.ts b/packages/wrangler/src/__tests__/metrics.test.ts index 9103f13b6c32..351efb59eb02 100644 --- a/packages/wrangler/src/__tests__/metrics.test.ts +++ b/packages/wrangler/src/__tests__/metrics.test.ts @@ -198,6 +198,7 @@ describe("metrics", () => { command: "wrangler docs", args: { xRemoteBindings: true, + xAutoCreate: true, search: [""], }, }; diff --git a/packages/wrangler/src/__tests__/provision.test.ts b/packages/wrangler/src/__tests__/provision.test.ts index fb2e78e193d5..5d2c71932e73 100644 --- a/packages/wrangler/src/__tests__/provision.test.ts +++ b/packages/wrangler/src/__tests__/provision.test.ts @@ -1,3 +1,5 @@ +import { rmSync } from "node:fs"; +import { readFile } from "node:fs/promises"; import { http, HttpResponse } from "msw"; import { mockAccountId, mockApiToken } from "./helpers/mock-account-id"; import { mockConsoleMethods } from "./helpers/mock-console"; @@ -18,7 +20,10 @@ import { mswListNewDeploymentsLatestFull } from "./helpers/msw/handlers/versions import { runInTempDir } from "./helpers/run-in-tmp"; import { runWrangler } from "./helpers/run-wrangler"; import { writeWorkerSource } from "./helpers/write-worker-source"; -import { writeWranglerConfig } from "./helpers/write-wrangler-config"; +import { + writeRedirectedWranglerConfig, + writeWranglerConfig, +} from "./helpers/write-wrangler-config"; import type { DatabaseInfo } from "../d1/types"; import type { Settings } from "../deployment-bundle/bindings"; @@ -197,6 +202,7 @@ describe("--x-provision", () => { Provisioning R2 (R2 Bucket)... ✨ R2 provisioned 🎉 + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discard. Either way future deploys will continue to work. 🎉 All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -317,6 +323,7 @@ describe("--x-provision", () => { Provisioning R2 (R2 Bucket)... ✨ R2 provisioned 🎉 + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discard. Either way future deploys will continue to work. 🎉 All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -450,6 +457,7 @@ describe("--x-provision", () => { 🌀 Creating new R2 Bucket \\"new-r2\\"... ✨ R2 provisioned 🎉 + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discard. Either way future deploys will continue to work. 🎉 All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -466,6 +474,189 @@ describe("--x-provision", () => { `); expect(std.err).toMatchInlineSnapshot(`""`); expect(std.warn).toMatchInlineSnapshot(`""`); + + // IDs should be written back to the config file + expect(await readFile("wrangler.toml", "utf-8")).toMatchInlineSnapshot(` + "compatibility_date = \\"2022-01-12\\" + name = \\"test-name\\" + main = \\"index.js\\" + + [[kv_namespaces]] + binding = \\"KV\\" + id = \\"new-kv-id\\" + + [[r2_buckets]] + binding = \\"R2\\" + bucket_name = \\"new-r2\\" + + [[d1_databases]] + binding = \\"D1\\" + database_id = \\"new-d1-id\\" + " + `); + }); + + it("can provision KV, R2 and D1 bindings with new resources w/ redirected config", async () => { + writeRedirectedWranglerConfig({ + main: "../index.js", + compatibility_flags: ["nodejs_compat"], + kv_namespaces: [{ binding: "KV" }], + r2_buckets: [{ binding: "R2" }], + d1_databases: [{ binding: "D1" }], + }); + mockGetSettings(); + mockListKVNamespacesRequest({ + title: "test-kv", + id: "existing-kv-id", + }); + msw.use( + http.get("*/accounts/:accountId/d1/database", async () => { + return HttpResponse.json( + createFetchResult([ + { + name: "db-name", + uuid: "existing-d1-id", + }, + ]) + ); + }), + http.get("*/accounts/:accountId/r2/buckets", async () => { + return HttpResponse.json( + createFetchResult({ + buckets: [ + { + name: "existing-bucket-name", + }, + ], + }) + ); + }) + ); + + mockSelect({ + text: "Would you like to connect an existing KV Namespace or create a new one?", + result: "__WRANGLER_INTERNAL_NEW", + }); + mockPrompt({ + text: "Enter a name for your new KV Namespace", + result: "new-kv", + }); + mockCreateKVNamespace({ + assertTitle: "new-kv", + resultId: "new-kv-id", + }); + + mockSelect({ + text: "Would you like to connect an existing D1 Database or create a new one?", + result: "__WRANGLER_INTERNAL_NEW", + }); + mockPrompt({ + text: "Enter a name for your new D1 Database", + result: "new-d1", + }); + mockCreateD1Database({ + assertName: "new-d1", + resultId: "new-d1-id", + }); + + mockSelect({ + text: "Would you like to connect an existing R2 Bucket or create a new one?", + result: "__WRANGLER_INTERNAL_NEW", + }); + mockPrompt({ + text: "Enter a name for your new R2 Bucket", + result: "new-r2", + }); + mockCreateR2Bucket({ + assertBucketName: "new-r2", + }); + + mockUploadWorkerRequest({ + expectedBindings: [ + { + name: "KV", + type: "kv_namespace", + namespace_id: "new-kv-id", + }, + { + name: "R2", + type: "r2_bucket", + bucket_name: "new-r2", + }, + { + name: "D1", + type: "d1", + id: "new-d1-id", + }, + ], + }); + + await runWrangler("deploy --x-provision --x-auto-create=false"); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Total Upload: xx KiB / gzip: xx KiB + + The following bindings need to be provisioned: + Binding Resource + env.KV KV Namespace + env.D1 D1 Database + env.R2 R2 Bucket + + + Provisioning KV (KV Namespace)... + 🌀 Creating new KV Namespace \\"new-kv\\"... + ✨ KV provisioned 🎉 + + Provisioning D1 (D1 Database)... + 🌀 Creating new D1 Database \\"new-d1\\"... + ✨ D1 provisioned 🎉 + + Provisioning R2 (R2 Bucket)... + 🌀 Creating new R2 Bucket \\"new-r2\\"... + ✨ R2 provisioned 🎉 + + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discard. Either way future deploys will continue to work. + 🎉 All resources provisioned, continuing with deployment... + + Worker Startup Time: 100 ms + Your Worker has access to the following bindings: + Binding Resource + env.KV (new-kv-id) KV Namespace + env.D1 (new-d1-id) D1 Database + env.R2 (new-r2) R2 Bucket + + Uploaded test-name (TIMINGS) + Deployed test-name triggers (TIMINGS) + https://test-name.test-sub-domain.workers.dev + Current Version ID: Galaxy-Class" + `); + expect(std.err).toMatchInlineSnapshot(`""`); + expect(std.warn).toMatchInlineSnapshot(`""`); + + // IDs should be written back to the user config file + expect(await readFile("wrangler.toml", "utf-8")).toMatchInlineSnapshot(` + "compatibility_date = \\"2022-01-12\\" + name = \\"test-name\\" + main = \\"index.js\\" + + [[kv_namespaces]] + binding = \\"KV\\" + id = \\"new-kv-id\\" + + [[r2_buckets]] + binding = \\"R2\\" + bucket_name = \\"new-r2\\" + + [[d1_databases]] + binding = \\"D1\\" + database_id = \\"new-d1-id\\" + " + `); + + rmSync(".wrangler/deploy/config.json"); }); it("can prefill d1 database name from config file if provided", async () => { @@ -522,6 +713,7 @@ describe("--x-provision", () => { 🌀 Creating new D1 Database \\"prefilled-d1-name\\"... ✨ D1 provisioned 🎉 + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discard. Either way future deploys will continue to work. 🎉 All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -650,6 +842,7 @@ describe("--x-provision", () => { 🌀 Creating new D1 Database \\"new-d1-name\\"... ✨ D1 provisioned 🎉 + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discard. Either way future deploys will continue to work. 🎉 All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -728,6 +921,7 @@ describe("--x-provision", () => { 🌀 Creating new R2 Bucket \\"prefilled-r2-name\\"... ✨ BUCKET provisioned 🎉 + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discard. Either way future deploys will continue to work. 🎉 All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -923,6 +1117,7 @@ describe("--x-provision", () => { 🌀 Creating new R2 Bucket \\"existing-bucket-name\\"... ✨ BUCKET provisioned 🎉 + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discard. Either way future deploys will continue to work. 🎉 All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms diff --git a/packages/wrangler/src/__tests__/versions/versions.view.test.ts b/packages/wrangler/src/__tests__/versions/versions.view.test.ts index 015cc11ad0e6..8cd0caf2ce3d 100644 --- a/packages/wrangler/src/__tests__/versions/versions.view.test.ts +++ b/packages/wrangler/src/__tests__/versions/versions.view.test.ts @@ -69,23 +69,23 @@ describe("versions view", () => { Source: Upload Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, scheduled Compatibility Date: 2020-01-01 Compatibility Flags: test, flag - ------------------------- bindings ------------------------- - [[analytics_engine_datasets]] - binding = ANALYTICS - dataset = analytics_dataset - - [[kv_namespaces]] - binding = \\"KV\\" - id = \\"kv-namespace-id\\" - " `); - expect(cnsl.out).toMatch(/⛅️ wrangler/); + expect(cnsl.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Your Worker has access to the following bindings: + Binding Resource + env.KV (kv-namespace-id) KV Namespace + env.ANALYTICS (analytics_dataset) Analytics Engine Dataset + " + `); expect(normalizeOutput(std.err)).toMatchInlineSnapshot(`""`); }); @@ -184,19 +184,20 @@ describe("versions view", () => { Source: Upload Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, scheduled Compatibility Date: 2020-01-01 Compatibility Flags: test, flag - ------------------------- bindings ------------------------- - [[analytics_engine_datasets]] - binding = ANALYTICS - dataset = analytics_dataset - - [[kv_namespaces]] - binding = \\"KV\\" - id = \\"kv-namespace-id\\" - + " + `); + expect(cnsl.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Your Worker has access to the following bindings: + Binding Resource + env.KV (kv-namespace-id) KV Namespace + env.ANALYTICS (analytics_dataset) Analytics Engine Dataset " `); @@ -318,7 +319,7 @@ describe("versions view", () => { Source: API 📡 Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue " `); @@ -365,7 +366,7 @@ describe("versions view", () => { Source: API 📡 Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue Compatibility Date: 2000-00-00 " @@ -414,7 +415,7 @@ describe("versions view", () => { Source: API 📡 Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue Compatibility Date: 2000-00-00 Compatibility Flags: flag_1, flag_2 @@ -467,11 +468,11 @@ describe("versions view", () => { Source: API 📡 Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue Compatibility Date: 2000-00-00 Compatibility Flags: flag_1, flag_2 - ------------------------- secrets ------------------------- + Secrets: Secret Name: SECRET_ONE Secret Name: SECRET_TWO " @@ -523,14 +524,10 @@ describe("versions view", () => { Source: API 📡 Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue Compatibility Date: 2000-00-00 Compatibility Flags: flag_1, flag_2 - ------------------------- bindings ------------------------- - [vars] - VAR_ONE = \\"var-one\\" - VAR_TWO = \\"var-one\\" " `); }); @@ -656,107 +653,41 @@ describe("versions view", () => { Source: API 📡 Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue Compatibility Date: 2000-00-00 Compatibility Flags: flag_1, flag_2 - ------------------------- bindings ------------------------- - [ai] - binding = AI - - [[analytics_engine_datasets]] - binding = AE - dataset = datset - - [browser] - binding = \\"BROWSER\\" - - [[d1_databases]] - binding = \\"D1\\" - database_id = \\"d1-id\\" - - [[dispatch_namespaces]] - binding = \\"WFP\\" - namespce = \\"wfp-namespace\\" - - [[dispatch_namespaces]] - binding = \\"WFP_2\\" - namespce = \\"wfp-namespace\\" - outbound = { service = \\"outbound-worker\\" } - - [[dispatch_namespaces]] - binding = \\"WFP_3\\" - namespce = \\"wfp-namespace\\" - outbound = { service = \\"outbound-worker\\", parameters = [paramOne, paramTwo] } - - [[durable_objects.bindings]] - name = \\"DO\\" - class_name = \\"DurableObject\\" - - [[durable_objects.bindings]] - name = \\"DO_2\\" - class_name = \\"DurableObject\\" - script_name = \\"other-worker\\" - - [[hyperdrive]] - binding = \\"HYPERDRIVE\\" - id = \\"hyperdrive-id\\" - - [[kv_namespaces]] - binding = \\"KV\\" - id = \\"kv-id\\" - - [[mtls_certificates]] - binding = \\"MTLS\\" - certificate_id = \\"mtls-id\\" - - [[queues.producers]] - binding = \\"QUEUE\\" - queue = \\"queue\\" - - [[queues.producers]] - binding = \\"QUEUE_2\\" - queue = \\"queue\\" - delivery_delay = 60 - - [[r2_buckets]] - binding = \\"R2\\" - bucket_name = \\"r2-bucket\\" - - [[r2_buckets]] - binding = \\"R2_2\\" - bucket_name = \\"r2-bucket\\" - jurisdiction = \\"eu\\" - - [[send_email]] - name = \\"MAIL\\" - - [[send_email]] - name = \\"MAIL_2\\" - destination_address = \\"dest@example.com\\" - - [[send_email]] - name = \\"MAIL_3\\" - destination_address = \\"dest@example.com\\" - allowed_destination_addresses = [\\"1@a.com\\", \\"2@a.com\\"] - allowed_sender_addresses = [\\"3@a.com\\", \\"4@a.com\\"] - - [[services]] - binding = \\"SERVICE\\" - service = \\"SERVICE\\" - - [[services]] - binding = \\"SERVICE_2\\" - service = \\"SERVICE_2\\" - entrypoint = \\"Enterypoint\\" - - [[vectorize]] - binding = \\"VECTORIZE\\" - index_name = \\"index\\" - - [version_metadata] - binding = \\"VERSION_METADATA\\" - + " + `); + expect(cnsl.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Your Worker has access to the following bindings: + Binding Resource + env.DO (DurableObject) Durable Object + env.DO_2 (DurableObject, defined in other-worker) Durable Object + env.KV (kv-id) KV Namespace + env.MAIL (unrestricted) Send Email + env.MAIL_2 (dest@example.com) Send Email + env.MAIL_3 (dest@example.com - senders: 3@a.com, 4@a.com) Send Email + env.QUEUE (queue) Queue + env.QUEUE_2 (queue) Queue + env.D1 (d1-id) D1 Database + env.VECTORIZE (index) Vectorize Index + env.HYPERDRIVE (hyperdrive-id) Hyperdrive Config + env.R2 (r2-bucket) R2 Bucket + env.R2_2 (r2-bucket (eu)) R2 Bucket + env.SERVICE (worker) Worker + env.SERVICE_2 (worker#Enterypoint) Worker + env.AE (datset) Analytics Engine Dataset + env.BROWSER Browser + env.AI AI + env.VERSION_METADATA Worker Version Metadata + env.WFP (wfp-namespace) Dispatch Namespace + env.WFP_2 (wfp-namespace (outbound -> outbound-worker)) Dispatch Namespace + env.WFP_3 (wfp-namespace (outbound -> outbound-worker)) Dispatch Namespace + env.MTLS (mtls-id) mTLS Certificate " `); }); diff --git a/packages/wrangler/src/api/dev.ts b/packages/wrangler/src/api/dev.ts index ae11921c019c..15de728c8005 100644 --- a/packages/wrangler/src/api/dev.ts +++ b/packages/wrangler/src/api/dev.ts @@ -222,6 +222,7 @@ export async function unstable_dev( logLevel: options?.logLevel ?? defaultLogLevel, port: options?.port ?? 0, experimentalProvision: undefined, + experimentalAutoCreate: false, experimentalRemoteBindings: true, experimentalVectorizeBindToProd: vectorizeBindToProd ?? false, experimentalImagesLocalMode: imagesLocalMode ?? false, @@ -240,6 +241,7 @@ export async function unstable_dev( RESOURCES_PROVISION: false, REMOTE_BINDINGS: false, DEPLOY_REMOTE_DIFF_CHECK: false, + AUTOCREATE_RESOURCES: false, }, () => startDev(devOptions) ); diff --git a/packages/wrangler/src/api/startDevWorker/utils.ts b/packages/wrangler/src/api/startDevWorker/utils.ts index 94db0720deb0..c804f2bae407 100644 --- a/packages/wrangler/src/api/startDevWorker/utils.ts +++ b/packages/wrangler/src/api/startDevWorker/utils.ts @@ -2,7 +2,11 @@ import assert from "node:assert"; import { readFile } from "node:fs/promises"; import { assertNever } from "../../utils/assert-never"; import type { ConfigBindingOptions } from "../../config"; -import type { CfWorkerInit } from "../../deployment-bundle/worker"; +import type { WorkerMetadataBinding } from "../../deployment-bundle/create-worker-upload-form"; +import type { + CfDispatchNamespace, + CfWorkerInit, +} from "../../deployment-bundle/worker"; import type { Binding, File, @@ -308,8 +312,22 @@ export function convertCfWorkerInitBindingsToBindings( return output; } +/** + * Convert either StartDevWorkerOptions["bindings"] or WorkerMetadataBinding[] to CfWorkerInit["bindings"] + * This function is by design temporary, but has lived longer than originally expected. + * For some context, CfWorkerInit is the in-memory representation of a Worker that Wrangler uses, + * WorkerMetadataBinding is the representation of bindings that comes from the API, and StartDevWorkerOptions + * is the "new" in-memory representation of a Worker that's used in Wrangler's dev flow. Over + * time, all uses of CfWorkerInit should transition to StartDevWorkerOptions, but that's a pretty big refactor. + * As such, in the meantime we have conversion functions so that different code paths can deal with the format they + * expect and were written for. + * + * WARNING: Using this with WorkerMetadataBinding[] will lose information about certain + * binding types (i.e. WASM modules, text blobs, and data blobs). These binding types are deprecated + * but may still be used by some Workers in the wild. + */ export async function convertBindingsToCfWorkerInitBindings( - inputBindings: StartDevWorkerOptions["bindings"] + inputBindings: StartDevWorkerOptions["bindings"] | WorkerMetadataBinding[] ): Promise<{ bindings: CfWorkerInit["bindings"]; fetchers: Record; @@ -349,23 +367,38 @@ export async function convertBindingsToCfWorkerInitBindings( const fetchers: Record = {}; - for (const [name, binding] of Object.entries(inputBindings ?? {})) { + const bindingEntries: [string, WorkerMetadataBinding | Binding][] = + Array.isArray(inputBindings) + ? inputBindings.map((b) => [b.name, b]) + : Object.entries(inputBindings ?? {}); + + for (const [name, binding] of bindingEntries) { if (binding.type === "plain_text") { bindings.vars ??= {}; - bindings.vars[name] = binding.value; + bindings.vars[name] = "value" in binding ? binding.value : binding.text; } else if (binding.type === "json") { bindings.vars ??= {}; - bindings.vars[name] = binding.value; + bindings.vars[name] = "value" in binding ? binding.value : binding.json; } else if (binding.type === "kv_namespace") { bindings.kv_namespaces ??= []; - bindings.kv_namespaces.push({ ...binding, binding: name }); + bindings.kv_namespaces.push({ + ...omitType(binding), + binding: name, + id: "namespace_id" in binding ? binding.namespace_id : binding.id, + }); } else if (binding.type === "send_email") { bindings.send_email ??= []; - bindings.send_email.push({ ...binding, name: name }); + bindings.send_email.push({ ...omitType(binding), name: name }); } else if (binding.type === "wasm_module") { + if (!("source" in binding)) { + continue; + } bindings.wasm_modules ??= {}; bindings.wasm_modules[name] = await getBinaryFileContents(binding.source); } else if (binding.type === "text_blob") { + if (!("source" in binding)) { + continue; + } bindings.text_blobs ??= {}; if (typeof binding.source.path === "string") { @@ -377,72 +410,100 @@ export async function convertBindingsToCfWorkerInitBindings( ); } } else if (binding.type === "data_blob") { + if (!("source" in binding)) { + continue; + } bindings.data_blobs ??= {}; bindings.data_blobs[name] = await getBinaryFileContents(binding.source); } else if (binding.type === "browser") { - bindings.browser = { ...binding, binding: name }; + bindings.browser = { ...omitType(binding), binding: name }; } else if (binding.type === "ai") { - bindings.ai = { ...binding, binding: name }; + bindings.ai = { ...omitType(binding), binding: name }; } else if (binding.type === "images") { - bindings.images = { ...binding, binding: name }; + bindings.images = { ...omitType(binding), binding: name }; } else if (binding.type === "version_metadata") { bindings.version_metadata = { binding: name }; } else if (binding.type === "durable_object_namespace") { bindings.durable_objects ??= { bindings: [] }; - bindings.durable_objects.bindings.push({ ...binding, name: name }); + bindings.durable_objects.bindings.push({ + ...omitType(binding), + name: name, + }); } else if (binding.type === "queue") { bindings.queues ??= []; - bindings.queues.push({ ...binding, binding: name }); + bindings.queues.push({ ...omitType(binding), binding: name }); } else if (binding.type === "r2_bucket") { bindings.r2_buckets ??= []; - bindings.r2_buckets.push({ ...binding, binding: name }); + bindings.r2_buckets.push({ ...omitType(binding), binding: name }); } else if (binding.type === "d1") { bindings.d1_databases ??= []; - bindings.d1_databases.push({ ...binding, binding: name }); + bindings.d1_databases.push({ + ...omitType(binding), + binding: name, + database_id: "id" in binding ? binding.id : binding.database_id, + }); } else if (binding.type === "vectorize") { bindings.vectorize ??= []; - bindings.vectorize.push({ ...binding, binding: name }); + bindings.vectorize.push({ ...omitType(binding), binding: name }); } else if (binding.type === "hyperdrive") { bindings.hyperdrive ??= []; - bindings.hyperdrive.push({ ...binding, binding: name }); + bindings.hyperdrive.push({ ...omitType(binding), binding: name }); } else if (binding.type === "service") { bindings.services ??= []; - bindings.services.push({ ...binding, binding: name }); + bindings.services.push({ ...omitType(binding), binding: name }); } else if (binding.type === "fetcher") { fetchers[name] = binding.fetcher; } else if (binding.type === "analytics_engine") { bindings.analytics_engine_datasets ??= []; - bindings.analytics_engine_datasets.push({ ...binding, binding: name }); + bindings.analytics_engine_datasets.push({ + ...omitType(binding), + binding: name, + }); } else if (binding.type === "dispatch_namespace") { bindings.dispatch_namespaces ??= []; - bindings.dispatch_namespaces.push({ ...binding, binding: name }); + const outbound: CfDispatchNamespace["outbound"] = + binding.outbound && "worker" in binding.outbound + ? { + service: binding.outbound.worker.service, + environment: binding.outbound.worker.environment, + parameters: binding.outbound.params?.map((p) => p.name), + } + : binding.outbound; + bindings.dispatch_namespaces.push({ + ...omitType(binding), + binding: name, + outbound, + }); } else if (binding.type === "mtls_certificate") { bindings.mtls_certificates ??= []; - bindings.mtls_certificates.push({ ...binding, binding: name }); + bindings.mtls_certificates.push({ ...omitType(binding), binding: name }); } else if (binding.type === "pipeline") { bindings.pipelines ??= []; - bindings.pipelines.push({ ...binding, binding: name }); + bindings.pipelines.push({ ...omitType(binding), binding: name }); } else if (binding.type === "logfwdr") { bindings.logfwdr ??= { bindings: [] }; - bindings.logfwdr.bindings.push({ ...binding, name: name }); + bindings.logfwdr.bindings.push({ ...omitType(binding), name: name }); } else if (binding.type === "workflow") { bindings.workflows ??= []; - bindings.workflows.push({ ...binding, binding: name }); + bindings.workflows.push({ ...omitType(binding), binding: name }); } else if (binding.type === "secrets_store_secret") { bindings.secrets_store_secrets ??= []; - bindings.secrets_store_secrets.push({ ...binding, binding: name }); + bindings.secrets_store_secrets.push({ + ...omitType(binding), + binding: name, + }); } else if (binding.type === "unsafe_hello_world") { bindings.unsafe_hello_world ??= []; - bindings.unsafe_hello_world.push({ ...binding, binding: name }); + bindings.unsafe_hello_world.push({ ...omitType(binding), binding: name }); } else if (binding.type === "ratelimit") { bindings.ratelimits ??= []; - bindings.ratelimits.push({ ...binding, name: name }); + bindings.ratelimits.push({ ...omitType(binding), name: name }); } else if (binding.type === "worker_loader") { bindings.worker_loaders ??= []; - bindings.worker_loaders.push({ ...binding, binding: name }); + bindings.worker_loaders.push({ ...omitType(binding), binding: name }); } else if (binding.type === "vpc_service") { bindings.vpc_services ??= []; - bindings.vpc_services.push({ ...binding, binding: name }); + bindings.vpc_services.push({ ...omitType(binding), binding: name }); } else if (isUnsafeBindingType(binding.type)) { bindings.unsafe ??= { bindings: [], @@ -466,6 +527,13 @@ function isUnsafeBindingType(type: string): type is `unsafe_${string}` { return type.startsWith("unsafe_"); } +function omitType>({ + type: _, + ...value +}: T): Omit { + return value; +} + export function extractBindingsOfType< Type extends NonNullable[string]["type"], >( diff --git a/packages/wrangler/src/core/register-yargs-command.ts b/packages/wrangler/src/core/register-yargs-command.ts index f5d69b38853c..1bde3649a733 100644 --- a/packages/wrangler/src/core/register-yargs-command.ts +++ b/packages/wrangler/src/core/register-yargs-command.ts @@ -159,6 +159,7 @@ function createHandler(def: CommandDefinition, commandName: string) { RESOURCES_PROVISION: args.experimentalProvision ?? false, REMOTE_BINDINGS: args.experimentalRemoteBindings ?? false, DEPLOY_REMOTE_DIFF_CHECK: false, + AUTOCREATE_RESOURCES: args.experimentalAutoCreate, }; await run(experimentalFlags, () => { diff --git a/packages/wrangler/src/deploy/deploy.ts b/packages/wrangler/src/deploy/deploy.ts index 3048a55d5ec3..5453d7e8a6ef 100644 --- a/packages/wrangler/src/deploy/deploy.ts +++ b/packages/wrangler/src/deploy/deploy.ts @@ -859,7 +859,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m } } - workerBundle = createWorkerUploadForm(worker); + workerBundle = createWorkerUploadForm(worker, { dryRun: true }); printBindings( { ...withoutStaticAssets, vars: maskedVars }, config.tail_consumers, diff --git a/packages/wrangler/src/deploy/index.ts b/packages/wrangler/src/deploy/index.ts index ef6b0a6fcf66..a295eb294294 100644 --- a/packages/wrangler/src/deploy/index.ts +++ b/packages/wrangler/src/deploy/index.ts @@ -219,13 +219,6 @@ export const deployCommand = createCommand({ "Name of a dispatch namespace to deploy the Worker to (Workers for Platforms)", type: "string", }, - "experimental-auto-create": { - describe: "Automatically provision draft bindings with new resources", - type: "boolean", - default: true, - hidden: true, - alias: "x-auto-create", - }, "containers-rollout": { describe: "Rollout strategy for Containers changes. If set to immediate, it will override `rollout_percentage_steps` if configured and roll out to 100% of instances in one step. ", @@ -251,6 +244,7 @@ export const deployCommand = createCommand({ RESOURCES_PROVISION: args.experimentalProvision ?? false, REMOTE_BINDINGS: args.experimentalRemoteBindings ?? true, DEPLOY_REMOTE_DIFF_CHECK: args.experimentalDeployRemoteDiffCheck ?? false, + AUTOCREATE_RESOURCES: args.experimentalAutoCreate, }), warnIfMultipleEnvsConfiguredButNoneSpecified: true, }, diff --git a/packages/wrangler/src/deployment-bundle/bindings.ts b/packages/wrangler/src/deployment-bundle/bindings.ts index fe10223533c3..b468fce35ccb 100644 --- a/packages/wrangler/src/deployment-bundle/bindings.ts +++ b/packages/wrangler/src/deployment-bundle/bindings.ts @@ -1,10 +1,15 @@ import assert from "node:assert"; import { fetchResult } from "../cfetch"; +import { + experimental_patchConfig, + PatchConfigError, +} from "../config/patch-config"; import { createD1Database } from "../d1/create"; import { listDatabases } from "../d1/list"; import { getDatabaseInfoFromIdOrName } from "../d1/utils"; import { prompt, select } from "../dialogs"; import { UserError } from "../errors"; +import { isNonInteractiveOrCI } from "../is-interactive"; import { createKVNamespace, listKVNamespaces } from "../kv/helpers"; import { logger } from "../logger"; import * as metrics from "../metrics"; @@ -12,7 +17,7 @@ import { APIError } from "../parse"; import { createR2Bucket, getR2Bucket, listR2Buckets } from "../r2/helpers"; import { isLegacyEnv } from "../utils/isLegacyEnv"; import { printBindings } from "../utils/print-bindings"; -import type { Config } from "../config"; +import type { Config, RawConfig } from "../config"; import type { ComplianceConfig } from "../environment-variables/misc-variables"; import type { WorkerMetadataBinding } from "./create-worker-upload-form"; import type { @@ -164,6 +169,7 @@ class R2Handler extends ProvisionResourceHandler<"r2_bucket", CfR2Bucket> { get name(): string | undefined { return this.binding.bucket_name as string; } + async create(name: string) { await createR2Bucket( this.complianceConfig, @@ -181,12 +187,30 @@ class R2Handler extends ProvisionResourceHandler<"r2_bucket", CfR2Bucket> { ) { super("r2_bucket", binding, "bucket_name", complianceConfig, accountId); } + + /** + * Inheriting an R2 binding replaces the id property (bucket_name for R2) with the inheritance symbol. + * This works when deploying (and is appropriate for all other binding types), but it means that the + * bucket_name for an R2 bucket is not displayed when deploying. As such, only use the inheritance symbol + * if the R2 binding has no `bucket_name`. + */ + override inherit(): void { + this.binding.bucket_name ??= INHERIT_SYMBOL; + } + + /** + * R2 bindings can be inherited if the binding name and jurisdiction match. + * Additionally, if the user has specified a bucket_name in config, make sure that matches + */ canInherit(settings: Settings | undefined): boolean { return !!settings?.bindings.find( (existing) => existing.type === this.type && existing.name === this.binding.binding && - existing.jurisdiction === this.binding.jurisdiction + existing.jurisdiction === this.binding.jurisdiction && + (this.binding.bucket_name + ? this.binding.bucket_name === existing.bucket_name + : true) ); } async isConnectedToExistingResource(): Promise { @@ -385,7 +409,8 @@ async function collectPendingResources( complianceConfig: ComplianceConfig, accountId: string, scriptName: string, - bindings: CfWorkerInit["bindings"] + bindings: CfWorkerInit["bindings"], + requireRemote: boolean ): Promise { let settings: Settings | undefined; @@ -406,6 +431,9 @@ async function collectPendingResources( HANDLERS ) as (keyof typeof HANDLERS)[]) { for (const resource of bindings[resourceType] ?? []) { + if (requireRemote && !resource.remote) { + continue; + } const h = new HANDLERS[resourceType].Handler( resource, complianceConfig, @@ -426,21 +454,30 @@ async function collectPendingResources( (a, b) => HANDLERS[a.resourceType].sort - HANDLERS[b.resourceType].sort ); } + export async function provisionBindings( bindings: CfWorkerInit["bindings"], accountId: string, scriptName: string, autoCreate: boolean, - config: Config + config: Config, + requireRemote = false ): Promise { + const configPath = config.userConfigPath ?? config.configPath; const pendingResources = await collectPendingResources( config, accountId, scriptName, - bindings + bindings, + requireRemote ); if (pendingResources.length > 0) { + assert( + configPath, + "Provisioning resources is not possible without a config file" + ); + if (!isLegacyEnv(config)) { throw new UserError( "Provisioning resources is not supported with a service environment" @@ -452,6 +489,7 @@ export async function provisionBindings( printable[resource.resourceType] ??= []; printable[resource.resourceType].push({ binding: resource.binding }); } + printBindings(printable, config.tail_consumers, { provisioning: true }); logger.log(); @@ -471,6 +509,56 @@ export async function provisionBindings( ); } + const patch: RawConfig = {}; + + const allChanges: Map = + new Map(); + + for (const resource of pendingResources) { + allChanges.set(resource.binding, resource.handler.binding); + } + + for (const resourceType of Object.keys( + HANDLERS + ) as (keyof typeof HANDLERS)[]) { + for (const binding of bindings[resourceType] ?? []) { + patch[resourceType] ??= []; + + const bindingToWrite = allChanges.has(binding.binding) + ? // Gated by Map.has() + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + allChanges.get(binding.binding)! + : binding; + + patch[resourceType].push( + Object.fromEntries( + Object.entries(bindingToWrite).filter( + // Make sure all the values are JSON serialisable. + // Otherwise we end up with "undefined" in the config + ([_, value]) => typeof value === "string" + ) + ) as NonNullable<(typeof patch)[typeof resourceType]>[number] + ); + } + } + + // If the user is performing an interactive deploy, write the provisioned IDs back to the config file. + // This is not necessary, as future deploys can use inherited resources, but it can help with + // portability of the config file, and adds robustness to bindings being renamed. + if (!isNonInteractiveOrCI()) { + try { + await experimental_patchConfig(configPath, patch, false); + logger.log( + "Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discard. Either way future deploys will continue to work." + ); + } catch (e) { + // no-op — if the user is using TOML config we can't update it. + if (!(e instanceof PatchConfigError)) { + throw e; + } + } + } + const resourceCount = pendingResources.reduce( (acc, resource) => { acc[resource.resourceType] ??= 0; @@ -480,13 +568,14 @@ export async function provisionBindings( {} as Record ); logger.log(`🎉 All resources provisioned, continuing with deployment...\n`); + metrics.sendMetricsEvent("provision resources", resourceCount, { sendMetrics: config.send_metrics, }); } } -function getSettings( +export function getSettings( complianceConfig: ComplianceConfig, accountId: string, scriptName: string diff --git a/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts b/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts index 9106fc66e27d..5e974b586106 100644 --- a/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts +++ b/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts @@ -219,7 +219,10 @@ export type WorkerMetadata = WorkerMetadataPut | WorkerMetadataVersionsPost; /** * Creates a `FormData` upload from a `CfWorkerInit`. */ -export function createWorkerUploadForm(worker: CfWorkerInit): FormData { +export function createWorkerUploadForm( + worker: CfWorkerInit, + options?: { dryRun: true } +): FormData { const formData = new FormData(); const { main, @@ -279,6 +282,14 @@ export function createWorkerUploadForm(worker: CfWorkerInit): FormData { }); bindings.kv_namespaces?.forEach(({ id, binding, raw }) => { + // If we're doing a dry run there's no way to know whether or not a KV namespace + // is inheritable or requires provisioning (since that would require hitting the API). + // As such, _assume_ any undefined IDs are inheritable when doing a dry run. + // When this Worker is actually deployed, some may be provisioned at the point of deploy + if (options?.dryRun) { + id ??= INHERIT_SYMBOL; + } + if (id === undefined) { throw new UserError(`${binding} bindings must have an "id" field`); } @@ -357,6 +368,9 @@ export function createWorkerUploadForm(worker: CfWorkerInit): FormData { bindings.r2_buckets?.forEach( ({ binding, bucket_name, jurisdiction, raw }) => { + if (options?.dryRun) { + bucket_name ??= INHERIT_SYMBOL; + } if (bucket_name === undefined) { throw new UserError( `${binding} bindings must have a "bucket_name" field` @@ -382,6 +396,9 @@ export function createWorkerUploadForm(worker: CfWorkerInit): FormData { bindings.d1_databases?.forEach( ({ binding, database_id, database_internal_env, raw }) => { + if (options?.dryRun) { + database_id ??= INHERIT_SYMBOL; + } if (database_id === undefined) { throw new UserError( `${binding} bindings must have a "database_id" field` diff --git a/packages/wrangler/src/dev.ts b/packages/wrangler/src/dev.ts index 14911a7f5bfa..df187eb278ff 100644 --- a/packages/wrangler/src/dev.ts +++ b/packages/wrangler/src/dev.ts @@ -42,6 +42,7 @@ export const dev = createCommand({ ? false : args.experimentalRemoteBindings ?? true, DEPLOY_REMOTE_DIFF_CHECK: false, + AUTOCREATE_RESOURCES: args.experimentalAutoCreate, }), }, metadata: { diff --git a/packages/wrangler/src/experimental-flags.ts b/packages/wrangler/src/experimental-flags.ts index fdb197572129..35644041b966 100644 --- a/packages/wrangler/src/experimental-flags.ts +++ b/packages/wrangler/src/experimental-flags.ts @@ -6,6 +6,7 @@ export type ExperimentalFlags = { RESOURCES_PROVISION: boolean; REMOTE_BINDINGS: boolean; DEPLOY_REMOTE_DIFF_CHECK: boolean; + AUTOCREATE_RESOURCES: boolean; }; const flags = new AsyncLocalStorage(); diff --git a/packages/wrangler/src/index.ts b/packages/wrangler/src/index.ts index a2657ffc787b..c7b1e5f9c7aa 100644 --- a/packages/wrangler/src/index.ts +++ b/packages/wrangler/src/index.ts @@ -407,6 +407,13 @@ export function createCLIParser(argv: string[]) { hidden: true, alias: ["x-provision"], }, + "experimental-auto-create": { + describe: "Automatically provision draft bindings with new resources", + type: "boolean", + default: true, + hidden: true, + alias: "x-auto-create", + }, } as const; // Type check result against CommonYargsOptions to make sure we've included // all common options diff --git a/packages/wrangler/src/kv/helpers.ts b/packages/wrangler/src/kv/helpers.ts index cf8a008eed2f..258585c75531 100644 --- a/packages/wrangler/src/kv/helpers.ts +++ b/packages/wrangler/src/kv/helpers.ts @@ -1,13 +1,17 @@ +import assert from "node:assert"; import { Blob } from "node:buffer"; import { URLSearchParams } from "node:url"; import { type KVNamespace } from "@cloudflare/workers-types/experimental"; import { Miniflare } from "miniflare"; import { FormData } from "undici"; import { fetchKVGetValue, fetchListResult, fetchResult } from "../cfetch"; +import { getSettings } from "../deployment-bundle/bindings"; import { getLocalPersistencePath } from "../dev/get-local-persistence-path"; import { getDefaultPersistRoot } from "../dev/miniflare"; import { UserError } from "../errors"; +import { getFlag } from "../experimental-flags"; import { logger } from "../logger"; +import { requireAuth } from "../user"; import type { Config } from "../config"; import type { ComplianceConfig } from "../environment-variables/misc-variables"; import type { ReplaceWorkersTypes } from "miniflare"; @@ -420,10 +424,38 @@ export async function deleteKVBulkKeyValue( } } -export function getKVNamespaceId( +async function getIdFromSettings( + config: Config, + binding: string, + isLocal: boolean +) { + // Don't do any network stuff when local, instead respect what + // Wrangler dev does, which is to use the binding name as a fallback + // for the namespace ID + if (isLocal) { + return binding; + } + const accountId = await requireAuth(config); + if (!config.name) { + throw new UserError("No Worker name found in config"); + } + const settings = await getSettings(config, accountId, config.name); + const existingKV = settings?.bindings.find( + (existing) => existing.type === "kv_namespace" && existing.name === binding + ); + if (!existingKV || !("namespace_id" in existingKV)) { + throw new UserError( + `No namespace ID found for binding "${binding}". Add one to your wrangler config file or pass it via \`--namespace-id\`.` + ); + } + return existingKV.namespace_id as string; +} + +export async function getKVNamespaceId( { preview, binding, "namespace-id": namespaceId }: KvArgs, - config: Config -): string { + config: Config, + isLocal: boolean +): Promise { // nice if (namespaceId) { return namespaceId; @@ -483,8 +515,12 @@ export function getKVNamespaceId( // We don't want to execute code below if preview is set to true, so we just return. Otherwise we can get error! return namespaceId; } else if (previewIsDefined) { + if (getFlag("RESOURCES_PROVISION")) { + assert(binding); + return getIdFromSettings(config, binding, isLocal); + } throw new UserError( - `No namespace ID found for ${binding}. Add one to your wrangler config file to use a separate namespace for previewing your worker.` + `No namespace ID found for ${binding}. Add one to your wrangler config file or pass it via \`--namespace-id\`.` ); } @@ -494,6 +530,13 @@ export function getKVNamespaceId( (!namespace.id && namespace.preview_id); if (bindingHasOnlyOneId) { namespaceId = namespace.id || namespace.preview_id; + } else if ( + getFlag("RESOURCES_PROVISION") && + !namespace.id && + !namespace.preview_id + ) { + assert(binding); + return getIdFromSettings(config, binding, isLocal); } else { throw new UserError( `${binding} has both a namespace ID and a preview ID. Specify "--preview" or "--preview false" to avoid writing data to the wrong namespace.` diff --git a/packages/wrangler/src/kv/index.ts b/packages/wrangler/src/kv/index.ts index 7db24f90e798..162b62eea28f 100644 --- a/packages/wrangler/src/kv/index.ts +++ b/packages/wrangler/src/kv/index.ts @@ -188,7 +188,7 @@ export const kvNamespaceDeleteCommand = createCommand({ printResourceLocation("remote"); let id; try { - id = getKVNamespaceId(args, config); + id = await getKVNamespaceId(args, config, false); } catch (e) { throw new CommandLineArgsError( "Not able to delete namespace.\n" + ((e as Error).message ?? e) @@ -383,7 +383,7 @@ export const kvKeyPutCommand = createCommand({ async handler({ key, ttl, expiration, metadata, ...args }) { const localMode = isLocal(args); const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); // One of `args.path` and `args.value` must be defined const value = args.path ? readFileSyncToBuffer(args.path) @@ -495,7 +495,7 @@ export const kvKeyListCommand = createCommand({ const localMode = isLocal(args); // TODO: support for limit+cursor (pagination) const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); let result: NamespaceKeyInfo[]; let metricEvent: EventNames; @@ -586,7 +586,7 @@ export const kvKeyGetCommand = createCommand({ async handler({ key, ...args }) { const localMode = isLocal(args); const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); let bufferKVValue; let metricEvent: EventNames; @@ -678,7 +678,7 @@ export const kvKeyDeleteCommand = createCommand({ async handler({ key, ...args }) { const localMode = isLocal(args); const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); logger.log(`Deleting the key "${key}" on namespace ${namespaceId}.`); @@ -753,7 +753,7 @@ export const kvBulkGetCommand = createCommand({ async handler({ filename, ...args }) { const localMode = isLocal(args); const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); const content = parseJSON(readFileSync(filename), filename) as ( | string @@ -895,7 +895,7 @@ export const kvBulkPutCommand = createCommand({ // but we'll do that in the future if needed. const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); const content = parseJSON(readFileSync(filename), filename); if (!Array.isArray(content)) { @@ -1045,7 +1045,7 @@ export const kvBulkDeleteCommand = createCommand({ async handler({ filename, ...args }) { const localMode = isLocal(args); const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); if (!args.force) { const result = await confirm( diff --git a/packages/wrangler/src/pages/dev.ts b/packages/wrangler/src/pages/dev.ts index 4b4fdacc7e25..b3b7cfa3de43 100644 --- a/packages/wrangler/src/pages/dev.ts +++ b/packages/wrangler/src/pages/dev.ts @@ -886,6 +886,7 @@ export const pagesDevCommand = createCommand({ RESOURCES_PROVISION: false, REMOTE_BINDINGS: false, DEPLOY_REMOTE_DIFF_CHECK: false, + AUTOCREATE_RESOURCES: false, }, () => startDev({ @@ -955,6 +956,7 @@ export const pagesDevCommand = createCommand({ experimentalRemoteBindings: true, experimentalVectorizeBindToProd: false, experimentalImagesLocalMode: false, + experimentalAutoCreate: false, enableIpc: true, config: Array.isArray(args.config) ? args.config : undefined, site: undefined, diff --git a/packages/wrangler/src/utils/isLegacyEnv.ts b/packages/wrangler/src/utils/isLegacyEnv.ts index fa578a9cc3f2..c1497f1b825e 100644 --- a/packages/wrangler/src/utils/isLegacyEnv.ts +++ b/packages/wrangler/src/utils/isLegacyEnv.ts @@ -1,7 +1,10 @@ +import type { StartDevWorkerOptions } from "../api"; import type { Config } from "../config"; -export function isLegacyEnv(config: Config): boolean { +export function isLegacyEnv(config: Config | StartDevWorkerOptions): boolean { // We only read from config here, because we've already accounted for // args["legacy-env"] in https://github.com/cloudflare/workers-sdk/blob/b24aeb5722370c2e04bce97a84a1fa1e55725d79/packages/wrangler/src/config/validation.ts#L94-L98 - return config.legacy_env; + return "legacy_env" in config + ? config.legacy_env + : !config.legacy.enableServiceEnvironments; } diff --git a/packages/wrangler/src/versions/upload.ts b/packages/wrangler/src/versions/upload.ts index 85a2df2db3ed..badba4bcea83 100644 --- a/packages/wrangler/src/versions/upload.ts +++ b/packages/wrangler/src/versions/upload.ts @@ -272,6 +272,7 @@ export const versionsUploadCommand = createCommand({ RESOURCES_PROVISION: args.experimentalProvision ?? false, REMOTE_BINDINGS: args.experimentalRemoteBindings ?? true, DEPLOY_REMOTE_DIFF_CHECK: false, + AUTOCREATE_RESOURCES: args.experimentalAutoCreate, }), warnIfMultipleEnvsConfiguredButNoneSpecified: true, }, diff --git a/packages/wrangler/src/versions/view.ts b/packages/wrangler/src/versions/view.ts index 1a0621a25213..2a77c3611fdf 100644 --- a/packages/wrangler/src/versions/view.ts +++ b/packages/wrangler/src/versions/view.ts @@ -1,12 +1,13 @@ import { logRaw } from "@cloudflare/cli"; +import { convertBindingsToCfWorkerInitBindings } from "../api/startDevWorker/utils"; import { createCommand } from "../core/create-command"; import { UserError } from "../errors"; import * as metrics from "../metrics"; import { requireAuth } from "../user"; +import { printBindings } from "../utils/print-bindings"; import formatLabelledValues from "../utils/render-labelled-values"; import { fetchVersion } from "./api"; import { getVersionSource } from "./list"; -import type { WorkerMetadataBinding } from "../deployment-bundle/create-worker-upload-form"; const BLANK_INPUT = "-"; // To be used where optional user-input is displayed and the value is nullish @@ -38,7 +39,7 @@ export const versionsViewCommand = createCommand({ }, }, positionalArgs: ["version-id"], - handler: async function versionsViewHandler(args, { config }) { + async handler(args, { config }) { metrics.sendMetricsEvent( "view worker version", {}, @@ -92,7 +93,7 @@ export const versionsViewCommand = createCommand({ version.resources.script_runtime.compatibility_flags.join(", "); } if (Object.keys(scriptInfo).length > 0) { - logRaw("------------------------------------------------------------"); + logRaw(""); logRaw(formatLabelledValues(scriptInfo)); } @@ -100,7 +101,7 @@ export const versionsViewCommand = createCommand({ (binding) => binding.type === "secret_text" ); if (secrets.length > 0) { - logRaw("------------------------- secrets -------------------------"); + logRaw("Secrets:"); for (const secret of secrets) { logRaw( formatLabelledValues({ @@ -113,33 +114,11 @@ export const versionsViewCommand = createCommand({ const bindings = version.resources.bindings.filter( (binding) => binding.type !== "secret_text" ); - if (bindings.length > 0) { - logRaw("------------------------- bindings -------------------------"); - // env vars are done differently so target them first - const envVars = bindings.filter( - (binding) => binding.type === "plain_text" - ); - if (envVars.length > 0) { - logRaw( - `[vars]\n` + - // ts is having issues typing from the filter - (envVars as { type: "plain_text"; name: string; text: string }[]) - .map((envVar) => `${envVar.name} = "${envVar.text}"`) - .join("\n") - ); - } - // Filter out env vars since they got handled above - const restOfBindings = bindings.filter( - (binding) => binding.type !== "plain_text" + if (bindings.length > 0) { + printBindings( + (await convertBindingsToCfWorkerInitBindings(bindings)).bindings ); - for (const binding of restOfBindings) { - const output = printBindingAsToml(binding); - if (output !== null) { - logRaw(output); - logRaw(""); - } - } } }, }); @@ -149,126 +128,3 @@ type ScriptInfoLog = { "Compatibility Date"?: string; "Compatibility Flags"?: string; }; - -function printBindingAsToml(binding: WorkerMetadataBinding) { - switch (binding.type) { - case "ai": - return "[ai]" + `\nbinding = ${binding.name}`; - - case "analytics_engine": - return ( - "[[analytics_engine_datasets]]" + - `\nbinding = ${binding.name}` + - (binding.dataset ? `\ndataset = ${binding.dataset}` : "") - ); - - case "browser": - return "[browser]" + `\nbinding = "${binding.name}"`; - - case "d1": - return ( - "[[d1_databases]]" + - `\nbinding = "${binding.name}"` + - `\ndatabase_id = "${binding.id}"` - ); - - case "dispatch_namespace": - return ( - "[[dispatch_namespaces]]" + - `\nbinding = "${binding.name}"` + - `\nnamespce = "${binding.namespace}"` + - (binding.outbound - ? `\noutbound = { service = "${binding.outbound.worker.service}"` + - (binding.outbound.params - ? `, parameters = [${binding.outbound.params.map((param) => param.name).join(", ")}]` - : "") + - " }" - : "") - ); - - case "durable_object_namespace": - return ( - "[[durable_objects.bindings]]" + - `\nname = "${binding.name}"` + - `\nclass_name = "${binding.class_name}"` + - (binding.script_name ? `\nscript_name = "${binding.script_name}"` : "") - ); - - case "hyperdrive": - return ( - "[[hyperdrive]]" + - `\nbinding = "${binding.name}"` + - `\nid = "${binding.id}"` - ); - - case "kv_namespace": - return ( - "[[kv_namespaces]]" + - `\nbinding = "${binding.name}"` + - `\nid = "${binding.namespace_id}"` - ); - - case "mtls_certificate": - return ( - "[[mtls_certificates]]" + - `\nbinding = "${binding.name}"` + - `\ncertificate_id = "${binding.certificate_id}"` - ); - - case "queue": - return ( - "[[queues.producers]]" + - `\nbinding = "${binding.name}"` + - `\nqueue = "${binding.queue_name}"` + - (binding.delivery_delay - ? `\ndelivery_delay = ${binding.delivery_delay}` - : "") - ); - - case "r2_bucket": - return ( - "[[r2_buckets]]" + - `\nbinding = "${binding.name}"` + - `\nbucket_name = "${binding.bucket_name}"` + - (binding.jurisdiction - ? `\njurisdiction = "${binding.jurisdiction}"` - : "") - ); - - case "send_email": - return ( - "[[send_email]]" + - `\nname = "${binding.name}"` + - (binding.destination_address - ? `\ndestination_address = "${binding.destination_address}"` - : "") + - (binding.allowed_destination_addresses - ? `\nallowed_destination_addresses = [${binding.allowed_destination_addresses.map((addr) => `"${addr}"`).join(", ")}]` - : "") + - (binding.allowed_sender_addresses - ? `\nallowed_sender_addresses = [${binding.allowed_sender_addresses.map((addr) => `"${addr}"`).join(", ")}]` - : "") - ); - - case "service": - return ( - "[[services]]" + - `\nbinding = "${binding.name}"` + - `\nservice = "${binding.name}"` + - (binding.entrypoint ? `\nentrypoint = "${binding.entrypoint}"` : "") - ); - - case "vectorize": - return ( - "[[vectorize]]" + - `\nbinding = "${binding.name}"` + - `\nindex_name = "${binding.index_name}"` - ); - - case "version_metadata": - return "[version_metadata]" + `\nbinding = "${binding.name}"`; - - default: - return null; - } -} diff --git a/packages/wrangler/src/yargs-types.ts b/packages/wrangler/src/yargs-types.ts index 5a7c9174c720..dbc66b68efe6 100644 --- a/packages/wrangler/src/yargs-types.ts +++ b/packages/wrangler/src/yargs-types.ts @@ -12,6 +12,7 @@ export interface CommonYargsOptions { "env-file": string[] | undefined; "experimental-provision": boolean | undefined; "experimental-remote-bindings": boolean | undefined; + "experimental-auto-create": boolean; } export type CommonYargsArgvSanitized

= OnlyCamelCase<