diff --git a/packages/wrangler/e2e/provision.test.ts b/packages/wrangler/e2e/provision.test.ts index 820dcc0a65dc..4fd9b722b688 100644 --- a/packages/wrangler/e2e/provision.test.ts +++ b/packages/wrangler/e2e/provision.test.ts @@ -1,7 +1,7 @@ import assert from "node:assert"; import dedent from "ts-dedent"; import { fetch } from "undici"; -import { afterAll, beforeAll, describe, expect, it, vi } from "vitest"; +import { afterAll, beforeEach, describe, expect, it, vi } from "vitest"; import { CLOUDFLARE_ACCOUNT_ID } from "./helpers/account-id"; import { WranglerE2ETestHelper } from "./helpers/e2e-wrangler-test"; import { fetchText } from "./helpers/fetch-text"; @@ -38,22 +38,26 @@ describe.skipIf(!CLOUDFLARE_ACCOUNT_ID)( expect(text).toMatchInlineSnapshot(`"Hello World!"`); }); - beforeAll(async () => { + beforeEach(async () => { await helper.seed({ "wrangler.toml": dedent` - name = "${workerName}" - main = "src/index.ts" - compatibility_date = "2023-01-01" + name = "${workerName}" + main = "src/index.ts" + compatibility_date = "2023-01-01" - [[kv_namespaces]] - binding = "KV" + [[kv_namespaces]] + binding = "KV" - [[r2_buckets]] - binding = "R2" + [[r2_buckets]] + binding = "R2" - [[d1_databases]] - binding = "D1" - `, + [[r2_buckets]] + binding = "R2_WITH_NAME" + bucket_name = "does-not-exist" + + [[d1_databases]] + binding = "D1" + `, "src/index.ts": dedent` export default { fetch(request) { @@ -75,32 +79,33 @@ describe.skipIf(!CLOUDFLARE_ACCOUNT_ID)( await worker.exitCode; const output = await worker.output; expect(normalize(output)).toMatchInlineSnapshot(` - "Total Upload: xx KiB / gzip: xx KiB - The following bindings need to be provisioned: - Binding Resource - env.KV KV Namespace - env.D1 D1 Database - env.R2 R2 Bucket - Provisioning KV (KV Namespace)... - πŸŒ€ Creating new KV Namespace "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-kv"... - ✨ KV provisioned πŸŽ‰ - Provisioning D1 (D1 Database)... - πŸŒ€ Creating new D1 Database "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-d1"... - ✨ D1 provisioned πŸŽ‰ - Provisioning R2 (R2 Bucket)... - πŸŒ€ Creating new R2 Bucket "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-r2"... - ✨ R2 provisioned πŸŽ‰ - πŸŽ‰ All resources provisioned, continuing with deployment... - Your Worker has access to the following bindings: - Binding Resource - env.KV (00000000000000000000000000000000) KV Namespace - env.D1 (00000000-0000-0000-0000-000000000000) D1 Database - env.R2 (tmp-e2e-worker-00000000-0000-0000-0000-000000000000-r2) R2 Bucket - Uploaded tmp-e2e-worker-00000000-0000-0000-0000-000000000000 (TIMINGS) - Deployed tmp-e2e-worker-00000000-0000-0000-0000-000000000000 triggers (TIMINGS) - https://tmp-e2e-worker-00000000-0000-0000-0000-000000000000.SUBDOMAIN.workers.dev - Current Version ID: 00000000-0000-0000-0000-000000000000" - `); + "Total Upload: xx KiB / gzip: xx KiB + The following bindings need to be provisioned: + Binding Resource + env.KV KV Namespace + env.D1 D1 Database + env.R2 R2 Bucket + Provisioning KV (KV Namespace)... + πŸŒ€ Creating new KV Namespace "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-kv"... + ✨ KV provisioned πŸŽ‰ + Provisioning D1 (D1 Database)... + πŸŒ€ Creating new D1 Database "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-d1"... + ✨ D1 provisioned πŸŽ‰ + Provisioning R2 (R2 Bucket)... + πŸŒ€ Creating new R2 Bucket "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-r2"... + ✨ R2 provisioned πŸŽ‰ + πŸŽ‰ All resources provisioned, continuing with deployment... + Your Worker has access to the following bindings: + Binding Resource + env.KV (00000000000000000000000000000000) KV Namespace + env.D1 (00000000-0000-0000-0000-000000000000) D1 Database + env.R2 (tmp-e2e-worker-00000000-0000-0000-0000-000000000000-r2) R2 Bucket + env.R2_WITH_NAME (does-not-exist) R2 Bucket + Uploaded tmp-e2e-worker-00000000-0000-0000-0000-000000000000 (TIMINGS) + Deployed tmp-e2e-worker-00000000-0000-0000-0000-000000000000 triggers (TIMINGS) + https://tmp-e2e-worker-00000000-0000-0000-0000-000000000000.SUBDOMAIN.workers.dev + Current Version ID: 00000000-0000-0000-0000-000000000000" + `); const urlMatch = output.match( /(?https:\/\/tmp-e2e-.+?\..+?\.workers\.dev)/ ); @@ -129,17 +134,18 @@ describe.skipIf(!CLOUDFLARE_ACCOUNT_ID)( await worker.exitCode; const output = await worker.output; expect(normalize(output)).toMatchInlineSnapshot(` - "Total Upload: xx KiB / gzip: xx KiB - Your Worker has access to the following bindings: - Binding Resource - env.KV (inherited) KV Namespace - env.D1 (inherited) D1 Database - env.R2 (inherited) R2 Bucket - Uploaded tmp-e2e-worker-00000000-0000-0000-0000-000000000000 (TIMINGS) - Deployed tmp-e2e-worker-00000000-0000-0000-0000-000000000000 triggers (TIMINGS) - https://tmp-e2e-worker-00000000-0000-0000-0000-000000000000.SUBDOMAIN.workers.dev - Current Version ID: 00000000-0000-0000-0000-000000000000" - `); + "Total Upload: xx KiB / gzip: xx KiB + Your Worker has access to the following bindings: + Binding Resource + env.KV (inherited) KV Namespace + env.D1 (inherited) D1 Database + env.R2 (inherited) R2 Bucket + env.R2_WITH_NAME (does-not-exist) R2 Bucket + Uploaded tmp-e2e-worker-00000000-0000-0000-0000-000000000000 (TIMINGS) + Deployed tmp-e2e-worker-00000000-0000-0000-0000-000000000000 triggers (TIMINGS) + https://tmp-e2e-worker-00000000-0000-0000-0000-000000000000.SUBDOMAIN.workers.dev + Current Version ID: 00000000-0000-0000-0000-000000000000" + `); const response = await retry( (resp) => !resp.ok, @@ -147,7 +153,59 @@ describe.skipIf(!CLOUDFLARE_ACCOUNT_ID)( ); await expect(response.text()).resolves.toEqual("Hello World!"); }); + it("can inspect current bindings", async () => { + const versionsRaw = await helper.run( + `wrangler versions list --json --x-provision` + ); + + const versions = JSON.parse(versionsRaw.stdout) as unknown[]; + const latest = versions.at(-1) as { id: string }; + + const versionView = await helper.run( + `wrangler versions view ${latest.id} --x-provision` + ); + + expect(normalizeOutput(versionView.output)).toMatchInlineSnapshot(` + "Version ID: 00000000-0000-0000-0000-000000000000 + Created: TIMESTAMP + Author: + Source: Unknown (version_upload) + Tag: - + Message: - + Handlers: fetch + Compatibility Date: 2023-01-01 + [ + { + "database_id": "00000000-0000-0000-0000-000000000000", + "id": "00000000-0000-0000-0000-000000000000", + "name": "D1", + "type": "d1" + }, + { + "name": "KV", + "namespace_id": "00000000000000000000000000000000", + "type": "kv_namespace" + }, + { + "bucket_name": "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-r2", + "name": "R2", + "type": "r2_bucket" + }, + { + "bucket_name": "does-not-exist", + "name": "R2_WITH_NAME", + "type": "r2_bucket" + } + ] + Your Worker has access to the following bindings: + Binding Resource + env.KV (00000000000000000000000000000000) KV Namespace + env.D1 (00000000-0000-0000-0000-000000000000) D1 Database + env.R2 (tmp-e2e-worker-00000000-0000-0000-0000-000000000000-r2) R2 Bucket + env.R2_WITH_NAME (does-not-exist) R2 Bucket" + `); + }); it("can inherit and provision resources on version upload", async () => { await helper.seed({ "wrangler.toml": dedent` @@ -167,27 +225,29 @@ describe.skipIf(!CLOUDFLARE_ACCOUNT_ID)( ); await worker.exitCode; const output = await worker.output; - expect(normalize(output)).toMatchInlineSnapshot(` - "Total Upload: xx KiB / gzip: xx KiB - The following bindings need to be provisioned: - Binding Resource - env.KV2 KV Namespace - Provisioning KV2 (KV Namespace)... - πŸŒ€ Creating new KV Namespace "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-kv2"... - ✨ KV2 provisioned πŸŽ‰ - πŸŽ‰ All resources provisioned, continuing with deployment... - Worker Startup Time: (TIMINGS) - Your Worker has access to the following bindings: - Binding Resource - env.KV2 (00000000000000000000000000000000) KV Namespace - env.R2 (inherited) R2 Bucket - Uploaded tmp-e2e-worker-00000000-0000-0000-0000-000000000000 (TIMINGS) - Worker Version ID: 00000000-0000-0000-0000-000000000000 - Version Preview URL: https://tmp-e2e-worker-PREVIEW-URL.SUBDOMAIN.workers.dev - To deploy this version to production traffic use the command wrangler versions deploy - Changes to non-versioned settings (config properties 'logpush' or 'tail_consumers') take effect after your next deployment using the command wrangler versions deploy - Changes to triggers (routes, custom domains, cron schedules, etc) must be applied with the command wrangler triggers deploy" - `); + expect(normalize(output)).toMatchInlineSnapshot( + ` + "Total Upload: xx KiB / gzip: xx KiB + The following bindings need to be provisioned: + Binding Resource + env.KV2 KV Namespace + Provisioning KV2 (KV Namespace)... + πŸŒ€ Creating new KV Namespace "tmp-e2e-worker-00000000-0000-0000-0000-000000000000-kv2"... + ✨ KV2 provisioned πŸŽ‰ + πŸŽ‰ All resources provisioned, continuing with deployment... + Worker Startup Time: (TIMINGS) + Your Worker has access to the following bindings: + Binding Resource + env.KV2 (00000000000000000000000000000000) KV Namespace + env.R2 (inherited) R2 Bucket + Uploaded tmp-e2e-worker-00000000-0000-0000-0000-000000000000 (TIMINGS) + Worker Version ID: 00000000-0000-0000-0000-000000000000 + Version Preview URL: https://tmp-e2e-worker-PREVIEW-URL.SUBDOMAIN.workers.dev + To deploy this version to production traffic use the command wrangler versions deploy + Changes to non-versioned settings (config properties 'logpush' or 'tail_consumers') take effect after your next deployment using the command wrangler versions deploy + Changes to triggers (routes, custom domains, cron schedules, etc) must be applied with the command wrangler triggers deploy" + ` + ); const kvMatch = output.match(/env.KV2 \((?[0-9a-f]{32})/); assert(kvMatch?.groups); kvId2 = kvMatch.groups.kv; diff --git a/packages/wrangler/src/__tests__/provision.test.ts b/packages/wrangler/src/__tests__/provision.test.ts index 1feb858295bd..d1628f368390 100644 --- a/packages/wrangler/src/__tests__/provision.test.ts +++ b/packages/wrangler/src/__tests__/provision.test.ts @@ -1,3 +1,4 @@ +import { readFile } from "node:fs/promises"; import { http, HttpResponse } from "msw"; import { mockAccountId, mockApiToken } from "./helpers/mock-account-id"; import { mockConsoleMethods } from "./helpers/mock-console"; @@ -191,6 +192,7 @@ describe("--x-provision", () => { Provisioning R2 (R2 Bucket)... ✨ R2 provisioned πŸŽ‰ + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discardβ€”either way future deploys will continue to work. πŸŽ‰ All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -308,6 +310,7 @@ describe("--x-provision", () => { Provisioning R2 (R2 Bucket)... ✨ R2 provisioned πŸŽ‰ + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discardβ€”either way future deploys will continue to work. πŸŽ‰ All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -438,6 +441,7 @@ describe("--x-provision", () => { πŸŒ€ Creating new R2 Bucket \\"new-r2\\"... ✨ R2 provisioned πŸŽ‰ + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discardβ€”either way future deploys will continue to work. πŸŽ‰ All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -454,6 +458,26 @@ describe("--x-provision", () => { `); expect(std.err).toMatchInlineSnapshot(`""`); expect(std.warn).toMatchInlineSnapshot(`""`); + + // IDs should be written back to the config file + expect(await readFile("wrangler.toml", "utf-8")).toMatchInlineSnapshot(` + "compatibility_date = \\"2022-01-12\\" + name = \\"test-name\\" + main = \\"index.js\\" + + [[kv_namespaces]] + binding = \\"KV\\" + id = \\"new-kv-id\\" + + [[r2_buckets]] + binding = \\"R2\\" + bucket_name = \\"new-r2\\" + + [[d1_databases]] + binding = \\"D1\\" + database_id = \\"new-d1-id\\" + " + `); }); it("can prefill d1 database name from config file if provided", async () => { @@ -507,6 +531,7 @@ describe("--x-provision", () => { πŸŒ€ Creating new D1 Database \\"prefilled-d1-name\\"... ✨ D1 provisioned πŸŽ‰ + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discardβ€”either way future deploys will continue to work. πŸŽ‰ All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -629,6 +654,7 @@ describe("--x-provision", () => { πŸŒ€ Creating new D1 Database \\"new-d1-name\\"... ✨ D1 provisioned πŸŽ‰ + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discardβ€”either way future deploys will continue to work. πŸŽ‰ All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -704,6 +730,7 @@ describe("--x-provision", () => { πŸŒ€ Creating new R2 Bucket \\"prefilled-r2-name\\"... ✨ BUCKET provisioned πŸŽ‰ + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discardβ€”either way future deploys will continue to work. πŸŽ‰ All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms @@ -890,6 +917,7 @@ describe("--x-provision", () => { πŸŒ€ Creating new R2 Bucket \\"existing-bucket-name\\"... ✨ BUCKET provisioned πŸŽ‰ + Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discardβ€”either way future deploys will continue to work. πŸŽ‰ All resources provisioned, continuing with deployment... Worker Startup Time: 100 ms diff --git a/packages/wrangler/src/__tests__/versions/versions.view.test.ts b/packages/wrangler/src/__tests__/versions/versions.view.test.ts index 145145185dbf..add68833405d 100644 --- a/packages/wrangler/src/__tests__/versions/versions.view.test.ts +++ b/packages/wrangler/src/__tests__/versions/versions.view.test.ts @@ -71,23 +71,23 @@ describe("versions view", () => { Source: Upload Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, scheduled Compatibility Date: 2020-01-01 Compatibility Flags: test, flag - ------------------------- bindings ------------------------- - [[analytics_engine_datasets]] - binding = ANALYTICS - dataset = analytics_dataset - - [[kv_namespaces]] - binding = \\"KV\\" - id = \\"kv-namespace-id\\" - " `); - expect(cnsl.out).toMatch(/⛅️ wrangler/); + expect(cnsl.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Your Worker has access to the following bindings: + Binding Resource + env.KV (kv-namespace-id) KV Namespace + env.ANALYTICS (analytics_dataset) Analytics Engine Dataset + " + `); expect(normalizeOutput(std.err)).toMatchInlineSnapshot(`""`); }); @@ -186,19 +186,20 @@ describe("versions view", () => { Source: Upload Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, scheduled Compatibility Date: 2020-01-01 Compatibility Flags: test, flag - ------------------------- bindings ------------------------- - [[analytics_engine_datasets]] - binding = ANALYTICS - dataset = analytics_dataset - - [[kv_namespaces]] - binding = \\"KV\\" - id = \\"kv-namespace-id\\" - + " + `); + expect(cnsl.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Your Worker has access to the following bindings: + Binding Resource + env.KV (kv-namespace-id) KV Namespace + env.ANALYTICS (analytics_dataset) Analytics Engine Dataset " `); @@ -320,7 +321,7 @@ describe("versions view", () => { Source: API πŸ“‘ Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue " `); @@ -367,7 +368,7 @@ describe("versions view", () => { Source: API πŸ“‘ Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue Compatibility Date: 2000-00-00 " @@ -416,7 +417,7 @@ describe("versions view", () => { Source: API πŸ“‘ Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue Compatibility Date: 2000-00-00 Compatibility Flags: flag_1, flag_2 @@ -469,11 +470,11 @@ describe("versions view", () => { Source: API πŸ“‘ Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue Compatibility Date: 2000-00-00 Compatibility Flags: flag_1, flag_2 - ------------------------- secrets ------------------------- + Secrets: Secret Name: SECRET_ONE Secret Name: SECRET_TWO " @@ -525,14 +526,10 @@ describe("versions view", () => { Source: API πŸ“‘ Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue Compatibility Date: 2000-00-00 Compatibility Flags: flag_1, flag_2 - ------------------------- bindings ------------------------- - [vars] - VAR_ONE = \\"var-one\\" - VAR_TWO = \\"var-one\\" " `); }); @@ -657,106 +654,41 @@ describe("versions view", () => { Source: API πŸ“‘ Tag: - Message: - - ------------------------------------------------------------ + Handlers: fetch, queue Compatibility Date: 2000-00-00 Compatibility Flags: flag_1, flag_2 - ------------------------- bindings ------------------------- - [ai] - binding = AI - - [[analytics_engine_datasets]] - binding = AE - dataset = datset - - [browser] - binding = \\"BROWSER\\" - - [[d1_databases]] - binding = \\"D1\\" - database_id = \\"d1-id\\" - - [[dispatch_namespaces]] - binding = \\"WFP\\" - namespce = \\"wfp-namespace\\" - - [[dispatch_namespaces]] - binding = \\"WFP_2\\" - namespce = \\"wfp-namespace\\" - outbound = { service = \\"outbound-worker\\" } - - [[dispatch_namespaces]] - binding = \\"WFP_3\\" - namespce = \\"wfp-namespace\\" - outbound = { service = \\"outbound-worker\\", parameters = [paramOne, paramTwo] } - - [[durable_objects.bindings]] - name = \\"DO\\" - class_name = \\"DurableObject\\" - - [[durable_objects.bindings]] - name = \\"DO_2\\" - class_name = \\"DurableObject\\" - script_name = \\"other-worker\\" - - [[hyperdrive]] - binding = \\"HYPERDRIVE\\" - id = \\"hyperdrive-id\\" - - [[kv_namespaces]] - binding = \\"KV\\" - id = \\"kv-id\\" - - [[mtls_certificates]] - binding = \\"MTLS\\" - certificate_id = \\"mtls-id\\" - - [[queues.producers]] - binding = \\"QUEUE\\" - queue = \\"queue\\" - - [[queues.producers]] - binding = \\"QUEUE_2\\" - queue = \\"queue\\" - delivery_delay = 60 - - [[r2_buckets]] - binding = \\"R2\\" - bucket_name = \\"r2-bucket\\" - - [[r2_buckets]] - binding = \\"R2_2\\" - bucket_name = \\"r2-bucket\\" - jurisdiction = \\"eu\\" - - [[send_email]] - name = \\"MAIL\\" - - [[send_email]] - name = \\"MAIL_2\\" - destination_address = \\"dest@example.com\\" - - [[send_email]] - name = \\"MAIL_3\\" - destination_address = \\"dest@example.com\\" - allowed_destination_addresses = [\\"1@a.com\\", \\"2@a.com\\"] - - [[services]] - binding = \\"SERVICE\\" - service = \\"SERVICE\\" - - [[services]] - binding = \\"SERVICE_2\\" - service = \\"SERVICE_2\\" - entrypoint = \\"Enterypoint\\" - - [[vectorize]] - binding = \\"VECTORIZE\\" - index_name = \\"index\\" - - [version_metadata] - binding = \\"VERSION_METADATA\\" - + " + `); + expect(cnsl.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Your Worker has access to the following bindings: + Binding Resource + env.DO (DurableObject) Durable Object + env.DO_2 (DurableObject, defined in other-worker) Durable Object + env.KV (kv-id) KV Namespace + env.MAIL (unrestricted) Send Email + env.MAIL_2 (dest@example.com) Send Email + env.MAIL_3 (dest@example.com) Send Email + env.QUEUE (queue) Queue + env.QUEUE_2 (queue) Queue + env.D1 (d1-id) D1 Database + env.VECTORIZE (index) Vectorize Index + env.HYPERDRIVE (hyperdrive-id) Hyperdrive Config + env.R2 (r2-bucket) R2 Bucket + env.R2_2 (r2-bucket (eu)) R2 Bucket + env.SERVICE (worker) Worker + env.SERVICE_2 (worker#Enterypoint) Worker + env.AE (datset) Analytics Engine Dataset + env.BROWSER Browser + env.AI AI + env.VERSION_METADATA Worker Version Metadata + env.WFP (wfp-namespace) Dispatch Namespace + env.WFP_2 (wfp-namespace) Dispatch Namespace + env.WFP_3 (wfp-namespace) Dispatch Namespace + env.MTLS (mtls-id) mTLS Certificate " `); }); diff --git a/packages/wrangler/src/api/startDevWorker/utils.ts b/packages/wrangler/src/api/startDevWorker/utils.ts index 2e46c87f2f60..c5dd789a2a10 100644 --- a/packages/wrangler/src/api/startDevWorker/utils.ts +++ b/packages/wrangler/src/api/startDevWorker/utils.ts @@ -1,6 +1,7 @@ import assert from "node:assert"; import { readFile } from "node:fs/promises"; import type { ConfigBindingOptions } from "../../config"; +import type { WorkerMetadataBinding } from "../../deployment-bundle/create-worker-upload-form"; import type { CfWorkerInit } from "../../deployment-bundle/worker"; import type { Binding, @@ -279,8 +280,22 @@ export function convertCfWorkerInitBindingsToBindings( return output; } +/** + * Convert either StartDevWorkerOptions["bindings"] or WorkerMetadataBinding[] to CfWorkerInit["bindings"] + * This function is by design temporary, but has lived longer than originally expected. + * For some context, CfWorkerInit is the in-memory representation of a Worker that Wrangler uses, + * WorkerMetadataBinding is the representation of bindings that comes from the API, and StartDevWorkerOptions + * is the "new" in-memory representation of a Worker that's used in Wrangler's dev flow. Over + * time, all uses of CfWorkerInit should transition to StartDevWorkerOptions, but that's a pretty big refactor. + * As such, in the meantime we have conversion functions so that different code paths can deal with the format they + * expect and were written for. + * + * WARNING: Using this with WorkerMetadataBinding[] will lose information about certain + * binding types (i.e. WASM modules, text blobs, and data blobs). These binding types are deprecated + * but may still be used by some Workers in the wild. + */ export async function convertBindingsToCfWorkerInitBindings( - inputBindings: StartDevWorkerOptions["bindings"] + inputBindings: StartDevWorkerOptions["bindings"] | WorkerMetadataBinding[] ): Promise<{ bindings: CfWorkerInit["bindings"]; fetchers: Record; @@ -317,23 +332,39 @@ export async function convertBindingsToCfWorkerInitBindings( const fetchers: Record = {}; - for (const [name, binding] of Object.entries(inputBindings ?? {})) { + const iterator: [string, WorkerMetadataBinding | Binding][] = Array.isArray( + inputBindings + ) + ? inputBindings.map((b) => [b.name, b]) + : Object.entries(inputBindings ?? {}); + + for (const [name, binding] of iterator) { if (binding.type === "plain_text") { bindings.vars ??= {}; - bindings.vars[name] = binding.value; + bindings.vars[name] = "value" in binding ? binding.value : binding.text; } else if (binding.type === "json") { bindings.vars ??= {}; - bindings.vars[name] = binding.value; + bindings.vars[name] = "value" in binding ? binding.value : binding.json; } else if (binding.type === "kv_namespace") { bindings.kv_namespaces ??= []; - bindings.kv_namespaces.push({ ...binding, binding: name }); + bindings.kv_namespaces.push({ + ...binding, + binding: name, + id: "namespace_id" in binding ? binding.namespace_id : binding.id, + }); } else if (binding.type === "send_email") { bindings.send_email ??= []; bindings.send_email.push({ ...binding, name: name }); } else if (binding.type === "wasm_module") { + if (!("source" in binding)) { + continue; + } bindings.wasm_modules ??= {}; bindings.wasm_modules[name] = await getBinaryFileContents(binding.source); } else if (binding.type === "text_blob") { + if (!("source" in binding)) { + continue; + } bindings.text_blobs ??= {}; if (typeof binding.source.path === "string") { @@ -345,6 +376,9 @@ export async function convertBindingsToCfWorkerInitBindings( ); } } else if (binding.type === "data_blob") { + if (!("source" in binding)) { + continue; + } bindings.data_blobs ??= {}; bindings.data_blobs[name] = await getBinaryFileContents(binding.source); } else if (binding.type === "browser") { @@ -366,7 +400,11 @@ export async function convertBindingsToCfWorkerInitBindings( bindings.r2_buckets.push({ ...binding, binding: name }); } else if (binding.type === "d1") { bindings.d1_databases ??= []; - bindings.d1_databases.push({ ...binding, binding: name }); + bindings.d1_databases.push({ + ...binding, + binding: name, + database_id: "id" in binding ? binding.id : binding.database_id, + }); } else if (binding.type === "vectorize") { bindings.vectorize ??= []; bindings.vectorize.push({ ...binding, binding: name }); @@ -383,7 +421,14 @@ export async function convertBindingsToCfWorkerInitBindings( bindings.analytics_engine_datasets.push({ ...binding, binding: name }); } else if (binding.type === "dispatch_namespace") { bindings.dispatch_namespaces ??= []; - bindings.dispatch_namespaces.push({ ...binding, binding: name }); + bindings.dispatch_namespaces.push({ + ...binding, + binding: name, + outbound: + binding.outbound && "worker" in binding.outbound + ? undefined + : binding.outbound, + }); } else if (binding.type === "mtls_certificate") { bindings.mtls_certificates ??= []; bindings.mtls_certificates.push({ ...binding, binding: name }); diff --git a/packages/wrangler/src/deploy/deploy.ts b/packages/wrangler/src/deploy/deploy.ts index 979f116cb8ee..1497fdfb35d1 100644 --- a/packages/wrangler/src/deploy/deploy.ts +++ b/packages/wrangler/src/deploy/deploy.ts @@ -847,7 +847,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m } } - workerBundle = createWorkerUploadForm(worker); + workerBundle = createWorkerUploadForm(worker, { dryRun: true }); printBindings( { ...withoutStaticAssets, vars: maskedVars }, config.tail_consumers, diff --git a/packages/wrangler/src/deployment-bundle/bindings.ts b/packages/wrangler/src/deployment-bundle/bindings.ts index a51c4aa7290b..a7e971d7049e 100644 --- a/packages/wrangler/src/deployment-bundle/bindings.ts +++ b/packages/wrangler/src/deployment-bundle/bindings.ts @@ -1,10 +1,15 @@ import assert from "node:assert"; import { fetchResult } from "../cfetch"; +import { + experimental_patchConfig, + PatchConfigError, +} from "../config/patch-config"; import { createD1Database } from "../d1/create"; import { listDatabases } from "../d1/list"; import { getDatabaseInfoFromIdOrName } from "../d1/utils"; import { prompt, select } from "../dialogs"; import { UserError } from "../errors"; +import { isNonInteractiveOrCI } from "../is-interactive"; import { createKVNamespace, listKVNamespaces } from "../kv/helpers"; import { logger } from "../logger"; import * as metrics from "../metrics"; @@ -12,7 +17,7 @@ import { APIError } from "../parse"; import { createR2Bucket, getR2Bucket, listR2Buckets } from "../r2/helpers"; import { isLegacyEnv } from "../utils/isLegacyEnv"; import { printBindings } from "../utils/print-bindings"; -import type { Config } from "../config"; +import type { Config, RawConfig } from "../config"; import type { ComplianceConfig } from "../environment-variables/misc-variables"; import type { WorkerMetadataBinding } from "./create-worker-upload-form"; import type { @@ -161,6 +166,13 @@ class R2Handler extends ProvisionResourceHandler<"r2_bucket", CfR2Bucket> { get name(): string | undefined { return this.binding.bucket_name as string; } + + override inherit(): void { + if (!this.binding.bucket_name) { + this.binding.bucket_name = INHERIT_SYMBOL; + } + } + async create(name: string) { await createR2Bucket( this.complianceConfig, @@ -183,7 +195,10 @@ class R2Handler extends ProvisionResourceHandler<"r2_bucket", CfR2Bucket> { (existing) => existing.type === this.type && existing.name === this.binding.binding && - existing.jurisdiction === this.binding.jurisdiction + existing.jurisdiction === this.binding.jurisdiction && + (this.binding.bucket_name + ? this.binding.bucket_name === existing.bucket_name + : true) ); } async isConnectedToExistingResource(): Promise { @@ -423,6 +438,7 @@ async function collectPendingResources( (a, b) => HANDLERS[a.resourceType].sort - HANDLERS[b.resourceType].sort ); } + export async function provisionBindings( bindings: CfWorkerInit["bindings"], accountId: string, @@ -438,6 +454,11 @@ export async function provisionBindings( ); if (pendingResources.length > 0) { + if (!config.configPath) { + throw new UserError( + "Provisioning resources is not supported without a config file" + ); + } if (!isLegacyEnv(config)) { throw new UserError( "Provisioning resources is not supported with a service environment" @@ -468,6 +489,45 @@ export async function provisionBindings( ); } + const patch: RawConfig = {}; + + for (const resource of pendingResources) { + patch[resource.resourceType] = config[resource.resourceType].map( + (binding) => { + if (binding.binding === resource.binding) { + // Using an early return here would be nicer but makes TS blow up + binding = resource.handler.binding; + } + + return Object.fromEntries( + Object.entries(binding).filter( + // Make sure all the values are JSON serialisable. + // Otherwise we end up with "undefined" in the config + ([_, value]) => typeof value === "string" + ) + ) as typeof binding; + } + ); + } + + // If the user is performing an interactive deploy, write the provisioned IDs back to the config file. + // This is not necessary, as future deploys can use inherited resources, but it can help with + // portability of the config file, and adds robustness to bindings being renamed. + if (!isNonInteractiveOrCI()) { + try { + await experimental_patchConfig(config.configPath, patch, false); + logger.log( + "Your Worker was deployed with provisioned resources. We've written the IDs of these resources to your config file, which you can choose to save or discardβ€”either way future deploys will continue to work." + ); + } catch (e) { + if (e instanceof PatchConfigError) { + // no-op β€” if the user is using TOML config we can't update it. + } else { + throw e; + } + } + } + const resourceCount = pendingResources.reduce( (acc, resource) => { acc[resource.resourceType] ??= 0; @@ -483,7 +543,7 @@ export async function provisionBindings( } } -function getSettings( +export function getSettings( complianceConfig: ComplianceConfig, accountId: string, scriptName: string diff --git a/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts b/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts index 8ee2afda8037..abea1b55e2ca 100644 --- a/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts +++ b/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts @@ -207,7 +207,10 @@ export type WorkerMetadata = WorkerMetadataPut | WorkerMetadataVersionsPost; /** * Creates a `FormData` upload from a `CfWorkerInit`. */ -export function createWorkerUploadForm(worker: CfWorkerInit): FormData { +export function createWorkerUploadForm( + worker: CfWorkerInit, + options?: { dryRun: true } +): FormData { const formData = new FormData(); const { main, @@ -267,6 +270,14 @@ export function createWorkerUploadForm(worker: CfWorkerInit): FormData { }); bindings.kv_namespaces?.forEach(({ id, binding, raw }) => { + // If we're doing a dry run there's no way to know whether or not a KV namespace + // is inheritable or requires provisioning (since that would require hitting the API). + // As such, _assume_ any undefined IDs are inheritable when doing a dry run. + // When this Worker is actually deployed, some may be provisioned at the point of deploy + if (options?.dryRun && id === undefined) { + id = INHERIT_SYMBOL; + } + if (id === undefined) { throw new UserError(`${binding} bindings must have an "id" field`); } @@ -340,6 +351,9 @@ export function createWorkerUploadForm(worker: CfWorkerInit): FormData { bindings.r2_buckets?.forEach( ({ binding, bucket_name, jurisdiction, raw }) => { + if (options?.dryRun && bucket_name === undefined) { + bucket_name = INHERIT_SYMBOL; + } if (bucket_name === undefined) { throw new UserError( `${binding} bindings must have a "bucket_name" field` @@ -365,6 +379,9 @@ export function createWorkerUploadForm(worker: CfWorkerInit): FormData { bindings.d1_databases?.forEach( ({ binding, database_id, database_internal_env, raw }) => { + if (options?.dryRun && database_id === undefined) { + database_id = INHERIT_SYMBOL; + } if (database_id === undefined) { throw new UserError( `${binding} bindings must have a "database_id" field` diff --git a/packages/wrangler/src/kv/helpers.ts b/packages/wrangler/src/kv/helpers.ts index cf8a008eed2f..258585c75531 100644 --- a/packages/wrangler/src/kv/helpers.ts +++ b/packages/wrangler/src/kv/helpers.ts @@ -1,13 +1,17 @@ +import assert from "node:assert"; import { Blob } from "node:buffer"; import { URLSearchParams } from "node:url"; import { type KVNamespace } from "@cloudflare/workers-types/experimental"; import { Miniflare } from "miniflare"; import { FormData } from "undici"; import { fetchKVGetValue, fetchListResult, fetchResult } from "../cfetch"; +import { getSettings } from "../deployment-bundle/bindings"; import { getLocalPersistencePath } from "../dev/get-local-persistence-path"; import { getDefaultPersistRoot } from "../dev/miniflare"; import { UserError } from "../errors"; +import { getFlag } from "../experimental-flags"; import { logger } from "../logger"; +import { requireAuth } from "../user"; import type { Config } from "../config"; import type { ComplianceConfig } from "../environment-variables/misc-variables"; import type { ReplaceWorkersTypes } from "miniflare"; @@ -420,10 +424,38 @@ export async function deleteKVBulkKeyValue( } } -export function getKVNamespaceId( +async function getIdFromSettings( + config: Config, + binding: string, + isLocal: boolean +) { + // Don't do any network stuff when local, instead respect what + // Wrangler dev does, which is to use the binding name as a fallback + // for the namespace ID + if (isLocal) { + return binding; + } + const accountId = await requireAuth(config); + if (!config.name) { + throw new UserError("No Worker name found in config"); + } + const settings = await getSettings(config, accountId, config.name); + const existingKV = settings?.bindings.find( + (existing) => existing.type === "kv_namespace" && existing.name === binding + ); + if (!existingKV || !("namespace_id" in existingKV)) { + throw new UserError( + `No namespace ID found for binding "${binding}". Add one to your wrangler config file or pass it via \`--namespace-id\`.` + ); + } + return existingKV.namespace_id as string; +} + +export async function getKVNamespaceId( { preview, binding, "namespace-id": namespaceId }: KvArgs, - config: Config -): string { + config: Config, + isLocal: boolean +): Promise { // nice if (namespaceId) { return namespaceId; @@ -483,8 +515,12 @@ export function getKVNamespaceId( // We don't want to execute code below if preview is set to true, so we just return. Otherwise we can get error! return namespaceId; } else if (previewIsDefined) { + if (getFlag("RESOURCES_PROVISION")) { + assert(binding); + return getIdFromSettings(config, binding, isLocal); + } throw new UserError( - `No namespace ID found for ${binding}. Add one to your wrangler config file to use a separate namespace for previewing your worker.` + `No namespace ID found for ${binding}. Add one to your wrangler config file or pass it via \`--namespace-id\`.` ); } @@ -494,6 +530,13 @@ export function getKVNamespaceId( (!namespace.id && namespace.preview_id); if (bindingHasOnlyOneId) { namespaceId = namespace.id || namespace.preview_id; + } else if ( + getFlag("RESOURCES_PROVISION") && + !namespace.id && + !namespace.preview_id + ) { + assert(binding); + return getIdFromSettings(config, binding, isLocal); } else { throw new UserError( `${binding} has both a namespace ID and a preview ID. Specify "--preview" or "--preview false" to avoid writing data to the wrong namespace.` diff --git a/packages/wrangler/src/kv/index.ts b/packages/wrangler/src/kv/index.ts index 5ecc79f781b3..b3cc3f0eca00 100644 --- a/packages/wrangler/src/kv/index.ts +++ b/packages/wrangler/src/kv/index.ts @@ -179,7 +179,7 @@ export const kvNamespaceDeleteCommand = createCommand({ printResourceLocation("remote"); let id; try { - id = getKVNamespaceId(args, config); + id = await getKVNamespaceId(args, config, false); } catch (e) { throw new CommandLineArgsError( "Not able to delete namespace.\n" + ((e as Error).message ?? e) @@ -374,7 +374,7 @@ export const kvKeyPutCommand = createCommand({ async handler({ key, ttl, expiration, metadata, ...args }) { const localMode = isLocal(args); const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); // One of `args.path` and `args.value` must be defined const value = args.path ? readFileSyncToBuffer(args.path) @@ -486,7 +486,7 @@ export const kvKeyListCommand = createCommand({ const localMode = isLocal(args); // TODO: support for limit+cursor (pagination) const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); let result: NamespaceKeyInfo[]; let metricEvent: EventNames; @@ -577,7 +577,7 @@ export const kvKeyGetCommand = createCommand({ async handler({ key, ...args }) { const localMode = isLocal(args); const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); let bufferKVValue; let metricEvent: EventNames; @@ -669,7 +669,7 @@ export const kvKeyDeleteCommand = createCommand({ async handler({ key, ...args }) { const localMode = isLocal(args); const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); logger.log(`Deleting the key "${key}" on namespace ${namespaceId}.`); @@ -744,7 +744,7 @@ export const kvBulkGetCommand = createCommand({ async handler({ filename, ...args }) { const localMode = isLocal(args); const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); const content = parseJSON(readFileSync(filename), filename) as ( | string @@ -886,7 +886,7 @@ export const kvBulkPutCommand = createCommand({ // but we'll do that in the future if needed. const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); const content = parseJSON(readFileSync(filename), filename); if (!Array.isArray(content)) { @@ -1036,7 +1036,7 @@ export const kvBulkDeleteCommand = createCommand({ async handler({ filename, ...args }) { const localMode = isLocal(args); const config = readConfig(args); - const namespaceId = getKVNamespaceId(args, config); + const namespaceId = await getKVNamespaceId(args, config, localMode); if (!args.force) { const result = await confirm( diff --git a/packages/wrangler/src/versions/view.ts b/packages/wrangler/src/versions/view.ts index eb3679a4a063..2a77c3611fdf 100644 --- a/packages/wrangler/src/versions/view.ts +++ b/packages/wrangler/src/versions/view.ts @@ -1,12 +1,13 @@ import { logRaw } from "@cloudflare/cli"; +import { convertBindingsToCfWorkerInitBindings } from "../api/startDevWorker/utils"; import { createCommand } from "../core/create-command"; import { UserError } from "../errors"; import * as metrics from "../metrics"; import { requireAuth } from "../user"; +import { printBindings } from "../utils/print-bindings"; import formatLabelledValues from "../utils/render-labelled-values"; import { fetchVersion } from "./api"; import { getVersionSource } from "./list"; -import type { WorkerMetadataBinding } from "../deployment-bundle/create-worker-upload-form"; const BLANK_INPUT = "-"; // To be used where optional user-input is displayed and the value is nullish @@ -38,7 +39,7 @@ export const versionsViewCommand = createCommand({ }, }, positionalArgs: ["version-id"], - handler: async function versionsViewHandler(args, { config }) { + async handler(args, { config }) { metrics.sendMetricsEvent( "view worker version", {}, @@ -92,7 +93,7 @@ export const versionsViewCommand = createCommand({ version.resources.script_runtime.compatibility_flags.join(", "); } if (Object.keys(scriptInfo).length > 0) { - logRaw("------------------------------------------------------------"); + logRaw(""); logRaw(formatLabelledValues(scriptInfo)); } @@ -100,7 +101,7 @@ export const versionsViewCommand = createCommand({ (binding) => binding.type === "secret_text" ); if (secrets.length > 0) { - logRaw("------------------------- secrets -------------------------"); + logRaw("Secrets:"); for (const secret of secrets) { logRaw( formatLabelledValues({ @@ -113,33 +114,11 @@ export const versionsViewCommand = createCommand({ const bindings = version.resources.bindings.filter( (binding) => binding.type !== "secret_text" ); - if (bindings.length > 0) { - logRaw("------------------------- bindings -------------------------"); - // env vars are done differently so target them first - const envVars = bindings.filter( - (binding) => binding.type === "plain_text" - ); - if (envVars.length > 0) { - logRaw( - `[vars]\n` + - // ts is having issues typing from the filter - (envVars as { type: "plain_text"; name: string; text: string }[]) - .map((envVar) => `${envVar.name} = "${envVar.text}"`) - .join("\n") - ); - } - // Filter out env vars since they got handled above - const restOfBindings = bindings.filter( - (binding) => binding.type !== "plain_text" + if (bindings.length > 0) { + printBindings( + (await convertBindingsToCfWorkerInitBindings(bindings)).bindings ); - for (const binding of restOfBindings) { - const output = printBindingAsToml(binding); - if (output !== null) { - logRaw(output); - logRaw(""); - } - } } }, }); @@ -149,123 +128,3 @@ type ScriptInfoLog = { "Compatibility Date"?: string; "Compatibility Flags"?: string; }; - -function printBindingAsToml(binding: WorkerMetadataBinding) { - switch (binding.type) { - case "ai": - return "[ai]" + `\nbinding = ${binding.name}`; - - case "analytics_engine": - return ( - "[[analytics_engine_datasets]]" + - `\nbinding = ${binding.name}` + - (binding.dataset ? `\ndataset = ${binding.dataset}` : "") - ); - - case "browser": - return "[browser]" + `\nbinding = "${binding.name}"`; - - case "d1": - return ( - "[[d1_databases]]" + - `\nbinding = "${binding.name}"` + - `\ndatabase_id = "${binding.id}"` - ); - - case "dispatch_namespace": - return ( - "[[dispatch_namespaces]]" + - `\nbinding = "${binding.name}"` + - `\nnamespce = "${binding.namespace}"` + - (binding.outbound - ? `\noutbound = { service = "${binding.outbound.worker.service}"` + - (binding.outbound.params - ? `, parameters = [${binding.outbound.params.map((param) => param.name).join(", ")}]` - : "") + - " }" - : "") - ); - - case "durable_object_namespace": - return ( - "[[durable_objects.bindings]]" + - `\nname = "${binding.name}"` + - `\nclass_name = "${binding.class_name}"` + - (binding.script_name ? `\nscript_name = "${binding.script_name}"` : "") - ); - - case "hyperdrive": - return ( - "[[hyperdrive]]" + - `\nbinding = "${binding.name}"` + - `\nid = "${binding.id}"` - ); - - case "kv_namespace": - return ( - "[[kv_namespaces]]" + - `\nbinding = "${binding.name}"` + - `\nid = "${binding.namespace_id}"` - ); - - case "mtls_certificate": - return ( - "[[mtls_certificates]]" + - `\nbinding = "${binding.name}"` + - `\ncertificate_id = "${binding.certificate_id}"` - ); - - case "queue": - return ( - "[[queues.producers]]" + - `\nbinding = "${binding.name}"` + - `\nqueue = "${binding.queue_name}"` + - (binding.delivery_delay - ? `\ndelivery_delay = ${binding.delivery_delay}` - : "") - ); - - case "r2_bucket": - return ( - "[[r2_buckets]]" + - `\nbinding = "${binding.name}"` + - `\nbucket_name = "${binding.bucket_name}"` + - (binding.jurisdiction - ? `\njurisdiction = "${binding.jurisdiction}"` - : "") - ); - - case "send_email": - return ( - "[[send_email]]" + - `\nname = "${binding.name}"` + - (binding.destination_address - ? `\ndestination_address = "${binding.destination_address}"` - : "") + - (binding.allowed_destination_addresses - ? `\nallowed_destination_addresses = [${binding.allowed_destination_addresses.map((addr) => `"${addr}"`).join(", ")}]` - : "") - ); - - case "service": - return ( - "[[services]]" + - `\nbinding = "${binding.name}"` + - `\nservice = "${binding.name}"` + - (binding.entrypoint ? `\nentrypoint = "${binding.entrypoint}"` : "") - ); - - case "vectorize": - return ( - "[[vectorize]]" + - `\nbinding = "${binding.name}"` + - `\nindex_name = "${binding.index_name}"` - ); - - case "version_metadata": - return "[version_metadata]" + `\nbinding = "${binding.name}"`; - - default: - return null; - } -}