From 6d98091c349493e3341ede9602eec4bcd7be3d4e Mon Sep 17 00:00:00 2001 From: Jun Lee Date: Fri, 7 Feb 2025 15:06:37 +0000 Subject: [PATCH 1/6] Renaming event context to request context Clarifying request context extends at least 60s after last client disconnect. --- .../docs/durable-objects/api/state.mdx | 4 +- .../deploy-an-analog-site.mdx | 2 +- ...oice-notes-app-with-auto-transcription.mdx | 1216 +++++++++-------- src/content/glossary/durable-objects.yaml | 4 +- .../workers/durable_objects_pricing.mdx | 45 +- 5 files changed, 648 insertions(+), 623 deletions(-) diff --git a/src/content/docs/durable-objects/api/state.mdx b/src/content/docs/durable-objects/api/state.mdx index 2dfae8fd0eca3a..89b67e72a5d0d3 100644 --- a/src/content/docs/durable-objects/api/state.mdx +++ b/src/content/docs/durable-objects/api/state.mdx @@ -53,11 +53,11 @@ export class MyDurableObject extends DurableObject { ### `waitUntil` -`waitUntil` waits until the promise which is passed as a parameter resolves and can extends an event context up to 30 seconds after the last client disconnects. +`waitUntil` waits until the promise which is passed as a parameter resolves and can extend a request context up to 30 seconds after the last client disconnects. :::note[`waitUntil` is not necessary] -The event context for a Durable Objects extends at least 60 seconds after the last client disconnects. So `waitUntil` is not necessary. It remains part of the `DurableObjectState` interface to remain compatible with [Workers Runtime APIs](/workers/runtime-apis/context/#waituntil). +The request context for a Durable Objects extends at least 60 seconds after the last client disconnects. So `waitUntil` is not necessary. It remains part of the `DurableObjectState` interface to remain compatible with [Workers Runtime APIs](/workers/runtime-apis/context/#waituntil). ::: diff --git a/src/content/docs/pages/framework-guides/deploy-an-analog-site.mdx b/src/content/docs/pages/framework-guides/deploy-an-analog-site.mdx index a87a9547ba4373..3336165a25f0d3 100644 --- a/src/content/docs/pages/framework-guides/deploy-an-analog-site.mdx +++ b/src/content/docs/pages/framework-guides/deploy-an-analog-site.mdx @@ -79,7 +79,7 @@ export default defineConfig({ }); ``` -This module in turn loads a plugin which adds bindings to the event context in dev: +This module in turn loads a plugin which adds bindings to the request context in dev: ```typescript import { NitroApp } from "nitropack"; diff --git a/src/content/docs/workers-ai/tutorials/build-a-voice-notes-app-with-auto-transcription.mdx b/src/content/docs/workers-ai/tutorials/build-a-voice-notes-app-with-auto-transcription.mdx index 3b028a57979d24..b177397e102fb2 100644 --- a/src/content/docs/workers-ai/tutorials/build-a-voice-notes-app-with-auto-transcription.mdx +++ b/src/content/docs/workers-ai/tutorials/build-a-voice-notes-app-with-auto-transcription.mdx @@ -42,7 +42,11 @@ To continue, you will need: Create a new Worker project using the `c3` CLI with the `nuxt` framework preset. - + ### Install additional dependencies @@ -60,12 +64,12 @@ Then add the `@nuxt/ui` module to the `nuxt.config.ts` file: ```ts title="nuxt.config.ts" export default defineNuxtConfig({ - //.. + //.. - modules: ['nitro-cloudflare-dev', '@nuxt/ui'], + modules: ["nitro-cloudflare-dev", "@nuxt/ui"], - //.. -}) + //.. +}); ``` ### [Optional] Move to Nuxt 4 compatibility mode @@ -76,14 +80,14 @@ Create a new `app` folder in the project's root directory and move the `app.vue` ```ts title="nuxt.config.ts" export default defineNuxtConfig({ - //.. + //.. - future: { - compatibilityVersion: 4, - }, + future: { + compatibilityVersion: 4, + }, - //.. -}) + //.. +}); ``` :::note @@ -111,7 +115,7 @@ Add the `AI` binding to the Wrangler file. binding = "AI" ``` -Once the `AI` binding has been configured, run the `cf-typegen` command to generate the necessary Cloudflare type definitions. This makes the types definitions available in the server event contexts. +Once the `AI` binding has been configured, run the `cf-typegen` command to generate the necessary Cloudflare type definitions. This makes the types definitions available in the server request contexts. @@ -119,30 +123,30 @@ Create a transcribe `POST` endpoint by creating `transcribe.post.ts` file inside ```ts title="server/api/transcribe.post.ts" export default defineEventHandler(async (event) => { - const { cloudflare } = event.context; - - const form = await readFormData(event); - const blob = form.get('audio') as Blob; - if (!blob) { - throw createError({ - statusCode: 400, - message: 'Missing audio blob to transcribe', - }); - } - - try { - const response = await cloudflare.env.AI.run('@cf/openai/whisper', { - audio: [...new Uint8Array(await blob.arrayBuffer())], - }); - - return response.text; - } catch (err) { - console.error('Error transcribing audio:', err); - throw createError({ - statusCode: 500, - message: 'Failed to transcribe audio. Please try again.', - }); - } + const { cloudflare } = event.context; + + const form = await readFormData(event); + const blob = form.get("audio") as Blob; + if (!blob) { + throw createError({ + statusCode: 400, + message: "Missing audio blob to transcribe", + }); + } + + try { + const response = await cloudflare.env.AI.run("@cf/openai/whisper", { + audio: [...new Uint8Array(await blob.arrayBuffer())], + }); + + return response.text; + } catch (err) { + console.error("Error transcribing audio:", err); + throw createError({ + statusCode: 500, + message: "Failed to transcribe audio. Please try again.", + }); + } }); ``` @@ -195,23 +199,23 @@ Now you are ready to create the upload endpoint. Create a new `upload.put.ts` fi ```ts title="server/api/upload.put.ts" export default defineEventHandler(async (event) => { - const { cloudflare } = event.context; - - const form = await readFormData(event); - const files = form.getAll('files') as File[]; - if (!files) { - throw createError({ statusCode: 400, message: 'Missing files' }); - } - - const uploadKeys: string[] = []; - for (const file of files) { - const obj = await cloudflare.env.R2.put(`recordings/${file.name}`, file); - if (obj) { - uploadKeys.push(obj.key); - } - } - - return uploadKeys; + const { cloudflare } = event.context; + + const form = await readFormData(event); + const files = form.getAll("files") as File[]; + if (!files) { + throw createError({ statusCode: 400, message: "Missing files" }); + } + + const uploadKeys: string[] = []; + for (const file of files) { + const obj = await cloudflare.env.R2.put(`recordings/${file.name}`, file); + if (obj) { + uploadKeys.push(obj.key); + } + } + + return uploadKeys; }); ``` @@ -337,31 +341,31 @@ Now you can create the API endpoint. Create a new file `index.post.ts` in the `s ```ts title="server/api/notes/index.post.ts" export default defineEventHandler(async (event) => { - const { cloudflare } = event.context; - - const { text, audioUrls } = await readBody(event); - if (!text) { - throw createError({ - statusCode: 400, - message: 'Missing note text', - }); - } - - try { - await cloudflare.env.DB.prepare( - 'INSERT INTO notes (text, audio_urls) VALUES (?1, ?2)' - ) - .bind(text, audioUrls ? JSON.stringify(audioUrls) : null) - .run(); - - return setResponseStatus(event, 201); - } catch (err) { - console.error('Error creating note:', err); - throw createError({ - statusCode: 500, - message: 'Failed to create note. Please try again.', - }); - } + const { cloudflare } = event.context; + + const { text, audioUrls } = await readBody(event); + if (!text) { + throw createError({ + statusCode: 400, + message: "Missing note text", + }); + } + + try { + await cloudflare.env.DB.prepare( + "INSERT INTO notes (text, audio_urls) VALUES (?1, ?2)", + ) + .bind(text, audioUrls ? JSON.stringify(audioUrls) : null) + .run(); + + return setResponseStatus(event, 201); + } catch (err) { + console.error("Error creating note:", err); + throw createError({ + statusCode: 500, + message: "Failed to create note. Please try again.", + }); + } }); ``` @@ -382,110 +386,110 @@ Create a new file `useMediaRecorder.ts` in the `app/composables` folder, and add ```ts title="app/composables/useMediaRecorder.ts" interface MediaRecorderState { - isRecording: boolean; - recordingDuration: number; - audioData: Uint8Array | null; - updateTrigger: number; + isRecording: boolean; + recordingDuration: number; + audioData: Uint8Array | null; + updateTrigger: number; } export function useMediaRecorder() { - const state = ref({ - isRecording: false, - recordingDuration: 0, - audioData: null, - updateTrigger: 0, - }); - - let mediaRecorder: MediaRecorder | null = null; - let audioContext: AudioContext | null = null; - let analyser: AnalyserNode | null = null; - let animationFrame: number | null = null; - let audioChunks: Blob[] | undefined = undefined; - - const updateAudioData = () => { - if (!analyser || !state.value.isRecording || !state.value.audioData) { - if (animationFrame) { - cancelAnimationFrame(animationFrame); - animationFrame = null; - } - - return; - } - - analyser.getByteTimeDomainData(state.value.audioData); - state.value.updateTrigger += 1; - animationFrame = requestAnimationFrame(updateAudioData); - }; - - const startRecording = async () => { - try { - const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); - - audioContext = new AudioContext(); - analyser = audioContext.createAnalyser(); - - const source = audioContext.createMediaStreamSource(stream); - source.connect(analyser); - - mediaRecorder = new MediaRecorder(stream); - audioChunks = []; - - mediaRecorder.ondataavailable = (e: BlobEvent) => { - audioChunks?.push(e.data); - state.value.recordingDuration += 1; - }; - - state.value.audioData = new Uint8Array(analyser.frequencyBinCount); - state.value.isRecording = true; - state.value.recordingDuration = 0; - state.value.updateTrigger = 0; - mediaRecorder.start(1000); - - updateAudioData(); - } catch (err) { - console.error('Error accessing microphone:', err); - throw err; - } - }; - - const stopRecording = async () => { - return await new Promise((resolve) => { - if (mediaRecorder && state.value.isRecording) { - mediaRecorder.onstop = () => { - const blob = new Blob(audioChunks, { type: 'audio/webm' }); - audioChunks = undefined; - - state.value.recordingDuration = 0; - state.value.updateTrigger = 0; - state.value.audioData = null; - - resolve(blob); - }; - - state.value.isRecording = false; - mediaRecorder.stop(); - mediaRecorder.stream.getTracks().forEach((track) => track.stop()); - - if (animationFrame) { - cancelAnimationFrame(animationFrame); - animationFrame = null; - } - - audioContext?.close(); - audioContext = null; - } - }); - }; - - onUnmounted(() => { - stopRecording(); - }); - - return { - state: readonly(state), - startRecording, - stopRecording, - }; + const state = ref({ + isRecording: false, + recordingDuration: 0, + audioData: null, + updateTrigger: 0, + }); + + let mediaRecorder: MediaRecorder | null = null; + let audioContext: AudioContext | null = null; + let analyser: AnalyserNode | null = null; + let animationFrame: number | null = null; + let audioChunks: Blob[] | undefined = undefined; + + const updateAudioData = () => { + if (!analyser || !state.value.isRecording || !state.value.audioData) { + if (animationFrame) { + cancelAnimationFrame(animationFrame); + animationFrame = null; + } + + return; + } + + analyser.getByteTimeDomainData(state.value.audioData); + state.value.updateTrigger += 1; + animationFrame = requestAnimationFrame(updateAudioData); + }; + + const startRecording = async () => { + try { + const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); + + audioContext = new AudioContext(); + analyser = audioContext.createAnalyser(); + + const source = audioContext.createMediaStreamSource(stream); + source.connect(analyser); + + mediaRecorder = new MediaRecorder(stream); + audioChunks = []; + + mediaRecorder.ondataavailable = (e: BlobEvent) => { + audioChunks?.push(e.data); + state.value.recordingDuration += 1; + }; + + state.value.audioData = new Uint8Array(analyser.frequencyBinCount); + state.value.isRecording = true; + state.value.recordingDuration = 0; + state.value.updateTrigger = 0; + mediaRecorder.start(1000); + + updateAudioData(); + } catch (err) { + console.error("Error accessing microphone:", err); + throw err; + } + }; + + const stopRecording = async () => { + return await new Promise((resolve) => { + if (mediaRecorder && state.value.isRecording) { + mediaRecorder.onstop = () => { + const blob = new Blob(audioChunks, { type: "audio/webm" }); + audioChunks = undefined; + + state.value.recordingDuration = 0; + state.value.updateTrigger = 0; + state.value.audioData = null; + + resolve(blob); + }; + + state.value.isRecording = false; + mediaRecorder.stop(); + mediaRecorder.stream.getTracks().forEach((track) => track.stop()); + + if (animationFrame) { + cancelAnimationFrame(animationFrame); + animationFrame = null; + } + + audioContext?.close(); + audioContext = null; + } + }); + }; + + onUnmounted(() => { + stopRecording(); + }); + + return { + state: readonly(state), + startRecording, + stopRecording, + }; } ``` @@ -508,116 +512,116 @@ Create a new file named `CreateNote.vue` inside the `app/components` folder. Add ```vue title="app/components/CreateNote.vue" ``` @@ -631,155 +635,155 @@ Now, add the following code below the template code in the same file: ```vue title="app/components/CreateNote.vue" ``` @@ -796,65 +800,65 @@ You can use this component in a Nuxt page to show it to the user. But before tha ```vue title="/app/app.vue" ``` @@ -865,29 +869,29 @@ Next, add a new file named `new.vue` inside the `app/pages` folder, add the foll ```vue title="app/pages/new.vue" ``` @@ -920,13 +924,13 @@ To show the notes from the database on the client side, create an API endpoint f Create a new file named `index.get.ts` inside the `server/api/notes` directory, and add the following code to it: ```ts title="server/api/index.get.ts" -import type { Note } from '~~/types'; +import type { Note } from "~~/types"; export default defineEventHandler(async (event) => { - const { cloudflare } = event.context; + const { cloudflare } = event.context; - const res = await cloudflare.env.DB.prepare( - `SELECT + const res = await cloudflare.env.DB.prepare( + `SELECT id, text, audio_urls AS audioUrls, @@ -934,13 +938,13 @@ export default defineEventHandler(async (event) => { updated_at AS updatedAt FROM notes ORDER BY created_at DESC - LIMIT 50;` - ).all & { audioUrls: string | null }>(); + LIMIT 50;`, + ).all & { audioUrls: string | null }>(); - return res.results.map((note) => ({ - ...note, - audioUrls: note.audioUrls ? JSON.parse(note.audioUrls) : undefined, - })); + return res.results.map((note) => ({ + ...note, + audioUrls: note.audioUrls ? JSON.parse(note.audioUrls) : undefined, + })); }); ``` @@ -950,24 +954,24 @@ Next, create a page named `index.vue` inside the `app/pages` directory. This wil ```vue title="app/pages/index.vue" ``` @@ -985,11 +989,11 @@ The `...` prefix in the file name makes it a catch all route. This allows it to ```ts title="server/routes/recordings/[...pathname].get.ts" export default defineEventHandler(async (event) => { - const { cloudflare, params } = event.context; + const { cloudflare, params } = event.context; - const { pathname } = params || {}; + const { pathname } = params || {}; - return cloudflare.env.R2.get(`recordings/${pathname}`); + return cloudflare.env.R2.get(`recordings/${pathname}`); }); ``` @@ -1005,52 +1009,52 @@ Create a new file named `settings.vue` in the `app/pages` folder, and add the fo ```vue title="app/pages/settings.vue" ``` @@ -1080,35 +1084,35 @@ Modify the `CreateNote` component to send the post processing prompt along with ```vue title="app/components/CreateNote.vue" ins={2, 6-9, 17-22} ``` @@ -800,65 +796,65 @@ You can use this component in a Nuxt page to show it to the user. But before tha ```vue title="/app/app.vue" ``` @@ -869,29 +865,29 @@ Next, add a new file named `new.vue` inside the `app/pages` folder, add the foll ```vue title="app/pages/new.vue" ``` @@ -924,13 +920,13 @@ To show the notes from the database on the client side, create an API endpoint f Create a new file named `index.get.ts` inside the `server/api/notes` directory, and add the following code to it: ```ts title="server/api/index.get.ts" -import type { Note } from "~~/types"; +import type { Note } from '~~/types'; export default defineEventHandler(async (event) => { - const { cloudflare } = event.context; + const { cloudflare } = event.context; - const res = await cloudflare.env.DB.prepare( - `SELECT + const res = await cloudflare.env.DB.prepare( + `SELECT id, text, audio_urls AS audioUrls, @@ -938,13 +934,13 @@ export default defineEventHandler(async (event) => { updated_at AS updatedAt FROM notes ORDER BY created_at DESC - LIMIT 50;`, - ).all & { audioUrls: string | null }>(); + LIMIT 50;` + ).all & { audioUrls: string | null }>(); - return res.results.map((note) => ({ - ...note, - audioUrls: note.audioUrls ? JSON.parse(note.audioUrls) : undefined, - })); + return res.results.map((note) => ({ + ...note, + audioUrls: note.audioUrls ? JSON.parse(note.audioUrls) : undefined, + })); }); ``` @@ -954,24 +950,24 @@ Next, create a page named `index.vue` inside the `app/pages` directory. This wil ```vue title="app/pages/index.vue" ``` @@ -989,11 +985,11 @@ The `...` prefix in the file name makes it a catch all route. This allows it to ```ts title="server/routes/recordings/[...pathname].get.ts" export default defineEventHandler(async (event) => { - const { cloudflare, params } = event.context; + const { cloudflare, params } = event.context; - const { pathname } = params || {}; + const { pathname } = params || {}; - return cloudflare.env.R2.get(`recordings/${pathname}`); + return cloudflare.env.R2.get(`recordings/${pathname}`); }); ``` @@ -1009,52 +1005,52 @@ Create a new file named `settings.vue` in the `app/pages` folder, and add the fo ```vue title="app/pages/settings.vue" ``` @@ -1084,35 +1080,35 @@ Modify the `CreateNote` component to send the post processing prompt along with ```vue title="app/components/CreateNote.vue" ins={2, 6-9, 17-22}