diff --git a/src/content/docs/workers-ai/function-calling/embedded/get-started.mdx b/src/content/docs/workers-ai/function-calling/embedded/get-started.mdx index 02f1d651c9daf2e..3d3d654817866c9 100644 --- a/src/content/docs/workers-ai/function-calling/embedded/get-started.mdx +++ b/src/content/docs/workers-ai/function-calling/embedded/get-started.mdx @@ -7,6 +7,7 @@ head: - tag: title content: Get Started --- +import { TypeScriptExample } from "~/components"; This guide will instruct you through setting up and deploying your first Workers AI project with embedded function calling. You will use Workers, a Workers AI binding, the [`ai-utils package`](https://github.com/cloudflare/ai-utils), and a large language model (LLM) to deploy your first AI-powered application on the Cloudflare global network with embedded function calling. @@ -26,7 +27,10 @@ npm install @cloudflare/ai-utils --save Update the `index.ts` file in your application directory with the following code: -```ts title="Embedded function calling example" + + + +```ts import { runWithTools } from "@cloudflare/ai-utils"; type Env = { @@ -76,6 +80,7 @@ export default { }, } satisfies ExportedHandler; ``` + This example imports the utils with `import { runWithTools} from "@cloudflare/ai-utils"` and follows the API reference below. diff --git a/src/content/docs/workers-ai/get-started/workers-wrangler.mdx b/src/content/docs/workers-ai/get-started/workers-wrangler.mdx index ab0281f4ec2bd07..d24f6a58b36184d 100644 --- a/src/content/docs/workers-ai/get-started/workers-wrangler.mdx +++ b/src/content/docs/workers-ai/get-started/workers-wrangler.mdx @@ -9,7 +9,7 @@ head: description: Deploy your first Cloudflare Workers AI project using the CLI. --- -import { Render, PackageManagers, WranglerConfig } from "~/components"; +import { Render, PackageManagers, WranglerConfig, TypeScriptExample } from "~/components"; This guide will instruct you through setting up and deploying your first Workers AI project. You will use [Workers](/workers/), a Workers AI binding, and a large language model (LLM) to deploy your first AI-powered application on the Cloudflare global network. @@ -73,7 +73,9 @@ You are now ready to run an inference task in your Worker. In this case, you wil Update the `index.ts` file in your `hello-ai` application directory with the following code: -```typescript title="src/index.ts" + + +```ts export interface Env { // If you set another name in the Wrangler config file as the value for 'binding', // replace "AI" with the variable name you defined. @@ -90,6 +92,7 @@ export default { }, } satisfies ExportedHandler; ``` + Up to this point, you have created an AI binding for your Worker and configured your Worker to be able to execute the Llama 3.1 model. You can now test your project locally before you deploy globally.