We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent ac4e9a3 commit cb9c4deCopy full SHA for cb9c4de
src/content/docs/workers-ai/features/async-batch-api.mdx
@@ -218,7 +218,7 @@ export default {
218
// Send the batch request to the AI model via the AI binding
219
// Replace "@cf/meta/llama-3.3-70b-instruct-batch" with your desired batch-enabled model if needed.
220
const batchResponse = await env.AI.run(
221
- "@cf/meta/llama-3.3-70b-instruct-batch",
+ "@cf/meta/ray-llama-3.3-70b-instruct-fp8-fast",
222
{
223
requests: data.requests,
224
},
0 commit comments