-
-
Notifications
You must be signed in to change notification settings - Fork 1.1k
feat(sdk): AI SDK chat transport — run useChat as tasks #3065
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 12 commits
741c983
8354e2a
b6448fb
68bd584
4a828fe
97811bb
a17cad9
c3656a5
0ca459d
6dd87fd
d9ef611
e4c30b0
eb2ccc0
3f8bc26
2329a10
7badb14
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,42 @@ | ||
| --- | ||
| "@trigger.dev/sdk": minor | ||
| --- | ||
|
|
||
| Add AI SDK chat transport integration via two new subpath exports: | ||
|
|
||
| **`@trigger.dev/sdk/chat`** (frontend, browser-safe): | ||
| - `TriggerChatTransport` — custom `ChatTransport` for the AI SDK's `useChat` hook that runs chat completions as durable Trigger.dev tasks | ||
| - `createChatTransport()` — factory function | ||
|
|
||
| ```tsx | ||
| import { useChat } from "@ai-sdk/react"; | ||
| import { TriggerChatTransport } from "@trigger.dev/sdk/chat"; | ||
|
|
||
| const { messages, sendMessage } = useChat({ | ||
| transport: new TriggerChatTransport({ | ||
| task: "my-chat-task", | ||
| accessToken, | ||
| }), | ||
| }); | ||
| ``` | ||
|
|
||
| **`@trigger.dev/sdk/ai`** (backend, extends existing `ai.tool`/`ai.currentToolOptions`): | ||
| - `chatTask()` — pre-typed task wrapper with auto-pipe support | ||
| - `pipeChat()` — pipe a `StreamTextResult` or stream to the frontend | ||
| - `CHAT_STREAM_KEY` — the default stream key constant | ||
| - `ChatTaskPayload` type | ||
|
|
||
| ```ts | ||
| import { chatTask } from "@trigger.dev/sdk/ai"; | ||
| import { streamText, convertToModelMessages } from "ai"; | ||
|
|
||
| export const myChatTask = chatTask({ | ||
| id: "my-chat-task", | ||
| run: async ({ messages }) => { | ||
| return streamText({ | ||
| model: openai("gpt-4o"), | ||
| messages: convertToModelMessages(messages), | ||
| }); | ||
| }, | ||
| }); | ||
| ``` |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -3,11 +3,16 @@ import { | |
| isSchemaZodEsque, | ||
| Task, | ||
| type inferSchemaIn, | ||
| type PipeStreamOptions, | ||
| type TaskOptions, | ||
| type TaskSchema, | ||
| type TaskWithSchema, | ||
| } from "@trigger.dev/core/v3"; | ||
| import type { UIMessage } from "ai"; | ||
| import { dynamicTool, jsonSchema, JSONSchema7, Schema, Tool, ToolCallOptions, zodSchema } from "ai"; | ||
| import { metadata } from "./metadata.js"; | ||
| import { streams } from "./streams.js"; | ||
| import { createTask } from "./shared.js"; | ||
|
|
||
| const METADATA_KEY = "tool.execute.options"; | ||
|
|
||
|
|
@@ -116,3 +121,240 @@ export const ai = { | |
| tool: toolFromTask, | ||
| currentToolOptions: getToolOptionsFromMetadata, | ||
| }; | ||
|
|
||
| // --------------------------------------------------------------------------- | ||
| // Chat transport helpers — backend side | ||
| // --------------------------------------------------------------------------- | ||
|
|
||
| /** | ||
| * The default stream key used for chat transport communication. | ||
| * Both `TriggerChatTransport` (frontend) and `pipeChat`/`chatTask` (backend) | ||
| * use this key by default. | ||
| */ | ||
| export const CHAT_STREAM_KEY = "chat"; | ||
|
|
||
| /** | ||
| * The payload shape that the chat transport sends to the triggered task. | ||
| * | ||
| * When using `chatTask()`, the payload is automatically typed — you don't need | ||
| * to import this type. Use this type only if you're using `task()` directly | ||
| * with `pipeChat()`. | ||
| */ | ||
| export type ChatTaskPayload<TMessage extends UIMessage = UIMessage> = { | ||
| /** The conversation messages */ | ||
| messages: TMessage[]; | ||
|
|
||
| /** The unique identifier for the chat session */ | ||
| chatId: string; | ||
|
|
||
| /** | ||
| * The trigger type: | ||
| * - `"submit-message"`: A new user message | ||
| * - `"regenerate-message"`: Regenerate the last assistant response | ||
| */ | ||
| trigger: "submit-message" | "regenerate-message"; | ||
|
|
||
| /** The ID of the message to regenerate (only for `"regenerate-message"`) */ | ||
| messageId?: string; | ||
|
|
||
| /** Custom metadata from the frontend */ | ||
| metadata?: unknown; | ||
| }; | ||
|
|
||
| /** | ||
| * Options for `pipeChat`. | ||
| */ | ||
| export type PipeChatOptions = { | ||
| /** | ||
| * Override the stream key. Must match the `streamKey` on `TriggerChatTransport`. | ||
| * @default "chat" | ||
| */ | ||
| streamKey?: string; | ||
|
|
||
| /** An AbortSignal to cancel the stream. */ | ||
| signal?: AbortSignal; | ||
|
|
||
| /** | ||
| * The target run ID to pipe to. | ||
| * @default "self" (current run) | ||
| */ | ||
| target?: string; | ||
| }; | ||
|
|
||
| /** | ||
| * An object with a `toUIMessageStream()` method (e.g. `StreamTextResult` from `streamText()`). | ||
| */ | ||
| type UIMessageStreamable = { | ||
| toUIMessageStream: (...args: any[]) => AsyncIterable<unknown> | ReadableStream<unknown>; | ||
| }; | ||
|
|
||
| function isUIMessageStreamable(value: unknown): value is UIMessageStreamable { | ||
| return ( | ||
| typeof value === "object" && | ||
| value !== null && | ||
| "toUIMessageStream" in value && | ||
| typeof (value as any).toUIMessageStream === "function" | ||
| ); | ||
| } | ||
|
|
||
| function isAsyncIterable(value: unknown): value is AsyncIterable<unknown> { | ||
| return typeof value === "object" && value !== null && Symbol.asyncIterator in value; | ||
| } | ||
|
|
||
| function isReadableStream(value: unknown): value is ReadableStream<unknown> { | ||
| return typeof value === "object" && value !== null && typeof (value as any).getReader === "function"; | ||
| } | ||
|
|
||
| /** | ||
| * Pipes a chat stream to the realtime stream, making it available to the | ||
| * `TriggerChatTransport` on the frontend. | ||
| * | ||
| * Accepts: | ||
| * - A `StreamTextResult` from `streamText()` (has `.toUIMessageStream()`) | ||
| * - An `AsyncIterable` of `UIMessageChunk`s | ||
| * - A `ReadableStream` of `UIMessageChunk`s | ||
| * | ||
| * Must be called from inside a Trigger.dev task's `run` function. | ||
| * | ||
| * @example | ||
| * ```ts | ||
| * import { task } from "@trigger.dev/sdk"; | ||
| * import { pipeChat, type ChatTaskPayload } from "@trigger.dev/sdk/ai"; | ||
| * import { streamText, convertToModelMessages } from "ai"; | ||
| * | ||
| * export const myChatTask = task({ | ||
| * id: "my-chat-task", | ||
| * run: async (payload: ChatTaskPayload) => { | ||
| * const result = streamText({ | ||
| * model: openai("gpt-4o"), | ||
| * messages: convertToModelMessages(payload.messages), | ||
| * }); | ||
| * | ||
| * await pipeChat(result); | ||
| * }, | ||
| * }); | ||
| * ``` | ||
| * | ||
| * @example | ||
| * ```ts | ||
| * // Works from anywhere inside a task — even deep in your agent code | ||
| * async function runAgentLoop(messages: CoreMessage[]) { | ||
| * const result = streamText({ model, messages }); | ||
| * await pipeChat(result); | ||
| * } | ||
| * ``` | ||
| */ | ||
| export async function pipeChat( | ||
| source: UIMessageStreamable | AsyncIterable<unknown> | ReadableStream<unknown>, | ||
| options?: PipeChatOptions | ||
| ): Promise<void> { | ||
| const streamKey = options?.streamKey ?? CHAT_STREAM_KEY; | ||
|
|
||
| let stream: AsyncIterable<unknown> | ReadableStream<unknown>; | ||
|
|
||
| if (isUIMessageStreamable(source)) { | ||
| stream = source.toUIMessageStream(); | ||
| } else if (isAsyncIterable(source) || isReadableStream(source)) { | ||
| stream = source; | ||
| } else { | ||
| throw new Error( | ||
| "pipeChat: source must be a StreamTextResult (with .toUIMessageStream()), " + | ||
| "an AsyncIterable, or a ReadableStream" | ||
| ); | ||
| } | ||
|
|
||
| const pipeOptions: PipeStreamOptions = {}; | ||
| if (options?.signal) { | ||
| pipeOptions.signal = options.signal; | ||
| } | ||
| if (options?.target) { | ||
| pipeOptions.target = options.target; | ||
| } | ||
|
|
||
| const { waitUntilComplete } = streams.pipe(streamKey, stream, pipeOptions); | ||
| await waitUntilComplete(); | ||
| } | ||
|
|
||
| /** | ||
| * Options for defining a chat task. | ||
| * | ||
| * Extends the standard `TaskOptions` but pre-types the payload as `ChatTaskPayload` | ||
| * and overrides `run` to accept `ChatTaskPayload` directly. | ||
| * | ||
| * **Auto-piping:** If the `run` function returns a value with `.toUIMessageStream()` | ||
| * (like a `StreamTextResult`), the stream is automatically piped to the frontend. | ||
| * For complex flows, use `pipeChat()` manually from anywhere in your code. | ||
| */ | ||
| export type ChatTaskOptions<TIdentifier extends string> = Omit< | ||
| TaskOptions<TIdentifier, ChatTaskPayload, unknown>, | ||
| "run" | ||
| > & { | ||
| /** | ||
| * The run function for the chat task. | ||
| * | ||
| * Receives a `ChatTaskPayload` with the conversation messages, chat session ID, | ||
| * and trigger type. | ||
| * | ||
| * **Auto-piping:** If this function returns a value with `.toUIMessageStream()`, | ||
| * the stream is automatically piped to the frontend. | ||
| */ | ||
| run: (payload: ChatTaskPayload) => Promise<unknown>; | ||
| }; | ||
|
|
||
| /** | ||
| * Creates a Trigger.dev task pre-configured for AI SDK chat. | ||
| * | ||
| * - **Pre-types the payload** as `ChatTaskPayload` — no manual typing needed | ||
| * - **Auto-pipes the stream** if `run` returns a `StreamTextResult` | ||
| * - For complex flows, use `pipeChat()` from anywhere inside your task code | ||
| * | ||
| * @example | ||
| * ```ts | ||
| * import { chatTask } from "@trigger.dev/sdk/ai"; | ||
| * import { streamText, convertToModelMessages } from "ai"; | ||
| * import { openai } from "@ai-sdk/openai"; | ||
| * | ||
| * // Simple: return streamText result — auto-piped to the frontend | ||
| * export const myChatTask = chatTask({ | ||
| * id: "my-chat-task", | ||
| * run: async ({ messages }) => { | ||
| * return streamText({ | ||
| * model: openai("gpt-4o"), | ||
| * messages: convertToModelMessages(messages), | ||
| * }); | ||
| * }, | ||
| * }); | ||
| * ``` | ||
| * | ||
| * @example | ||
| * ```ts | ||
| * import { chatTask, pipeChat } from "@trigger.dev/sdk/ai"; | ||
| * | ||
| * // Complex: pipeChat() from deep in your agent code | ||
| * export const myAgentTask = chatTask({ | ||
| * id: "my-agent-task", | ||
| * run: async ({ messages }) => { | ||
| * await runComplexAgentLoop(messages); | ||
| * }, | ||
| * }); | ||
| * ``` | ||
| */ | ||
| export function chatTask<TIdentifier extends string>( | ||
| options: ChatTaskOptions<TIdentifier> | ||
| ): Task<TIdentifier, ChatTaskPayload, unknown> { | ||
| const { run: userRun, ...restOptions } = options; | ||
|
|
||
| return createTask<TIdentifier, ChatTaskPayload, unknown>({ | ||
| ...restOptions, | ||
| run: async (payload: ChatTaskPayload) => { | ||
| const result = await userRun(payload); | ||
|
|
||
| // Auto-pipe if the run function returned a StreamTextResult or similar | ||
| if (isUIMessageStreamable(result)) { | ||
| await pipeChat(result); | ||
| } | ||
|
|
||
| return result; | ||
| }, | ||
| }); | ||
| } | ||
|
Comment on lines
+342
to
+360
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Potential double-pipe if user calls If a user both calls 🤖 Prompt for AI Agents |
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🧩 Analysis chain
🏁 Script executed:
Repository: triggerdotdev/trigger.dev
Length of output: 104
🌐 Web query:
Vercel AI SDK v6 streamText toUIMessageStream method documentation💡 Result:
In AI SDK v6,
toUIMessageStreamis a method on thestreamText()result (notstreamTextitself):streamText()→toUIMessageStream(options?: UIMessageStreamOptions): AsyncIterableStream<UIMessageChunk>. Options includeonFinish,messageMetadata,sendReasoning,sendSources,sendStart,sendFinish,onError, andconsumeSseStream(for abort handling). [1]toUIMessageStreamResponse()andpipeUIMessageStreamToResponse()(for returning/piping an HTTP response using the UI message stream protocol). [1]reply.send(result.toUIMessageStream())andwriter.merge(result.toUIMessageStream({ sendStart: false })). [2]streamText()UI stream into a customcreateUIMessageStreamviawriter.merge(result.toUIMessageStream()). [3]Sources: [1]
streamText()reference, [2] Fastify cookbook, [3] Streaming Custom Data docs.🌐 Web query:
Vercel AI SDK method names toDataStream toAIStream history versions💡 Result:
ai(Vercel AI SDK) streaming helper method name historyAIStream,StreamingTextResponse,streamToResponsestreamText(...).toDataStream()/.toDataStreamResponse()instead. [1]streamText(...)result had AI-stream helpers:toAIStream(),pipeAIStreamToResponse(...),toAIStreamResponse()streamText(...)result uses Data-stream helpers:toDataStream(),pipeDataStreamToResponse(...),toDataStreamResponse()toAIStream, etc.) were removed in favor of the Data-stream methods above. [1]LangChainAdapter.toAIStream(...)LangChainAdapter.toDataStream(...). [1]streamToResponse(...)pipeDataStreamToResponse) rather thanstreamToResponse. [2]Sources: AI SDK 3.4 → 4.0 migration guide (breaking changes) [1], Vercel GitHub discussion on
streamToResponseremoval [2].🏁 Script executed:
Repository: triggerdotdev/trigger.dev
Length of output: 377
🏁 Script executed:
Repository: triggerdotdev/trigger.dev
Length of output: 2084
🏁 Script executed:
rg "isUIMessageStreamable|toUIMessageStream|toDataStream" packages/trigger-sdk/src/v3/ai.tsRepository: triggerdotdev/trigger.dev
Length of output: 881
Update
isUIMessageStreamableto support AI SDK v4/v5 or declare v6+ requirement in peer dependency.The code only checks for
toUIMessageStream()(v6 method), but the peer dependency explicitly includes^4.2.0and^5.0.0, which usetoDataStream()instead. Users on v4 or v5 will experience silent failures whenpipeChat()or auto-piping encounters aStreamTextResult—the type guard returns false, causing the stream to be rejected.Either:
toDataStream()and abstract the method call^6.0.0only if v6 is the minimum supported version🤖 Prompt for AI Agents