From 918ebb9c9ac4d02961ee2f0f7ee70b32cfda3f74 Mon Sep 17 00:00:00 2001 From: Jack Herrington Date: Fri, 5 Dec 2025 13:15:02 -0800 Subject: [PATCH 1/7] first pass at ag-ui --- docs/protocol/chunk-definitions.md | 628 ++++++++------ docs/protocol/http-stream-protocol.md | 57 +- docs/protocol/sse-protocol.md | 76 +- .../tanstack-ai/src/StreamChunkConverter.php | 206 +++-- .../tanstack-ai/src/tanstack_ai/converter.py | 195 +++-- .../python/tanstack-ai/src/tanstack_ai/sse.py | 6 +- .../tanstack-ai/src/tanstack_ai/types.py | 203 +++-- .../ai-anthropic/src/anthropic-adapter.ts | 194 +++-- .../ai-devtools/src/store/ai-context.tsx | 4 +- .../ai-gemini/src/gemini-adapter.ts | 158 +++- .../ai-gemini/tests/gemini-adapter.test.ts | 21 +- .../ai-ollama/src/ollama-adapter.ts | 162 +++- .../ai-openai/src/openai-adapter.ts | 128 ++- packages/typescript/ai/src/core/chat.ts | 256 ++++-- .../typescript/ai/src/stream/processor.ts | 775 ++++++++++++++---- .../typescript/ai/src/tools/tool-calls.ts | 65 +- packages/typescript/ai/src/types.ts | 364 ++++++-- .../ai/src/utilities/stream-to-response.ts | 5 +- packages/typescript/ai/tests/ai-chat.test.ts | 65 +- .../ai/tests/stream-to-response.test.ts | 2 +- .../ai/tests/tool-call-manager.test.ts | 24 +- .../smoke-tests/adapters/src/harness.ts | 139 ++-- 22 files changed, 2734 insertions(+), 999 deletions(-) diff --git a/docs/protocol/chunk-definitions.md b/docs/protocol/chunk-definitions.md index b0d29033..4985bafa 100644 --- a/docs/protocol/chunk-definitions.md +++ b/docs/protocol/chunk-definitions.md @@ -1,173 +1,299 @@ --- -title: Chunk Definitions +title: AG-UI Event Definitions id: chunk-definitions --- -All streaming responses in TanStack AI consist of a series of **StreamChunks** - discrete JSON objects representing different events during the conversation. These chunks enable real-time updates for content generation, tool calls, errors, and completion signals. - -This document defines the data structures (chunks) that flow between the TanStack AI server and client during streaming chat operations. +TanStack AI implements the [AG-UI (Agent-User Interaction) Protocol](https://docs.ag-ui.com/introduction), an open, lightweight, event-based protocol that standardizes how AI agents connect to user-facing applications. +All streaming responses in TanStack AI consist of a series of **AG-UI Events** - discrete JSON objects representing different stages of the conversation lifecycle. These events enable real-time updates for content generation, tool calls, thinking/reasoning, and completion signals. ## Base Structure -All chunks share a common base structure: +All events share a common base structure: ```typescript -interface BaseStreamChunk { - type: StreamChunkType; - id: string; // Unique identifier for the message/response - model: string; // Model identifier (e.g., "gpt-4o", "claude-3-5-sonnet") - timestamp: number; // Unix timestamp in milliseconds +interface BaseEvent { + type: EventType; + timestamp: number; // Unix timestamp in milliseconds + model?: string; // Model identifier (TanStack AI addition) + rawEvent?: unknown; // Original provider event for debugging } ``` -### Chunk Types +### Event Types + +```typescript +type EventType = + | 'RUN_STARTED' // Run lifecycle begins + | 'RUN_FINISHED' // Run completed successfully + | 'RUN_ERROR' // Error occurred + | 'TEXT_MESSAGE_START' // Text message begins + | 'TEXT_MESSAGE_CONTENT' // Text content streaming + | 'TEXT_MESSAGE_END' // Text message completes + | 'TOOL_CALL_START' // Tool invocation begins + | 'TOOL_CALL_ARGS' // Tool arguments streaming + | 'TOOL_CALL_END' // Tool call completes (with result) + | 'STEP_STARTED' // Thinking/reasoning step begins + | 'STEP_FINISHED' // Thinking/reasoning step completes + | 'STATE_SNAPSHOT' // Full state synchronization + | 'STATE_DELTA' // Incremental state update + | 'CUSTOM'; // Custom extensibility events +``` + +## Event Definitions + +### RUN_STARTED + +Emitted when a run begins. This is the first event in any streaming response. ```typescript -type StreamChunkType = - | 'content' // Text content being generated - | 'thinking' // Model's reasoning process (when supported) - | 'tool_call' // Model calling a tool/function - | 'tool-input-available' // Tool inputs are ready for client execution - | 'approval-requested' // Tool requires user approval - | 'tool_result' // Result from tool execution - | 'done' // Stream completion - | 'error'; // Error occurred +interface RunStartedEvent extends BaseEvent { + type: 'RUN_STARTED'; + runId: string; // Unique identifier for this run + threadId?: string; // Optional thread/conversation ID +} +``` + +**Example:** +```json +{ + "type": "RUN_STARTED", + "runId": "run_abc123", + "model": "gpt-4o", + "timestamp": 1701234567890 +} ``` -## Chunk Definitions +--- -### ContentStreamChunk +### RUN_FINISHED -Emitted when the model generates text content. Sent incrementally as tokens are generated. +Emitted when a run completes successfully. ```typescript -interface ContentStreamChunk extends BaseStreamChunk { - type: 'content'; - delta: string; // The incremental content token (new text since last chunk) - content: string; // Full accumulated content so far - role?: 'assistant'; +interface RunFinishedEvent extends BaseEvent { + type: 'RUN_FINISHED'; + runId: string; + finishReason: 'stop' | 'length' | 'content_filter' | 'tool_calls' | null; + usage?: { + promptTokens: number; + completionTokens: number; + totalTokens: number; + }; } ``` **Example:** ```json { - "type": "content", - "id": "chatcmpl-abc123", + "type": "RUN_FINISHED", + "runId": "run_abc123", + "model": "gpt-4o", + "timestamp": 1701234567892, + "finishReason": "stop", + "usage": { + "promptTokens": 150, + "completionTokens": 75, + "totalTokens": 225 + } +} +``` + +**Finish Reasons:** +- `stop` - Natural completion +- `length` - Reached max tokens +- `content_filter` - Stopped by content filtering +- `tool_calls` - Stopped to execute tools +- `null` - Unknown or not provided + +--- + +### RUN_ERROR + +Emitted when an error occurs during a run. + +```typescript +interface RunErrorEvent extends BaseEvent { + type: 'RUN_ERROR'; + runId?: string; + error: { + message: string; + code?: string; + }; +} +``` + +**Example:** +```json +{ + "type": "RUN_ERROR", + "runId": "run_abc123", + "model": "gpt-4o", + "timestamp": 1701234567893, + "error": { + "message": "Rate limit exceeded", + "code": "rate_limit_exceeded" + } +} +``` + +--- + +### TEXT_MESSAGE_START + +Emitted when a text message begins streaming. + +```typescript +interface TextMessageStartEvent extends BaseEvent { + type: 'TEXT_MESSAGE_START'; + messageId: string; + role: 'assistant'; +} +``` + +**Example:** +```json +{ + "type": "TEXT_MESSAGE_START", + "messageId": "msg_xyz789", "model": "gpt-4o", "timestamp": 1701234567890, - "delta": "Hello", - "content": "Hello", "role": "assistant" } ``` -**Usage:** -- Display `delta` for smooth streaming effect -- Use `content` for the complete message so far -- Multiple content chunks will be sent for a single response - --- -### ThinkingStreamChunk +### TEXT_MESSAGE_CONTENT -Emitted when the model exposes its reasoning process (e.g., Claude with extended thinking, o1 models). +Emitted for each chunk of text content as it streams. ```typescript -interface ThinkingStreamChunk extends BaseStreamChunk { - type: 'thinking'; - delta?: string; // The incremental thinking token - content: string; // Full accumulated thinking content so far +interface TextMessageContentEvent extends BaseEvent { + type: 'TEXT_MESSAGE_CONTENT'; + messageId: string; + delta: string; // The incremental content token + content?: string; // Full accumulated content so far (TanStack AI addition) } ``` **Example:** ```json { - "type": "thinking", - "id": "chatcmpl-abc123", - "model": "claude-3-5-sonnet", + "type": "TEXT_MESSAGE_CONTENT", + "messageId": "msg_xyz789", + "model": "gpt-4o", "timestamp": 1701234567890, - "delta": "First, I need to", - "content": "First, I need to" + "delta": "Hello", + "content": "Hello" } ``` -**Usage:** -- Display in a separate "thinking" UI element -- Thinking is excluded from messages sent back to the model -- Not all models support thinking chunks +--- + +### TEXT_MESSAGE_END + +Emitted when a text message completes. + +```typescript +interface TextMessageEndEvent extends BaseEvent { + type: 'TEXT_MESSAGE_END'; + messageId: string; +} +``` + +**Example:** +```json +{ + "type": "TEXT_MESSAGE_END", + "messageId": "msg_xyz789", + "model": "gpt-4o", + "timestamp": 1701234567891 +} +``` --- -### ToolCallStreamChunk +### TOOL_CALL_START -Emitted when the model decides to call a tool/function. +Emitted when a tool call begins. ```typescript -interface ToolCallStreamChunk extends BaseStreamChunk { - type: 'tool_call'; - toolCall: { +interface ToolCallStartEvent extends BaseEvent { + type: 'TOOL_CALL_START'; + toolCallId: string; + toolName: string; + index?: number; // Index for parallel tool calls + approval?: { // Present if tool requires approval id: string; - type: 'function'; - function: { - name: string; - arguments: string; // JSON string (may be partial/incremental) - }; + needsApproval: true; }; - index: number; // Index of this tool call (for parallel calls) } ``` **Example:** ```json { - "type": "tool_call", - "id": "chatcmpl-abc123", + "type": "TOOL_CALL_START", + "toolCallId": "call_abc123", + "toolName": "get_weather", "model": "gpt-4o", "timestamp": 1701234567890, - "toolCall": { - "id": "call_abc123", - "type": "function", - "function": { - "name": "get_weather", - "arguments": "{\"location\":\"San Francisco\"}" - } - }, "index": 0 } ``` -**Usage:** -- Multiple chunks may be sent for a single tool call (streaming arguments) -- `arguments` may be incomplete until all chunks for this tool call are received -- `index` allows multiple parallel tool calls - --- -### ToolInputAvailableStreamChunk +### TOOL_CALL_ARGS -Emitted when tool inputs are complete and ready for client-side execution. +Emitted as tool call arguments stream. ```typescript -interface ToolInputAvailableStreamChunk extends BaseStreamChunk { - type: 'tool-input-available'; - toolCallId: string; // ID of the tool call - toolName: string; // Name of the tool to execute - input: any; // Parsed tool arguments (JSON object) +interface ToolCallArgsEvent extends BaseEvent { + type: 'TOOL_CALL_ARGS'; + toolCallId: string; + delta: string; // Incremental JSON arguments + args?: string; // Full accumulated arguments so far } ``` **Example:** ```json { - "type": "tool-input-available", - "id": "chatcmpl-abc123", + "type": "TOOL_CALL_ARGS", + "toolCallId": "call_abc123", "model": "gpt-4o", "timestamp": 1701234567890, + "delta": "{\"location\":", + "args": "{\"location\":" +} +``` + +--- + +### TOOL_CALL_END + +Emitted when a tool call completes. May include the result if the tool was executed server-side. + +```typescript +interface ToolCallEndEvent extends BaseEvent { + type: 'TOOL_CALL_END'; + toolCallId: string; + toolName: string; + input?: any; // Final parsed input arguments + result?: string; // Tool execution result (if executed) +} +``` + +**Example (client-side tool):** +```json +{ + "type": "TOOL_CALL_END", "toolCallId": "call_abc123", "toolName": "get_weather", + "model": "gpt-4o", + "timestamp": 1701234567890, "input": { "location": "San Francisco", "unit": "fahrenheit" @@ -175,215 +301,210 @@ interface ToolInputAvailableStreamChunk extends BaseStreamChunk { } ``` -**Usage:** -- Signals that the client should execute the tool -- Only sent for tools without a server-side `execute` function -- Client calls `onToolCall` callback with these parameters +**Example (server-side tool with result):** +```json +{ + "type": "TOOL_CALL_END", + "toolCallId": "call_abc123", + "toolName": "get_weather", + "model": "gpt-4o", + "timestamp": 1701234567891, + "input": { "location": "San Francisco" }, + "result": "{\"temperature\":72,\"conditions\":\"sunny\"}" +} +``` --- -### ApprovalRequestedStreamChunk +### STEP_STARTED -Emitted when a tool requires user approval before execution. +Emitted when a thinking/reasoning step begins (e.g., Claude's extended thinking, o1 models). ```typescript -interface ApprovalRequestedStreamChunk extends BaseStreamChunk { - type: 'approval-requested'; - toolCallId: string; // ID of the tool call - toolName: string; // Name of the tool requiring approval - input: any; // Tool arguments for review - approval: { - id: string; // Unique approval request ID - needsApproval: true; // Always true - }; +interface StepStartedEvent extends BaseEvent { + type: 'STEP_STARTED'; + stepId: string; + stepType: 'thinking' | 'reasoning' | 'planning'; } ``` **Example:** ```json { - "type": "approval-requested", - "id": "chatcmpl-abc123", - "model": "gpt-4o", - "timestamp": 1701234567890, - "toolCallId": "call_abc123", - "toolName": "send_email", - "input": { - "to": "user@example.com", - "subject": "Hello", - "body": "Test email" - }, - "approval": { - "id": "approval_xyz789", - "needsApproval": true - } + "type": "STEP_STARTED", + "stepId": "step_xyz123", + "stepType": "thinking", + "model": "claude-3-5-sonnet", + "timestamp": 1701234567890 } ``` -**Usage:** -- Display approval UI to user -- User responds with approval decision via `addToolApprovalResponse()` -- Tool execution pauses until approval is granted or denied - --- -### ToolResultStreamChunk +### STEP_FINISHED -Emitted when a tool execution completes (either server-side or client-side). +Emitted when thinking/reasoning content streams or completes. ```typescript -interface ToolResultStreamChunk extends BaseStreamChunk { - type: 'tool_result'; - toolCallId: string; // ID of the tool call that was executed - content: string; // Result of the tool execution (JSON stringified) +interface StepFinishedEvent extends BaseEvent { + type: 'STEP_FINISHED'; + stepId: string; + delta?: string; // Incremental thinking token + content: string; // Full accumulated thinking content } ``` **Example:** ```json { - "type": "tool_result", - "id": "chatcmpl-abc123", - "model": "gpt-4o", - "timestamp": 1701234567891, - "toolCallId": "call_abc123", - "content": "{\"temperature\":72,\"conditions\":\"sunny\"}" + "type": "STEP_FINISHED", + "stepId": "step_xyz123", + "model": "claude-3-5-sonnet", + "timestamp": 1701234567890, + "delta": "Let me analyze", + "content": "Let me analyze" } ``` -**Usage:** -- Sent after tool execution completes -- Model uses this result to continue the conversation -- May trigger additional model responses - --- -### DoneStreamChunk +### STATE_SNAPSHOT -Emitted when the stream completes successfully. +Emitted for full state synchronization (shared state between agent and app). ```typescript -interface DoneStreamChunk extends BaseStreamChunk { - type: 'done'; - finishReason: 'stop' | 'length' | 'content_filter' | 'tool_calls' | null; - usage?: { - promptTokens: number; - completionTokens: number; - totalTokens: number; - }; +interface StateSnapshotEvent extends BaseEvent { + type: 'STATE_SNAPSHOT'; + state: Record; } ``` **Example:** ```json { - "type": "done", - "id": "chatcmpl-abc123", - "model": "gpt-4o", - "timestamp": 1701234567892, - "finishReason": "stop", - "usage": { - "promptTokens": 150, - "completionTokens": 75, - "totalTokens": 225 + "type": "STATE_SNAPSHOT", + "timestamp": 1701234567890, + "state": { + "currentStep": 3, + "progress": 0.75, + "context": { "user": "John" } } } ``` -**Finish Reasons:** -- `stop` - Natural completion -- `length` - Reached max tokens -- `content_filter` - Stopped by content filtering -- `tool_calls` - Stopped to execute tools -- `null` - Unknown or not provided +--- -**Usage:** -- Marks the end of a successful stream -- Clean up streaming state -- Display token usage (if available) +### STATE_DELTA + +Emitted for incremental state updates using JSON Patch-like operations. + +```typescript +interface StateDeltaEvent extends BaseEvent { + type: 'STATE_DELTA'; + delta: Array<{ + op: 'add' | 'remove' | 'replace'; + path: string; + value?: unknown; + }>; +} +``` + +**Example:** +```json +{ + "type": "STATE_DELTA", + "timestamp": 1701234567890, + "delta": [ + { "op": "replace", "path": "/progress", "value": 0.80 }, + { "op": "add", "path": "/results/0", "value": "item1" } + ] +} +``` --- -### ErrorStreamChunk +### CUSTOM -Emitted when an error occurs during streaming. +Custom event for extensibility. Used for features not covered by standard AG-UI events. ```typescript -interface ErrorStreamChunk extends BaseStreamChunk { - type: 'error'; - error: { - message: string; // Human-readable error message - code?: string; // Optional error code - }; +interface CustomEvent extends BaseEvent { + type: 'CUSTOM'; + name: string; + value: unknown; } ``` -**Example:** +**Example (approval request):** ```json { - "type": "error", - "id": "chatcmpl-abc123", + "type": "CUSTOM", + "name": "approval-requested", "model": "gpt-4o", - "timestamp": 1701234567893, - "error": { - "message": "Rate limit exceeded", - "code": "rate_limit_exceeded" + "timestamp": 1701234567890, + "value": { + "toolCallId": "call_abc123", + "toolName": "send_email", + "input": { "to": "user@example.com", "subject": "Hello" }, + "approval": { "id": "approval_xyz789" } } } ``` -**Common Error Codes:** -- `rate_limit_exceeded` - API rate limit hit -- `invalid_request` - Malformed request -- `authentication_error` - API key issues -- `timeout` - Request timed out -- `server_error` - Internal server error - -**Usage:** -- Display error to user -- Stream ends after error chunk -- Retry logic should be implemented client-side - --- -## Chunk Ordering and Relationships +## Event Ordering and Relationships ### Typical Flow -1. **Content Generation:** +1. **Simple Content Generation:** ``` - ContentStreamChunk (delta: "Hello") - ContentStreamChunk (delta: " world") - ContentStreamChunk (delta: "!") - DoneStreamChunk (finishReason: "stop") + RUN_STARTED + TEXT_MESSAGE_START + TEXT_MESSAGE_CONTENT (delta: "Hello") + TEXT_MESSAGE_CONTENT (delta: " world") + TEXT_MESSAGE_CONTENT (delta: "!") + TEXT_MESSAGE_END + RUN_FINISHED (finishReason: "stop") ``` 2. **With Thinking:** ``` - ThinkingStreamChunk (delta: "I need to...") - ThinkingStreamChunk (delta: " check the weather") - ContentStreamChunk (delta: "Let me check") - DoneStreamChunk (finishReason: "stop") + RUN_STARTED + STEP_STARTED (stepType: "thinking") + STEP_FINISHED (delta: "I need to...") + STEP_FINISHED (delta: " check the weather") + TEXT_MESSAGE_START + TEXT_MESSAGE_CONTENT (delta: "Let me check") + TEXT_MESSAGE_END + RUN_FINISHED (finishReason: "stop") ``` 3. **Tool Usage:** ``` - ToolCallStreamChunk (name: "get_weather") - ToolResultStreamChunk (content: "{...}") - ContentStreamChunk (delta: "The weather is...") - DoneStreamChunk (finishReason: "stop") + RUN_STARTED + TOOL_CALL_START (toolName: "get_weather") + TOOL_CALL_ARGS (delta: "{\"location\":\"SF\"}") + TOOL_CALL_END (input: {"location":"SF"}, result: "{...}") + TEXT_MESSAGE_START + TEXT_MESSAGE_CONTENT (delta: "The weather is...") + TEXT_MESSAGE_END + RUN_FINISHED (finishReason: "stop") ``` 4. **Client Tool with Approval:** ``` - ToolCallStreamChunk (name: "send_email") - ApprovalRequestedStreamChunk (toolName: "send_email") + RUN_STARTED + TOOL_CALL_START (toolName: "send_email", approval: {...}) + TOOL_CALL_ARGS (delta: "{...}") + CUSTOM (name: "approval-requested") [User approves] - ToolInputAvailableStreamChunk (toolName: "send_email") + TOOL_CALL_END (input: {...}) [Client executes] - ToolResultStreamChunk (content: "{\"sent\":true}") - ContentStreamChunk (delta: "Email sent successfully") - DoneStreamChunk (finishReason: "stop") + TEXT_MESSAGE_START + TEXT_MESSAGE_CONTENT (delta: "Email sent successfully") + TEXT_MESSAGE_END + RUN_FINISHED (finishReason: "stop") ``` ### Multiple Tool Calls @@ -391,45 +512,56 @@ interface ErrorStreamChunk extends BaseStreamChunk { When the model calls multiple tools in parallel: ``` -ToolCallStreamChunk (index: 0, name: "get_weather") -ToolCallStreamChunk (index: 1, name: "get_time") -ToolResultStreamChunk (toolCallId: "call_1") -ToolResultStreamChunk (toolCallId: "call_2") -ContentStreamChunk (delta: "Based on the data...") -DoneStreamChunk (finishReason: "stop") +RUN_STARTED +TOOL_CALL_START (index: 0, toolName: "get_weather") +TOOL_CALL_START (index: 1, toolName: "get_time") +TOOL_CALL_ARGS (toolCallId: "call_1", ...) +TOOL_CALL_ARGS (toolCallId: "call_2", ...) +TOOL_CALL_END (toolCallId: "call_1", ...) +TOOL_CALL_END (toolCallId: "call_2", ...) +TEXT_MESSAGE_START +TEXT_MESSAGE_CONTENT (delta: "Based on the data...") +TEXT_MESSAGE_END +RUN_FINISHED (finishReason: "stop") ``` --- ## TypeScript Union Type -All chunks are represented as a discriminated union: +All events are represented as a discriminated union: ```typescript type StreamChunk = - | ContentStreamChunk - | ThinkingStreamChunk - | ToolCallStreamChunk - | ToolInputAvailableStreamChunk - | ApprovalRequestedStreamChunk - | ToolResultStreamChunk - | DoneStreamChunk - | ErrorStreamChunk; + | RunStartedEvent + | RunFinishedEvent + | RunErrorEvent + | TextMessageStartEvent + | TextMessageContentEvent + | TextMessageEndEvent + | ToolCallStartEvent + | ToolCallArgsEvent + | ToolCallEndEvent + | StepStartedEvent + | StepFinishedEvent + | StateSnapshotEvent + | StateDeltaEvent + | CustomEvent; ``` This enables type-safe handling in TypeScript: ```typescript -function handleChunk(chunk: StreamChunk) { - switch (chunk.type) { - case 'content': - console.log(chunk.delta); // TypeScript knows this is ContentStreamChunk +function handleEvent(event: StreamChunk) { + switch (event.type) { + case 'TEXT_MESSAGE_CONTENT': + console.log(event.delta); // TypeScript knows this is TextMessageContentEvent break; - case 'thinking': - console.log(chunk.content); // TypeScript knows this is ThinkingStreamChunk + case 'TOOL_CALL_START': + console.log(event.toolName); // TypeScript knows structure break; - case 'tool_call': - console.log(chunk.toolCall.function.name); // TypeScript knows structure + case 'RUN_FINISHED': + console.log(event.usage); // TypeScript knows this is RunFinishedEvent break; // ... other cases } @@ -438,8 +570,24 @@ function handleChunk(chunk: StreamChunk) { --- +## AG-UI Compatibility + +TanStack AI's streaming protocol is fully compatible with the AG-UI specification. This means: + +1. **Ecosystem Interoperability**: TanStack AI can work with AG-UI-compatible tools and frameworks like LangGraph, CrewAI, and CopilotKit. + +2. **Standard Event Types**: All 14 AG-UI event types are supported. + +3. **TanStack AI Additions**: We add useful fields like `model` on every event and `content` accumulation on text events for convenience. + +4. **Extensibility**: The `CUSTOM` event type allows for any additional functionality not covered by the standard events. + +For more information about AG-UI, visit the [official documentation](https://docs.ag-ui.com/introduction). + +--- + ## See Also -- [SSE Protocol](../sse-protocol) - How chunks are transmitted via Server-Sent Events -- [HTTP Stream Protocol](../http-stream-protocol) - How chunks are transmitted via HTTP streaming +- [SSE Protocol](../sse-protocol) - How events are transmitted via Server-Sent Events +- [HTTP Stream Protocol](../http-stream-protocol) - How events are transmitted via HTTP streaming - [Connection Adapters Guide](../../guides/connection-adapters) - Client implementation diff --git a/docs/protocol/http-stream-protocol.md b/docs/protocol/http-stream-protocol.md index fead1d88..fdb21e0d 100644 --- a/docs/protocol/http-stream-protocol.md +++ b/docs/protocol/http-stream-protocol.md @@ -80,25 +80,29 @@ Each StreamChunk is transmitted as a single line of JSON followed by a newline ( ### Examples -#### Content Chunks +#### Content Streaming (AG-UI Events) ```json -{"type":"content","id":"chatcmpl-abc123","model":"gpt-4o","timestamp":1701234567890,"delta":"Hello","content":"Hello","role":"assistant"} -{"type":"content","id":"chatcmpl-abc123","model":"gpt-4o","timestamp":1701234567891,"delta":" world","content":"Hello world","role":"assistant"} -{"type":"content","id":"chatcmpl-abc123","model":"gpt-4o","timestamp":1701234567892,"delta":"!","content":"Hello world!","role":"assistant"} +{"type":"RUN_STARTED","runId":"run_abc123","model":"gpt-4o","timestamp":1701234567890} +{"type":"TEXT_MESSAGE_START","messageId":"msg_xyz789","model":"gpt-4o","timestamp":1701234567890,"role":"assistant"} +{"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_xyz789","model":"gpt-4o","timestamp":1701234567890,"delta":"Hello","content":"Hello"} +{"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_xyz789","model":"gpt-4o","timestamp":1701234567891,"delta":" world","content":"Hello world"} +{"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_xyz789","model":"gpt-4o","timestamp":1701234567892,"delta":"!","content":"Hello world!"} +{"type":"TEXT_MESSAGE_END","messageId":"msg_xyz789","model":"gpt-4o","timestamp":1701234567892} ``` #### Tool Call ```json -{"type":"tool_call","id":"chatcmpl-abc123","model":"gpt-4o","timestamp":1701234567893,"toolCall":{"id":"call_xyz","type":"function","function":{"name":"get_weather","arguments":"{\"location\":\"SF\"}"}},"index":0} -{"type":"tool_result","id":"chatcmpl-abc123","model":"gpt-4o","timestamp":1701234567894,"toolCallId":"call_xyz","content":"{\"temperature\":72,\"conditions\":\"sunny\"}"} +{"type":"TOOL_CALL_START","toolCallId":"call_xyz","toolName":"get_weather","model":"gpt-4o","timestamp":1701234567893,"index":0} +{"type":"TOOL_CALL_ARGS","toolCallId":"call_xyz","model":"gpt-4o","timestamp":1701234567893,"delta":"{\"location\":\"SF\"}","args":"{\"location\":\"SF\"}"} +{"type":"TOOL_CALL_END","toolCallId":"call_xyz","toolName":"get_weather","model":"gpt-4o","timestamp":1701234567894,"input":{"location":"SF"},"result":"{\"temperature\":72,\"conditions\":\"sunny\"}"} ``` #### Stream Completion ```json -{"type":"done","id":"chatcmpl-abc123","model":"gpt-4o","timestamp":1701234567895,"finishReason":"stop","usage":{"promptTokens":10,"completionTokens":15,"totalTokens":25}} +{"type":"RUN_FINISHED","runId":"run_abc123","model":"gpt-4o","timestamp":1701234567895,"finishReason":"stop","usage":{"promptTokens":10,"completionTokens":15,"totalTokens":25}} ``` --- @@ -127,14 +131,16 @@ Transfer-Encoding: chunked ### 3. Server Streams Chunks -The server sends newline-delimited JSON: +The server sends newline-delimited AG-UI events: ```json -{"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567890,"delta":"The","content":"The"} -{"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567891,"delta":" weather","content":"The weather"} -{"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567892,"delta":" is","content":"The weather is"} -{"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567893,"delta":" sunny","content":"The weather is sunny"} -{"type":"done","id":"msg_1","model":"gpt-4o","timestamp":1701234567894,"finishReason":"stop"} +{"type":"RUN_STARTED","runId":"run_1","model":"gpt-4o","timestamp":1701234567890} +{"type":"TEXT_MESSAGE_START","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567890,"role":"assistant"} +{"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567890,"delta":"The","content":"The"} +{"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567891,"delta":" weather","content":"The weather"} +{"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567892,"delta":" is sunny","content":"The weather is sunny"} +{"type":"TEXT_MESSAGE_END","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567893} +{"type":"RUN_FINISHED","runId":"run_1","model":"gpt-4o","timestamp":1701234567894,"finishReason":"stop"} ``` ### 4. Stream Completion @@ -147,10 +153,10 @@ Server closes the connection. No special marker needed (unlike SSE's `[DONE]`). ### Server-Side Errors -If an error occurs during generation, send an error chunk: +If an error occurs during generation, send a RUN_ERROR event: ```json -{"type":"error","id":"msg_1","model":"gpt-4o","timestamp":1701234567895,"error":{"message":"Rate limit exceeded","code":"rate_limit_exceeded"}} +{"type":"RUN_ERROR","runId":"run_1","model":"gpt-4o","timestamp":1701234567895,"error":{"message":"Rate limit exceeded","code":"rate_limit_exceeded"}} ``` Then close the connection. @@ -196,14 +202,14 @@ export async function POST(request: Request) { } controller.close(); } catch (error: any) { - const errorChunk = { - type: 'error', + const errorEvent = { + type: 'RUN_ERROR', error: { message: error.message || 'Unknown error', code: error.code, }, }; - controller.enqueue(encoder.encode(JSON.stringify(errorChunk) + '\n')); + controller.enqueue(encoder.encode(JSON.stringify(errorEvent) + '\n')); controller.close(); } }, @@ -246,11 +252,11 @@ app.post('/api/chat', async (req, res) => { res.write(JSON.stringify(chunk) + '\n'); } } catch (error: any) { - const errorChunk = { - type: 'error', + const errorEvent = { + type: 'RUN_ERROR', error: { message: error.message }, }; - res.write(JSON.stringify(errorChunk) + '\n'); + res.write(JSON.stringify(errorEvent) + '\n'); } finally { res.end(); } @@ -362,9 +368,12 @@ The `-N` flag disables buffering to see real-time output. **Example Output:** ```json -{"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567890,"delta":"Hello","content":"Hello"} -{"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567891,"delta":" there","content":"Hello there"} -{"type":"done","id":"msg_1","model":"gpt-4o","timestamp":1701234567892,"finishReason":"stop"} +{"type":"RUN_STARTED","runId":"run_1","model":"gpt-4o","timestamp":1701234567889} +{"type":"TEXT_MESSAGE_START","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567890,"role":"assistant"} +{"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567890,"delta":"Hello","content":"Hello"} +{"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567891,"delta":" there","content":"Hello there"} +{"type":"TEXT_MESSAGE_END","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567892} +{"type":"RUN_FINISHED","runId":"run_1","model":"gpt-4o","timestamp":1701234567892,"finishReason":"stop"} ``` ### Validating NDJSON diff --git a/docs/protocol/sse-protocol.md b/docs/protocol/sse-protocol.md index 4beb8a79..2dd21e09 100644 --- a/docs/protocol/sse-protocol.md +++ b/docs/protocol/sse-protocol.md @@ -69,24 +69,48 @@ data: {JSON_ENCODED_CHUNK}\n\n 3. **Ends with double newline `\n\n`** 4. **No event names or IDs** (not required for our use case) -### Examples +### Examples (AG-UI Events) -#### Content Chunk +#### Run Started ``` -data: {"type":"content","id":"chatcmpl-abc123","model":"gpt-4o","timestamp":1701234567890,"delta":"Hello","content":"Hello","role":"assistant"}\n\n +data: {"type":"RUN_STARTED","runId":"run_abc123","model":"gpt-4o","timestamp":1701234567890}\n\n ``` -#### Tool Call Chunk +#### Text Message Start ``` -data: {"type":"tool_call","id":"chatcmpl-abc123","model":"gpt-4o","timestamp":1701234567891,"toolCall":{"id":"call_xyz","type":"function","function":{"name":"get_weather","arguments":"{\"location\":\"SF\"}"}},"index":0}\n\n +data: {"type":"TEXT_MESSAGE_START","messageId":"msg_xyz789","model":"gpt-4o","timestamp":1701234567890,"role":"assistant"}\n\n ``` -#### Done Chunk +#### Text Message Content ``` -data: {"type":"done","id":"chatcmpl-abc123","model":"gpt-4o","timestamp":1701234567892,"finishReason":"stop","usage":{"promptTokens":10,"completionTokens":5,"totalTokens":15}}\n\n +data: {"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_xyz789","model":"gpt-4o","timestamp":1701234567890,"delta":"Hello","content":"Hello"}\n\n +``` + +#### Tool Call Start + +``` +data: {"type":"TOOL_CALL_START","toolCallId":"call_xyz","toolName":"get_weather","model":"gpt-4o","timestamp":1701234567891,"index":0}\n\n +``` + +#### Tool Call Args + +``` +data: {"type":"TOOL_CALL_ARGS","toolCallId":"call_xyz","model":"gpt-4o","timestamp":1701234567891,"delta":"{\"location\":\"SF\"}","args":"{\"location\":\"SF\"}"}\n\n +``` + +#### Tool Call End + +``` +data: {"type":"TOOL_CALL_END","toolCallId":"call_xyz","toolName":"get_weather","model":"gpt-4o","timestamp":1701234567892,"input":{"location":"SF"}}\n\n +``` + +#### Run Finished + +``` +data: {"type":"RUN_FINISHED","runId":"run_abc123","model":"gpt-4o","timestamp":1701234567892,"finishReason":"stop","usage":{"promptTokens":10,"completionTokens":5,"totalTokens":15}}\n\n ``` --- @@ -115,16 +139,18 @@ Cache-Control: no-cache Connection: keep-alive ``` -### 3. Server Streams Chunks +### 3. Server Streams Events -The server sends multiple `data:` events as chunks are generated: +The server sends multiple AG-UI events as content is generated: ``` -data: {"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567890,"delta":"The","content":"The"}\n\n -data: {"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567891,"delta":" weather","content":"The weather"}\n\n -data: {"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567892,"delta":" is","content":"The weather is"}\n\n -data: {"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567893,"delta":" sunny","content":"The weather is sunny"}\n\n -data: {"type":"done","id":"msg_1","model":"gpt-4o","timestamp":1701234567894,"finishReason":"stop"}\n\n +data: {"type":"RUN_STARTED","runId":"run_1","model":"gpt-4o","timestamp":1701234567889}\n\n +data: {"type":"TEXT_MESSAGE_START","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567890,"role":"assistant"}\n\n +data: {"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567890,"delta":"The","content":"The"}\n\n +data: {"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567891,"delta":" weather","content":"The weather"}\n\n +data: {"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567892,"delta":" is sunny","content":"The weather is sunny"}\n\n +data: {"type":"TEXT_MESSAGE_END","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567893}\n\n +data: {"type":"RUN_FINISHED","runId":"run_1","model":"gpt-4o","timestamp":1701234567894,"finishReason":"stop"}\n\n ``` ### 4. Stream Completion @@ -143,10 +169,10 @@ Then closes the connection. ### Server-Side Errors -If an error occurs during generation, send an error chunk: +If an error occurs during generation, send a RUN_ERROR event: ``` -data: {"type":"error","id":"msg_1","model":"gpt-4o","timestamp":1701234567895,"error":{"message":"Rate limit exceeded","code":"rate_limit_exceeded"}}\n\n +data: {"type":"RUN_ERROR","runId":"run_1","model":"gpt-4o","timestamp":1701234567895,"error":{"message":"Rate limit exceeded","code":"rate_limit_exceeded"}}\n\n ``` Then close the connection. @@ -231,11 +257,11 @@ export async function POST(request: Request) { controller.enqueue(encoder.encode('data: [DONE]\n\n')); controller.close(); } catch (error) { - const errorChunk = { - type: 'error', + const errorEvent = { + type: 'RUN_ERROR', error: { message: error.message } }; - controller.enqueue(encoder.encode(`data: ${JSON.stringify(errorChunk)}\n\n`)); + controller.enqueue(encoder.encode(`data: ${JSON.stringify(errorEvent)}\n\n`)); controller.close(); } } @@ -305,11 +331,17 @@ The `-N` flag disables buffering to see real-time output. **Example Output:** ``` -data: {"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567890,"delta":"Hello","content":"Hello"} +data: {"type":"RUN_STARTED","runId":"run_1","model":"gpt-4o","timestamp":1701234567889} + +data: {"type":"TEXT_MESSAGE_START","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567890,"role":"assistant"} + +data: {"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567890,"delta":"Hello","content":"Hello"} + +data: {"type":"TEXT_MESSAGE_CONTENT","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567891,"delta":" there","content":"Hello there"} -data: {"type":"content","id":"msg_1","model":"gpt-4o","timestamp":1701234567891,"delta":" there","content":"Hello there"} +data: {"type":"TEXT_MESSAGE_END","messageId":"msg_1","model":"gpt-4o","timestamp":1701234567891} -data: {"type":"done","id":"msg_1","model":"gpt-4o","timestamp":1701234567892,"finishReason":"stop"} +data: {"type":"RUN_FINISHED","runId":"run_1","model":"gpt-4o","timestamp":1701234567892,"finishReason":"stop"} data: [DONE] ``` diff --git a/packages/php/tanstack-ai/src/StreamChunkConverter.php b/packages/php/tanstack-ai/src/StreamChunkConverter.php index 39e1e4e7..2bdf4469 100644 --- a/packages/php/tanstack-ai/src/StreamChunkConverter.php +++ b/packages/php/tanstack-ai/src/StreamChunkConverter.php @@ -3,7 +3,7 @@ namespace TanStack\AI; /** - * Converts provider-specific streaming events to TanStack AI StreamChunk format. + * Converts provider-specific streaming events to TanStack AI AG-UI event format. * * Supports: * - Anthropic streaming events @@ -18,20 +18,28 @@ class StreamChunkConverter private array $toolCallsMap = []; private int $currentToolIndex = -1; private bool $doneEmitted = false; + + // AG-UI lifecycle tracking + private string $runId; + private string $messageId; + private bool $runStartedEmitted = false; + private bool $textMessageStarted = false; public function __construct(string $model, string $provider = 'anthropic') { $this->model = $model; $this->provider = strtolower($provider); $this->timestamp = (int)(microtime(true) * 1000); + $this->runId = $this->generateId(); + $this->messageId = $this->generateId(); } /** - * Generate a unique ID for the chunk + * Generate a unique ID for the event */ public function generateId(): string { - return 'chatcmpl-' . bin2hex(random_bytes(4)); + return 'evt-' . bin2hex(random_bytes(4)); } /** @@ -60,40 +68,86 @@ private function getAttr(mixed $obj, string $attr, mixed $default = null): mixed } /** - * Convert Anthropic streaming event to StreamChunk format + * Safely parse JSON string + */ + private function safeJsonParse(string $json): mixed + { + try { + return json_decode($json, true) ?? $json; + } catch (\Exception $e) { + return $json; + } + } + + /** + * Convert Anthropic streaming event to AG-UI event format */ public function convertAnthropicEvent(mixed $event): array { $chunks = []; $eventType = $this->getEventType($event); + // Emit RUN_STARTED on first event + if (!$this->runStartedEmitted) { + $this->runStartedEmitted = true; + $chunks[] = [ + 'type' => 'RUN_STARTED', + 'runId' => $this->runId, + 'model' => $this->model, + 'timestamp' => $this->timestamp, + ]; + } + if ($eventType === 'content_block_start') { // Tool call is starting $contentBlock = $this->getAttr($event, 'content_block'); if ($contentBlock && $this->getAttr($contentBlock, 'type') === 'tool_use') { $this->currentToolIndex++; + $toolCallId = $this->getAttr($contentBlock, 'id'); + $toolName = $this->getAttr($contentBlock, 'name'); $this->toolCallsMap[$this->currentToolIndex] = [ - 'id' => $this->getAttr($contentBlock, 'id'), - 'name' => $this->getAttr($contentBlock, 'name'), + 'id' => $toolCallId, + 'name' => $toolName, 'input' => '' ]; + + // Emit TOOL_CALL_START + $chunks[] = [ + 'type' => 'TOOL_CALL_START', + 'toolCallId' => $toolCallId, + 'toolName' => $toolName, + 'model' => $this->model, + 'timestamp' => $this->timestamp, + 'index' => $this->currentToolIndex, + ]; } } elseif ($eventType === 'content_block_delta') { $delta = $this->getAttr($event, 'delta'); if ($delta && $this->getAttr($delta, 'type') === 'text_delta') { + // Emit TEXT_MESSAGE_START on first text + if (!$this->textMessageStarted) { + $this->textMessageStarted = true; + $chunks[] = [ + 'type' => 'TEXT_MESSAGE_START', + 'messageId' => $this->messageId, + 'model' => $this->model, + 'timestamp' => $this->timestamp, + 'role' => 'assistant', + ]; + } + // Text content delta $deltaText = $this->getAttr($delta, 'text', ''); $this->accumulatedContent .= $deltaText; $chunks[] = [ - 'type' => 'content', - 'id' => $this->generateId(), + 'type' => 'TEXT_MESSAGE_CONTENT', + 'messageId' => $this->messageId, 'model' => $this->model, 'timestamp' => $this->timestamp, 'delta' => $deltaText, 'content' => $this->accumulatedContent, - 'role' => 'assistant' ]; } elseif ($delta && $this->getAttr($delta, 'type') === 'input_json_delta') { // Tool input is being streamed @@ -104,23 +158,40 @@ public function convertAnthropicEvent(mixed $event): array $toolCall['input'] .= $partialJson; $this->toolCallsMap[$this->currentToolIndex] = $toolCall; + // Emit TOOL_CALL_ARGS $chunks[] = [ - 'type' => 'tool_call', - 'id' => $this->generateId(), + 'type' => 'TOOL_CALL_ARGS', + 'toolCallId' => $toolCall['id'], 'model' => $this->model, 'timestamp' => $this->timestamp, - 'toolCall' => [ - 'id' => $toolCall['id'], - 'type' => 'function', - 'function' => [ - 'name' => $toolCall['name'], - 'arguments' => $partialJson // Incremental JSON - ] - ], - 'index' => $this->currentToolIndex + 'delta' => $partialJson, + 'args' => $toolCall['input'], ]; } } + } elseif ($eventType === 'content_block_stop') { + // Emit TEXT_MESSAGE_END if we had text content + if ($this->textMessageStarted && $this->accumulatedContent) { + $chunks[] = [ + 'type' => 'TEXT_MESSAGE_END', + 'messageId' => $this->messageId, + 'model' => $this->model, + 'timestamp' => $this->timestamp, + ]; + } + + // Emit TOOL_CALL_END for tool calls + $toolCall = $this->toolCallsMap[$this->currentToolIndex] ?? null; + if ($toolCall) { + $chunks[] = [ + 'type' => 'TOOL_CALL_END', + 'toolCallId' => $toolCall['id'], + 'toolName' => $toolCall['name'], + 'model' => $this->model, + 'timestamp' => $this->timestamp, + 'input' => $this->safeJsonParse($toolCall['input'] ?: '{}'), + ]; + } } elseif ($eventType === 'message_delta') { // Message metadata update (includes stop_reason and usage) $delta = $this->getAttr($event, 'delta'); @@ -128,7 +199,7 @@ public function convertAnthropicEvent(mixed $event): array $stopReason = $delta ? $this->getAttr($delta, 'stop_reason') : null; if ($stopReason) { - // Map Anthropic stop_reason to TanStack format + // Map Anthropic stop_reason to AG-UI format $finishReason = match ($stopReason) { 'tool_use' => 'tool_calls', 'end_turn' => 'stop', @@ -146,8 +217,8 @@ public function convertAnthropicEvent(mixed $event): array $this->doneEmitted = true; $chunks[] = [ - 'type' => 'done', - 'id' => $this->generateId(), + 'type' => 'RUN_FINISHED', + 'runId' => $this->runId, 'model' => $this->model, 'timestamp' => $this->timestamp, 'finishReason' => $finishReason, @@ -159,8 +230,8 @@ public function convertAnthropicEvent(mixed $event): array if (!$this->doneEmitted) { $this->doneEmitted = true; $chunks[] = [ - 'type' => 'done', - 'id' => $this->generateId(), + 'type' => 'RUN_FINISHED', + 'runId' => $this->runId, 'model' => $this->model, 'timestamp' => $this->timestamp, 'finishReason' => 'stop' @@ -172,12 +243,23 @@ public function convertAnthropicEvent(mixed $event): array } /** - * Convert OpenAI streaming event to StreamChunk format + * Convert OpenAI streaming event to AG-UI event format */ public function convertOpenAIEvent(mixed $event): array { $chunks = []; + // Emit RUN_STARTED on first event + if (!$this->runStartedEmitted) { + $this->runStartedEmitted = true; + $chunks[] = [ + 'type' => 'RUN_STARTED', + 'runId' => $this->runId, + 'model' => $this->model, + 'timestamp' => $this->timestamp, + ]; + } + // OpenAI events have chunk.choices[0].delta structure $choices = $this->getAttr($event, 'choices', []); $choice = !empty($choices) ? $choices[0] : $event; @@ -188,15 +270,26 @@ public function convertOpenAIEvent(mixed $event): array if ($delta) { $content = $this->getAttr($delta, 'content'); if ($content !== null) { + // Emit TEXT_MESSAGE_START on first text + if (!$this->textMessageStarted) { + $this->textMessageStarted = true; + $chunks[] = [ + 'type' => 'TEXT_MESSAGE_START', + 'messageId' => $this->messageId, + 'model' => $this->getAttr($event, 'model', $this->model), + 'timestamp' => $this->timestamp, + 'role' => 'assistant', + ]; + } + $this->accumulatedContent .= $content; $chunks[] = [ - 'type' => 'content', - 'id' => $this->getAttr($event, 'id', $this->generateId()), + 'type' => 'TEXT_MESSAGE_CONTENT', + 'messageId' => $this->messageId, 'model' => $this->getAttr($event, 'model', $this->model), 'timestamp' => $this->timestamp, 'delta' => $content, 'content' => $this->accumulatedContent, - 'role' => 'assistant' ]; } @@ -205,21 +298,32 @@ public function convertOpenAIEvent(mixed $event): array if ($toolCalls) { foreach ($toolCalls as $index => $toolCall) { $function = $this->getAttr($toolCall, 'function', []); + $toolCallId = $this->getAttr($toolCall, 'id', 'call_' . $this->timestamp); + $toolName = $this->getAttr($function, 'name', ''); + $args = $this->getAttr($function, 'arguments', ''); + $toolIndex = $this->getAttr($toolCall, 'index', $index); + + // Emit TOOL_CALL_START $chunks[] = [ - 'type' => 'tool_call', - 'id' => $this->getAttr($event, 'id', $this->generateId()), + 'type' => 'TOOL_CALL_START', + 'toolCallId' => $toolCallId, + 'toolName' => $toolName, 'model' => $this->getAttr($event, 'model', $this->model), 'timestamp' => $this->timestamp, - 'toolCall' => [ - 'id' => $this->getAttr($toolCall, 'id', 'call_' . $this->timestamp), - 'type' => 'function', - 'function' => [ - 'name' => $this->getAttr($function, 'name', ''), - 'arguments' => $this->getAttr($function, 'arguments', '') - ] - ], - 'index' => $this->getAttr($toolCall, 'index', $index) + 'index' => $toolIndex, ]; + + // Emit TOOL_CALL_ARGS if there are arguments + if ($args) { + $chunks[] = [ + 'type' => 'TOOL_CALL_ARGS', + 'toolCallId' => $toolCallId, + 'model' => $this->getAttr($event, 'model', $this->model), + 'timestamp' => $this->timestamp, + 'delta' => $args, + 'args' => $args, + ]; + } } } } @@ -227,6 +331,16 @@ public function convertOpenAIEvent(mixed $event): array // Handle completion $finishReason = $this->getAttr($choice, 'finish_reason'); if ($finishReason) { + // Emit TEXT_MESSAGE_END if we had text + if ($this->textMessageStarted) { + $chunks[] = [ + 'type' => 'TEXT_MESSAGE_END', + 'messageId' => $this->messageId, + 'model' => $this->getAttr($event, 'model', $this->model), + 'timestamp' => $this->timestamp, + ]; + } + $usage = $this->getAttr($event, 'usage'); $usageDict = null; if ($usage) { @@ -239,8 +353,8 @@ public function convertOpenAIEvent(mixed $event): array $this->doneEmitted = true; $chunks[] = [ - 'type' => 'done', - 'id' => $this->getAttr($event, 'id', $this->generateId()), + 'type' => 'RUN_FINISHED', + 'runId' => $this->runId, 'model' => $this->getAttr($event, 'model', $this->model), 'timestamp' => $this->timestamp, 'finishReason' => $finishReason, @@ -279,18 +393,18 @@ public function convertEvent(mixed $event): array } /** - * Convert an error to ErrorStreamChunk format + * Convert an error to RUN_ERROR event format */ public function convertError(\Throwable $error): array { return [ - 'type' => 'error', - 'id' => $this->generateId(), + 'type' => 'RUN_ERROR', + 'runId' => $this->runId, 'model' => $this->model, 'timestamp' => $this->timestamp, 'error' => [ 'message' => $error->getMessage(), - 'code' => $error->getCode() + 'code' => $error->getCode() ?: null ] ]; } diff --git a/packages/python/tanstack-ai/src/tanstack_ai/converter.py b/packages/python/tanstack-ai/src/tanstack_ai/converter.py index 2ee9e541..21fc504c 100644 --- a/packages/python/tanstack-ai/src/tanstack_ai/converter.py +++ b/packages/python/tanstack-ai/src/tanstack_ai/converter.py @@ -2,8 +2,9 @@ TanStack AI Stream Chunk Converter Converts streaming events from various AI providers (Anthropic, OpenAI) -into TanStack AI StreamChunk format. +into TanStack AI AG-UI event format. """ +import json import uuid from typing import List, Dict, Any, Optional from datetime import datetime @@ -11,7 +12,7 @@ class StreamChunkConverter: """ - Converts provider-specific streaming events to TanStack AI StreamChunk format. + Converts provider-specific streaming events to TanStack AI AG-UI event format. Supports: - Anthropic streaming events @@ -33,10 +34,16 @@ def __init__(self, model: str, provider: str = "anthropic"): self.tool_calls_map: Dict[int, Dict[str, Any]] = {} self.current_tool_index = -1 self.done_emitted = False + + # AG-UI lifecycle tracking + self.run_id = self.generate_id() + self.message_id = self.generate_id() + self.run_started_emitted = False + self.text_message_started = False def generate_id(self) -> str: - """Generate a unique ID for the chunk""" - return f"chatcmpl-{uuid.uuid4().hex[:8]}" + """Generate a unique ID for the event""" + return f"evt-{uuid.uuid4().hex[:8]}" def _get_event_type(self, event: Any) -> str: """Get event type from either dict or object""" @@ -50,38 +57,76 @@ def _get_attr(self, obj: Any, attr: str, default: Any = None) -> Any: return obj.get(attr, default) return getattr(obj, attr, default) + def _safe_json_parse(self, json_str: str) -> Any: + """Safely parse JSON string""" + try: + return json.loads(json_str) + except: + return json_str + async def convert_anthropic_event(self, event: Any) -> List[Dict[str, Any]]: - """Convert Anthropic streaming event to StreamChunk format""" + """Convert Anthropic streaming event to AG-UI event format""" chunks = [] event_type = self._get_event_type(event) + # Emit RUN_STARTED on first event + if not self.run_started_emitted: + self.run_started_emitted = True + chunks.append({ + "type": "RUN_STARTED", + "runId": self.run_id, + "model": self.model, + "timestamp": self.timestamp, + }) + if event_type == "content_block_start": # Tool call is starting content_block = self._get_attr(event, "content_block") if content_block and self._get_attr(content_block, "type") == "tool_use": self.current_tool_index += 1 + tool_call_id = self._get_attr(content_block, "id") + tool_name = self._get_attr(content_block, "name") self.tool_calls_map[self.current_tool_index] = { - "id": self._get_attr(content_block, "id"), - "name": self._get_attr(content_block, "name"), + "id": tool_call_id, + "name": tool_name, "input": "" } + # Emit TOOL_CALL_START + chunks.append({ + "type": "TOOL_CALL_START", + "toolCallId": tool_call_id, + "toolName": tool_name, + "model": self.model, + "timestamp": self.timestamp, + "index": self.current_tool_index, + }) elif event_type == "content_block_delta": delta = self._get_attr(event, "delta") if delta and self._get_attr(delta, "type") == "text_delta": + # Emit TEXT_MESSAGE_START on first text + if not self.text_message_started: + self.text_message_started = True + chunks.append({ + "type": "TEXT_MESSAGE_START", + "messageId": self.message_id, + "model": self.model, + "timestamp": self.timestamp, + "role": "assistant", + }) + # Text content delta delta_text = self._get_attr(delta, "text", "") self.accumulated_content += delta_text chunks.append({ - "type": "content", - "id": self.generate_id(), + "type": "TEXT_MESSAGE_CONTENT", + "messageId": self.message_id, "model": self.model, "timestamp": self.timestamp, "delta": delta_text, "content": self.accumulated_content, - "role": "assistant" }) elif delta and self._get_attr(delta, "type") == "input_json_delta": @@ -92,22 +137,38 @@ async def convert_anthropic_event(self, event: Any) -> List[Dict[str, Any]]: if tool_call: tool_call["input"] += partial_json + # Emit TOOL_CALL_ARGS chunks.append({ - "type": "tool_call", - "id": self.generate_id(), + "type": "TOOL_CALL_ARGS", + "toolCallId": tool_call["id"], "model": self.model, "timestamp": self.timestamp, - "toolCall": { - "id": tool_call["id"], - "type": "function", - "function": { - "name": tool_call["name"], - "arguments": partial_json # Incremental JSON - } - }, - "index": self.current_tool_index + "delta": partial_json, + "args": tool_call["input"], }) + elif event_type == "content_block_stop": + # Emit TEXT_MESSAGE_END if we had text content + if self.text_message_started and self.accumulated_content: + chunks.append({ + "type": "TEXT_MESSAGE_END", + "messageId": self.message_id, + "model": self.model, + "timestamp": self.timestamp, + }) + + # Emit TOOL_CALL_END for tool calls + tool_call = self.tool_calls_map.get(self.current_tool_index) + if tool_call: + chunks.append({ + "type": "TOOL_CALL_END", + "toolCallId": tool_call["id"], + "toolName": tool_call["name"], + "model": self.model, + "timestamp": self.timestamp, + "input": self._safe_json_parse(tool_call["input"] or "{}"), + }) + elif event_type == "message_delta": # Message metadata update (includes stop_reason and usage) delta = self._get_attr(event, "delta") @@ -115,7 +176,7 @@ async def convert_anthropic_event(self, event: Any) -> List[Dict[str, Any]]: stop_reason = self._get_attr(delta, "stop_reason") if delta else None if stop_reason: - # Map Anthropic stop_reason to TanStack format + # Map Anthropic stop_reason to AG-UI format if stop_reason == "tool_use": finish_reason = "tool_calls" elif stop_reason == "end_turn": @@ -133,8 +194,8 @@ async def convert_anthropic_event(self, event: Any) -> List[Dict[str, Any]]: self.done_emitted = True chunks.append({ - "type": "done", - "id": self.generate_id(), + "type": "RUN_FINISHED", + "runId": self.run_id, "model": self.model, "timestamp": self.timestamp, "finishReason": finish_reason, @@ -146,8 +207,8 @@ async def convert_anthropic_event(self, event: Any) -> List[Dict[str, Any]]: if not self.done_emitted: self.done_emitted = True chunks.append({ - "type": "done", - "id": self.generate_id(), + "type": "RUN_FINISHED", + "runId": self.run_id, "model": self.model, "timestamp": self.timestamp, "finishReason": "stop" @@ -156,9 +217,19 @@ async def convert_anthropic_event(self, event: Any) -> List[Dict[str, Any]]: return chunks async def convert_openai_event(self, event: Any) -> List[Dict[str, Any]]: - """Convert OpenAI streaming event to StreamChunk format""" + """Convert OpenAI streaming event to AG-UI event format""" chunks = [] + # Emit RUN_STARTED on first event + if not self.run_started_emitted: + self.run_started_emitted = True + chunks.append({ + "type": "RUN_STARTED", + "runId": self.run_id, + "model": self.model, + "timestamp": self.timestamp, + }) + # OpenAI events have chunk.choices[0].delta structure choice = self._get_attr(event, "choices", []) if choice and len(choice) > 0: @@ -173,40 +244,70 @@ async def convert_openai_event(self, event: Any) -> List[Dict[str, Any]]: if delta: content = self._get_attr(delta, "content") if content: + # Emit TEXT_MESSAGE_START on first text + if not self.text_message_started: + self.text_message_started = True + chunks.append({ + "type": "TEXT_MESSAGE_START", + "messageId": self.message_id, + "model": self._get_attr(event, "model", self.model), + "timestamp": self.timestamp, + "role": "assistant", + }) + self.accumulated_content += content chunks.append({ - "type": "content", - "id": self._get_attr(event, "id", self.generate_id()), + "type": "TEXT_MESSAGE_CONTENT", + "messageId": self.message_id, "model": self._get_attr(event, "model", self.model), "timestamp": self.timestamp, "delta": content, "content": self.accumulated_content, - "role": "assistant" }) # Handle tool calls tool_calls = self._get_attr(delta, "tool_calls") if tool_calls: for tool_call in tool_calls: + tool_call_id = self._get_attr(tool_call, "id", f"call_{self.timestamp}") + function = self._get_attr(tool_call, "function", {}) + tool_name = self._get_attr(function, "name", "") + args = self._get_attr(function, "arguments", "") + index = self._get_attr(tool_call, "index", 0) + + # Emit TOOL_CALL_START chunks.append({ - "type": "tool_call", - "id": self._get_attr(event, "id", self.generate_id()), + "type": "TOOL_CALL_START", + "toolCallId": tool_call_id, + "toolName": tool_name, "model": self._get_attr(event, "model", self.model), "timestamp": self.timestamp, - "toolCall": { - "id": self._get_attr(tool_call, "id", f"call_{self.timestamp}"), - "type": "function", - "function": { - "name": self._get_attr(self._get_attr(tool_call, "function", {}), "name", ""), - "arguments": self._get_attr(self._get_attr(tool_call, "function", {}), "arguments", "") - } - }, - "index": self._get_attr(tool_call, "index", 0) + "index": index, }) + + # Emit TOOL_CALL_ARGS if there are arguments + if args: + chunks.append({ + "type": "TOOL_CALL_ARGS", + "toolCallId": tool_call_id, + "model": self._get_attr(event, "model", self.model), + "timestamp": self.timestamp, + "delta": args, + "args": args, + }) # Handle completion finish_reason = self._get_attr(choice, "finish_reason") if finish_reason: + # Emit TEXT_MESSAGE_END if we had text + if self.text_message_started: + chunks.append({ + "type": "TEXT_MESSAGE_END", + "messageId": self.message_id, + "model": self._get_attr(event, "model", self.model), + "timestamp": self.timestamp, + }) + usage = self._get_attr(event, "usage") usage_dict = None if usage: @@ -218,8 +319,8 @@ async def convert_openai_event(self, event: Any) -> List[Dict[str, Any]]: self.done_emitted = True chunks.append({ - "type": "done", - "id": self._get_attr(event, "id", self.generate_id()), + "type": "RUN_FINISHED", + "runId": self.run_id, "model": self._get_attr(event, "model", self.model), "timestamp": self.timestamp, "finishReason": finish_reason, @@ -230,7 +331,7 @@ async def convert_openai_event(self, event: Any) -> List[Dict[str, Any]]: async def convert_event(self, event: Any) -> List[Dict[str, Any]]: """ - Convert provider streaming event to StreamChunk format. + Convert provider streaming event to AG-UI event format. Automatically detects provider based on event structure. """ if self.provider == "anthropic": @@ -252,10 +353,10 @@ async def convert_event(self, event: Any) -> List[Dict[str, Any]]: return await self.convert_anthropic_event(event) async def convert_error(self, error: Exception) -> Dict[str, Any]: - """Convert an error to ErrorStreamChunk format""" + """Convert an error to RUN_ERROR event format""" return { - "type": "error", - "id": self.generate_id(), + "type": "RUN_ERROR", + "runId": self.run_id, "model": self.model, "timestamp": self.timestamp, "error": { diff --git a/packages/python/tanstack-ai/src/tanstack_ai/sse.py b/packages/python/tanstack-ai/src/tanstack_ai/sse.py index 7d3e30ee..f6626014 100644 --- a/packages/python/tanstack-ai/src/tanstack_ai/sse.py +++ b/packages/python/tanstack-ai/src/tanstack_ai/sse.py @@ -33,7 +33,7 @@ def format_sse_done() -> str: def format_sse_error(error: Exception) -> str: """ - Format an error as an SSE error chunk. + Format an error as an SSE RUN_ERROR event (AG-UI protocol). Args: error: Exception to format @@ -42,9 +42,9 @@ def format_sse_error(error: Exception) -> str: SSE-formatted error chunk """ error_chunk = { - "type": "error", + "type": "RUN_ERROR", "error": { - "type": type(error).__name__, + "code": type(error).__name__, "message": str(error) } } diff --git a/packages/python/tanstack-ai/src/tanstack_ai/types.py b/packages/python/tanstack-ai/src/tanstack_ai/types.py index 21ccc701..1f82077a 100644 --- a/packages/python/tanstack-ai/src/tanstack_ai/types.py +++ b/packages/python/tanstack-ai/src/tanstack_ai/types.py @@ -84,115 +84,188 @@ class Tool: # ============================================================================ -# Stream Chunk Types +# AG-UI Protocol Event Types # ============================================================================ - -StreamChunkType = Literal[ - "content", - "thinking", - "tool_call", - "tool-input-available", - "approval-requested", - "tool_result", - "done", - "error", +EventType = Literal[ + "RUN_STARTED", + "RUN_FINISHED", + "RUN_ERROR", + "TEXT_MESSAGE_START", + "TEXT_MESSAGE_CONTENT", + "TEXT_MESSAGE_END", + "TOOL_CALL_START", + "TOOL_CALL_ARGS", + "TOOL_CALL_END", + "STEP_STARTED", + "STEP_FINISHED", + "STATE_SNAPSHOT", + "STATE_DELTA", + "CUSTOM", ] +# Legacy alias for backwards compatibility +StreamChunkType = EventType -class BaseStreamChunk(TypedDict): - """Base structure for all stream chunks.""" - type: StreamChunkType - id: str - model: str +class BaseEvent(TypedDict, total=False): + """Base structure for all AG-UI events.""" + + type: EventType timestamp: int # Unix timestamp in milliseconds + model: Optional[str] # TanStack AI addition + rawEvent: Optional[Any] # Original provider event + + +class UsageInfo(TypedDict, total=False): + """Token usage information.""" + + promptTokens: int + completionTokens: int + totalTokens: int + + +class ErrorInfo(TypedDict, total=False): + """Error information.""" + + message: str + code: Optional[str] + + +class RunStartedEvent(BaseEvent): + """Emitted when a run starts.""" + + runId: str + threadId: Optional[str] + + +class RunFinishedEvent(BaseEvent): + """Emitted when a run completes successfully.""" + + runId: str + finishReason: Optional[Literal["stop", "length", "content_filter", "tool_calls"]] + usage: Optional[UsageInfo] + +class RunErrorEvent(BaseEvent): + """Emitted when an error occurs during a run.""" -class ContentStreamChunk(BaseStreamChunk): - """Emitted when the model generates text content.""" + runId: Optional[str] + error: ErrorInfo + + +class TextMessageStartEvent(BaseEvent): + """Emitted when a text message starts.""" + + messageId: str + role: Literal["assistant"] - delta: str # The incremental content token - content: str # Full accumulated content so far - role: Optional[Literal["assistant"]] +class TextMessageContentEvent(BaseEvent): + """Emitted when text content is generated (streaming tokens).""" -class ThinkingStreamChunk(BaseStreamChunk): - """Emitted when the model exposes its reasoning process.""" + messageId: str + delta: str + content: Optional[str] # Full accumulated content so far - delta: Optional[str] # The incremental thinking token - content: str # Full accumulated thinking content so far +class TextMessageEndEvent(BaseEvent): + """Emitted when a text message completes.""" -class ToolCallStreamChunk(BaseStreamChunk): - """Emitted when the model decides to call a tool/function.""" + messageId: str - toolCall: ToolCall - index: int # Index of this tool call (for parallel calls) + +class ApprovalInfo(TypedDict, total=False): + """Approval metadata for tools requiring user approval.""" + + id: str + needsApproval: bool -class ToolInputAvailableStreamChunk(BaseStreamChunk): - """Emitted when tool inputs are complete and ready for client-side execution.""" +class ToolCallStartEvent(BaseEvent): + """Emitted when a tool call starts.""" toolCallId: str toolName: str - input: Any # Parsed tool arguments + index: Optional[int] + approval: Optional[ApprovalInfo] -class ApprovalRequestedStreamChunk(BaseStreamChunk): - """Emitted when a tool requires user approval before execution.""" +class ToolCallArgsEvent(BaseEvent): + """Emitted when tool call arguments are streaming.""" toolCallId: str - toolName: str - input: Any - approval: Dict[str, Any] # Contains 'id' and 'needsApproval' + delta: str # Incremental JSON arguments delta + args: Optional[str] # Full accumulated arguments -class ToolResultStreamChunk(BaseStreamChunk): - """Emitted when a tool execution completes.""" +class ToolCallEndEvent(BaseEvent): + """Emitted when a tool call completes (with optional result).""" toolCallId: str - content: str # Result of the tool execution (JSON stringified) + toolName: str + input: Optional[Any] # Final parsed input arguments + result: Optional[str] # Tool execution result -class UsageInfo(TypedDict, total=False): - """Token usage information.""" +class StepStartedEvent(BaseEvent): + """Emitted when a reasoning/thinking step starts.""" - promptTokens: int - completionTokens: int - totalTokens: int + stepId: str + stepType: Literal["thinking", "reasoning", "planning"] -class DoneStreamChunk(BaseStreamChunk): - """Emitted when the stream completes successfully.""" +class StepFinishedEvent(BaseEvent): + """Emitted when a reasoning/thinking step completes or streams content.""" - finishReason: Optional[Literal["stop", "length", "content_filter", "tool_calls"]] - usage: Optional[UsageInfo] + stepId: str + delta: Optional[str] # Incremental thinking token + content: str # Full accumulated thinking content -class ErrorInfo(TypedDict, total=False): - """Error information.""" +class StateDeltaOp(TypedDict): + """A single state delta operation.""" - message: str - code: Optional[str] + op: Literal["add", "remove", "replace"] + path: str + value: Optional[Any] -class ErrorStreamChunk(BaseStreamChunk): - """Emitted when an error occurs during streaming.""" +class StateSnapshotEvent(BaseEvent): + """Emitted for full state synchronization.""" - error: ErrorInfo + state: Dict[str, Any] + + +class StateDeltaEvent(BaseEvent): + """Emitted for incremental state updates.""" + + delta: List[StateDeltaOp] + + +class CustomEvent(BaseEvent): + """Custom event for extensibility.""" + + name: str + value: Any -# Union type for all stream chunks +# Union type for all AG-UI events StreamChunk = Union[ - ContentStreamChunk, - ThinkingStreamChunk, - ToolCallStreamChunk, - ToolInputAvailableStreamChunk, - ApprovalRequestedStreamChunk, - ToolResultStreamChunk, - DoneStreamChunk, - ErrorStreamChunk, + RunStartedEvent, + RunFinishedEvent, + RunErrorEvent, + TextMessageStartEvent, + TextMessageContentEvent, + TextMessageEndEvent, + ToolCallStartEvent, + ToolCallArgsEvent, + ToolCallEndEvent, + StepStartedEvent, + StepFinishedEvent, + StateSnapshotEvent, + StateDeltaEvent, + CustomEvent, ] diff --git a/packages/typescript/ai-anthropic/src/anthropic-adapter.ts b/packages/typescript/ai-anthropic/src/anthropic-adapter.ts index 205f9d0e..d12d1899 100644 --- a/packages/typescript/ai-anthropic/src/anthropic-adapter.ts +++ b/packages/typescript/ai-anthropic/src/anthropic-adapter.ts @@ -107,10 +107,9 @@ export class Anthropic extends BaseAdapter< stack: error?.stack, }) - // Emit an error chunk + // Emit an error chunk (AG-UI RUN_ERROR) yield { - type: 'error', - id: this.generateId(), + type: 'RUN_ERROR', model: options.model, timestamp: Date.now(), error: { @@ -416,12 +415,31 @@ export class Anthropic extends BaseAdapter< const timestamp = Date.now() const toolCallsMap = new Map< number, - { id: string; name: string; input: string } + { id: string; name: string; input: string; started: boolean } >() let currentToolIndex = -1 + // AG-UI lifecycle tracking + const runId = generateId() + const messageId = generateId() + const stepId = generateId() + let hasEmittedRunStarted = false + let hasEmittedTextMessageStart = false + let hasEmittedStepStarted = false + try { for await (const event of stream) { + // Emit RUN_STARTED on first event + if (!hasEmittedRunStarted) { + hasEmittedRunStarted = true + yield { + type: 'RUN_STARTED', + runId, + model, + timestamp, + } + } + if (event.type === 'content_block_start') { if (event.content_block.type === 'tool_use') { currentToolIndex++ @@ -429,32 +447,55 @@ export class Anthropic extends BaseAdapter< id: event.content_block.id, name: event.content_block.name, input: '', + started: false, }) } else if (event.content_block.type === 'thinking') { // Reset thinking content when a new thinking block starts accumulatedThinking = '' + // Emit STEP_STARTED for thinking + if (!hasEmittedStepStarted) { + hasEmittedStepStarted = true + yield { + type: 'STEP_STARTED', + stepId, + model, + timestamp, + stepType: 'thinking', + } + } } } else if (event.type === 'content_block_delta') { if (event.delta.type === 'text_delta') { + // Emit TEXT_MESSAGE_START on first text content + if (!hasEmittedTextMessageStart) { + hasEmittedTextMessageStart = true + yield { + type: 'TEXT_MESSAGE_START', + messageId, + model, + timestamp, + role: 'assistant', + } + } + const delta = event.delta.text accumulatedContent += delta yield { - type: 'content', - id: generateId(), - model: model, + type: 'TEXT_MESSAGE_CONTENT', + messageId, + model, timestamp, delta, content: accumulatedContent, - role: 'assistant', } } else if (event.delta.type === 'thinking_delta') { // Handle thinking content const delta = event.delta.thinking accumulatedThinking += delta yield { - type: 'thinking', - id: generateId(), - model: model, + type: 'STEP_FINISHED', + stepId, + model, timestamp, delta, content: accumulatedThinking, @@ -463,55 +504,75 @@ export class Anthropic extends BaseAdapter< // Tool input is being streamed const existing = toolCallsMap.get(currentToolIndex) if (existing) { + // Emit TOOL_CALL_START on first args + if (!existing.started) { + existing.started = true + yield { + type: 'TOOL_CALL_START', + toolCallId: existing.id, + toolName: existing.name, + model, + timestamp, + index: currentToolIndex, + } + } + // Accumulate the input for final processing existing.input += event.delta.partial_json - // Yield the DELTA (partial_json), not the full accumulated input - // The stream processor will concatenate these deltas + // Yield the DELTA (partial_json) as TOOL_CALL_ARGS yield { - type: 'tool_call', - id: generateId(), - model: model, + type: 'TOOL_CALL_ARGS', + toolCallId: existing.id, + model, timestamp, - toolCall: { - id: existing.id, - type: 'function', - function: { - name: existing.name, - arguments: event.delta.partial_json, - }, - }, - index: currentToolIndex, + delta: event.delta.partial_json, + args: existing.input, } } } } else if (event.type === 'content_block_stop') { - // If this is a tool call and we haven't received any input deltas, - // emit a tool_call chunk with empty arguments + // If this is a tool call, emit TOOL_CALL_END const existing = toolCallsMap.get(currentToolIndex) - if (existing && existing.input === '') { - // No input_json_delta events received, emit empty arguments + if (existing) { + // If we never started (no input deltas), emit start first + if (!existing.started) { + existing.started = true + yield { + type: 'TOOL_CALL_START', + toolCallId: existing.id, + toolName: existing.name, + model, + timestamp, + index: currentToolIndex, + } + } + + // Emit TOOL_CALL_END with final input yield { - type: 'tool_call', - id: generateId(), - model: model, + type: 'TOOL_CALL_END', + toolCallId: existing.id, + toolName: existing.name, + model, + timestamp, + input: this.safeJsonParse(existing.input || '{}'), + } + } + + // Emit TEXT_MESSAGE_END if we had text content + if (hasEmittedTextMessageStart && accumulatedContent) { + yield { + type: 'TEXT_MESSAGE_END', + messageId, + model, timestamp, - toolCall: { - id: existing.id, - type: 'function', - function: { - name: existing.name, - arguments: '{}', - }, - }, - index: currentToolIndex, } } } else if (event.type === 'message_stop') { yield { - type: 'done', - id: generateId(), - model: model, + type: 'RUN_FINISHED', + runId, + model, timestamp, finishReason: 'stop', } @@ -520,12 +581,11 @@ export class Anthropic extends BaseAdapter< switch (event.delta.stop_reason) { case 'tool_use': { yield { - type: 'done', - id: generateId(), - model: model, + type: 'RUN_FINISHED', + runId, + model, timestamp, finishReason: 'tool_calls', - usage: { promptTokens: event.usage.input_tokens || 0, completionTokens: event.usage.output_tokens || 0, @@ -538,9 +598,9 @@ export class Anthropic extends BaseAdapter< } case 'max_tokens': { yield { - type: 'error', - id: generateId(), - model: model, + type: 'RUN_ERROR', + runId, + model, timestamp, error: { message: @@ -552,9 +612,9 @@ export class Anthropic extends BaseAdapter< } case 'model_context_window_exceeded': { yield { - type: 'error', - id: generateId(), - model: model, + type: 'RUN_ERROR', + runId, + model, timestamp, error: { message: @@ -566,9 +626,9 @@ export class Anthropic extends BaseAdapter< } case 'refusal': { yield { - type: 'error', - id: generateId(), - model: model, + type: 'RUN_ERROR', + runId, + model, timestamp, error: { message: 'The model refused to complete the request.', @@ -579,9 +639,9 @@ export class Anthropic extends BaseAdapter< } default: { yield { - type: 'done', - id: generateId(), - model: model, + type: 'RUN_FINISHED', + runId, + model, timestamp, finishReason: 'stop', usage: { @@ -609,9 +669,9 @@ export class Anthropic extends BaseAdapter< }) yield { - type: 'error', - id: generateId(), - model: model, + type: 'RUN_ERROR', + runId, + model, timestamp, error: { message: error?.message || 'Unknown error occurred', @@ -620,6 +680,14 @@ export class Anthropic extends BaseAdapter< } } } + + private safeJsonParse(jsonString: string): any { + try { + return JSON.parse(jsonString) + } catch { + return jsonString + } + } } /** * Creates an Anthropic adapter with simplified configuration diff --git a/packages/typescript/ai-devtools/src/store/ai-context.tsx b/packages/typescript/ai-devtools/src/store/ai-context.tsx index 08e1853c..c63a7c1e 100644 --- a/packages/typescript/ai-devtools/src/store/ai-context.tsx +++ b/packages/typescript/ai-devtools/src/store/ai-context.tsx @@ -1042,7 +1042,9 @@ export const AIProvider: ParentComponent = (props) => { type: 'tool_result', messageId: e.payload.messageId, toolCallId: e.payload.toolCallId, - content: e.payload.result, + content: typeof e.payload.result === 'string' + ? e.payload.result + : JSON.stringify(e.payload.result), timestamp: e.payload.timestamp, chunkCount: 1, } diff --git a/packages/typescript/ai-gemini/src/gemini-adapter.ts b/packages/typescript/ai-gemini/src/gemini-adapter.ts index 8a711b10..e6d9815c 100644 --- a/packages/typescript/ai-gemini/src/gemini-adapter.ts +++ b/packages/typescript/ai-gemini/src/gemini-adapter.ts @@ -80,8 +80,7 @@ export class GeminiAdapter extends BaseAdapter< } catch (error) { const timestamp = Date.now() yield { - type: 'error', - id: this.generateId(), + type: 'RUN_ERROR', model: options.model, timestamp, error: { @@ -219,11 +218,29 @@ export class GeminiAdapter extends BaseAdapter< let accumulatedContent = '' const toolCallMap = new Map< string, - { name: string; args: string; index: number } + { name: string; args: string; index: number; started: boolean } >() let nextToolIndex = 0 + + // AG-UI lifecycle tracking + const runId = this.generateId() + const messageId = this.generateId() + let hasEmittedRunStarted = false + let hasEmittedTextMessageStart = false + // Iterate over the stream result (it's already an AsyncGenerator) for await (const chunk of result) { + // Emit RUN_STARTED on first chunk + if (!hasEmittedRunStarted) { + hasEmittedRunStarted = true + yield { + type: 'RUN_STARTED', + runId, + model, + timestamp, + } + } + // Extract content from candidates[0].content.parts // Parts can contain text or functionCall if (chunk.candidates?.[0]?.content?.parts) { @@ -232,20 +249,30 @@ export class GeminiAdapter extends BaseAdapter< for (const part of parts) { // Handle text content if (part.text) { + // Emit TEXT_MESSAGE_START on first text + if (!hasEmittedTextMessageStart) { + hasEmittedTextMessageStart = true + yield { + type: 'TEXT_MESSAGE_START', + messageId, + model, + timestamp, + role: 'assistant', + } + } + accumulatedContent += part.text yield { - type: 'content', - id: this.generateId(), + type: 'TEXT_MESSAGE_CONTENT', + messageId, model, timestamp, delta: part.text, content: accumulatedContent, - role: 'assistant', } } // Handle function calls (tool calls) - // Check both camelCase (SDK) and snake_case (direct API) formats const functionCall = part.functionCall if (functionCall) { const toolCallId = @@ -262,11 +289,22 @@ export class GeminiAdapter extends BaseAdapter< ? functionArgs : JSON.stringify(functionArgs), index: nextToolIndex++, + started: false, } toolCallMap.set(toolCallId, toolCallData) + + // Emit TOOL_CALL_START + yield { + type: 'TOOL_CALL_START', + toolCallId, + toolName: toolCallData.name, + model, + timestamp, + index: toolCallData.index, + } + toolCallData.started = true } else { // Merge arguments if streaming - try { const existingArgs = JSON.parse(toolCallData.args) const newArgs = @@ -276,7 +314,6 @@ export class GeminiAdapter extends BaseAdapter< const mergedArgs = { ...existingArgs, ...newArgs } toolCallData.args = JSON.stringify(mergedArgs) } catch { - // If parsing fails, use new args toolCallData.args = typeof functionArgs === 'string' ? functionArgs @@ -284,34 +321,38 @@ export class GeminiAdapter extends BaseAdapter< } } + // Emit TOOL_CALL_ARGS with the arguments yield { - type: 'tool_call', - id: this.generateId(), + type: 'TOOL_CALL_ARGS', + toolCallId, model, timestamp, - toolCall: { - id: toolCallId, - type: 'function', - function: { - name: toolCallData.name, - arguments: toolCallData.args, - }, - }, - index: toolCallData.index, + delta: toolCallData.args, + args: toolCallData.args, } } } } else if (chunk.data) { // Fallback to chunk.data if available + if (!hasEmittedTextMessageStart) { + hasEmittedTextMessageStart = true + yield { + type: 'TEXT_MESSAGE_START', + messageId, + model, + timestamp, + role: 'assistant', + } + } + accumulatedContent += chunk.data yield { - type: 'content', - id: this.generateId(), + type: 'TEXT_MESSAGE_CONTENT', + messageId, model, timestamp, delta: chunk.data, content: accumulatedContent, - role: 'assistant', } } @@ -319,16 +360,34 @@ export class GeminiAdapter extends BaseAdapter< if (chunk.candidates?.[0]?.finishReason) { const finishReason = chunk.candidates[0].finishReason + // Emit TEXT_MESSAGE_END if we had text content + if (hasEmittedTextMessageStart) { + yield { + type: 'TEXT_MESSAGE_END', + messageId, + model, + timestamp, + } + } + + // Emit TOOL_CALL_END for all tool calls + for (const [toolCallId, toolData] of toolCallMap) { + yield { + type: 'TOOL_CALL_END', + toolCallId, + toolName: toolData.name, + model, + timestamp, + input: this.safeJsonParse(toolData.args), + } + } + // UNEXPECTED_TOOL_CALL means Gemini tried to call a function but it wasn't properly declared - // This typically means there's an issue with the tool declaration format - // We should map it to tool_calls to try to process it anyway if (finishReason === FinishReason.UNEXPECTED_TOOL_CALL) { - // Try to extract function call from content.parts if available if (chunk.candidates[0].content?.parts) { for (const part of chunk.candidates[0].content.parts) { const functionCall = part.functionCall if (functionCall) { - // We found a function call - process it const toolCallId = functionCall.name || `call_${Date.now()}_${nextToolIndex}` const functionArgs = functionCall.args || {} @@ -340,34 +399,37 @@ export class GeminiAdapter extends BaseAdapter< ? functionArgs : JSON.stringify(functionArgs), index: nextToolIndex++, + started: true, }) yield { - type: 'tool_call', - id: this.generateId(), + type: 'TOOL_CALL_START', + toolCallId, + toolName: functionCall.name || '', model, timestamp, - toolCall: { - id: toolCallId, - type: 'function', - function: { - name: functionCall.name || '', - arguments: - typeof functionArgs === 'string' - ? functionArgs - : JSON.stringify(functionArgs), - }, - }, index: nextToolIndex - 1, } + + yield { + type: 'TOOL_CALL_END', + toolCallId, + toolName: functionCall.name || '', + model, + timestamp, + input: + typeof functionArgs === 'string' + ? this.safeJsonParse(functionArgs) + : functionArgs, + } } } } } if (finishReason === FinishReason.MAX_TOKENS) { yield { - type: 'error', - id: this.generateId(), + type: 'RUN_ERROR', + runId, model, timestamp, error: { @@ -378,8 +440,8 @@ export class GeminiAdapter extends BaseAdapter< } yield { - type: 'done', - id: this.generateId(), + type: 'RUN_FINISHED', + runId, model, timestamp, finishReason: toolCallMap.size > 0 ? 'tool_calls' : 'stop', @@ -395,6 +457,14 @@ export class GeminiAdapter extends BaseAdapter< } } + private safeJsonParse(jsonString: string): any { + try { + return JSON.parse(jsonString) + } catch { + return jsonString + } + } + private convertContentPartToGemini(part: ContentPart): Part { switch (part.type) { case 'text': diff --git a/packages/typescript/ai-gemini/tests/gemini-adapter.test.ts b/packages/typescript/ai-gemini/tests/gemini-adapter.test.ts index 6c7a08aa..8ce9d052 100644 --- a/packages/typescript/ai-gemini/tests/gemini-adapter.test.ts +++ b/packages/typescript/ai-gemini/tests/gemini-adapter.test.ts @@ -326,18 +326,27 @@ describe('GeminiAdapter through AI', () => { expect(mocks.generateContentStreamSpy).toHaveBeenCalledTimes(1) const [streamPayload] = mocks.generateContentStreamSpy.mock.calls[0] expect(streamPayload.config?.topK).toBe(3) + // First chunk is RUN_STARTED expect(received[0]).toMatchObject({ - type: 'content', - delta: 'Partly ', - content: 'Partly ', + type: 'RUN_STARTED', }) + // Then TEXT_MESSAGE_START before content expect(received[1]).toMatchObject({ - type: 'content', + type: 'TEXT_MESSAGE_START', + role: 'assistant', + }) + // Then TEXT_MESSAGE_CONTENT chunks + expect(received[2]).toMatchObject({ + type: 'TEXT_MESSAGE_CONTENT', + delta: 'Partly ', + }) + expect(received[3]).toMatchObject({ + type: 'TEXT_MESSAGE_CONTENT', delta: 'cloudy', - content: 'Partly cloudy', }) + // Last chunk is RUN_FINISHED expect(received.at(-1)).toMatchObject({ - type: 'done', + type: 'RUN_FINISHED', finishReason: 'stop', usage: { promptTokens: 4, diff --git a/packages/typescript/ai-ollama/src/ollama-adapter.ts b/packages/typescript/ai-ollama/src/ollama-adapter.ts index fc6080c0..2842730b 100644 --- a/packages/typescript/ai-ollama/src/ollama-adapter.ts +++ b/packages/typescript/ai-ollama/src/ollama-adapter.ts @@ -271,44 +271,75 @@ export class Ollama extends BaseAdapter< ): AsyncIterable { let accumulatedContent = '' const timestamp = Date.now() - const responseId: string = this.generateId() + const runId: string = this.generateId() + const messageId: string = this.generateId() + const stepId: string = this.generateId() let accumulatedReasoning = '' let hasEmittedToolCalls = false + let hasEmittedRunStarted = false + let hasEmittedTextMessageStart = false + let hasEmittedStepStarted = false + for await (const chunk of stream) { - function handleToolCall(toolCall: ToolCall): StreamChunk { - // we cast because the library types are missing id and index - const actualToolCall = toolCall as ToolCall & { - id: string - function: { index: number } - } - return { - type: 'tool_call', - id: responseId, + // Emit RUN_STARTED on first chunk + if (!hasEmittedRunStarted) { + hasEmittedRunStarted = true + yield { + type: 'RUN_STARTED', + runId, model: chunk.model, timestamp, - toolCall: { - type: 'function', - id: actualToolCall.id, - function: { - name: actualToolCall.function.name || '', - arguments: - typeof actualToolCall.function.arguments === 'string' - ? actualToolCall.function.arguments - : JSON.stringify(actualToolCall.function.arguments), - }, - }, - index: actualToolCall.function.index, } } + if (chunk.done) { + // Emit TEXT_MESSAGE_END if we had text + if (hasEmittedTextMessageStart) { + yield { + type: 'TEXT_MESSAGE_END', + messageId, + model: chunk.model, + timestamp, + } + } + if (chunk.message.tool_calls && chunk.message.tool_calls.length > 0) { - for (const toolCall of chunk.message.tool_calls) { - yield handleToolCall(toolCall) + for (let i = 0; i < chunk.message.tool_calls.length; i++) { + const toolCall = chunk.message.tool_calls[i] + const actualToolCall = toolCall as ToolCall & { + id: string + function: { index: number } + } + const toolCallId = actualToolCall.id || `call_${timestamp}_${i}` + const toolName = actualToolCall.function.name || '' + const args = + typeof actualToolCall.function.arguments === 'string' + ? actualToolCall.function.arguments + : JSON.stringify(actualToolCall.function.arguments) + + // Emit TOOL_CALL_START and TOOL_CALL_END together + yield { + type: 'TOOL_CALL_START', + toolCallId, + toolName, + model: chunk.model, + timestamp, + index: actualToolCall.function.index, + } + + yield { + type: 'TOOL_CALL_END', + toolCallId, + toolName, + model: chunk.model, + timestamp, + input: this.safeJsonParse(args), + } hasEmittedToolCalls = true } yield { - type: 'done', - id: responseId || this.generateId(), + type: 'RUN_FINISHED', + runId, model: chunk.model, timestamp, finishReason: 'tool_calls', @@ -316,38 +347,91 @@ export class Ollama extends BaseAdapter< continue } yield { - type: 'done', - id: responseId || this.generateId(), + type: 'RUN_FINISHED', + runId, model: chunk.model, timestamp, finishReason: hasEmittedToolCalls ? 'tool_calls' : 'stop', } continue } + if (chunk.message.content) { + // Emit TEXT_MESSAGE_START on first content + if (!hasEmittedTextMessageStart) { + hasEmittedTextMessageStart = true + yield { + type: 'TEXT_MESSAGE_START', + messageId, + model: chunk.model, + timestamp, + role: 'assistant', + } + } + accumulatedContent += chunk.message.content yield { - type: 'content', - id: responseId || this.generateId(), + type: 'TEXT_MESSAGE_CONTENT', + messageId, model: chunk.model, timestamp, delta: chunk.message.content, content: accumulatedContent, - role: 'assistant', } } if (chunk.message.tool_calls && chunk.message.tool_calls.length > 0) { - for (const toolCall of chunk.message.tool_calls) { - yield handleToolCall(toolCall) + for (let i = 0; i < chunk.message.tool_calls.length; i++) { + const toolCall = chunk.message.tool_calls[i] + const actualToolCall = toolCall as ToolCall & { + id: string + function: { index: number } + } + const toolCallId = actualToolCall.id || `call_${timestamp}_${i}` + const toolName = actualToolCall.function.name || '' + const args = + typeof actualToolCall.function.arguments === 'string' + ? actualToolCall.function.arguments + : JSON.stringify(actualToolCall.function.arguments) + + yield { + type: 'TOOL_CALL_START', + toolCallId, + toolName, + model: chunk.model, + timestamp, + index: actualToolCall.function.index, + } + + yield { + type: 'TOOL_CALL_ARGS', + toolCallId, + model: chunk.model, + timestamp, + delta: args, + args, + } hasEmittedToolCalls = true } } + if (chunk.message.thinking) { + // Emit STEP_STARTED on first thinking + if (!hasEmittedStepStarted) { + hasEmittedStepStarted = true + yield { + type: 'STEP_STARTED', + stepId, + model: chunk.model, + timestamp, + stepType: 'thinking', + } + } + accumulatedReasoning += chunk.message.thinking yield { - type: 'thinking', - id: responseId || this.generateId(), + type: 'STEP_FINISHED', + stepId, model: chunk.model, timestamp, content: accumulatedReasoning, @@ -357,6 +441,14 @@ export class Ollama extends BaseAdapter< } } + private safeJsonParse(jsonString: string): any { + try { + return JSON.parse(jsonString) + } catch { + return jsonString + } + } + /** * Converts standard Tool format to Ollama-specific tool format * Ollama uses OpenAI-compatible tool format diff --git a/packages/typescript/ai-openai/src/openai-adapter.ts b/packages/typescript/ai-openai/src/openai-adapter.ts index 3c51e77f..fbd8b537 100644 --- a/packages/typescript/ai-openai/src/openai-adapter.ts +++ b/packages/typescript/ai-openai/src/openai-adapter.ts @@ -210,10 +210,14 @@ export class OpenAI extends BaseAdapter< // Track if we've been streaming deltas to avoid duplicating content from done events let hasStreamedContentDeltas = false let hasStreamedReasoningDeltas = false + let hasEmittedTextMessageStart = false + let hasEmittedStepStarted = false // Preserve response metadata across events let responseId: string | null = null let model: string = options.model + let messageId: string | null = null + let stepId: string | null = null const eventTypeCounts = new Map() @@ -229,21 +233,20 @@ export class OpenAI extends BaseAdapter< if (contentPart.type === 'output_text') { accumulatedContent += contentPart.text return { - type: 'content', - id: responseId || generateId(), + type: 'TEXT_MESSAGE_CONTENT', + messageId: messageId || generateId(), model: model || options.model, timestamp, delta: contentPart.text, content: accumulatedContent, - role: 'assistant', } } if (contentPart.type === 'reasoning_text') { accumulatedReasoning += contentPart.text return { - type: 'thinking', - id: responseId || generateId(), + type: 'STEP_FINISHED', + stepId: stepId || generateId(), model: model || options.model, timestamp, delta: contentPart.text, @@ -251,8 +254,8 @@ export class OpenAI extends BaseAdapter< } } return { - type: 'error', - id: responseId || generateId(), + type: 'RUN_ERROR', + runId: responseId || undefined, model: model || options.model, timestamp, error: { @@ -268,15 +271,30 @@ export class OpenAI extends BaseAdapter< ) { responseId = chunk.response.id model = chunk.response.model + messageId = generateId() + stepId = generateId() // Reset streaming flags for new response hasStreamedContentDeltas = false hasStreamedReasoningDeltas = false + hasEmittedTextMessageStart = false + hasEmittedStepStarted = false accumulatedContent = '' accumulatedReasoning = '' + + // Emit RUN_STARTED event + if (chunk.type === 'response.created') { + yield { + type: 'RUN_STARTED', + runId: chunk.response.id, + model: chunk.response.model, + timestamp, + } + } + if (chunk.response.error) { yield { - type: 'error', - id: chunk.response.id, + type: 'RUN_ERROR', + runId: chunk.response.id, model: chunk.response.model, timestamp, error: chunk.response.error, @@ -284,8 +302,8 @@ export class OpenAI extends BaseAdapter< } if (chunk.response.incomplete_details) { yield { - type: 'error', - id: chunk.response.id, + type: 'RUN_ERROR', + runId: chunk.response.id, model: chunk.response.model, timestamp, error: { @@ -305,16 +323,27 @@ export class OpenAI extends BaseAdapter< : '' if (textDelta) { + // Emit TEXT_MESSAGE_START on first content + if (!hasEmittedTextMessageStart) { + hasEmittedTextMessageStart = true + yield { + type: 'TEXT_MESSAGE_START', + messageId: messageId || generateId(), + model: model || options.model, + timestamp, + role: 'assistant', + } + } + accumulatedContent += textDelta hasStreamedContentDeltas = true yield { - type: 'content', - id: responseId || generateId(), + type: 'TEXT_MESSAGE_CONTENT', + messageId: messageId || generateId(), model: model || options.model, timestamp, delta: textDelta, content: accumulatedContent, - role: 'assistant', } } } @@ -330,11 +359,23 @@ export class OpenAI extends BaseAdapter< : '' if (reasoningDelta) { + // Emit STEP_STARTED on first reasoning content + if (!hasEmittedStepStarted) { + hasEmittedStepStarted = true + yield { + type: 'STEP_STARTED', + stepId: stepId || generateId(), + model: model || options.model, + timestamp, + stepType: 'thinking', + } + } + accumulatedReasoning += reasoningDelta hasStreamedReasoningDeltas = true yield { - type: 'thinking', - id: responseId || generateId(), + type: 'STEP_FINISHED', + stepId: stepId || generateId(), model: model || options.model, timestamp, delta: reasoningDelta, @@ -355,7 +396,13 @@ export class OpenAI extends BaseAdapter< // Skip emitting chunks for content parts that we've already streamed via deltas // The done event is just a completion marker, not new content if (contentPart.type === 'output_text' && hasStreamedContentDeltas) { - // Content already accumulated from deltas, skip + // Emit TEXT_MESSAGE_END + yield { + type: 'TEXT_MESSAGE_END', + messageId: messageId || generateId(), + model: model || options.model, + timestamp, + } continue } if ( @@ -380,31 +427,35 @@ export class OpenAI extends BaseAdapter< index: chunk.output_index, name: item.name || '', }) + + // Emit TOOL_CALL_START when we first see the function call + yield { + type: 'TOOL_CALL_START', + toolCallId: item.id, + toolName: item.name || '', + model: model || options.model, + timestamp, + index: chunk.output_index, + } } } } if (chunk.type === 'response.function_call_arguments.done') { - const { item_id, output_index } = chunk + const { item_id } = chunk // Get the function name from metadata (captured in output_item.added) const metadata = toolCallMetadata.get(item_id) const name = metadata?.name || '' + // Emit TOOL_CALL_END with the complete arguments yield { - type: 'tool_call', - id: responseId || generateId(), + type: 'TOOL_CALL_END', + toolCallId: item_id, + toolName: name, model: model || options.model, timestamp, - index: output_index, - toolCall: { - id: item_id, - type: 'function', - function: { - name, - arguments: chunk.arguments, - }, - }, + input: this.safeJsonParse(chunk.arguments), } } @@ -416,8 +467,8 @@ export class OpenAI extends BaseAdapter< ) yield { - type: 'done', - id: responseId || generateId(), + type: 'RUN_FINISHED', + runId: responseId || generateId(), model: model || options.model, timestamp, usage: { @@ -431,8 +482,8 @@ export class OpenAI extends BaseAdapter< if (chunk.type === 'error') { yield { - type: 'error', - id: responseId || generateId(), + type: 'RUN_ERROR', + runId: responseId || undefined, model: model || options.model, timestamp, error: { @@ -452,8 +503,7 @@ export class OpenAI extends BaseAdapter< }, ) yield { - type: 'error', - id: generateId(), + type: 'RUN_ERROR', model: options.model, timestamp, error: { @@ -464,6 +514,14 @@ export class OpenAI extends BaseAdapter< } } + private safeJsonParse(jsonString: string): any { + try { + return JSON.parse(jsonString) + } catch { + return jsonString + } + } + /** * Maps common options to OpenAI-specific format * Handles translation of normalized options to OpenAI's API format diff --git a/packages/typescript/ai/src/core/chat.ts b/packages/typescript/ai/src/core/chat.ts index aebaf65a..9d5346d1 100644 --- a/packages/typescript/ai/src/core/chat.ts +++ b/packages/typescript/ai/src/core/chat.ts @@ -11,8 +11,8 @@ import type { AgentLoopStrategy, ChatOptions, ChatStreamOptionsForModel, - DoneStreamChunk, ModelMessage, + RunFinishedEvent, StreamChunk, Tool, ToolCall, @@ -53,7 +53,7 @@ class ChatEngine< private totalChunkCount = 0 private currentMessageId: string | null = null private accumulatedContent = '' - private doneChunk: DoneStreamChunk | null = null + private doneChunk: RunFinishedEvent | null = null private shouldEmitStreamEnd = true private earlyTermination = false private toolPhase: ToolPhaseResult = 'continue' @@ -215,68 +215,117 @@ class ChatEngine< private handleStreamChunk(chunk: StreamChunk): void { switch (chunk.type) { - case 'content': - this.handleContentChunk(chunk) + // AG-UI Event Types + case 'TEXT_MESSAGE_CONTENT': + this.handleTextMessageContentEvent(chunk) break - case 'tool_call': - this.handleToolCallChunk(chunk) + case 'TOOL_CALL_START': + this.handleToolCallStartEvent(chunk) break - case 'tool_result': - this.handleToolResultChunk(chunk) + case 'TOOL_CALL_ARGS': + this.handleToolCallArgsEvent(chunk) + break + case 'TOOL_CALL_END': + this.handleToolCallEndEvent(chunk) + break + case 'RUN_FINISHED': + this.handleRunFinishedEvent(chunk) + break + case 'RUN_ERROR': + this.handleRunErrorEvent(chunk) + break + case 'STEP_FINISHED': + this.handleStepFinishedEvent(chunk) + break + + // Legacy Event Types (backward compatibility) + case 'content': + this.handleLegacyContentChunk(chunk) break case 'done': - this.handleDoneChunk(chunk) + this.handleLegacyDoneChunk(chunk) break case 'error': - this.handleErrorChunk(chunk) + this.handleLegacyErrorChunk(chunk) break - case 'thinking': - this.handleThinkingChunk(chunk) + case 'tool_call': + this.handleLegacyToolCallChunk(chunk) + break + case 'tool_result': + this.handleLegacyToolResultChunk(chunk) break + default: + // RUN_STARTED, TEXT_MESSAGE_START, TEXT_MESSAGE_END, STEP_STARTED, + // STATE_SNAPSHOT, STATE_DELTA, CUSTOM, thinking, approval-requested, + // tool-input-available - no special handling needed break } } - private handleContentChunk(chunk: Extract) { - this.accumulatedContent = chunk.content + private handleTextMessageContentEvent( + chunk: Extract, + ) { + if (chunk.content) { + this.accumulatedContent = chunk.content + } else { + this.accumulatedContent += chunk.delta + } aiEventClient.emit('stream:chunk:content', { streamId: this.streamId, messageId: this.currentMessageId || undefined, - content: chunk.content, + content: this.accumulatedContent, delta: chunk.delta, timestamp: Date.now(), }) } - private handleToolCallChunk( - chunk: Extract, + private handleToolCallStartEvent( + chunk: Extract, ): void { - this.toolCallManager.addToolCallChunk(chunk) + this.toolCallManager.addToolCallStartEvent(chunk) aiEventClient.emit('stream:chunk:tool-call', { streamId: this.streamId, messageId: this.currentMessageId || undefined, - toolCallId: chunk.toolCall.id, - toolName: chunk.toolCall.function.name, - index: chunk.index, - arguments: chunk.toolCall.function.arguments, + toolCallId: chunk.toolCallId, + toolName: chunk.toolName, + index: chunk.index ?? 0, + arguments: '', timestamp: Date.now(), }) } - private handleToolResultChunk( - chunk: Extract, + private handleToolCallArgsEvent( + chunk: Extract, ): void { - aiEventClient.emit('stream:chunk:tool-result', { + this.toolCallManager.addToolCallArgsEvent(chunk) + aiEventClient.emit('stream:chunk:tool-call', { streamId: this.streamId, messageId: this.currentMessageId || undefined, toolCallId: chunk.toolCallId, - result: chunk.content, + toolName: '', + index: 0, + arguments: chunk.delta, timestamp: Date.now(), }) } - private handleDoneChunk(chunk: DoneStreamChunk): void { + private handleToolCallEndEvent( + chunk: Extract, + ): void { + this.toolCallManager.completeToolCall(chunk.toolCallId, chunk.input) + if (chunk.result !== undefined) { + aiEventClient.emit('stream:chunk:tool-result', { + streamId: this.streamId, + messageId: this.currentMessageId || undefined, + toolCallId: chunk.toolCallId, + result: chunk.result, + timestamp: Date.now(), + }) + } + } + + private handleRunFinishedEvent(chunk: RunFinishedEvent): void { // Don't overwrite a tool_calls finishReason with a stop finishReason // This can happen when adapters send multiple done chunks if ( @@ -328,8 +377,8 @@ class ChatEngine< } } - private handleErrorChunk( - chunk: Extract, + private handleRunErrorEvent( + chunk: Extract, ): void { aiEventClient.emit('stream:chunk:error', { streamId: this.streamId, @@ -341,8 +390,8 @@ class ChatEngine< this.shouldEmitStreamEnd = false } - private handleThinkingChunk( - chunk: Extract, + private handleStepFinishedEvent( + chunk: Extract, ): void { aiEventClient.emit('stream:chunk:thinking', { streamId: this.streamId, @@ -353,6 +402,88 @@ class ChatEngine< }) } + // ============================================ + // Legacy Event Handlers (Backward Compatibility) + // ============================================ + + private handleLegacyContentChunk( + chunk: Extract, + ): void { + if (chunk.content) { + this.accumulatedContent = chunk.content + } else { + this.accumulatedContent += chunk.delta + } + aiEventClient.emit('stream:chunk:content', { + streamId: this.streamId, + messageId: this.currentMessageId || undefined, + content: this.accumulatedContent, + delta: chunk.delta, + timestamp: Date.now(), + }) + } + + private handleLegacyDoneChunk( + chunk: Extract, + ): void { + // Create a RUN_FINISHED-like chunk for compatibility + const runFinishedChunk: RunFinishedEvent = { + type: 'RUN_FINISHED', + runId: chunk.id, + model: chunk.model, + timestamp: chunk.timestamp, + finishReason: chunk.finishReason ?? 'stop', + usage: chunk.usage, + } + this.handleRunFinishedEvent(runFinishedChunk) + } + + private handleLegacyErrorChunk( + chunk: Extract, + ): void { + const errorMessage = + typeof chunk.error === 'string' ? chunk.error : chunk.error.message + aiEventClient.emit('stream:chunk:error', { + streamId: this.streamId, + messageId: this.currentMessageId || undefined, + error: errorMessage, + timestamp: Date.now(), + }) + this.earlyTermination = true + this.shouldEmitStreamEnd = false + } + + private handleLegacyToolCallChunk( + chunk: Extract, + ): void { + const toolCall = chunk.toolCall + this.toolCallManager.addToolCallChunk({ + toolCall, + index: chunk.index, + }) + aiEventClient.emit('stream:chunk:tool-call', { + streamId: this.streamId, + messageId: this.currentMessageId || undefined, + toolCallId: toolCall.id, + toolName: toolCall.function.name, + index: chunk.index, + arguments: toolCall.function.arguments, + timestamp: Date.now(), + }) + } + + private handleLegacyToolResultChunk( + chunk: Extract, + ): void { + aiEventClient.emit('stream:chunk:tool-result', { + streamId: this.streamId, + messageId: this.currentMessageId || undefined, + toolCallId: chunk.toolCallId, + result: chunk.content, + timestamp: Date.now(), + }) + } + private async *checkForPendingToolCalls(): AsyncGenerator< StreamChunk, ToolPhaseResult, @@ -542,7 +673,7 @@ class ChatEngine< private emitApprovalRequests( approvals: Array, - doneChunk: DoneStreamChunk, + doneChunk: RunFinishedEvent, ): Array { const chunks: Array = [] @@ -557,17 +688,20 @@ class ChatEngine< timestamp: Date.now(), }) + // Emit CUSTOM event for approval requests chunks.push({ - type: 'approval-requested', - id: doneChunk.id, - model: doneChunk.model, + type: 'CUSTOM', timestamp: Date.now(), - toolCallId: approval.toolCallId, - toolName: approval.toolName, - input: approval.input, - approval: { - id: approval.approvalId, - needsApproval: true, + model: doneChunk.model ?? '', + name: 'approval-requested', + value: { + toolCallId: approval.toolCallId, + toolName: approval.toolName, + input: approval.input, + approval: { + id: approval.approvalId, + needsApproval: true, + }, }, }) } @@ -577,7 +711,7 @@ class ChatEngine< private emitClientToolInputs( clientRequests: Array, - doneChunk: DoneStreamChunk, + doneChunk: RunFinishedEvent, ): Array { const chunks: Array = [] @@ -591,14 +725,17 @@ class ChatEngine< timestamp: Date.now(), }) + // Emit CUSTOM event for client tool inputs chunks.push({ - type: 'tool-input-available', - id: doneChunk.id, - model: doneChunk.model, + type: 'CUSTOM', timestamp: Date.now(), - toolCallId: clientTool.toolCallId, - toolName: clientTool.toolName, - input: clientTool.input, + model: doneChunk.model ?? '', + name: 'tool-input-available', + value: { + toolCallId: clientTool.toolCallId, + toolName: clientTool.toolName, + input: clientTool.input, + }, }) } @@ -607,7 +744,7 @@ class ChatEngine< private emitToolResults( results: Array, - doneChunk: DoneStreamChunk, + doneChunk: RunFinishedEvent, ): Array { const chunks: Array = [] @@ -624,16 +761,15 @@ class ChatEngine< }) const content = JSON.stringify(result.result) - const chunk: Extract = { - type: 'tool_result', - id: doneChunk.id, - model: doneChunk.model, + // Emit TOOL_CALL_END event with result + chunks.push({ + type: 'TOOL_CALL_END', timestamp: Date.now(), + model: doneChunk.model ?? '', toolCallId: result.toolCallId, - content, - } - - chunks.push(chunk) + toolName: result.toolName, + result: result.result, + }) this.messages = [ ...this.messages, @@ -670,10 +806,10 @@ class ChatEngine< return pending } - private createSyntheticDoneChunk(): DoneStreamChunk { + private createSyntheticDoneChunk(): RunFinishedEvent { return { - type: 'done', - id: this.createId('pending'), + type: 'RUN_FINISHED', + runId: this.createId('pending'), model: this.params.model, timestamp: Date.now(), finishReason: 'tool_calls', diff --git a/packages/typescript/ai/src/stream/processor.ts b/packages/typescript/ai/src/stream/processor.ts index d8441a33..d0585a05 100644 --- a/packages/typescript/ai/src/stream/processor.ts +++ b/packages/typescript/ai/src/stream/processor.ts @@ -426,36 +426,101 @@ export class StreamProcessor { } switch (chunk.type) { - case 'content': - this.handleContentChunk(chunk) + // Run lifecycle events + case 'RUN_STARTED': + // Run started - could be used for initialization break - case 'tool_call': - this.handleToolCallChunk(chunk) + case 'RUN_FINISHED': + this.handleRunFinishedEvent(chunk) break - case 'tool_result': - this.handleToolResultChunk(chunk) + case 'RUN_ERROR': + this.handleRunErrorEvent(chunk) + break + + // Text message events + case 'TEXT_MESSAGE_START': + // Message starting - could track message ID + break + + case 'TEXT_MESSAGE_CONTENT': + this.handleTextMessageContentEvent(chunk) + break + + case 'TEXT_MESSAGE_END': + // Message ended - finalize text if needed + break + + // Tool call events + case 'TOOL_CALL_START': + this.handleToolCallStartEvent(chunk) + break + + case 'TOOL_CALL_ARGS': + this.handleToolCallArgsEvent(chunk) + break + + case 'TOOL_CALL_END': + this.handleToolCallEndEvent(chunk) + break + + // Step/thinking events + case 'STEP_STARTED': + // Step started - could track step ID + break + + case 'STEP_FINISHED': + this.handleStepFinishedEvent(chunk) + break + + // State events + case 'STATE_SNAPSHOT': + // Full state sync - custom handling + break + + case 'STATE_DELTA': + // Incremental state update - custom handling + break + + // Custom events (including approval flows) + case 'CUSTOM': + this.handleCustomEvent(chunk) + break + + // ============================================ + // Legacy event types (backward compatibility) + // ============================================ + case 'content': + this.handleLegacyContentChunk(chunk) break case 'done': - this.handleDoneChunk(chunk) + this.handleLegacyDoneChunk(chunk) break case 'error': - this.handleErrorChunk(chunk) + this.handleLegacyErrorChunk(chunk) + break + + case 'tool_call': + this.handleLegacyToolCallChunk(chunk) + break + + case 'tool_result': + this.handleLegacyToolResultChunk(chunk) break case 'thinking': - this.handleThinkingChunk(chunk) + this.handleLegacyThinkingChunk(chunk) break case 'approval-requested': - this.handleApprovalRequestedChunk(chunk) + this.handleLegacyApprovalRequestedChunk(chunk) break case 'tool-input-available': - this.handleToolInputAvailableChunk(chunk) + this.handleLegacyToolInputAvailableChunk(chunk) break default: @@ -465,10 +530,10 @@ export class StreamProcessor { } /** - * Handle a content chunk + * Handle TEXT_MESSAGE_CONTENT event (AG-UI) */ - private handleContentChunk( - chunk: Extract, + private handleTextMessageContentEvent( + chunk: Extract, ): void { // Content arriving means all current tool calls are complete this.completeAllToolCalls() @@ -498,7 +563,7 @@ export class StreamProcessor { // Prefer delta over content - delta is the incremental change if (chunk.delta !== '') { nextText = currentText + chunk.delta - } else if (chunk.content !== '') { + } else if (chunk.content && chunk.content !== '') { // Fallback: use content if delta is not provided if (chunk.content.startsWith(currentText)) { nextText = chunk.content @@ -514,9 +579,8 @@ export class StreamProcessor { this.currentSegmentText = nextText this.totalTextContent += textDelta - // Use delta for chunk strategy if available - // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition - const chunkPortion = chunk.delta ?? chunk.content ?? '' + // Use delta for chunk strategy + const chunkPortion = chunk.delta const shouldEmit = this.chunkStrategy.shouldEmit( chunkPortion, this.currentSegmentText, @@ -527,37 +591,28 @@ export class StreamProcessor { } /** - * Handle a tool call chunk + * Handle TOOL_CALL_START event (AG-UI) */ - private handleToolCallChunk( - chunk: Extract, + private handleToolCallStartEvent( + chunk: Extract, ): void { // Mark that we've seen tool calls since the last text segment this.hasToolCallsSinceTextStart = true - const toolCallId = chunk.toolCall.id + const toolCallId = chunk.toolCallId const existingToolCall = this.toolCalls.get(toolCallId) if (!existingToolCall) { // New tool call starting - const initialState: ToolCallState = chunk.toolCall.function.arguments - ? 'input-streaming' - : 'awaiting-input' + const initialState: ToolCallState = 'awaiting-input' const newToolCall: InternalToolCallState = { - id: chunk.toolCall.id, - name: chunk.toolCall.function.name, - arguments: chunk.toolCall.function.arguments || '', + id: toolCallId, + name: chunk.toolName, + arguments: '', state: initialState, parsedArguments: undefined, - index: chunk.index, - } - - // Try to parse the arguments - if (chunk.toolCall.function.arguments) { - newToolCall.parsedArguments = this.jsonParser.parse( - chunk.toolCall.function.arguments, - ) + index: chunk.index ?? this.toolCallOrder.length, } this.toolCalls.set(toolCallId, newToolCall) @@ -567,60 +622,70 @@ export class StreamProcessor { const actualIndex = this.toolCallOrder.indexOf(toolCallId) // Emit legacy lifecycle event - this.handlers.onToolCallStart?.( - actualIndex, - chunk.toolCall.id, - chunk.toolCall.function.name, - ) + this.handlers.onToolCallStart?.(actualIndex, toolCallId, chunk.toolName) // Emit legacy state change event this.handlers.onToolCallStateChange?.( actualIndex, - chunk.toolCall.id, - chunk.toolCall.function.name, + toolCallId, + chunk.toolName, initialState, - chunk.toolCall.function.arguments || '', - newToolCall.parsedArguments, + '', + undefined, ) - // Emit initial delta - if (chunk.toolCall.function.arguments) { - this.handlers.onToolCallDelta?.( - actualIndex, - chunk.toolCall.function.arguments, - ) - } - // Update UIMessage if (this.currentAssistantMessageId) { this.messages = updateToolCallPart( this.messages, this.currentAssistantMessageId, { - id: chunk.toolCall.id, - name: chunk.toolCall.function.name, - arguments: chunk.toolCall.function.arguments || '', + id: toolCallId, + name: chunk.toolName, + arguments: '', state: initialState, }, ) + + // If there's approval metadata, update it + if (chunk.approval) { + this.messages = updateToolCallApproval( + this.messages, + this.currentAssistantMessageId, + toolCallId, + chunk.approval.id, + ) + } this.emitMessagesChange() // Emit new granular event this.events.onToolCallStateChange?.( this.currentAssistantMessageId, - chunk.toolCall.id, + toolCallId, initialState, - chunk.toolCall.function.arguments || '', + '', ) } - } else { - // Continuing existing tool call + } + } + + /** + * Handle TOOL_CALL_ARGS event (AG-UI) + */ + private handleToolCallArgsEvent( + chunk: Extract, + ): void { + const toolCallId = chunk.toolCallId + const existingToolCall = this.toolCalls.get(toolCallId) + + if (existingToolCall) { const wasAwaitingInput = existingToolCall.state === 'awaiting-input' - existingToolCall.arguments += chunk.toolCall.function.arguments || '' + // Append delta to arguments + existingToolCall.arguments += chunk.delta || '' // Update state - if (wasAwaitingInput && chunk.toolCall.function.arguments) { + if (wasAwaitingInput && chunk.delta) { existingToolCall.state = 'input-streaming' } @@ -643,11 +708,8 @@ export class StreamProcessor { ) // Emit delta - if (chunk.toolCall.function.arguments) { - this.handlers.onToolCallDelta?.( - actualIndex, - chunk.toolCall.function.arguments, - ) + if (chunk.delta) { + this.handlers.onToolCallDelta?.(actualIndex, chunk.delta) } // Update UIMessage @@ -676,47 +738,79 @@ export class StreamProcessor { } /** - * Handle a tool result chunk + * Handle TOOL_CALL_END event (AG-UI) + * This handles both tool completion and tool results */ - private handleToolResultChunk( - chunk: Extract, + private handleToolCallEndEvent( + chunk: Extract, ): void { - const state: ToolResultState = 'complete' + const toolCallId = chunk.toolCallId + const existingToolCall = this.toolCalls.get(toolCallId) - // Emit legacy handler - this.handlers.onToolResultStateChange?.( - chunk.toolCallId, - chunk.content, - state, - ) + if (existingToolCall) { + // Mark tool call as complete + existingToolCall.state = 'input-complete' + if (chunk.input) { + existingToolCall.parsedArguments = chunk.input + } + } - // Update UIMessage if we have a current assistant message - if (this.currentAssistantMessageId) { - this.messages = updateToolResultPart( - this.messages, - this.currentAssistantMessageId, - chunk.toolCallId, - chunk.content, - state, + // If there's a result, this is a tool result + if (chunk.result !== undefined) { + const state: ToolResultState = 'complete' + const resultContent = + typeof chunk.result === 'string' + ? chunk.result + : JSON.stringify(chunk.result) + + // Emit handler + this.handlers.onToolResultStateChange?.(toolCallId, resultContent, state) + + // Update UIMessage if we have a current assistant message + if (this.currentAssistantMessageId) { + this.messages = updateToolResultPart( + this.messages, + this.currentAssistantMessageId, + toolCallId, + resultContent, + state, + ) + this.emitMessagesChange() + } + } else if (chunk.input !== undefined) { + // This is tool input available (client tool ready for execution) + // Emit legacy handler + this.handlers.onToolInputAvailable?.( + toolCallId, + chunk.toolName, + chunk.input, ) - this.emitMessagesChange() + + // Emit new event + this.events.onToolCall?.({ + toolCallId, + toolName: chunk.toolName, + input: chunk.input, + }) } } /** - * Handle a done chunk + * Handle RUN_FINISHED event (AG-UI) */ - private handleDoneChunk(chunk: Extract): void { + private handleRunFinishedEvent( + chunk: Extract, + ): void { this.finishReason = chunk.finishReason this.isDone = true this.completeAllToolCalls() } /** - * Handle an error chunk + * Handle RUN_ERROR event (AG-UI) */ - private handleErrorChunk( - chunk: Extract, + private handleRunErrorEvent( + chunk: Extract, ): void { // Emit legacy handler this.handlers.onError?.(chunk.error) @@ -726,16 +820,16 @@ export class StreamProcessor { } /** - * Handle a thinking chunk + * Handle STEP_FINISHED event (AG-UI) - for thinking/reasoning content */ - private handleThinkingChunk( - chunk: Extract, + private handleStepFinishedEvent( + chunk: Extract, ): void { const previous = this.thinkingContent let nextThinking = previous // Prefer delta over content - if (chunk.delta !== '') { + if (chunk.delta && chunk.delta !== '') { nextThinking = previous + chunk.delta } else if (chunk.content !== '') { if (chunk.content.startsWith(previous)) { @@ -770,69 +864,57 @@ export class StreamProcessor { } /** - * Handle an approval-requested chunk + * Handle CUSTOM event (AG-UI) - for approval flows and other custom events */ - private handleApprovalRequestedChunk( - chunk: Extract, + private handleCustomEvent( + chunk: Extract, ): void { - // Emit legacy handler - this.handlers.onApprovalRequested?.( - chunk.toolCallId, - chunk.toolName, - chunk.input, - chunk.approval.id, - ) + // Handle approval-requested custom event + if (chunk.name === 'approval-requested') { + const value = chunk.value as { + toolCallId: string + toolName: string + input: any + approval: { id: string } + } - // Update UIMessage with approval metadata - if (this.currentAssistantMessageId) { - this.messages = updateToolCallApproval( - this.messages, - this.currentAssistantMessageId, - chunk.toolCallId, - chunk.approval.id, + // Emit legacy handler + this.handlers.onApprovalRequested?.( + value.toolCallId, + value.toolName, + value.input, + value.approval.id, ) - this.emitMessagesChange() - } - - // Emit new event - this.events.onApprovalRequest?.({ - toolCallId: chunk.toolCallId, - toolName: chunk.toolName, - input: chunk.input, - approvalId: chunk.approval.id, - }) - } - /** - * Handle a tool-input-available chunk - */ - private handleToolInputAvailableChunk( - chunk: Extract, - ): void { - // Emit legacy handler - this.handlers.onToolInputAvailable?.( - chunk.toolCallId, - chunk.toolName, - chunk.input, - ) + // Update UIMessage with approval metadata + if (this.currentAssistantMessageId) { + this.messages = updateToolCallApproval( + this.messages, + this.currentAssistantMessageId, + value.toolCallId, + value.approval.id, + ) + this.emitMessagesChange() + } - // Emit new event - this.events.onToolCall?.({ - toolCallId: chunk.toolCallId, - toolName: chunk.toolName, - input: chunk.input, - }) + // Emit new event + this.events.onApprovalRequest?.({ + toolCallId: value.toolCallId, + toolName: value.toolName, + input: value.input, + approvalId: value.approval.id, + }) + } } /** * Detect if an incoming content chunk represents a NEW text segment */ private isNewTextSegment( - chunk: Extract, + chunk: Extract, previous: string, ): boolean { - // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition - if (chunk.delta !== undefined && chunk.content !== undefined) { + if (chunk.content !== undefined) { if (chunk.content.length < previous.length) { return true } @@ -1018,6 +1100,411 @@ export class StreamProcessor { } } + // ============================================ + // Legacy Event Handlers (Backward Compatibility) + // ============================================ + + /** + * Handle legacy 'content' chunk type + */ + private handleLegacyContentChunk( + chunk: Extract, + ): void { + // Convert to TEXT_MESSAGE_CONTENT handling + this.completeAllToolCalls() + + const previousSegment = this.currentSegmentText + + const isNewSegment = + this.hasToolCallsSinceTextStart && + previousSegment.length > 0 && + this.isNewTextSegmentFromLegacy(chunk, previousSegment) + + if (isNewSegment) { + if (previousSegment !== this.lastEmittedText) { + this.emitTextUpdate() + } + this.currentSegmentText = '' + this.lastEmittedText = '' + this.hasToolCallsSinceTextStart = false + } + + const currentText = this.currentSegmentText + let nextText = currentText + + // In a new segment (after tool call), prefer content field if it contains more + // than just the delta (i.e., it includes accumulated text from before) + if ( + isNewSegment && + chunk.content && + chunk.content !== '' && + chunk.content.length > chunk.delta.length + ) { + // Use content field for accumulated text in new segments + nextText = chunk.content + } else if (chunk.delta && chunk.delta !== '') { + nextText = currentText + chunk.delta + } else if (chunk.content && chunk.content !== '') { + if (chunk.content.startsWith(currentText)) { + nextText = chunk.content + } else if (currentText.startsWith(chunk.content)) { + nextText = currentText + } else { + nextText = currentText + chunk.content + } + } + + const textDelta = nextText.slice(currentText.length) + this.currentSegmentText = nextText + this.totalTextContent += textDelta + + // Reset hasToolCallsSinceTextStart after processing content + // This prevents subsequent chunks in the same segment from being treated as new segments + this.hasToolCallsSinceTextStart = false + + const chunkPortion = chunk.delta || chunk.content + const shouldEmit = this.chunkStrategy.shouldEmit( + chunkPortion, + this.currentSegmentText, + ) + if (shouldEmit && this.currentSegmentText !== this.lastEmittedText) { + this.emitTextUpdate() + } + } + + /** + * Check if this is a new text segment from legacy chunk + */ + private isNewTextSegmentFromLegacy( + chunk: Extract, + previousSegment: string, + ): boolean { + if (!chunk.delta && !chunk.content) return false + const newContent = chunk.delta || chunk.content || '' + return ( + !newContent.startsWith(previousSegment.slice(0, 10)) && + !previousSegment.endsWith(newContent.slice(-10)) + ) + } + + /** + * Handle legacy 'done' chunk type + */ + private handleLegacyDoneChunk( + chunk: Extract, + ): void { + this.finishReason = chunk.finishReason ?? 'stop' + this.isDone = true + } + + /** + * Handle legacy 'error' chunk type + */ + private handleLegacyErrorChunk( + chunk: Extract, + ): void { + const errorMessage = + typeof chunk.error === 'string' ? chunk.error : chunk.error.message + const errorCode = + typeof chunk.error === 'object' ? chunk.error.code : chunk.code + this.handlers.onError?.(new Error(errorMessage)) + this.events.onError?.(new Error(`${errorCode}: ${errorMessage}`)) + } + + /** + * Handle legacy 'tool_call' chunk type + */ + private handleLegacyToolCallChunk( + chunk: Extract, + ): void { + this.hasToolCallsSinceTextStart = true + + const toolCall = chunk.toolCall + const toolCallId = toolCall.id + const existingToolCall = this.toolCalls.get(toolCallId) + + if (!existingToolCall) { + // New tool call - use approval-requested state if approval is needed + const initialState: ToolCallState = chunk.approval?.needsApproval + ? 'approval-requested' + : 'awaiting-input' + + const actualIndex = this.toolCallOrder.length + + const newToolCall: InternalToolCallState = { + id: toolCallId, + name: toolCall.function.name, + arguments: toolCall.function.arguments, + state: initialState, + parsedArguments: undefined, + index: actualIndex, + } + + this.toolCalls.set(toolCallId, newToolCall) + this.toolCallOrder.push(toolCallId) + + // Emit legacy lifecycle event + this.handlers.onToolCallStart?.(actualIndex, toolCallId, toolCall.function.name) + + // Emit legacy delta for initial arguments + if (toolCall.function.arguments) { + this.handlers.onToolCallDelta?.(actualIndex, toolCall.function.arguments) + } + + // Update UIMessage + if (this.currentAssistantMessageId) { + this.messages = updateToolCallPart( + this.messages, + this.currentAssistantMessageId, + { + id: toolCallId, + name: toolCall.function.name, + arguments: toolCall.function.arguments, + state: initialState, + }, + ) + if (chunk.approval) { + this.messages = updateToolCallApproval( + this.messages, + this.currentAssistantMessageId, + toolCallId, + chunk.approval.id, + ) + } + this.emitMessagesChange() + } + + this.handlers.onToolCallStateChange?.( + actualIndex, + toolCallId, + toolCall.function.name, + initialState, + toolCall.function.arguments, + this.jsonParser.parse(toolCall.function.arguments), + ) + + this.events.onToolCallStateChange?.( + this.currentAssistantMessageId || '', + toolCallId, + initialState, + toolCall.function.arguments, + ) + } else { + // Update existing tool call arguments + existingToolCall.name = + existingToolCall.name || toolCall.function.name + existingToolCall.arguments += toolCall.function.arguments + + // Emit delta event for additional arguments + if (toolCall.function.arguments) { + this.handlers.onToolCallDelta?.(existingToolCall.index, toolCall.function.arguments) + } + } + } + + /** + * Handle legacy 'tool_result' chunk type + */ + private handleLegacyToolResultChunk( + chunk: Extract, + ): void { + const toolCallId = chunk.toolCallId + const existingToolCall = this.toolCalls.get(toolCallId) + + if (existingToolCall) { + existingToolCall.state = 'input-complete' + } + + if (this.currentAssistantMessageId) { + // Add a tool-result part (separate from the tool-call part) + this.messages = updateToolResultPart( + this.messages, + this.currentAssistantMessageId, + toolCallId, + chunk.content, + 'complete', + ) + this.emitMessagesChange() + } + + this.handlers.onToolResultStateChange?.( + toolCallId, + chunk.content, + 'complete', + ) + } + + /** + * Handle legacy 'thinking' chunk type + */ + private handleLegacyThinkingChunk( + chunk: Extract, + ): void { + const previousThinking = this.thinkingContent + + if (chunk.delta && chunk.delta !== '') { + this.thinkingContent = previousThinking + chunk.delta + } else if (chunk.content) { + if (chunk.content.startsWith(previousThinking)) { + this.thinkingContent = chunk.content + } else if (previousThinking.startsWith(chunk.content)) { + // Current thinking already includes this content + } else { + this.thinkingContent = previousThinking + chunk.content + } + } + + this.handlers.onThinkingUpdate?.(this.thinkingContent) + + if (this.currentAssistantMessageId) { + this.messages = updateThinkingPart( + this.messages, + this.currentAssistantMessageId, + this.thinkingContent, + ) + this.emitMessagesChange() + + // Emit new granular event + this.events.onThinkingUpdate?.( + this.currentAssistantMessageId, + this.thinkingContent, + ) + } + } + + /** + * Handle legacy 'approval-requested' chunk type + */ + private handleLegacyApprovalRequestedChunk( + chunk: Extract, + ): void { + const toolCallId = chunk.toolCallId + const existingToolCall = this.toolCalls.get(toolCallId) + + if (existingToolCall) { + existingToolCall.state = 'approval-requested' + existingToolCall.parsedArguments = chunk.input + + if (this.currentAssistantMessageId) { + this.messages = updateToolCallPart( + this.messages, + this.currentAssistantMessageId, + { + id: toolCallId, + name: existingToolCall.name, + arguments: JSON.stringify(chunk.input), + state: 'approval-requested', + }, + ) + if (chunk.approval) { + this.messages = updateToolCallApproval( + this.messages, + this.currentAssistantMessageId, + toolCallId, + chunk.approval.id, + ) + } + this.emitMessagesChange() + } + + this.handlers.onToolCallStateChange?.( + existingToolCall.index, + toolCallId, + chunk.toolName, + 'approval-requested', + JSON.stringify(chunk.input), + chunk.input, + ) + + this.events.onToolCallStateChange?.( + this.currentAssistantMessageId || '', + toolCallId, + 'approval-requested', + JSON.stringify(chunk.input), + ) + } + + // Always call onApprovalRequested and onApprovalRequest regardless of existingToolCall + this.handlers.onApprovalRequested?.( + toolCallId, + chunk.toolName, + chunk.input, + chunk.approval?.id || '', + ) + + this.events.onApprovalRequest?.({ + toolCallId, + toolName: chunk.toolName, + input: chunk.input, + approvalId: chunk.approval?.id || '', + }) + } + + /** + * Handle legacy 'tool-input-available' chunk type + */ + private handleLegacyToolInputAvailableChunk( + chunk: Extract, + ): void { + const toolCallId = chunk.toolCallId + const existingToolCall = this.toolCalls.get(toolCallId) + + if (existingToolCall) { + existingToolCall.state = 'input-complete' + existingToolCall.parsedArguments = chunk.input + + if (this.currentAssistantMessageId) { + this.messages = updateToolCallPart( + this.messages, + this.currentAssistantMessageId, + { + id: toolCallId, + name: existingToolCall.name, + arguments: JSON.stringify(chunk.input), + state: 'input-complete', + }, + ) + this.emitMessagesChange() + } + + this.handlers.onToolCallStateChange?.( + existingToolCall.index, + toolCallId, + chunk.toolName, + 'input-complete', + JSON.stringify(chunk.input), + chunk.input, + ) + + this.events.onToolCallStateChange?.( + this.currentAssistantMessageId || '', + toolCallId, + 'input-complete', + JSON.stringify(chunk.input), + ) + + // Also invoke the onToolCall handler + this.events.onToolCall?.({ + toolCallId, + toolName: chunk.toolName, + input: chunk.input, + }) + } + + // Always call onToolInputAvailable and onToolCall regardless of existingToolCall + this.handlers.onToolInputAvailable?.( + toolCallId, + chunk.toolName, + chunk.input, + ) + + this.events.onToolCall?.({ + toolCallId, + toolName: chunk.toolName, + input: chunk.input, + }) + } + /** * Start recording chunks */ diff --git a/packages/typescript/ai/src/tools/tool-calls.ts b/packages/typescript/ai/src/tools/tool-calls.ts index 4329f6dc..2b453f82 100644 --- a/packages/typescript/ai/src/tools/tool-calls.ts +++ b/packages/typescript/ai/src/tools/tool-calls.ts @@ -1,9 +1,11 @@ import type { - DoneStreamChunk, ModelMessage, + RunFinishedEvent, Tool, ToolCall, - ToolResultStreamChunk, + ToolCallArgsEvent, + ToolCallEndEvent, + ToolCallStartEvent, } from '../types' /** @@ -47,8 +49,49 @@ export class ToolCallManager { } /** - * Add a tool call chunk to the accumulator + * Add a TOOL_CALL_START event to begin tracking a tool call + */ + addToolCallStartEvent(chunk: ToolCallStartEvent): void { + const index = chunk.index ?? this.toolCallsMap.size + this.toolCallsMap.set(index, { + id: chunk.toolCallId, + type: 'function', + function: { + name: chunk.toolName, + arguments: '', + }, + }) + } + + /** + * Add a TOOL_CALL_ARGS event to accumulate arguments + */ + addToolCallArgsEvent(chunk: ToolCallArgsEvent): void { + // Find the tool call by ID + for (const [, toolCall] of this.toolCallsMap.entries()) { + if (toolCall.id === chunk.toolCallId) { + toolCall.function.arguments += chunk.delta + break + } + } + } + + /** + * Complete a tool call with its final input + */ + completeToolCall(toolCallId: string, input?: any): void { + for (const [, toolCall] of this.toolCallsMap.entries()) { + if (toolCall.id === toolCallId && input !== undefined) { + toolCall.function.arguments = JSON.stringify(input) + break + } + } + } + + /** + * Add a tool call chunk to the accumulator (legacy format) * Handles streaming tool calls by accumulating arguments + * @deprecated Use addToolCallStartEvent and addToolCallArgsEvent instead */ addToolCallChunk(chunk: { toolCall: { @@ -106,11 +149,11 @@ export class ToolCallManager { /** * Execute all tool calls and return tool result messages - * Also yields tool_result chunks for streaming + * Also yields TOOL_CALL_END events for streaming */ async *executeTools( - doneChunk: DoneStreamChunk, - ): AsyncGenerator, void> { + doneChunk: RunFinishedEvent, + ): AsyncGenerator, void> { const toolCallsArray = this.getToolCalls() const toolResults: Array = [] @@ -166,14 +209,14 @@ export class ToolCallManager { toolResultContent = `Tool ${toolCall.function.name} does not have an execute function` } - // Emit tool_result chunk so callers can track tool execution + // Emit TOOL_CALL_END event with result yield { - type: 'tool_result', - id: doneChunk.id, - model: doneChunk.model, + type: 'TOOL_CALL_END', timestamp: Date.now(), + model: doneChunk.model, toolCallId: toolCall.id, - content: toolResultContent, + toolName: toolCall.function.name, + result: toolResultContent, } // Add tool result message diff --git a/packages/typescript/ai/src/types.ts b/packages/typescript/ai/src/types.ts index c4d71402..ae7dc992 100644 --- a/packages/typescript/ai/src/types.ts +++ b/packages/typescript/ai/src/types.ts @@ -502,51 +502,71 @@ export interface ChatOptions< abortController?: AbortController } -export type StreamChunkType = +// ============================================================================ +// AG-UI Protocol Event Types +// ============================================================================ + +/** + * AG-UI Protocol event types. + * Based on the AG-UI specification for agent-user interaction. + * @see https://docs.ag-ui.com/concepts/events + * + * Includes legacy type aliases for backward compatibility during migration. + */ +export type EventType = + // AG-UI Standard Events + | 'RUN_STARTED' + | 'RUN_FINISHED' + | 'RUN_ERROR' + | 'TEXT_MESSAGE_START' + | 'TEXT_MESSAGE_CONTENT' + | 'TEXT_MESSAGE_END' + | 'TOOL_CALL_START' + | 'TOOL_CALL_ARGS' + | 'TOOL_CALL_END' + | 'STEP_STARTED' + | 'STEP_FINISHED' + | 'STATE_SNAPSHOT' + | 'STATE_DELTA' + | 'CUSTOM' + // Legacy types (deprecated, for backward compatibility) | 'content' - | 'tool_call' - | 'tool_result' | 'done' | 'error' + | 'tool_call' + | 'tool_result' + | 'thinking' | 'approval-requested' | 'tool-input-available' - | 'thinking' -export interface BaseStreamChunk { - type: StreamChunkType - id: string - model: string +/** + * Base structure for all AG-UI events. + * Extends AG-UI spec with TanStack AI additions (model field). + */ +export interface BaseEvent { + type: EventType timestamp: number + /** TanStack AI addition: Model identifier for multi-model support */ + model?: string + /** Original provider event for debugging/advanced use cases */ + rawEvent?: unknown } -export interface ContentStreamChunk extends BaseStreamChunk { - type: 'content' - delta: string // The incremental content token - content: string // Full accumulated content so far - role?: 'assistant' -} - -export interface ToolCallStreamChunk extends BaseStreamChunk { - type: 'tool_call' - toolCall: { - id: string - type: 'function' - function: { - name: string - arguments: string // Incremental JSON arguments - } - } - index: number -} - -export interface ToolResultStreamChunk extends BaseStreamChunk { - type: 'tool_result' - toolCallId: string - content: string +/** + * Emitted when a run starts. + */ +export interface RunStartedEvent extends BaseEvent { + type: 'RUN_STARTED' + runId: string + threadId?: string } -export interface DoneStreamChunk extends BaseStreamChunk { - type: 'done' +/** + * Emitted when a run completes successfully. + */ +export interface RunFinishedEvent extends BaseEvent { + type: 'RUN_FINISHED' + runId: string finishReason: 'stop' | 'length' | 'content_filter' | 'tool_calls' | null usage?: { promptTokens: number @@ -555,50 +575,292 @@ export interface DoneStreamChunk extends BaseStreamChunk { } } -export interface ErrorStreamChunk extends BaseStreamChunk { - type: 'error' +/** + * Emitted when an error occurs during a run. + */ +export interface RunErrorEvent extends BaseEvent { + type: 'RUN_ERROR' + runId?: string error: { message: string code?: string } } -export interface ApprovalRequestedStreamChunk extends BaseStreamChunk { - type: 'approval-requested' +/** + * Emitted when a text message starts. + */ +export interface TextMessageStartEvent extends BaseEvent { + type: 'TEXT_MESSAGE_START' + messageId: string + role: 'assistant' +} + +/** + * Emitted when text content is generated (streaming tokens). + */ +export interface TextMessageContentEvent extends BaseEvent { + type: 'TEXT_MESSAGE_CONTENT' + messageId: string + delta: string + /** TanStack AI addition: Full accumulated content so far */ + content?: string +} + +/** + * Emitted when a text message completes. + */ +export interface TextMessageEndEvent extends BaseEvent { + type: 'TEXT_MESSAGE_END' + messageId: string +} + +/** + * Emitted when a tool call starts. + */ +export interface ToolCallStartEvent extends BaseEvent { + type: 'TOOL_CALL_START' toolCallId: string toolName: string - input: any - approval: { + /** Index for parallel tool calls */ + index?: number + /** Approval metadata if tool requires user approval */ + approval?: { id: string needsApproval: true } } -export interface ToolInputAvailableStreamChunk extends BaseStreamChunk { - type: 'tool-input-available' +/** + * Emitted when tool call arguments are streaming. + */ +export interface ToolCallArgsEvent extends BaseEvent { + type: 'TOOL_CALL_ARGS' + toolCallId: string + /** Incremental JSON arguments delta */ + delta: string + /** Full accumulated arguments so far */ + args?: string +} + +/** + * Emitted when a tool call completes (with optional result). + */ +export interface ToolCallEndEvent extends BaseEvent { + type: 'TOOL_CALL_END' toolCallId: string toolName: string - input: any + /** Final parsed input arguments */ + input?: unknown + /** Tool execution result (present when tool has executed) */ + result?: unknown } -export interface ThinkingStreamChunk extends BaseStreamChunk { - type: 'thinking' - delta?: string // The incremental thinking token - content: string // Full accumulated thinking content so far +/** + * Emitted when a reasoning/thinking step starts. + */ +export interface StepStartedEvent extends BaseEvent { + type: 'STEP_STARTED' + stepId: string + stepType: 'thinking' | 'reasoning' | 'planning' } /** - * Chunk returned by the sdk during streaming chat completions. + * Emitted when a reasoning/thinking step completes or streams content. + */ +export interface StepFinishedEvent extends BaseEvent { + type: 'STEP_FINISHED' + stepId: string + /** Incremental thinking token */ + delta?: string + /** Full accumulated thinking content */ + content: string +} + +/** + * Emitted for full state synchronization. + */ +export interface StateSnapshotEvent extends BaseEvent { + type: 'STATE_SNAPSHOT' + state: Record +} + +/** + * Emitted for incremental state updates. + */ +export interface StateDeltaEvent extends BaseEvent { + type: 'STATE_DELTA' + delta: Array<{ + op: 'add' | 'remove' | 'replace' + path: string + value?: unknown + }> +} + +/** + * Custom event for extensibility. + * Used for features not covered by standard AG-UI events (e.g., approval flows). + */ +export interface CustomEvent extends BaseEvent { + type: 'CUSTOM' + name: string + value: unknown +} + +/** + * Union type for all AG-UI events. + * This is the primary type for streaming chat completions. + * Includes legacy types for backward compatibility. */ export type StreamChunk = + // AG-UI Standard Events + | RunStartedEvent + | RunFinishedEvent + | RunErrorEvent + | TextMessageStartEvent + | TextMessageContentEvent + | TextMessageEndEvent + | ToolCallStartEvent + | ToolCallArgsEvent + | ToolCallEndEvent + | StepStartedEvent + | StepFinishedEvent + | StateSnapshotEvent + | StateDeltaEvent + | CustomEvent + // Legacy types (deprecated) | ContentStreamChunk - | ToolCallStreamChunk - | ToolResultStreamChunk | DoneStreamChunk | ErrorStreamChunk + | ToolCallStreamChunk + | ToolResultStreamChunk + | ThinkingStreamChunk | ApprovalRequestedStreamChunk | ToolInputAvailableStreamChunk - | ThinkingStreamChunk + +// Legacy type aliases for transition (can be removed in future version) +export type StreamChunkType = EventType + +// ============================================================================ +// Legacy Chunk Type Aliases (Deprecated - for backward compatibility) +// ============================================================================ +// These types provide backward compatibility during the transition to AG-UI. +// They map old chunk type names to the new AG-UI event types. +// These will be removed in a future major version. + +/** + * @deprecated Use TextMessageContentEvent instead + */ +export interface ContentStreamChunk { + type: 'content' + id: string + model: string + timestamp: number + /** Incremental text delta */ + delta: string + /** Full accumulated content so far */ + content: string + /** Role of the message */ + role?: 'assistant' +} + +/** + * @deprecated Use RunFinishedEvent instead + */ +export interface DoneStreamChunk { + type: 'done' + id: string + model: string + timestamp: number + finishReason?: 'stop' | 'length' | 'content_filter' | 'tool_calls' | null + usage?: { + promptTokens: number + completionTokens: number + totalTokens: number + } +} + +/** + * @deprecated Use RunErrorEvent instead + */ +export interface ErrorStreamChunk { + type: 'error' + id: string + model: string + timestamp: number + error: string | { message: string; code?: string } + code?: string +} + +/** + * @deprecated Use ToolCallStartEvent and ToolCallArgsEvent instead + */ +export interface ToolCallStreamChunk { + type: 'tool_call' + id: string + model: string + timestamp: number + toolCall: ToolCall + index: number + approval?: { + id: string + needsApproval: true + } +} + +/** + * @deprecated Use ToolCallEndEvent instead + */ +export interface ToolResultStreamChunk { + type: 'tool_result' + id: string + model: string + timestamp: number + toolCallId: string + content: string +} + +/** + * @deprecated Use StepStartedEvent/StepFinishedEvent instead + */ +export interface ThinkingStreamChunk { + type: 'thinking' + id: string + model: string + timestamp: number + delta?: string + content: string +} + +/** + * @deprecated Use CustomEvent with name='approval-requested' instead + */ +export interface ApprovalRequestedStreamChunk { + type: 'approval-requested' + id: string + model: string + timestamp: number + toolCallId: string + toolName: string + input: Record + approval?: { + id: string + needsApproval: true + } +} + +/** + * @deprecated Use CustomEvent with name='tool-input-available' instead + */ +export interface ToolInputAvailableStreamChunk { + type: 'tool-input-available' + id: string + model: string + timestamp: number + toolCallId: string + toolName: string + input: Record +} // Simple streaming format for basic chat completions // Converted to StreamChunk format by convertChatCompletionStream() diff --git a/packages/typescript/ai/src/utilities/stream-to-response.ts b/packages/typescript/ai/src/utilities/stream-to-response.ts index 19069204..186b9cbd 100644 --- a/packages/typescript/ai/src/utilities/stream-to-response.ts +++ b/packages/typescript/ai/src/utilities/stream-to-response.ts @@ -50,11 +50,12 @@ export function toServerSentEventsStream( return } - // Send error chunk + // Send error event (AG-UI RUN_ERROR) controller.enqueue( encoder.encode( `data: ${JSON.stringify({ - type: 'error', + type: 'RUN_ERROR', + timestamp: Date.now(), error: { message: error.message || 'Unknown error occurred', code: error.code, diff --git a/packages/typescript/ai/tests/ai-chat.test.ts b/packages/typescript/ai/tests/ai-chat.test.ts index 2ec8a650..156a39de 100644 --- a/packages/typescript/ai/tests/ai-chat.test.ts +++ b/packages/typescript/ai/tests/ai-chat.test.ts @@ -455,7 +455,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { expect(tool.execute).toHaveBeenCalledWith({ location: 'Paris' }) expect(adapter.chatStreamCallCount).toBeGreaterThanOrEqual(2) - const toolResultChunks = chunks.filter((c) => c.type === 'tool_result') + const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) expect(toolResultChunks).toHaveLength(1) // Check events @@ -560,7 +560,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { // Tool should be executed with complete arguments expect(tool.execute).toHaveBeenCalledWith({ a: 10, b: 20 }) - const toolResultChunks = chunks.filter((c) => c.type === 'tool_result') + const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) expect(toolResultChunks.length).toBeGreaterThan(0) }) @@ -652,7 +652,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { expect(tool1.execute).toHaveBeenCalled() expect(tool2.execute).toHaveBeenCalled() - const toolResultChunks = chunks.filter((c) => c.type === 'tool_result') + const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) expect(toolResultChunks).toHaveLength(2) // Check iteration event @@ -950,12 +950,11 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }), ) - const toolResultChunks = chunks.filter((c) => c.type === 'tool_result') + const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) expect(toolResultChunks).toHaveLength(1) const resultChunk = toolResultChunks[0] as any - const result = JSON.parse(resultChunk.content) - expect(result.result).toBe('success') + expect(resultChunk.result.result).toBe('success') // Check tool:call-completed event const completedEvents = capturedEvents.filter( @@ -1104,12 +1103,11 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }), ) - const toolResultChunks = chunks.filter((c) => c.type === 'tool_result') + const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) expect(toolResultChunks).toHaveLength(1) const resultChunk = toolResultChunks[0] as any - const result = JSON.parse(resultChunk.content) - expect(result.error).toBe('Tool execution failed') + expect(resultChunk.result.error).toBe('Tool execution failed') }) it('should handle unknown tool calls', async () => { @@ -1158,12 +1156,11 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { ) // Should still produce a tool_result with error - const toolResultChunks = chunks.filter((c) => c.type === 'tool_result') + const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) expect(toolResultChunks.length).toBeGreaterThan(0) const resultChunk = toolResultChunks[0] as any - const result = JSON.parse(resultChunk.content) - expect(result.error).toContain('Unknown tool') + expect(resultChunk.result.error).toContain('Unknown tool') }) }) @@ -1219,13 +1216,13 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { ) const approvalChunks = chunks.filter( - (c) => c.type === 'approval-requested', + (c) => c.type === 'CUSTOM' && (c as any).name === 'approval-requested', ) expect(approvalChunks).toHaveLength(1) const approvalChunk = approvalChunks[0] as any - expect(approvalChunk.toolName).toBe('delete_file') - expect(approvalChunk.approval.needsApproval).toBe(true) + expect(approvalChunk.value.toolName).toBe('delete_file') + expect(approvalChunk.value.approval.needsApproval).toBe(true) // Tool should NOT be executed yet expect(tool.execute).not.toHaveBeenCalled() @@ -1285,13 +1282,13 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { ) const inputChunks = chunks.filter( - (c) => c.type === 'tool-input-available', + (c) => c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', ) expect(inputChunks).toHaveLength(1) const inputChunk = inputChunks[0] as any - expect(inputChunk.toolName).toBe('client_tool') - expect(inputChunk.input).toEqual({ input: 'test' }) + expect(inputChunk.value.toolName).toBe('client_tool') + expect(inputChunk.value.input).toEqual({ input: 'test' }) // Should emit tool-input-available event expect( @@ -1387,10 +1384,10 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { // Approval and client tools should request intervention const approvalChunks = chunks.filter( - (c) => c.type === 'approval-requested', + (c) => c.type === 'CUSTOM' && (c as any).name === 'approval-requested', ) const inputChunks = chunks.filter( - (c) => c.type === 'tool-input-available', + (c) => c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', ) expect(approvalChunks.length + inputChunks.length).toBeGreaterThan(0) @@ -1488,7 +1485,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }) const chunks = await collectChunks(stream) - expect(chunks[0]?.type).toBe('tool_result') + expect(chunks[0]?.type).toBe('TOOL_CALL_END') expect(toolExecute).toHaveBeenCalledWith({ path: '/tmp/test.txt' }) expect(adapter.chatStreamCallCount).toBe(1) }) @@ -2408,26 +2405,24 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { async *chatStream(options: ChatOptions): AsyncIterable { this.trackStreamCall(options) yield { - type: 'content', - id: 'test-id', + type: 'TEXT_MESSAGE_CONTENT', model: 'test-model', timestamp: Date.now(), delta: 'Using tool', content: 'Using tool', - role: 'assistant', } - // Adapter sends tool_result chunk directly (from previous execution) + // Adapter sends TOOL_CALL_END with result (from previous execution) yield { - type: 'tool_result', - id: 'test-id', + type: 'TOOL_CALL_END', model: 'test-model', timestamp: Date.now(), toolCallId: 'call-previous', - content: JSON.stringify({ result: 'previous result' }), + toolName: 'previousTool', + result: { result: 'previous result' }, } yield { - type: 'done', - id: 'test-id', + type: 'RUN_FINISHED', + runId: 'test-run', model: 'test-model', timestamp: Date.now(), finishReason: 'stop', @@ -2451,9 +2446,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { ) expect(toolResultEvents.length).toBeGreaterThan(0) expect(toolResultEvents[0]?.data.toolCallId).toBe('call-previous') - expect(toolResultEvents[0]?.data.result).toBe( - JSON.stringify({ result: 'previous result' }), - ) + expect(toolResultEvents[0]?.data.result).toEqual({ result: 'previous result' }) }) }) @@ -2554,7 +2547,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }) const chunks1 = await collectChunks(stream1) - const approvalChunk = chunks1.find((c) => c.type === 'approval-requested') + const approvalChunk = chunks1.find((c) => c.type === 'CUSTOM' && (c as any).name === 'approval-requested') expect(approvalChunk).toBeDefined() // Second call - with approval response in message parts @@ -2677,7 +2670,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }) const chunks1 = await collectChunks(stream1) - const inputChunk = chunks1.find((c) => c.type === 'tool-input-available') + const inputChunk = chunks1.find((c) => c.type === 'CUSTOM' && (c as any).name === 'tool-input-available') expect(inputChunk).toBeDefined() // Second call - with client tool output in message parts @@ -2976,7 +2969,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { const chunks = await collectChunks(stream) const toolCallChunks = chunks.filter((c) => c.type === 'tool_call') - const toolResultChunks = chunks.filter((c) => c.type === 'tool_result') + const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) // We should have received tool_call chunks expect(toolCallChunks.length).toBeGreaterThan(0) diff --git a/packages/typescript/ai/tests/stream-to-response.test.ts b/packages/typescript/ai/tests/stream-to-response.test.ts index 38bc3853..713278df 100644 --- a/packages/typescript/ai/tests/stream-to-response.test.ts +++ b/packages/typescript/ai/tests/stream-to-response.test.ts @@ -227,7 +227,7 @@ describe('toServerSentEventsStream', () => { const sseStream = toServerSentEventsStream(errorStream()) const output = await readStream(sseStream) - expect(output).toContain('"type":"error"') + expect(output).toContain('"type":"RUN_ERROR"') expect(output).toContain('"message":"Stream error"') }) diff --git a/packages/typescript/ai/tests/tool-call-manager.test.ts b/packages/typescript/ai/tests/tool-call-manager.test.ts index 9d74205c..aa4a81d7 100644 --- a/packages/typescript/ai/tests/tool-call-manager.test.ts +++ b/packages/typescript/ai/tests/tool-call-manager.test.ts @@ -1,12 +1,12 @@ import { describe, expect, it, vi } from 'vitest' import { z } from 'zod' import { ToolCallManager } from '../src/tools/tool-calls' -import type { DoneStreamChunk, Tool } from '../src/types' +import type { RunFinishedEvent, Tool } from '../src/types' describe('ToolCallManager', () => { - const mockDoneChunk: DoneStreamChunk = { - type: 'done', - id: 'test-id', + const mockDoneChunk: RunFinishedEvent = { + type: 'RUN_FINISHED', + runId: 'test-run', model: 'gpt-4', timestamp: Date.now(), finishReason: 'tool_calls', @@ -94,7 +94,7 @@ describe('ToolCallManager', () => { expect(toolCalls[0]?.id).toBe('call_123') }) - it('should execute tools and emit tool_result chunks', async () => { + it('should execute tools and emit TOOL_CALL_END events', async () => { const manager = new ToolCallManager([mockWeatherTool]) manager.addToolCallChunk({ @@ -109,11 +109,11 @@ describe('ToolCallManager', () => { const { chunks: emittedChunks, result: finalResult } = await collectGeneratorOutput(manager.executeTools(mockDoneChunk)) - // Should emit one tool_result chunk + // Should emit one TOOL_CALL_END event expect(emittedChunks).toHaveLength(1) - expect(emittedChunks[0]?.type).toBe('tool_result') + expect(emittedChunks[0]?.type).toBe('TOOL_CALL_END') expect(emittedChunks[0]?.toolCallId).toBe('call_123') - expect(emittedChunks[0]?.content).toContain('temp') + expect(emittedChunks[0]?.result).toContain('temp') // Should return one tool result message expect(finalResult).toHaveLength(1) @@ -150,9 +150,9 @@ describe('ToolCallManager', () => { manager.executeTools(mockDoneChunk), ) - // Should still emit chunk with error message + // Should still emit event with error message expect(chunks).toHaveLength(1) - expect(chunks[0]?.content).toContain('Error executing tool: Tool failed') + expect(chunks[0]?.result).toContain('Error executing tool: Tool failed') // Should still return tool result message expect(toolResults).toHaveLength(1) @@ -182,7 +182,7 @@ describe('ToolCallManager', () => { manager.executeTools(mockDoneChunk), ) - expect(chunks[0]?.content).toContain('does not have an execute function') + expect(chunks[0]?.result).toContain('does not have an execute function') expect(toolResults[0]?.content).toContain( 'does not have an execute function', ) @@ -248,7 +248,7 @@ describe('ToolCallManager', () => { manager.executeTools(mockDoneChunk), ) - // Should emit two tool_result chunks + // Should emit two TOOL_CALL_END events expect(chunks).toHaveLength(2) expect(chunks[0]?.toolCallId).toBe('call_weather') expect(chunks[1]?.toolCallId).toBe('call_calc') diff --git a/packages/typescript/smoke-tests/adapters/src/harness.ts b/packages/typescript/smoke-tests/adapters/src/harness.ts index 25d01ee6..17cc3fca 100644 --- a/packages/typescript/smoke-tests/adapters/src/harness.ts +++ b/packages/typescript/smoke-tests/adapters/src/harness.ts @@ -168,78 +168,115 @@ export async function captureStream(opts: { model: chunk.model, } - if (chunk.type === 'content') { + if (chunk.type === 'TEXT_MESSAGE_CONTENT') { chunkData.delta = chunk.delta chunkData.content = chunk.content - chunkData.role = chunk.role - const delta = chunk.delta || chunk.content || '' + const delta = chunk.delta || '' fullResponse += delta - if (chunk.role === 'assistant') { - if (!assistantDraft) { - assistantDraft = { - role: 'assistant', - content: chunk.content || '', - toolCalls: [], - } - } else { - assistantDraft.content = (assistantDraft.content || '') + delta + if (!assistantDraft) { + assistantDraft = { + role: 'assistant', + content: delta, + toolCalls: [], } + } else { + assistantDraft.content = (assistantDraft.content || '') + delta } - } else if (chunk.type === 'tool_call') { - const id = chunk.toolCall.id + } else if (chunk.type === 'TOOL_CALL_START') { + const id = chunk.toolCallId const existing = toolCallMap.get(id) || { id, - name: chunk.toolCall.function.name, + name: chunk.toolName, arguments: '', } - existing.arguments += chunk.toolCall.function.arguments || '' toolCallMap.set(id, existing) - chunkData.toolCall = chunk.toolCall + chunkData.toolCallId = chunk.toolCallId + chunkData.toolName = chunk.toolName if (!assistantDraft) { assistantDraft = { role: 'assistant', content: null, toolCalls: [] } } - const existingToolCall = assistantDraft.toolCalls?.find( - (tc: any) => tc.id === id, - ) - if (existingToolCall) { - existingToolCall.function.arguments = existing.arguments - } else { - assistantDraft.toolCalls?.push({ - ...chunk.toolCall, - function: { - ...chunk.toolCall.function, - arguments: existing.arguments, - }, - }) + assistantDraft.toolCalls?.push({ + id, + type: 'function', + function: { + name: chunk.toolName, + arguments: '', + }, + }) + } else if (chunk.type === 'TOOL_CALL_ARGS') { + const id = chunk.toolCallId + const existing = toolCallMap.get(id) + if (existing) { + existing.arguments += chunk.delta || '' + toolCallMap.set(id, existing) } - } else if (chunk.type === 'tool_result') { + chunkData.toolCallId = chunk.toolCallId - chunkData.content = chunk.content - toolResults.push({ - toolCallId: chunk.toolCallId, - content: chunk.content, - }) - reconstructedMessages.push({ - role: 'tool', - toolCallId: chunk.toolCallId, - content: chunk.content, - }) - } else if (chunk.type === 'approval-requested') { - const approval: ApprovalCapture = { - toolCallId: chunk.toolCallId, - toolName: chunk.toolName, - input: chunk.input, - approval: chunk.approval, + chunkData.delta = chunk.delta + + if (assistantDraft) { + const existingToolCall = assistantDraft.toolCalls?.find( + (tc: any) => tc.id === id, + ) + if (existingToolCall) { + existingToolCall.function.arguments += chunk.delta || '' + } } + } else if (chunk.type === 'TOOL_CALL_END') { chunkData.toolCallId = chunk.toolCallId - chunkData.toolName = chunk.toolName - chunkData.input = chunk.input - chunkData.approval = chunk.approval + + // Capture input/arguments from TOOL_CALL_END (OpenAI sends complete args here) + if (chunk.input !== undefined) { + const id = chunk.toolCallId + const existing = toolCallMap.get(id) + if (existing) { + existing.arguments = JSON.stringify(chunk.input) + toolCallMap.set(id, existing) + } + + // Update the assistant draft's tool call arguments + if (assistantDraft) { + const existingToolCall = assistantDraft.toolCalls?.find( + (tc: any) => tc.id === id, + ) + if (existingToolCall) { + existingToolCall.function.arguments = JSON.stringify(chunk.input) + } + } + } + + // Tool result is included in TOOL_CALL_END for server-executed tools + if (chunk.result !== undefined) { + const content = + typeof chunk.result === 'string' + ? chunk.result + : JSON.stringify(chunk.result) + toolResults.push({ + toolCallId: chunk.toolCallId, + content, + }) + reconstructedMessages.push({ + role: 'tool', + toolCallId: chunk.toolCallId, + content, + }) + } + } else if (chunk.type === 'CUSTOM' && chunk.name === 'approval-requested') { + const approval: ApprovalCapture = { + toolCallId: chunk.value.toolCallId, + toolName: chunk.value.toolName, + input: chunk.value.input, + approval: chunk.value.approval, + } + chunkData.toolCallId = chunk.value.toolCallId + chunkData.toolName = chunk.value.toolName + chunkData.input = chunk.value.input + chunkData.approval = chunk.value.approval approvalRequests.push(approval) - } else if (chunk.type === 'done') { + } else if (chunk.type === 'RUN_FINISHED') { chunkData.finishReason = chunk.finishReason chunkData.usage = chunk.usage if (chunk.finishReason === 'stop' && assistantDraft) { From 9a65ad5bf8fdc685949ec68e23e1be6dc67f861d Mon Sep 17 00:00:00 2001 From: Jack Herrington Date: Fri, 5 Dec 2025 13:20:40 -0800 Subject: [PATCH 2/7] updating all the docs --- ...x-e559659f-ceb7-4b86-879d-a603788b0b56.png | 0 docs/adapters/anthropic.md | 2 +- docs/adapters/openai.md | 2 +- docs/api/ai.md | 75 ++++++++++++++----- docs/guides/agentic-cycle.md | 10 +-- docs/guides/client-tools.md | 36 ++++----- docs/guides/server-tools.md | 11 +-- docs/guides/streaming.md | 26 ++++--- docs/guides/tool-architecture.md | 37 ++++----- 9 files changed, 121 insertions(+), 78 deletions(-) delete mode 100644 assets/CleanShot_2025-12-03_at_09.07.34_2x-e559659f-ceb7-4b86-879d-a603788b0b56.png diff --git a/assets/CleanShot_2025-12-03_at_09.07.34_2x-e559659f-ceb7-4b86-879d-a603788b0b56.png b/assets/CleanShot_2025-12-03_at_09.07.34_2x-e559659f-ceb7-4b86-879d-a603788b0b56.png deleted file mode 100644 index e69de29b..00000000 diff --git a/docs/adapters/anthropic.md b/docs/adapters/anthropic.md index 4a37757a..b602ae00 100644 --- a/docs/adapters/anthropic.md +++ b/docs/adapters/anthropic.md @@ -132,7 +132,7 @@ const stream = chat({ ### Thinking (Extended Thinking) -Enable extended thinking with a token budget. This allows Claude to show its reasoning process, which is streamed as `thinking` chunks and displayed as `ThinkingPart` in messages: +Enable extended thinking with a token budget. This allows Claude to show its reasoning process, which is streamed as `STEP_STARTED` and `STEP_FINISHED` events and displayed as `ThinkingPart` in messages: ```typescript providerOptions: { diff --git a/docs/adapters/openai.md b/docs/adapters/openai.md index 33e0b2c4..093d3f53 100644 --- a/docs/adapters/openai.md +++ b/docs/adapters/openai.md @@ -140,7 +140,7 @@ const stream = chat({ ### Reasoning -Enable reasoning for models that support it (e.g., GPT-5). This allows the model to show its reasoning process, which is streamed as `thinking` chunks: +Enable reasoning for models that support it (e.g., GPT-5). This allows the model to show its reasoning process, which is streamed as `STEP_STARTED` and `STEP_FINISHED` events: ```typescript providerOptions: { diff --git a/docs/api/ai.md b/docs/api/ai.md index 6f9240f4..ee4ed9ad 100644 --- a/docs/api/ai.md +++ b/docs/api/ai.md @@ -247,33 +247,68 @@ interface ModelMessage { ### `StreamChunk` +TanStack AI implements the [AG-UI Protocol](https://docs.ag-ui.com/introduction) for streaming. All events share a common base structure: + ```typescript -type StreamChunk = - | ContentStreamChunk - | ThinkingStreamChunk - | ToolCallStreamChunk - | ToolResultStreamChunk - | DoneStreamChunk - | ErrorStreamChunk; - -interface ThinkingStreamChunk { - type: "thinking"; - id: string; - model: string; +interface BaseEvent { + type: EventType; timestamp: number; - delta?: string; // Incremental thinking token + model?: string; + rawEvent?: unknown; +} + +type EventType = + | 'RUN_STARTED' // Run lifecycle begins + | 'RUN_FINISHED' // Run completed successfully + | 'RUN_ERROR' // Error occurred + | 'TEXT_MESSAGE_START' // Text message begins + | 'TEXT_MESSAGE_CONTENT' // Text content streaming + | 'TEXT_MESSAGE_END' // Text message completes + | 'TOOL_CALL_START' // Tool invocation begins + | 'TOOL_CALL_ARGS' // Tool arguments streaming + | 'TOOL_CALL_END' // Tool call completes (with result) + | 'STEP_STARTED' // Thinking/reasoning step begins + | 'STEP_FINISHED' // Thinking/reasoning step completes + | 'STATE_SNAPSHOT' // Full state synchronization + | 'STATE_DELTA' // Incremental state update + | 'CUSTOM'; // Custom extensibility events + +type StreamChunk = + | RunStartedEvent + | RunFinishedEvent + | RunErrorEvent + | TextMessageStartEvent + | TextMessageContentEvent + | TextMessageEndEvent + | ToolCallStartEvent + | ToolCallArgsEvent + | ToolCallEndEvent + | StepStartedEvent + | StepFinishedEvent + | StateSnapshotEvent + | StateDeltaEvent + | CustomEvent; + +// Example: Thinking/reasoning event +interface StepFinishedEvent extends BaseEvent { + type: "STEP_FINISHED"; + stepId: string; + delta?: string; // Incremental thinking token content: string; // Accumulated thinking content } ``` -Stream chunks represent different types of data in the stream: +Stream events represent different types of data in the stream: + +- **`RUN_STARTED` / `RUN_FINISHED`** - Run lifecycle events +- **`TEXT_MESSAGE_*`** - Text content being generated +- **`STEP_STARTED` / `STEP_FINISHED`** - Model's reasoning process (thinking) +- **`TOOL_CALL_*`** - Tool invocation and results +- **`RUN_ERROR`** - Stream errors +- **`STATE_*`** - Shared state updates +- **`CUSTOM`** - Custom extensibility events -- **Content chunks** - Text content being generated -- **Thinking chunks** - Model's reasoning process (when supported by the model) -- **Tool call chunks** - When the model calls a tool -- **Tool result chunks** - Results from tool execution -- **Done chunks** - Stream completion -- **Error chunks** - Stream errors +See [AG-UI Event Definitions](../protocol/chunk-definitions) for full details. ### `Tool` diff --git a/docs/guides/agentic-cycle.md b/docs/guides/agentic-cycle.md index 2e68c1e7..9e1356f5 100644 --- a/docs/guides/agentic-cycle.md +++ b/docs/guides/agentic-cycle.md @@ -46,21 +46,21 @@ sequenceDiagram Note over LLM: Cycle 1: Call first tool - LLM->>Server: tool_call: get_weather(SF) + LLM->>Server: TOOL_CALL_START/ARGS: get_weather(SF) Server->>Tools: Execute get_weather Tools-->>Server: {temp: 65, conditions: "sunny"} - Server->>LLM: tool_result + Server->>LLM: TOOL_CALL_END with result Note over LLM: Cycle 2: Call second tool - LLM->>Server: tool_call: get_weather(LA) + LLM->>Server: TOOL_CALL_START/ARGS: get_weather(LA) Server->>Tools: Execute get_weather Tools-->>Server: {temp: 75, conditions: "clear"} - Server->>LLM: tool_result + Server->>LLM: TOOL_CALL_END with result Note over LLM: Cycle 3: Generate answer - LLM-->>Server: content: "SF is 65°F..." + LLM-->>Server: TEXT_MESSAGE_CONTENT: "SF is 65°F..." Server-->>Client: Stream response Client->>User: Display answer ``` diff --git a/docs/guides/client-tools.md b/docs/guides/client-tools.md index a7b19cb5..6d74eaa4 100644 --- a/docs/guides/client-tools.md +++ b/docs/guides/client-tools.md @@ -12,23 +12,24 @@ sequenceDiagram participant Browser participant ClientTool - LLM Service->>Server: tool_call chunk
{name: "updateUI", args: {...}} + LLM Service->>Server: TOOL_CALL_START event
{toolName: "updateUI", toolCallId: "..."} + LLM Service->>Server: TOOL_CALL_ARGS event
{delta: "{...}"} Server->>Server: Check if tool has
server execute Note over Server: No execute function
= client tool - Server->>Browser: Forward tool-input-available
chunk via SSE/HTTP - Browser->>Browser: onToolCall callback
triggered + Server->>Browser: Forward CUSTOM event
(tool-input-available) via SSE/HTTP + Browser->>Browser: Client tool handler
triggered Browser->>ClientTool: execute(args) ClientTool->>ClientTool: Update UI,
localStorage, etc. ClientTool-->>Browser: Return result Browser->>Server: POST tool result - Server->>LLM Service: Add tool_result
to conversation + Server->>LLM Service: Add TOOL_CALL_END with
result to conversation Note over LLM Service: Model uses result
to continue - LLM Service-->>Server: Stream response - Server-->>Browser: Forward chunks + LLM Service-->>Server: Stream TEXT_MESSAGE_CONTENT events + Server-->>Browser: Forward events ``` ## When to Use Client Tools @@ -41,15 +42,16 @@ sequenceDiagram ## How It Works -1. **Tool Call from LLM**: LLM decides to call a client tool -2. **Server Detection**: Server sees the tool has no `execute` function -3. **Client Notification**: Server sends a `tool-input-available` chunk to the browser -4. **Client Execution**: Browser's `onToolCall` callback is triggered with: +1. **Tool Call from LLM**: LLM decides to call a client tool via `TOOL_CALL_START` event +2. **Arguments Streaming**: Tool arguments stream via `TOOL_CALL_ARGS` events +3. **Server Detection**: Server sees the tool has no `execute` function +4. **Client Notification**: Server sends a `CUSTOM` event (name: `tool-input-available`) to the browser +5. **Client Execution**: Browser's client tool handler is triggered with: - `toolName`: Name of the tool to execute - `input`: Parsed arguments -5. **Result Return**: Client executes the tool and returns the result -6. **Server Update**: Result is sent back to the server and added to the conversation -7. **LLM Continuation**: LLM receives the result and continues the conversation +6. **Result Return**: Client executes the tool and returns the result +7. **Server Update**: Result is sent back as a `TOOL_CALL_END` event with the result +8. **LLM Continuation**: LLM receives the result and continues the conversation ## Defining Client Tools @@ -199,12 +201,12 @@ function MessageComponent({ message }: { message: ChatMessages[number] }) { ## Automatic Execution -Client tools are **automatically executed** when the model calls them. No manual `onToolCall` callback needed! The flow is: +Client tools are **automatically executed** when the model calls them. No manual callback needed! The flow is: -1. LLM calls a client tool -2. Server sends `tool-input-available` chunk to browser +1. LLM calls a client tool via `TOOL_CALL_START` and `TOOL_CALL_ARGS` events +2. Server sends `CUSTOM` event (name: `tool-input-available`) to browser 3. Client automatically executes the matching tool implementation -4. Result is sent back to server +4. Result is sent back to server as a `TOOL_CALL_END` event 5. Conversation continues with the result ## Type Safety Benefits diff --git a/docs/guides/server-tools.md b/docs/guides/server-tools.md index 8e159d45..01be858c 100644 --- a/docs/guides/server-tools.md +++ b/docs/guides/server-tools.md @@ -12,24 +12,25 @@ sequenceDiagram participant Tool participant Database/API - LLM Service->>Server: tool_call chunk
{name: "getUserData", args: {...}} + LLM Service->>Server: TOOL_CALL_START event
{toolName: "getUserData", toolCallId: "..."} + LLM Service->>Server: TOOL_CALL_ARGS event
{delta: "{...}"} Server->>Server: Parse tool call
arguments Server->>Tool: execute(parsedArgs) Tool->>Database/API: Query/Fetch data Database/API-->>Tool: Return data Tool-->>Server: Return result - Server->>Server: Create tool_result
message - Server->>LLM Service: Continue chat with
tool_result in history + Server->>Server: Create TOOL_CALL_END
with result + Server->>LLM Service: Continue chat with
tool result in history Note over LLM Service: Model uses result
to generate response - LLM Service-->>Server: Stream content chunks + LLM Service-->>Server: Stream TEXT_MESSAGE_CONTENT events Server-->>Server: Stream to client ``` ## How It Works -1. **Tool Call Received**: Server receives a `tool_call` chunk from the LLM +1. **Tool Call Received**: Server receives `TOOL_CALL_START` and `TOOL_CALL_ARGS` events from the LLM 2. **Argument Parsing**: The tool arguments (JSON string) are parsed and validated against the input schema 3. **Execution**: The tool's `execute` function is called with the parsed arguments 4. **Result Processing**: The result is: diff --git a/docs/guides/streaming.md b/docs/guides/streaming.md index da2a806b..8e64fa50 100644 --- a/docs/guides/streaming.md +++ b/docs/guides/streaming.md @@ -64,30 +64,34 @@ messages.forEach((message) => { }); ``` -## Stream Chunks +## Stream Events -Stream chunks contain different types of data: +TanStack AI implements the [AG-UI Protocol](https://docs.ag-ui.com/introduction) for streaming. Stream events contain different types of data: -- **Content chunks** - Text content being generated -- **Thinking chunks** - Model's internal reasoning process (when supported) -- **Tool call chunks** - When the model calls a tool -- **Tool result chunks** - Results from tool execution -- **Done chunks** - Stream completion +- **`RUN_STARTED` / `RUN_FINISHED`** - Run lifecycle events +- **`TEXT_MESSAGE_START` / `TEXT_MESSAGE_CONTENT` / `TEXT_MESSAGE_END`** - Text content streaming +- **`STEP_STARTED` / `STEP_FINISHED`** - Model's internal reasoning process (thinking) +- **`TOOL_CALL_START` / `TOOL_CALL_ARGS` / `TOOL_CALL_END`** - Tool invocation and results +- **`STATE_SNAPSHOT` / `STATE_DELTA`** - Shared state updates +- **`CUSTOM`** - Custom extensibility events -### Thinking Chunks +### Thinking Events -Thinking chunks represent the model's reasoning process. They stream separately from the final response text: +Thinking events (`STEP_STARTED` / `STEP_FINISHED`) represent the model's reasoning process. They stream separately from the final response text: ```typescript for await (const chunk of stream) { - if (chunk.type === "thinking") { + if (chunk.type === "STEP_STARTED") { + console.log("Thinking started:", chunk.stepId); + } + if (chunk.type === "STEP_FINISHED") { console.log("Thinking:", chunk.content); // Accumulated thinking content console.log("Delta:", chunk.delta); // Incremental thinking token } } ``` -Thinking chunks are automatically converted to `ThinkingPart` in `UIMessage` objects. They are UI-only and excluded from messages sent back to the model. +Thinking events are automatically converted to `ThinkingPart` in `UIMessage` objects. They are UI-only and excluded from messages sent back to the model. ## Connection Adapters diff --git a/docs/guides/tool-architecture.md b/docs/guides/tool-architecture.md index f691f510..0367da36 100644 --- a/docs/guides/tool-architecture.md +++ b/docs/guides/tool-architecture.md @@ -36,9 +36,9 @@ sequenceDiagram Note over LLM Service: Model analyzes tools
and decides to use one - LLM Service-->>Server: Stream chunks:
tool_call, content, done - Server-->>Browser: Forward chunks via SSE/HTTP - Browser->>Browser: Parse chunks &
update UI + LLM Service-->>Server: Stream AG-UI events:
TOOL_CALL_*, TEXT_MESSAGE_*, RUN_FINISHED + Server-->>Browser: Forward events via SSE/HTTP + Browser->>Browser: Parse events &
update UI Browser->>User: Show response ``` @@ -57,11 +57,11 @@ sequenceDiagram - Analyzes the conversation and available tools - Decides whether to call a tool based on the user's request - Generates tool calls with arguments -5. **Streaming Response**: The LLM streams back chunks: - - `tool_call` chunks with tool name and arguments - - `content` chunks with text responses - - `done` chunk when complete -6. **Client Updates**: The browser receives chunks and updates the UI in real-time +5. **Streaming Response**: The LLM streams back AG-UI events: + - `TOOL_CALL_START` / `TOOL_CALL_ARGS` / `TOOL_CALL_END` events for tool invocations + - `TEXT_MESSAGE_CONTENT` events for text responses + - `RUN_FINISHED` event when complete +6. **Client Updates**: The browser receives events and updates the UI in real-time ### Code Example @@ -114,14 +114,14 @@ Tools progress through different states during their lifecycle. Understanding th ```mermaid stateDiagram-v2 - [*] --> AwaitingInput: tool_call received - AwaitingInput --> InputStreaming: partial arguments + [*] --> AwaitingInput: TOOL_CALL_START received + AwaitingInput --> InputStreaming: TOOL_CALL_ARGS (partial) InputStreaming --> InputComplete: all arguments received InputComplete --> ApprovalRequested: needsApproval=true InputComplete --> Executing: needsApproval=false ApprovalRequested --> Executing: user approves ApprovalRequested --> Cancelled: user denies - Executing --> OutputAvailable: success + Executing --> OutputAvailable: TOOL_CALL_END (success) Executing --> OutputError: error OutputAvailable --> [*] OutputError --> [*] @@ -210,15 +210,16 @@ sequenceDiagram participant LLM participant Tool - LLM->>Server: tool_call: send_email + LLM->>Server: TOOL_CALL_START: send_email + LLM->>Server: TOOL_CALL_ARGS: {to, subject, body} Server->>Server: Check needsApproval - Server->>Client: approval-requested chunk + Server->>Client: CUSTOM event (approval-requested) Client->>Client: Show approval UI User->>Client: Clicks "Approve" Client->>Server: POST approval response Server->>Tool: execute(args) Tool-->>Server: result - Server->>LLM: tool_result + Server->>LLM: TOOL_CALL_END with result LLM-->>Client: Generate response ``` @@ -325,13 +326,13 @@ The LLM can call multiple tools in parallel for efficiency: ```mermaid graph TD - A[LLM decides to call 3 tools] --> B[tool_call index: 0] - A --> C[tool_call index: 1] - A --> D[tool_call index: 2] + A[LLM decides to call 3 tools] --> B[TOOL_CALL_START index: 0] + A --> C[TOOL_CALL_START index: 1] + A --> D[TOOL_CALL_START index: 2] B --> E[Execute in parallel] C --> E D --> E - E --> F[Collect all results] + E --> F[Collect all TOOL_CALL_END results] F --> G[Continue with results] ``` From 55228acebe3cba145df38bb1468cf91a43873515 Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Fri, 5 Dec 2025 21:23:29 +0000 Subject: [PATCH 3/7] ci: apply automated fixes --- docs/reference/classes/BaseAdapter.md | 310 ------------- docs/reference/classes/BatchStrategy.md | 87 ---- docs/reference/classes/CompositeStrategy.md | 91 ---- docs/reference/classes/ImmediateStrategy.md | 58 --- docs/reference/classes/PartialJSONParser.md | 57 --- docs/reference/classes/PunctuationStrategy.md | 61 --- docs/reference/classes/StreamProcessor.md | 407 ------------------ docs/reference/classes/ToolCallManager.md | 196 --------- .../reference/classes/WordBoundaryStrategy.md | 61 --- docs/reference/functions/chat.md | 55 --- docs/reference/functions/chatOptions.md | 32 -- docs/reference/functions/combineStrategies.md | 44 -- .../convertMessagesToModelMessages.md | 35 -- .../functions/convertZodToJsonSchema.md | 50 --- .../reference/functions/createReplayStream.md | 24 -- docs/reference/functions/embedding.md | 30 -- docs/reference/functions/generateMessageId.md | 18 - docs/reference/functions/maxIterations.md | 40 -- docs/reference/functions/messages.md | 94 ---- .../functions/modelMessageToUIMessage.md | 39 -- .../functions/modelMessagesToUIMessages.md | 33 -- .../functions/normalizeToUIMessage.md | 38 -- docs/reference/functions/parsePartialJSON.md | 28 -- docs/reference/functions/summarize.md | 30 -- .../functions/toServerSentEventsStream.md | 47 -- docs/reference/functions/toStreamResponse.md | 51 --- docs/reference/functions/toolDefinition.md | 87 ---- .../functions/uiMessageToModelMessages.md | 39 -- docs/reference/functions/untilFinishReason.md | 40 -- docs/reference/index.md | 120 ------ docs/reference/interfaces/AIAdapter.md | 214 --------- docs/reference/interfaces/AIAdapterConfig.md | 58 --- docs/reference/interfaces/AgentLoopState.md | 49 --- .../ApprovalRequestedStreamChunk.md | 120 ------ docs/reference/interfaces/AudioPart.md | 52 --- docs/reference/interfaces/BaseStreamChunk.md | 59 --- .../interfaces/ChatCompletionChunk.md | 86 ---- docs/reference/interfaces/ChatOptions.md | 161 ------- docs/reference/interfaces/ChunkRecording.md | 88 ---- docs/reference/interfaces/ChunkStrategy.md | 58 --- docs/reference/interfaces/ClientTool.md | 114 ----- .../reference/interfaces/ContentPartSource.md | 39 -- .../interfaces/ContentStreamChunk.md | 98 ----- .../DefaultMessageMetadataByModality.md | 61 --- docs/reference/interfaces/DocumentPart.md | 52 --- docs/reference/interfaces/DoneStreamChunk.md | 106 ----- docs/reference/interfaces/EmbeddingOptions.md | 38 -- docs/reference/interfaces/EmbeddingResult.md | 60 --- docs/reference/interfaces/ErrorStreamChunk.md | 90 ---- docs/reference/interfaces/ImagePart.md | 52 --- .../interfaces/InternalToolCallState.md | 70 --- docs/reference/interfaces/JSONParser.md | 36 -- docs/reference/interfaces/ModelMessage.md | 64 --- docs/reference/interfaces/ProcessorResult.md | 50 --- docs/reference/interfaces/ProcessorState.md | 70 --- docs/reference/interfaces/ResponseFormat.md | 151 ------- docs/reference/interfaces/ServerTool.md | 231 ---------- .../interfaces/StreamProcessorEvents.md | 228 ---------- .../interfaces/StreamProcessorHandlers.md | 317 -------------- .../interfaces/StreamProcessorOptions.md | 94 ---- .../interfaces/SummarizationOptions.md | 58 --- .../interfaces/SummarizationResult.md | 66 --- docs/reference/interfaces/TextPart.md | 46 -- docs/reference/interfaces/ThinkingPart.md | 28 -- .../interfaces/ThinkingStreamChunk.md | 88 ---- docs/reference/interfaces/Tool.md | 204 --------- docs/reference/interfaces/ToolCall.md | 50 --- docs/reference/interfaces/ToolCallPart.md | 100 ----- .../interfaces/ToolCallStreamChunk.md | 118 ----- docs/reference/interfaces/ToolConfig.md | 14 - docs/reference/interfaces/ToolDefinition.md | 279 ------------ .../interfaces/ToolDefinitionConfig.md | 84 ---- .../interfaces/ToolDefinitionInstance.md | 235 ---------- .../ToolInputAvailableStreamChunk.md | 98 ----- docs/reference/interfaces/ToolResultPart.md | 58 --- .../interfaces/ToolResultStreamChunk.md | 88 ---- docs/reference/interfaces/UIMessage.md | 51 --- docs/reference/interfaces/VideoPart.md | 52 --- .../type-aliases/AgentLoopStrategy.md | 35 -- docs/reference/type-aliases/AnyClientTool.md | 16 - .../type-aliases/ChatStreamOptionsForModel.md | 26 -- .../type-aliases/ChatStreamOptionsUnion.md | 18 - .../type-aliases/ConstrainedContent.md | 44 -- .../type-aliases/ConstrainedModelMessage.md | 49 --- docs/reference/type-aliases/ContentPart.md | 49 --- .../type-aliases/ContentPartForModalities.md | 43 -- .../type-aliases/ExtractModalitiesForModel.md | 24 -- .../type-aliases/ExtractModelsFromAdapter.md | 18 - docs/reference/type-aliases/InferToolInput.md | 20 - docs/reference/type-aliases/InferToolName.md | 20 - .../reference/type-aliases/InferToolOutput.md | 20 - docs/reference/type-aliases/MessagePart.md | 16 - .../type-aliases/ModalitiesArrayToUnion.md | 21 - docs/reference/type-aliases/Modality.md | 19 - docs/reference/type-aliases/StreamChunk.md | 22 - .../reference/type-aliases/StreamChunkType.md | 20 - docs/reference/type-aliases/ToolCallState.md | 19 - .../reference/type-aliases/ToolResultState.md | 14 - docs/reference/variables/aiEventClient.md | 12 - docs/reference/variables/defaultJSONParser.md | 14 - .../ai-devtools/src/store/ai-context.tsx | 7 +- .../typescript/ai/src/stream/processor.ts | 19 +- packages/typescript/ai/src/types.ts | 2 +- packages/typescript/ai/tests/ai-chat.test.ts | 47 +- .../smoke-tests/adapters/src/harness.ts | 6 +- 105 files changed, 57 insertions(+), 7548 deletions(-) delete mode 100644 docs/reference/classes/BaseAdapter.md delete mode 100644 docs/reference/classes/BatchStrategy.md delete mode 100644 docs/reference/classes/CompositeStrategy.md delete mode 100644 docs/reference/classes/ImmediateStrategy.md delete mode 100644 docs/reference/classes/PartialJSONParser.md delete mode 100644 docs/reference/classes/PunctuationStrategy.md delete mode 100644 docs/reference/classes/StreamProcessor.md delete mode 100644 docs/reference/classes/ToolCallManager.md delete mode 100644 docs/reference/classes/WordBoundaryStrategy.md delete mode 100644 docs/reference/functions/chat.md delete mode 100644 docs/reference/functions/chatOptions.md delete mode 100644 docs/reference/functions/combineStrategies.md delete mode 100644 docs/reference/functions/convertMessagesToModelMessages.md delete mode 100644 docs/reference/functions/convertZodToJsonSchema.md delete mode 100644 docs/reference/functions/createReplayStream.md delete mode 100644 docs/reference/functions/embedding.md delete mode 100644 docs/reference/functions/generateMessageId.md delete mode 100644 docs/reference/functions/maxIterations.md delete mode 100644 docs/reference/functions/messages.md delete mode 100644 docs/reference/functions/modelMessageToUIMessage.md delete mode 100644 docs/reference/functions/modelMessagesToUIMessages.md delete mode 100644 docs/reference/functions/normalizeToUIMessage.md delete mode 100644 docs/reference/functions/parsePartialJSON.md delete mode 100644 docs/reference/functions/summarize.md delete mode 100644 docs/reference/functions/toServerSentEventsStream.md delete mode 100644 docs/reference/functions/toStreamResponse.md delete mode 100644 docs/reference/functions/toolDefinition.md delete mode 100644 docs/reference/functions/uiMessageToModelMessages.md delete mode 100644 docs/reference/functions/untilFinishReason.md delete mode 100644 docs/reference/index.md delete mode 100644 docs/reference/interfaces/AIAdapter.md delete mode 100644 docs/reference/interfaces/AIAdapterConfig.md delete mode 100644 docs/reference/interfaces/AgentLoopState.md delete mode 100644 docs/reference/interfaces/ApprovalRequestedStreamChunk.md delete mode 100644 docs/reference/interfaces/AudioPart.md delete mode 100644 docs/reference/interfaces/BaseStreamChunk.md delete mode 100644 docs/reference/interfaces/ChatCompletionChunk.md delete mode 100644 docs/reference/interfaces/ChatOptions.md delete mode 100644 docs/reference/interfaces/ChunkRecording.md delete mode 100644 docs/reference/interfaces/ChunkStrategy.md delete mode 100644 docs/reference/interfaces/ClientTool.md delete mode 100644 docs/reference/interfaces/ContentPartSource.md delete mode 100644 docs/reference/interfaces/ContentStreamChunk.md delete mode 100644 docs/reference/interfaces/DefaultMessageMetadataByModality.md delete mode 100644 docs/reference/interfaces/DocumentPart.md delete mode 100644 docs/reference/interfaces/DoneStreamChunk.md delete mode 100644 docs/reference/interfaces/EmbeddingOptions.md delete mode 100644 docs/reference/interfaces/EmbeddingResult.md delete mode 100644 docs/reference/interfaces/ErrorStreamChunk.md delete mode 100644 docs/reference/interfaces/ImagePart.md delete mode 100644 docs/reference/interfaces/InternalToolCallState.md delete mode 100644 docs/reference/interfaces/JSONParser.md delete mode 100644 docs/reference/interfaces/ModelMessage.md delete mode 100644 docs/reference/interfaces/ProcessorResult.md delete mode 100644 docs/reference/interfaces/ProcessorState.md delete mode 100644 docs/reference/interfaces/ResponseFormat.md delete mode 100644 docs/reference/interfaces/ServerTool.md delete mode 100644 docs/reference/interfaces/StreamProcessorEvents.md delete mode 100644 docs/reference/interfaces/StreamProcessorHandlers.md delete mode 100644 docs/reference/interfaces/StreamProcessorOptions.md delete mode 100644 docs/reference/interfaces/SummarizationOptions.md delete mode 100644 docs/reference/interfaces/SummarizationResult.md delete mode 100644 docs/reference/interfaces/TextPart.md delete mode 100644 docs/reference/interfaces/ThinkingPart.md delete mode 100644 docs/reference/interfaces/ThinkingStreamChunk.md delete mode 100644 docs/reference/interfaces/Tool.md delete mode 100644 docs/reference/interfaces/ToolCall.md delete mode 100644 docs/reference/interfaces/ToolCallPart.md delete mode 100644 docs/reference/interfaces/ToolCallStreamChunk.md delete mode 100644 docs/reference/interfaces/ToolConfig.md delete mode 100644 docs/reference/interfaces/ToolDefinition.md delete mode 100644 docs/reference/interfaces/ToolDefinitionConfig.md delete mode 100644 docs/reference/interfaces/ToolDefinitionInstance.md delete mode 100644 docs/reference/interfaces/ToolInputAvailableStreamChunk.md delete mode 100644 docs/reference/interfaces/ToolResultPart.md delete mode 100644 docs/reference/interfaces/ToolResultStreamChunk.md delete mode 100644 docs/reference/interfaces/UIMessage.md delete mode 100644 docs/reference/interfaces/VideoPart.md delete mode 100644 docs/reference/type-aliases/AgentLoopStrategy.md delete mode 100644 docs/reference/type-aliases/AnyClientTool.md delete mode 100644 docs/reference/type-aliases/ChatStreamOptionsForModel.md delete mode 100644 docs/reference/type-aliases/ChatStreamOptionsUnion.md delete mode 100644 docs/reference/type-aliases/ConstrainedContent.md delete mode 100644 docs/reference/type-aliases/ConstrainedModelMessage.md delete mode 100644 docs/reference/type-aliases/ContentPart.md delete mode 100644 docs/reference/type-aliases/ContentPartForModalities.md delete mode 100644 docs/reference/type-aliases/ExtractModalitiesForModel.md delete mode 100644 docs/reference/type-aliases/ExtractModelsFromAdapter.md delete mode 100644 docs/reference/type-aliases/InferToolInput.md delete mode 100644 docs/reference/type-aliases/InferToolName.md delete mode 100644 docs/reference/type-aliases/InferToolOutput.md delete mode 100644 docs/reference/type-aliases/MessagePart.md delete mode 100644 docs/reference/type-aliases/ModalitiesArrayToUnion.md delete mode 100644 docs/reference/type-aliases/Modality.md delete mode 100644 docs/reference/type-aliases/StreamChunk.md delete mode 100644 docs/reference/type-aliases/StreamChunkType.md delete mode 100644 docs/reference/type-aliases/ToolCallState.md delete mode 100644 docs/reference/type-aliases/ToolResultState.md delete mode 100644 docs/reference/variables/aiEventClient.md delete mode 100644 docs/reference/variables/defaultJSONParser.md diff --git a/docs/reference/classes/BaseAdapter.md b/docs/reference/classes/BaseAdapter.md deleted file mode 100644 index 1127e644..00000000 --- a/docs/reference/classes/BaseAdapter.md +++ /dev/null @@ -1,310 +0,0 @@ ---- -id: BaseAdapter -title: BaseAdapter ---- - -# Abstract Class: BaseAdapter\ - -Defined in: [base-adapter.ts:26](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L26) - -Base adapter class with support for endpoint-specific models and provider options. - -Generic parameters: -- TChatModels: Models that support chat/text completion -- TEmbeddingModels: Models that support embeddings -- TChatProviderOptions: Provider-specific options for chat endpoint -- TEmbeddingProviderOptions: Provider-specific options for embedding endpoint -- TModelProviderOptionsByName: Provider-specific options for model by name -- TModelInputModalitiesByName: Map from model name to its supported input modalities -- TMessageMetadataByModality: Map from modality type to adapter-specific metadata types - -## Type Parameters - -### TChatModels - -`TChatModels` *extends* `ReadonlyArray`\<`string`\> = `ReadonlyArray`\<`string`\> - -### TEmbeddingModels - -`TEmbeddingModels` *extends* `ReadonlyArray`\<`string`\> = `ReadonlyArray`\<`string`\> - -### TChatProviderOptions - -`TChatProviderOptions` *extends* `Record`\<`string`, `any`\> = `Record`\<`string`, `any`\> - -### TEmbeddingProviderOptions - -`TEmbeddingProviderOptions` *extends* `Record`\<`string`, `any`\> = `Record`\<`string`, `any`\> - -### TModelProviderOptionsByName - -`TModelProviderOptionsByName` *extends* `Record`\<`string`, `any`\> = `Record`\<`string`, `any`\> - -### TModelInputModalitiesByName - -`TModelInputModalitiesByName` *extends* `Record`\<`string`, `ReadonlyArray`\<[`Modality`](../type-aliases/Modality.md)\>\> = `Record`\<`string`, `ReadonlyArray`\<[`Modality`](../type-aliases/Modality.md)\>\> - -### TMessageMetadataByModality - -`TMessageMetadataByModality` *extends* `object` = [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md) - -## Implements - -- [`AIAdapter`](../interfaces/AIAdapter.md)\<`TChatModels`, `TEmbeddingModels`, `TChatProviderOptions`, `TEmbeddingProviderOptions`, `TModelProviderOptionsByName`, `TModelInputModalitiesByName`, `TMessageMetadataByModality`\> - -## Constructors - -### Constructor - -```ts -new BaseAdapter(config): BaseAdapter; -``` - -Defined in: [base-adapter.ts:70](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L70) - -#### Parameters - -##### config - -[`AIAdapterConfig`](../interfaces/AIAdapterConfig.md) = `{}` - -#### Returns - -`BaseAdapter`\<`TChatModels`, `TEmbeddingModels`, `TChatProviderOptions`, `TEmbeddingProviderOptions`, `TModelProviderOptionsByName`, `TModelInputModalitiesByName`, `TMessageMetadataByModality`\> - -## Properties - -### \_chatProviderOptions? - -```ts -optional _chatProviderOptions: TChatProviderOptions; -``` - -Defined in: [base-adapter.ts:61](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L61) - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`_chatProviderOptions`](../interfaces/AIAdapter.md#_chatprovideroptions) - -*** - -### \_embeddingProviderOptions? - -```ts -optional _embeddingProviderOptions: TEmbeddingProviderOptions; -``` - -Defined in: [base-adapter.ts:62](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L62) - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`_embeddingProviderOptions`](../interfaces/AIAdapter.md#_embeddingprovideroptions) - -*** - -### \_messageMetadataByModality? - -```ts -optional _messageMetadataByModality: TMessageMetadataByModality; -``` - -Defined in: [base-adapter.ts:68](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L68) - -Type-only map from modality type to adapter-specific metadata types. -Used to provide type-safe autocomplete for metadata on content parts. - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`_messageMetadataByModality`](../interfaces/AIAdapter.md#_messagemetadatabymodality) - -*** - -### \_modelInputModalitiesByName? - -```ts -optional _modelInputModalitiesByName: TModelInputModalitiesByName; -``` - -Defined in: [base-adapter.ts:66](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L66) - -Type-only map from model name to its supported input modalities. -Used by the core AI types to narrow ContentPart types based on the selected model. -Must be provided by all adapters. - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`_modelInputModalitiesByName`](../interfaces/AIAdapter.md#_modelinputmodalitiesbyname) - -*** - -### \_modelProviderOptionsByName - -```ts -_modelProviderOptionsByName: TModelProviderOptionsByName; -``` - -Defined in: [base-adapter.ts:64](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L64) - -Type-only map from model name to its specific provider options. -Used by the core AI types to narrow providerOptions based on the selected model. -Must be provided by all adapters. - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`_modelProviderOptionsByName`](../interfaces/AIAdapter.md#_modelprovideroptionsbyname) - -*** - -### \_providerOptions? - -```ts -optional _providerOptions: TChatProviderOptions; -``` - -Defined in: [base-adapter.ts:60](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L60) - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`_providerOptions`](../interfaces/AIAdapter.md#_provideroptions) - -*** - -### config - -```ts -protected config: AIAdapterConfig; -``` - -Defined in: [base-adapter.ts:57](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L57) - -*** - -### embeddingModels? - -```ts -optional embeddingModels: TEmbeddingModels; -``` - -Defined in: [base-adapter.ts:56](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L56) - -Models that support embeddings - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`embeddingModels`](../interfaces/AIAdapter.md#embeddingmodels) - -*** - -### models - -```ts -abstract models: TChatModels; -``` - -Defined in: [base-adapter.ts:55](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L55) - -Models that support chat/text completion - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`models`](../interfaces/AIAdapter.md#models) - -*** - -### name - -```ts -abstract name: string; -``` - -Defined in: [base-adapter.ts:54](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L54) - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`name`](../interfaces/AIAdapter.md#name) - -## Methods - -### chatStream() - -```ts -abstract chatStream(options): AsyncIterable; -``` - -Defined in: [base-adapter.ts:74](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L74) - -#### Parameters - -##### options - -[`ChatOptions`](../interfaces/ChatOptions.md) - -#### Returns - -`AsyncIterable`\<[`StreamChunk`](../type-aliases/StreamChunk.md)\> - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`chatStream`](../interfaces/AIAdapter.md#chatstream) - -*** - -### createEmbeddings() - -```ts -abstract createEmbeddings(options): Promise; -``` - -Defined in: [base-adapter.ts:79](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L79) - -#### Parameters - -##### options - -[`EmbeddingOptions`](../interfaces/EmbeddingOptions.md) - -#### Returns - -`Promise`\<[`EmbeddingResult`](../interfaces/EmbeddingResult.md)\> - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`createEmbeddings`](../interfaces/AIAdapter.md#createembeddings) - -*** - -### generateId() - -```ts -protected generateId(): string; -``` - -Defined in: [base-adapter.ts:81](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L81) - -#### Returns - -`string` - -*** - -### summarize() - -```ts -abstract summarize(options): Promise; -``` - -Defined in: [base-adapter.ts:76](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L76) - -#### Parameters - -##### options - -[`SummarizationOptions`](../interfaces/SummarizationOptions.md) - -#### Returns - -`Promise`\<[`SummarizationResult`](../interfaces/SummarizationResult.md)\> - -#### Implementation of - -[`AIAdapter`](../interfaces/AIAdapter.md).[`summarize`](../interfaces/AIAdapter.md#summarize) diff --git a/docs/reference/classes/BatchStrategy.md b/docs/reference/classes/BatchStrategy.md deleted file mode 100644 index a437b0aa..00000000 --- a/docs/reference/classes/BatchStrategy.md +++ /dev/null @@ -1,87 +0,0 @@ ---- -id: BatchStrategy -title: BatchStrategy ---- - -# Class: BatchStrategy - -Defined in: [stream/strategies.ts:34](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L34) - -Batch Strategy - emit every N chunks -Useful for reducing UI update frequency - -## Implements - -- [`ChunkStrategy`](../interfaces/ChunkStrategy.md) - -## Constructors - -### Constructor - -```ts -new BatchStrategy(batchSize): BatchStrategy; -``` - -Defined in: [stream/strategies.ts:37](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L37) - -#### Parameters - -##### batchSize - -`number` = `5` - -#### Returns - -`BatchStrategy` - -## Methods - -### reset() - -```ts -reset(): void; -``` - -Defined in: [stream/strategies.ts:48](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L48) - -Optional: Reset strategy state (called when streaming starts) - -#### Returns - -`void` - -#### Implementation of - -[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`reset`](../interfaces/ChunkStrategy.md#reset) - -*** - -### shouldEmit() - -```ts -shouldEmit(_chunk, _accumulated): boolean; -``` - -Defined in: [stream/strategies.ts:39](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L39) - -Called for each text chunk received - -#### Parameters - -##### \_chunk - -`string` - -##### \_accumulated - -`string` - -#### Returns - -`boolean` - -true if an update should be emitted now - -#### Implementation of - -[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`shouldEmit`](../interfaces/ChunkStrategy.md#shouldemit) diff --git a/docs/reference/classes/CompositeStrategy.md b/docs/reference/classes/CompositeStrategy.md deleted file mode 100644 index 5c6f71e3..00000000 --- a/docs/reference/classes/CompositeStrategy.md +++ /dev/null @@ -1,91 +0,0 @@ ---- -id: CompositeStrategy -title: CompositeStrategy ---- - -# Class: CompositeStrategy - -Defined in: [stream/strategies.ts:68](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L68) - -Composite Strategy - combine multiple strategies (OR logic) -Emits if ANY strategy says to emit - -## Implements - -- [`ChunkStrategy`](../interfaces/ChunkStrategy.md) - -## Constructors - -### Constructor - -```ts -new CompositeStrategy(strategies): CompositeStrategy; -``` - -Defined in: [stream/strategies.ts:69](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L69) - -#### Parameters - -##### strategies - -[`ChunkStrategy`](../interfaces/ChunkStrategy.md)[] - -#### Returns - -`CompositeStrategy` - -## Methods - -### reset() - -```ts -reset(): void; -``` - -Defined in: [stream/strategies.ts:75](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L75) - -Optional: Reset strategy state (called when streaming starts) - -#### Returns - -`void` - -#### Implementation of - -[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`reset`](../interfaces/ChunkStrategy.md#reset) - -*** - -### shouldEmit() - -```ts -shouldEmit(chunk, accumulated): boolean; -``` - -Defined in: [stream/strategies.ts:71](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L71) - -Called for each text chunk received - -#### Parameters - -##### chunk - -`string` - -The new chunk of text (delta) - -##### accumulated - -`string` - -All text accumulated so far - -#### Returns - -`boolean` - -true if an update should be emitted now - -#### Implementation of - -[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`shouldEmit`](../interfaces/ChunkStrategy.md#shouldemit) diff --git a/docs/reference/classes/ImmediateStrategy.md b/docs/reference/classes/ImmediateStrategy.md deleted file mode 100644 index fcf89ea0..00000000 --- a/docs/reference/classes/ImmediateStrategy.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -id: ImmediateStrategy -title: ImmediateStrategy ---- - -# Class: ImmediateStrategy - -Defined in: [stream/strategies.ts:12](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L12) - -Immediate Strategy - emit on every chunk (default behavior) - -## Implements - -- [`ChunkStrategy`](../interfaces/ChunkStrategy.md) - -## Constructors - -### Constructor - -```ts -new ImmediateStrategy(): ImmediateStrategy; -``` - -#### Returns - -`ImmediateStrategy` - -## Methods - -### shouldEmit() - -```ts -shouldEmit(_chunk, _accumulated): boolean; -``` - -Defined in: [stream/strategies.ts:13](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L13) - -Called for each text chunk received - -#### Parameters - -##### \_chunk - -`string` - -##### \_accumulated - -`string` - -#### Returns - -`boolean` - -true if an update should be emitted now - -#### Implementation of - -[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`shouldEmit`](../interfaces/ChunkStrategy.md#shouldemit) diff --git a/docs/reference/classes/PartialJSONParser.md b/docs/reference/classes/PartialJSONParser.md deleted file mode 100644 index 59fdcfb0..00000000 --- a/docs/reference/classes/PartialJSONParser.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -id: PartialJSONParser -title: PartialJSONParser ---- - -# Class: PartialJSONParser - -Defined in: [stream/json-parser.ts:25](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L25) - -Partial JSON Parser implementation using the partial-json library -This parser can handle incomplete JSON strings during streaming - -## Implements - -- [`JSONParser`](../interfaces/JSONParser.md) - -## Constructors - -### Constructor - -```ts -new PartialJSONParser(): PartialJSONParser; -``` - -#### Returns - -`PartialJSONParser` - -## Methods - -### parse() - -```ts -parse(jsonString): any; -``` - -Defined in: [stream/json-parser.ts:31](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L31) - -Parse a potentially incomplete JSON string - -#### Parameters - -##### jsonString - -`string` - -The JSON string to parse (may be incomplete) - -#### Returns - -`any` - -The parsed object, or undefined if parsing fails - -#### Implementation of - -[`JSONParser`](../interfaces/JSONParser.md).[`parse`](../interfaces/JSONParser.md#parse) diff --git a/docs/reference/classes/PunctuationStrategy.md b/docs/reference/classes/PunctuationStrategy.md deleted file mode 100644 index f5e1ebe4..00000000 --- a/docs/reference/classes/PunctuationStrategy.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -id: PunctuationStrategy -title: PunctuationStrategy ---- - -# Class: PunctuationStrategy - -Defined in: [stream/strategies.ts:22](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L22) - -Punctuation Strategy - emit when chunk contains punctuation -Useful for natural text flow in UI - -## Implements - -- [`ChunkStrategy`](../interfaces/ChunkStrategy.md) - -## Constructors - -### Constructor - -```ts -new PunctuationStrategy(): PunctuationStrategy; -``` - -#### Returns - -`PunctuationStrategy` - -## Methods - -### shouldEmit() - -```ts -shouldEmit(chunk, _accumulated): boolean; -``` - -Defined in: [stream/strategies.ts:25](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L25) - -Called for each text chunk received - -#### Parameters - -##### chunk - -`string` - -The new chunk of text (delta) - -##### \_accumulated - -`string` - -#### Returns - -`boolean` - -true if an update should be emitted now - -#### Implementation of - -[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`shouldEmit`](../interfaces/ChunkStrategy.md#shouldemit) diff --git a/docs/reference/classes/StreamProcessor.md b/docs/reference/classes/StreamProcessor.md deleted file mode 100644 index 8acdf904..00000000 --- a/docs/reference/classes/StreamProcessor.md +++ /dev/null @@ -1,407 +0,0 @@ ---- -id: StreamProcessor -title: StreamProcessor ---- - -# Class: StreamProcessor - -Defined in: [stream/processor.ts:171](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L171) - -StreamProcessor - State machine for processing AI response streams - -Manages the full UIMessage[] conversation and emits events on changes. - -State tracking: -- Full message array -- Current assistant message being streamed -- Text content accumulation -- Multiple parallel tool calls -- Tool call completion detection - -Tool call completion is detected when: -1. A new tool call starts at a different index -2. Text content arrives -3. Stream ends - -## Constructors - -### Constructor - -```ts -new StreamProcessor(options): StreamProcessor; -``` - -Defined in: [stream/processor.ts:200](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L200) - -#### Parameters - -##### options - -[`StreamProcessorOptions`](../interfaces/StreamProcessorOptions.md) = `{}` - -#### Returns - -`StreamProcessor` - -## Methods - -### addToolApprovalResponse() - -```ts -addToolApprovalResponse(approvalId, approved): void; -``` - -Defined in: [stream/processor.ts:314](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L314) - -Add an approval response (called by client after handling onApprovalRequest) - -#### Parameters - -##### approvalId - -`string` - -##### approved - -`boolean` - -#### Returns - -`void` - -*** - -### addToolResult() - -```ts -addToolResult( - toolCallId, - output, - error?): void; -``` - -Defined in: [stream/processor.ts:270](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L270) - -Add a tool result (called by client after handling onToolCall) - -#### Parameters - -##### toolCallId - -`string` - -##### output - -`any` - -##### error? - -`string` - -#### Returns - -`void` - -*** - -### addUserMessage() - -```ts -addUserMessage(content): UIMessage; -``` - -Defined in: [stream/processor.ts:228](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L228) - -Add a user message to the conversation - -#### Parameters - -##### content - -`string` - -#### Returns - -[`UIMessage`](../interfaces/UIMessage.md) - -*** - -### areAllToolsComplete() - -```ts -areAllToolsComplete(): boolean; -``` - -Defined in: [stream/processor.ts:345](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L345) - -Check if all tool calls in the last assistant message are complete -Useful for auto-continue logic - -#### Returns - -`boolean` - -*** - -### clearMessages() - -```ts -clearMessages(): void; -``` - -Defined in: [stream/processor.ts:377](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L377) - -Clear all messages - -#### Returns - -`void` - -*** - -### finalizeStream() - -```ts -finalizeStream(): void; -``` - -Defined in: [stream/processor.ts:951](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L951) - -Finalize the stream - complete all pending operations - -#### Returns - -`void` - -*** - -### getMessages() - -```ts -getMessages(): UIMessage[]; -``` - -Defined in: [stream/processor.ts:337](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L337) - -Get current messages - -#### Returns - -[`UIMessage`](../interfaces/UIMessage.md)[] - -*** - -### getRecording() - -```ts -getRecording(): ChunkRecording | null; -``` - -Defined in: [stream/processor.ts:1037](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1037) - -Get the current recording - -#### Returns - -[`ChunkRecording`](../interfaces/ChunkRecording.md) \| `null` - -*** - -### getState() - -```ts -getState(): ProcessorState; -``` - -Defined in: [stream/processor.ts:1010](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1010) - -Get current processor state (legacy) - -#### Returns - -[`ProcessorState`](../interfaces/ProcessorState.md) - -*** - -### process() - -```ts -process(stream): Promise; -``` - -Defined in: [stream/processor.ts:390](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L390) - -Process a stream and emit events through handlers - -#### Parameters - -##### stream - -`AsyncIterable`\<`any`\> - -#### Returns - -`Promise`\<[`ProcessorResult`](../interfaces/ProcessorResult.md)\> - -*** - -### processChunk() - -```ts -processChunk(chunk): void; -``` - -Defined in: [stream/processor.ts:418](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L418) - -Process a single chunk from the stream - -#### Parameters - -##### chunk - -[`StreamChunk`](../type-aliases/StreamChunk.md) - -#### Returns - -`void` - -*** - -### removeMessagesAfter() - -```ts -removeMessagesAfter(index): void; -``` - -Defined in: [stream/processor.ts:369](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L369) - -Remove messages after a certain index (for reload/retry) - -#### Parameters - -##### index - -`number` - -#### Returns - -`void` - -*** - -### reset() - -```ts -reset(): void; -``` - -Defined in: [stream/processor.ts:1060](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1060) - -Full reset (including messages) - -#### Returns - -`void` - -*** - -### setMessages() - -```ts -setMessages(messages): void; -``` - -Defined in: [stream/processor.ts:220](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L220) - -Set the messages array (e.g., from persisted state) - -#### Parameters - -##### messages - -[`UIMessage`](../interfaces/UIMessage.md)[] - -#### Returns - -`void` - -*** - -### startAssistantMessage() - -```ts -startAssistantMessage(): string; -``` - -Defined in: [stream/processor.ts:246](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L246) - -Start streaming a new assistant message -Returns the message ID - -#### Returns - -`string` - -*** - -### startRecording() - -```ts -startRecording(): void; -``` - -Defined in: [stream/processor.ts:1024](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1024) - -Start recording chunks - -#### Returns - -`void` - -*** - -### toModelMessages() - -```ts -toModelMessages(): ModelMessage< - | string - | ContentPart[] - | null>[]; -``` - -Defined in: [stream/processor.ts:326](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L326) - -Get the conversation as ModelMessages (for sending to LLM) - -#### Returns - -[`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>[] - -*** - -### replay() - -```ts -static replay(recording, options?): Promise; -``` - -Defined in: [stream/processor.ts:1069](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1069) - -Replay a recording through the processor - -#### Parameters - -##### recording - -[`ChunkRecording`](../interfaces/ChunkRecording.md) - -##### options? - -[`StreamProcessorOptions`](../interfaces/StreamProcessorOptions.md) - -#### Returns - -`Promise`\<[`ProcessorResult`](../interfaces/ProcessorResult.md)\> diff --git a/docs/reference/classes/ToolCallManager.md b/docs/reference/classes/ToolCallManager.md deleted file mode 100644 index e0ae159c..00000000 --- a/docs/reference/classes/ToolCallManager.md +++ /dev/null @@ -1,196 +0,0 @@ ---- -id: ToolCallManager -title: ToolCallManager ---- - -# Class: ToolCallManager - -Defined in: [tools/tool-calls.ts:41](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L41) - -Manages tool call accumulation and execution for the chat() method's automatic tool execution loop. - -Responsibilities: -- Accumulates streaming tool call chunks (ID, name, arguments) -- Validates tool calls (filters out incomplete ones) -- Executes tool `execute` functions with parsed arguments -- Emits `tool_result` chunks for client visibility -- Returns tool result messages for conversation history - -This class is used internally by the AI.chat() method to handle the automatic -tool execution loop. It can also be used independently for custom tool execution logic. - -## Example - -```typescript -const manager = new ToolCallManager(tools); - -// During streaming, accumulate tool calls -for await (const chunk of stream) { - if (chunk.type === "tool_call") { - manager.addToolCallChunk(chunk); - } -} - -// After stream completes, execute tools -if (manager.hasToolCalls()) { - const toolResults = yield* manager.executeTools(doneChunk); - messages = [...messages, ...toolResults]; - manager.clear(); -} -``` - -## Constructors - -### Constructor - -```ts -new ToolCallManager(tools): ToolCallManager; -``` - -Defined in: [tools/tool-calls.ts:45](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L45) - -#### Parameters - -##### tools - -readonly [`Tool`](../interfaces/Tool.md)\<`ZodType`\<`unknown`, `unknown`, `$ZodTypeInternals`\<`unknown`, `unknown`\>\>, `ZodType`\<`unknown`, `unknown`, `$ZodTypeInternals`\<`unknown`, `unknown`\>\>, `string`\>[] - -#### Returns - -`ToolCallManager` - -## Methods - -### addToolCallChunk() - -```ts -addToolCallChunk(chunk): void; -``` - -Defined in: [tools/tool-calls.ts:53](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L53) - -Add a tool call chunk to the accumulator -Handles streaming tool calls by accumulating arguments - -#### Parameters - -##### chunk - -###### index - -`number` - -###### toolCall - -\{ - `function`: \{ - `arguments`: `string`; - `name`: `string`; - \}; - `id`: `string`; - `type`: `"function"`; -\} - -###### toolCall.function - -\{ - `arguments`: `string`; - `name`: `string`; -\} - -###### toolCall.function.arguments - -`string` - -###### toolCall.function.name - -`string` - -###### toolCall.id - -`string` - -###### toolCall.type - -`"function"` - -#### Returns - -`void` - -*** - -### clear() - -```ts -clear(): void; -``` - -Defined in: [tools/tool-calls.ts:193](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L193) - -Clear the tool calls map for the next iteration - -#### Returns - -`void` - -*** - -### executeTools() - -```ts -executeTools(doneChunk): AsyncGenerator[] -| null>[], void>; -``` - -Defined in: [tools/tool-calls.ts:111](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L111) - -Execute all tool calls and return tool result messages -Also yields tool_result chunks for streaming - -#### Parameters - -##### doneChunk - -[`DoneStreamChunk`](../interfaces/DoneStreamChunk.md) - -#### Returns - -`AsyncGenerator`\<[`ToolResultStreamChunk`](../interfaces/ToolResultStreamChunk.md), [`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>[], `void`\> - -*** - -### getToolCalls() - -```ts -getToolCalls(): ToolCall[]; -``` - -Defined in: [tools/tool-calls.ts:101](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L101) - -Get all complete tool calls (filtered for valid ID and name) - -#### Returns - -[`ToolCall`](../interfaces/ToolCall.md)[] - -*** - -### hasToolCalls() - -```ts -hasToolCalls(): boolean; -``` - -Defined in: [tools/tool-calls.ts:94](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L94) - -Check if there are any complete tool calls to execute - -#### Returns - -`boolean` diff --git a/docs/reference/classes/WordBoundaryStrategy.md b/docs/reference/classes/WordBoundaryStrategy.md deleted file mode 100644 index 985ce4f6..00000000 --- a/docs/reference/classes/WordBoundaryStrategy.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -id: WordBoundaryStrategy -title: WordBoundaryStrategy ---- - -# Class: WordBoundaryStrategy - -Defined in: [stream/strategies.ts:57](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L57) - -Word Boundary Strategy - emit at word boundaries -Prevents cutting words in half - -## Implements - -- [`ChunkStrategy`](../interfaces/ChunkStrategy.md) - -## Constructors - -### Constructor - -```ts -new WordBoundaryStrategy(): WordBoundaryStrategy; -``` - -#### Returns - -`WordBoundaryStrategy` - -## Methods - -### shouldEmit() - -```ts -shouldEmit(chunk, _accumulated): boolean; -``` - -Defined in: [stream/strategies.ts:58](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L58) - -Called for each text chunk received - -#### Parameters - -##### chunk - -`string` - -The new chunk of text (delta) - -##### \_accumulated - -`string` - -#### Returns - -`boolean` - -true if an update should be emitted now - -#### Implementation of - -[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`shouldEmit`](../interfaces/ChunkStrategy.md#shouldemit) diff --git a/docs/reference/functions/chat.md b/docs/reference/functions/chat.md deleted file mode 100644 index 16934d76..00000000 --- a/docs/reference/functions/chat.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -id: chat -title: chat ---- - -# Function: chat() - -```ts -function chat(options): AsyncIterable; -``` - -Defined in: [core/chat.ts:741](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/core/chat.ts#L741) - -Standalone chat streaming function with type inference from adapter -Returns an async iterable of StreamChunks for streaming responses -Includes automatic tool execution loop - -## Type Parameters - -### TAdapter - -`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `any`, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> - -### TModel - -`TModel` *extends* `any` - -## Parameters - -### options - -[`ChatStreamOptionsForModel`](../type-aliases/ChatStreamOptionsForModel.md)\<`TAdapter`, `TModel`\> - -Chat options - -## Returns - -`AsyncIterable`\<[`StreamChunk`](../type-aliases/StreamChunk.md)\> - -## Example - -```typescript -const stream = chat({ - adapter: openai(), - model: 'gpt-4o', - messages: [{ role: 'user', content: 'Hello!' }], - tools: [weatherTool], // Optional: auto-executed when called -}); - -for await (const chunk of stream) { - if (chunk.type === 'content') { - console.log(chunk.delta); - } -} -``` diff --git a/docs/reference/functions/chatOptions.md b/docs/reference/functions/chatOptions.md deleted file mode 100644 index d776680b..00000000 --- a/docs/reference/functions/chatOptions.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -id: chatOptions -title: chatOptions ---- - -# Function: chatOptions() - -```ts -function chatOptions(options): Omit, "model" | "providerOptions" | "messages" | "abortController"> & object; -``` - -Defined in: [utilities/chat-options.ts:3](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/chat-options.ts#L3) - -## Type Parameters - -### TAdapter - -`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `Record`\<`string`, readonly [`Modality`](../type-aliases/Modality.md)[]\>, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> - -### TModel - -`TModel` *extends* `any` - -## Parameters - -### options - -`Omit`\<[`ChatStreamOptionsUnion`](../type-aliases/ChatStreamOptionsUnion.md)\<`TAdapter`\>, `"model"` \| `"providerOptions"` \| `"messages"` \| `"abortController"`\> & `object` - -## Returns - -`Omit`\<[`ChatStreamOptionsUnion`](../type-aliases/ChatStreamOptionsUnion.md)\<`TAdapter`\>, `"model"` \| `"providerOptions"` \| `"messages"` \| `"abortController"`\> & `object` diff --git a/docs/reference/functions/combineStrategies.md b/docs/reference/functions/combineStrategies.md deleted file mode 100644 index 454a0f33..00000000 --- a/docs/reference/functions/combineStrategies.md +++ /dev/null @@ -1,44 +0,0 @@ ---- -id: combineStrategies -title: combineStrategies ---- - -# Function: combineStrategies() - -```ts -function combineStrategies(strategies): AgentLoopStrategy; -``` - -Defined in: [utilities/agent-loop-strategies.ts:79](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/agent-loop-strategies.ts#L79) - -Creates a strategy that combines multiple strategies with AND logic -All strategies must return true to continue - -## Parameters - -### strategies - -[`AgentLoopStrategy`](../type-aliases/AgentLoopStrategy.md)[] - -Array of strategies to combine - -## Returns - -[`AgentLoopStrategy`](../type-aliases/AgentLoopStrategy.md) - -AgentLoopStrategy that continues only if all strategies return true - -## Example - -```typescript -const stream = chat({ - adapter: openai(), - model: "gpt-4o", - messages: [...], - tools: [weatherTool], - agentLoopStrategy: combineStrategies([ - maxIterations(10), - ({ messages }) => messages.length < 100, - ]), -}); -``` diff --git a/docs/reference/functions/convertMessagesToModelMessages.md b/docs/reference/functions/convertMessagesToModelMessages.md deleted file mode 100644 index 00224ace..00000000 --- a/docs/reference/functions/convertMessagesToModelMessages.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -id: convertMessagesToModelMessages -title: convertMessagesToModelMessages ---- - -# Function: convertMessagesToModelMessages() - -```ts -function convertMessagesToModelMessages(messages): ModelMessage< - | string - | ContentPart[] - | null>[]; -``` - -Defined in: [message-converters.ts:38](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L38) - -Convert UIMessages or ModelMessages to ModelMessages - -## Parameters - -### messages - -( - \| [`UIMessage`](../interfaces/UIMessage.md) - \| [`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>)[] - -## Returns - -[`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>[] diff --git a/docs/reference/functions/convertZodToJsonSchema.md b/docs/reference/functions/convertZodToJsonSchema.md deleted file mode 100644 index 8b0526ab..00000000 --- a/docs/reference/functions/convertZodToJsonSchema.md +++ /dev/null @@ -1,50 +0,0 @@ ---- -id: convertZodToJsonSchema -title: convertZodToJsonSchema ---- - -# Function: convertZodToJsonSchema() - -```ts -function convertZodToJsonSchema(schema): Record | undefined; -``` - -Defined in: [tools/zod-converter.ts:31](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/zod-converter.ts#L31) - -Converts a Zod schema to JSON Schema format compatible with LLM providers. - -## Parameters - -### schema - -Zod schema to convert - -`ZodType`\<`unknown`, `unknown`, `$ZodTypeInternals`\<`unknown`, `unknown`\>\> | `undefined` - -## Returns - -`Record`\<`string`, `any`\> \| `undefined` - -JSON Schema object that can be sent to LLM providers - -## Example - -```typescript -import { z } from 'zod'; - -const schema = z.object({ - location: z.string().describe('City name'), - unit: z.enum(['celsius', 'fahrenheit']).optional() -}); - -const jsonSchema = convertZodToJsonSchema(schema); -// Returns: -// { -// type: 'object', -// properties: { -// location: { type: 'string', description: 'City name' }, -// unit: { type: 'string', enum: ['celsius', 'fahrenheit'] } -// }, -// required: ['location'] -// } -``` diff --git a/docs/reference/functions/createReplayStream.md b/docs/reference/functions/createReplayStream.md deleted file mode 100644 index 39ec2a49..00000000 --- a/docs/reference/functions/createReplayStream.md +++ /dev/null @@ -1,24 +0,0 @@ ---- -id: createReplayStream -title: createReplayStream ---- - -# Function: createReplayStream() - -```ts -function createReplayStream(recording): AsyncIterable; -``` - -Defined in: [stream/processor.ts:1081](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1081) - -Create an async iterable from a recording - -## Parameters - -### recording - -[`ChunkRecording`](../interfaces/ChunkRecording.md) - -## Returns - -`AsyncIterable`\<[`StreamChunk`](../type-aliases/StreamChunk.md)\> diff --git a/docs/reference/functions/embedding.md b/docs/reference/functions/embedding.md deleted file mode 100644 index 058e3ff4..00000000 --- a/docs/reference/functions/embedding.md +++ /dev/null @@ -1,30 +0,0 @@ ---- -id: embedding -title: embedding ---- - -# Function: embedding() - -```ts -function embedding(options): Promise; -``` - -Defined in: [core/embedding.ts:16](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/core/embedding.ts#L16) - -Standalone embedding function with type inference from adapter - -## Type Parameters - -### TAdapter - -`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `Record`\<`string`, readonly [`Modality`](../type-aliases/Modality.md)[]\>, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> - -## Parameters - -### options - -`Omit`\<[`EmbeddingOptions`](../interfaces/EmbeddingOptions.md), `"model"`\> & `object` - -## Returns - -`Promise`\<[`EmbeddingResult`](../interfaces/EmbeddingResult.md)\> diff --git a/docs/reference/functions/generateMessageId.md b/docs/reference/functions/generateMessageId.md deleted file mode 100644 index 44568ce3..00000000 --- a/docs/reference/functions/generateMessageId.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -id: generateMessageId -title: generateMessageId ---- - -# Function: generateMessageId() - -```ts -function generateMessageId(): string; -``` - -Defined in: [message-converters.ts:283](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L283) - -Generate a unique message ID - -## Returns - -`string` diff --git a/docs/reference/functions/maxIterations.md b/docs/reference/functions/maxIterations.md deleted file mode 100644 index 1ab98cda..00000000 --- a/docs/reference/functions/maxIterations.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: maxIterations -title: maxIterations ---- - -# Function: maxIterations() - -```ts -function maxIterations(max): AgentLoopStrategy; -``` - -Defined in: [utilities/agent-loop-strategies.ts:20](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/agent-loop-strategies.ts#L20) - -Creates a strategy that continues for a maximum number of iterations - -## Parameters - -### max - -`number` - -Maximum number of iterations to allow - -## Returns - -[`AgentLoopStrategy`](../type-aliases/AgentLoopStrategy.md) - -AgentLoopStrategy that stops after max iterations - -## Example - -```typescript -const stream = chat({ - adapter: openai(), - model: "gpt-4o", - messages: [...], - tools: [weatherTool], - agentLoopStrategy: maxIterations(3), // Max 3 iterations -}); -``` diff --git a/docs/reference/functions/messages.md b/docs/reference/functions/messages.md deleted file mode 100644 index b426dc57..00000000 --- a/docs/reference/functions/messages.md +++ /dev/null @@ -1,94 +0,0 @@ ---- -id: messages -title: messages ---- - -# Function: messages() - -```ts -function messages(_options, msgs): TAdapter extends AIAdapter ? TModel extends keyof ModelInputModalities ? ModelInputModalities[TModel] extends readonly Modality[] ? ConstrainedModelMessage[] : ModelMessage< - | string - | ContentPart[] - | null>[] : ModelMessage< - | string - | ContentPart[] - | null>[] : ModelMessage< - | string - | ContentPart[] - | null>[]; -``` - -Defined in: [utilities/messages.ts:33](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/messages.ts#L33) - -Type-safe helper to create a messages array constrained by a model's supported modalities. - -This function provides compile-time checking that your messages only contain -content types supported by the specified model. It's particularly useful when -combining typed messages with untyped data (like from request.json()). - -## Type Parameters - -### TAdapter - -`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `any`, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> - -### TModel - -`TModel` *extends* `any` - -## Parameters - -### \_options - -#### adapter - -`TAdapter` - -#### model - -`TModel` - -### msgs - -`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `ModelInputModalities`, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> ? `TModel` *extends* keyof `ModelInputModalities` ? `ModelInputModalities`\[`TModel`\<`TModel`\>\] *extends* readonly [`Modality`](../type-aliases/Modality.md)[] ? [`ConstrainedModelMessage`](../type-aliases/ConstrainedModelMessage.md)\<`any`\[`any`\]\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>[] - -## Returns - -`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `ModelInputModalities`, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> ? `TModel` *extends* keyof `ModelInputModalities` ? `ModelInputModalities`\[`TModel`\<`TModel`\>\] *extends* readonly [`Modality`](../type-aliases/Modality.md)[] ? [`ConstrainedModelMessage`](../type-aliases/ConstrainedModelMessage.md)\<`any`\[`any`\]\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>[] - -## Example - -```typescript -import { messages, chat } from '@tanstack/ai' -import { openai } from '@tanstack/ai-openai' - -const adapter = openai() - -// This will error at compile time because gpt-4o only supports text+image -const msgs = messages({ adapter, model: 'gpt-4o' }, [ - { - role: 'user', - content: [ - { type: 'video', source: { type: 'url', value: '...' } } // Error! - ] - } -]) -``` diff --git a/docs/reference/functions/modelMessageToUIMessage.md b/docs/reference/functions/modelMessageToUIMessage.md deleted file mode 100644 index 02e80f8d..00000000 --- a/docs/reference/functions/modelMessageToUIMessage.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: modelMessageToUIMessage -title: modelMessageToUIMessage ---- - -# Function: modelMessageToUIMessage() - -```ts -function modelMessageToUIMessage(modelMessage, id?): UIMessage; -``` - -Defined in: [message-converters.ts:158](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L158) - -Convert a ModelMessage to UIMessage - -This conversion creates a parts-based structure: -- content field → TextPart -- toolCalls array → ToolCallPart[] -- role="tool" messages should be converted separately and merged - -## Parameters - -### modelMessage - -[`ModelMessage`](../interfaces/ModelMessage.md) - -The ModelMessage to convert - -### id? - -`string` - -Optional ID for the UIMessage (generated if not provided) - -## Returns - -[`UIMessage`](../interfaces/UIMessage.md) - -A UIMessage with parts diff --git a/docs/reference/functions/modelMessagesToUIMessages.md b/docs/reference/functions/modelMessagesToUIMessages.md deleted file mode 100644 index dd50df71..00000000 --- a/docs/reference/functions/modelMessagesToUIMessages.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -id: modelMessagesToUIMessages -title: modelMessagesToUIMessages ---- - -# Function: modelMessagesToUIMessages() - -```ts -function modelMessagesToUIMessages(modelMessages): UIMessage[]; -``` - -Defined in: [message-converters.ts:211](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L211) - -Convert an array of ModelMessages to UIMessages - -This handles merging tool result messages with their corresponding assistant messages - -## Parameters - -### modelMessages - -[`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>[] - -Array of ModelMessages to convert - -## Returns - -[`UIMessage`](../interfaces/UIMessage.md)[] - -Array of UIMessages diff --git a/docs/reference/functions/normalizeToUIMessage.md b/docs/reference/functions/normalizeToUIMessage.md deleted file mode 100644 index a42310e3..00000000 --- a/docs/reference/functions/normalizeToUIMessage.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -id: normalizeToUIMessage -title: normalizeToUIMessage ---- - -# Function: normalizeToUIMessage() - -```ts -function normalizeToUIMessage(message, generateId): UIMessage; -``` - -Defined in: [message-converters.ts:260](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L260) - -Normalize a message (UIMessage or ModelMessage) to a UIMessage -Ensures the message has an ID and createdAt timestamp - -## Parameters - -### message - -Either a UIMessage or ModelMessage - -[`UIMessage`](../interfaces/UIMessage.md) | [`ModelMessage`](../interfaces/ModelMessage.md)\< -\| `string` -\| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] -\| `null`\> - -### generateId - -() => `string` - -Function to generate a message ID if needed - -## Returns - -[`UIMessage`](../interfaces/UIMessage.md) - -A UIMessage with guaranteed id and createdAt diff --git a/docs/reference/functions/parsePartialJSON.md b/docs/reference/functions/parsePartialJSON.md deleted file mode 100644 index c3fb3806..00000000 --- a/docs/reference/functions/parsePartialJSON.md +++ /dev/null @@ -1,28 +0,0 @@ ---- -id: parsePartialJSON -title: parsePartialJSON ---- - -# Function: parsePartialJSON() - -```ts -function parsePartialJSON(jsonString): any; -``` - -Defined in: [stream/json-parser.ts:56](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L56) - -Parse partial JSON string (convenience function) - -## Parameters - -### jsonString - -`string` - -The JSON string to parse (may be incomplete) - -## Returns - -`any` - -The parsed object, or undefined if parsing fails diff --git a/docs/reference/functions/summarize.md b/docs/reference/functions/summarize.md deleted file mode 100644 index 2ae9aead..00000000 --- a/docs/reference/functions/summarize.md +++ /dev/null @@ -1,30 +0,0 @@ ---- -id: summarize -title: summarize ---- - -# Function: summarize() - -```ts -function summarize(options): Promise; -``` - -Defined in: [core/summarize.ts:16](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/core/summarize.ts#L16) - -Standalone summarize function with type inference from adapter - -## Type Parameters - -### TAdapter - -`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `Record`\<`string`, readonly [`Modality`](../type-aliases/Modality.md)[]\>, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> - -## Parameters - -### options - -`Omit`\<[`SummarizationOptions`](../interfaces/SummarizationOptions.md), `"model"`\> & `object` - -## Returns - -`Promise`\<[`SummarizationResult`](../interfaces/SummarizationResult.md)\> diff --git a/docs/reference/functions/toServerSentEventsStream.md b/docs/reference/functions/toServerSentEventsStream.md deleted file mode 100644 index 65582450..00000000 --- a/docs/reference/functions/toServerSentEventsStream.md +++ /dev/null @@ -1,47 +0,0 @@ ---- -id: toServerSentEventsStream -title: toServerSentEventsStream ---- - -# Function: toServerSentEventsStream() - -```ts -function toServerSentEventsStream(stream, abortController?): ReadableStream>; -``` - -Defined in: [utilities/stream-to-response.ts:22](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/stream-to-response.ts#L22) - -Convert a StreamChunk async iterable to a ReadableStream in Server-Sent Events format - -This creates a ReadableStream that emits chunks in SSE format: -- Each chunk is prefixed with "data: " -- Each chunk is followed by "\n\n" -- Stream ends with "data: [DONE]\n\n" - -## Parameters - -### stream - -`AsyncIterable`\<[`StreamChunk`](../type-aliases/StreamChunk.md)\> - -AsyncIterable of StreamChunks from chat() - -### abortController? - -`AbortController` - -Optional AbortController to abort when stream is cancelled - -## Returns - -`ReadableStream`\<`Uint8Array`\<`ArrayBufferLike`\>\> - -ReadableStream in Server-Sent Events format - -## Example - -```typescript -const stream = chat({ adapter: openai(), model: "gpt-4o", messages: [...] }); -const readableStream = toServerSentEventsStream(stream); -// Use with Response, or any API that accepts ReadableStream -``` diff --git a/docs/reference/functions/toStreamResponse.md b/docs/reference/functions/toStreamResponse.md deleted file mode 100644 index 0753057c..00000000 --- a/docs/reference/functions/toStreamResponse.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -id: toStreamResponse -title: toStreamResponse ---- - -# Function: toStreamResponse() - -```ts -function toStreamResponse(stream, init?): Response; -``` - -Defined in: [utilities/stream-to-response.ts:102](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/stream-to-response.ts#L102) - -Create a streaming HTTP response from a StreamChunk async iterable -Includes proper headers for Server-Sent Events - -## Parameters - -### stream - -`AsyncIterable`\<[`StreamChunk`](../type-aliases/StreamChunk.md)\> - -AsyncIterable of StreamChunks from chat() - -### init? - -`ResponseInit` & `object` - -Optional Response initialization options - -## Returns - -`Response` - -Response object with SSE headers and streaming body - -## Example - -```typescript -export async function POST(request: Request) { - const { messages } = await request.json(); - const abortController = new AbortController(); - const stream = chat({ - adapter: openai(), - model: "gpt-4o", - messages, - options: { abortSignal: abortController.signal } - }); - return toStreamResponse(stream, undefined, abortController); -} -``` diff --git a/docs/reference/functions/toolDefinition.md b/docs/reference/functions/toolDefinition.md deleted file mode 100644 index 46017b07..00000000 --- a/docs/reference/functions/toolDefinition.md +++ /dev/null @@ -1,87 +0,0 @@ ---- -id: toolDefinition -title: toolDefinition ---- - -# Function: toolDefinition() - -```ts -function toolDefinition(config): ToolDefinition; -``` - -Defined in: [tools/tool-definition.ts:170](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L170) - -Create an isomorphic tool definition that can be used directly or instantiated for server/client - -The definition contains all tool metadata (name, description, schemas) and can be: -1. Used directly in chat() on the server (as a tool definition without execute) -2. Instantiated as a server tool with .server() -3. Instantiated as a client tool with .client() - -## Type Parameters - -### TInput - -`TInput` *extends* `ZodType`\<`unknown`, `unknown`, `$ZodTypeInternals`\<`unknown`, `unknown`\>\> = `ZodAny` - -### TOutput - -`TOutput` *extends* `ZodType`\<`unknown`, `unknown`, `$ZodTypeInternals`\<`unknown`, `unknown`\>\> = `ZodAny` - -### TName - -`TName` *extends* `string` = `string` - -## Parameters - -### config - -[`ToolDefinitionConfig`](../interfaces/ToolDefinitionConfig.md)\<`TInput`, `TOutput`, `TName`\> - -## Returns - -[`ToolDefinition`](../interfaces/ToolDefinition.md)\<`TInput`, `TOutput`, `TName`\> - -## Example - -```typescript -import { toolDefinition } from '@tanstack/ai'; -import { z } from 'zod'; - -const addToCartTool = toolDefinition({ - name: 'addToCart', - description: 'Add a guitar to the shopping cart (requires approval)', - needsApproval: true, - inputSchema: z.object({ - guitarId: z.string(), - quantity: z.number(), - }), - outputSchema: z.object({ - success: z.boolean(), - cartId: z.string(), - totalItems: z.number(), - }), -}); - -// Use directly in chat (server-side, no execute function) -chat({ - tools: [addToCartTool], - // ... -}); - -// Or create server-side implementation -const addToCartServer = addToCartTool.server(async (args) => { - // args is typed as { guitarId: string; quantity: number } - return { - success: true, - cartId: 'CART_' + Date.now(), - totalItems: args.quantity, - }; -}); - -// Or create client-side implementation -const addToCartClient = addToCartTool.client(async (args) => { - // Client-specific logic (e.g., localStorage) - return { success: true, cartId: 'local', totalItems: 1 }; -}); -``` diff --git a/docs/reference/functions/uiMessageToModelMessages.md b/docs/reference/functions/uiMessageToModelMessages.md deleted file mode 100644 index 9b295ad6..00000000 --- a/docs/reference/functions/uiMessageToModelMessages.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: uiMessageToModelMessages -title: uiMessageToModelMessages ---- - -# Function: uiMessageToModelMessages() - -```ts -function uiMessageToModelMessages(uiMessage): ModelMessage< - | string - | ContentPart[] - | null>[]; -``` - -Defined in: [message-converters.ts:65](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L65) - -Convert a UIMessage to ModelMessage(s) - -This conversion handles the parts-based structure: -- Text parts → content field -- ToolCall parts → toolCalls array -- ToolResult parts → separate role="tool" messages - -## Parameters - -### uiMessage - -[`UIMessage`](../interfaces/UIMessage.md) - -The UIMessage to convert - -## Returns - -[`ModelMessage`](../interfaces/ModelMessage.md)\< - \| `string` - \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] - \| `null`\>[] - -An array of ModelMessages (may be multiple if tool results are present) diff --git a/docs/reference/functions/untilFinishReason.md b/docs/reference/functions/untilFinishReason.md deleted file mode 100644 index 2522b3f0..00000000 --- a/docs/reference/functions/untilFinishReason.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: untilFinishReason -title: untilFinishReason ---- - -# Function: untilFinishReason() - -```ts -function untilFinishReason(stopReasons): AgentLoopStrategy; -``` - -Defined in: [utilities/agent-loop-strategies.ts:41](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/agent-loop-strategies.ts#L41) - -Creates a strategy that continues until a specific finish reason is encountered - -## Parameters - -### stopReasons - -`string`[] - -Finish reasons that should stop the loop - -## Returns - -[`AgentLoopStrategy`](../type-aliases/AgentLoopStrategy.md) - -AgentLoopStrategy that stops on specific finish reasons - -## Example - -```typescript -const stream = chat({ - adapter: openai(), - model: "gpt-4o", - messages: [...], - tools: [weatherTool], - agentLoopStrategy: untilFinishReason(["stop", "length"]), -}); -``` diff --git a/docs/reference/index.md b/docs/reference/index.md deleted file mode 100644 index 118e4642..00000000 --- a/docs/reference/index.md +++ /dev/null @@ -1,120 +0,0 @@ ---- -id: "@tanstack/ai" -title: "@tanstack/ai" ---- - -# @tanstack/ai - -## Classes - -- [BaseAdapter](classes/BaseAdapter.md) -- [BatchStrategy](classes/BatchStrategy.md) -- [CompositeStrategy](classes/CompositeStrategy.md) -- [ImmediateStrategy](classes/ImmediateStrategy.md) -- [PartialJSONParser](classes/PartialJSONParser.md) -- [PunctuationStrategy](classes/PunctuationStrategy.md) -- [StreamProcessor](classes/StreamProcessor.md) -- [ToolCallManager](classes/ToolCallManager.md) -- [WordBoundaryStrategy](classes/WordBoundaryStrategy.md) - -## Interfaces - -- [AgentLoopState](interfaces/AgentLoopState.md) -- [AIAdapter](interfaces/AIAdapter.md) -- [AIAdapterConfig](interfaces/AIAdapterConfig.md) -- [ApprovalRequestedStreamChunk](interfaces/ApprovalRequestedStreamChunk.md) -- [AudioPart](interfaces/AudioPart.md) -- [BaseStreamChunk](interfaces/BaseStreamChunk.md) -- [ChatCompletionChunk](interfaces/ChatCompletionChunk.md) -- [ChatOptions](interfaces/ChatOptions.md) -- [ChunkRecording](interfaces/ChunkRecording.md) -- [ChunkStrategy](interfaces/ChunkStrategy.md) -- [ClientTool](interfaces/ClientTool.md) -- [ContentPartSource](interfaces/ContentPartSource.md) -- [ContentStreamChunk](interfaces/ContentStreamChunk.md) -- [DefaultMessageMetadataByModality](interfaces/DefaultMessageMetadataByModality.md) -- [DocumentPart](interfaces/DocumentPart.md) -- [DoneStreamChunk](interfaces/DoneStreamChunk.md) -- [EmbeddingOptions](interfaces/EmbeddingOptions.md) -- [EmbeddingResult](interfaces/EmbeddingResult.md) -- [ErrorStreamChunk](interfaces/ErrorStreamChunk.md) -- [ImagePart](interfaces/ImagePart.md) -- [InternalToolCallState](interfaces/InternalToolCallState.md) -- [JSONParser](interfaces/JSONParser.md) -- [ModelMessage](interfaces/ModelMessage.md) -- [ProcessorResult](interfaces/ProcessorResult.md) -- [ProcessorState](interfaces/ProcessorState.md) -- [ResponseFormat](interfaces/ResponseFormat.md) -- [ServerTool](interfaces/ServerTool.md) -- [StreamProcessorEvents](interfaces/StreamProcessorEvents.md) -- [StreamProcessorHandlers](interfaces/StreamProcessorHandlers.md) -- [StreamProcessorOptions](interfaces/StreamProcessorOptions.md) -- [SummarizationOptions](interfaces/SummarizationOptions.md) -- [SummarizationResult](interfaces/SummarizationResult.md) -- [TextPart](interfaces/TextPart.md) -- [ThinkingPart](interfaces/ThinkingPart.md) -- [ThinkingStreamChunk](interfaces/ThinkingStreamChunk.md) -- [Tool](interfaces/Tool.md) -- [ToolCall](interfaces/ToolCall.md) -- [ToolCallPart](interfaces/ToolCallPart.md) -- [ToolCallStreamChunk](interfaces/ToolCallStreamChunk.md) -- [ToolConfig](interfaces/ToolConfig.md) -- [ToolDefinition](interfaces/ToolDefinition.md) -- [ToolDefinitionConfig](interfaces/ToolDefinitionConfig.md) -- [ToolDefinitionInstance](interfaces/ToolDefinitionInstance.md) -- [ToolInputAvailableStreamChunk](interfaces/ToolInputAvailableStreamChunk.md) -- [ToolResultPart](interfaces/ToolResultPart.md) -- [ToolResultStreamChunk](interfaces/ToolResultStreamChunk.md) -- [UIMessage](interfaces/UIMessage.md) -- [VideoPart](interfaces/VideoPart.md) - -## Type Aliases - -- [AgentLoopStrategy](type-aliases/AgentLoopStrategy.md) -- [AnyClientTool](type-aliases/AnyClientTool.md) -- [ChatStreamOptionsForModel](type-aliases/ChatStreamOptionsForModel.md) -- [ChatStreamOptionsUnion](type-aliases/ChatStreamOptionsUnion.md) -- [ConstrainedContent](type-aliases/ConstrainedContent.md) -- [ConstrainedModelMessage](type-aliases/ConstrainedModelMessage.md) -- [ContentPart](type-aliases/ContentPart.md) -- [ContentPartForModalities](type-aliases/ContentPartForModalities.md) -- [ExtractModalitiesForModel](type-aliases/ExtractModalitiesForModel.md) -- [ExtractModelsFromAdapter](type-aliases/ExtractModelsFromAdapter.md) -- [InferToolInput](type-aliases/InferToolInput.md) -- [InferToolName](type-aliases/InferToolName.md) -- [InferToolOutput](type-aliases/InferToolOutput.md) -- [MessagePart](type-aliases/MessagePart.md) -- [ModalitiesArrayToUnion](type-aliases/ModalitiesArrayToUnion.md) -- [Modality](type-aliases/Modality.md) -- [StreamChunk](type-aliases/StreamChunk.md) -- [StreamChunkType](type-aliases/StreamChunkType.md) -- [ToolCallState](type-aliases/ToolCallState.md) -- [ToolResultState](type-aliases/ToolResultState.md) - -## Variables - -- [aiEventClient](variables/aiEventClient.md) -- [defaultJSONParser](variables/defaultJSONParser.md) - -## Functions - -- [chat](functions/chat.md) -- [chatOptions](functions/chatOptions.md) -- [combineStrategies](functions/combineStrategies.md) -- [convertMessagesToModelMessages](functions/convertMessagesToModelMessages.md) -- [convertZodToJsonSchema](functions/convertZodToJsonSchema.md) -- [createReplayStream](functions/createReplayStream.md) -- [embedding](functions/embedding.md) -- [generateMessageId](functions/generateMessageId.md) -- [maxIterations](functions/maxIterations.md) -- [messages](functions/messages.md) -- [modelMessagesToUIMessages](functions/modelMessagesToUIMessages.md) -- [modelMessageToUIMessage](functions/modelMessageToUIMessage.md) -- [normalizeToUIMessage](functions/normalizeToUIMessage.md) -- [parsePartialJSON](functions/parsePartialJSON.md) -- [summarize](functions/summarize.md) -- [toolDefinition](functions/toolDefinition.md) -- [toServerSentEventsStream](functions/toServerSentEventsStream.md) -- [toStreamResponse](functions/toStreamResponse.md) -- [uiMessageToModelMessages](functions/uiMessageToModelMessages.md) -- [untilFinishReason](functions/untilFinishReason.md) diff --git a/docs/reference/interfaces/AIAdapter.md b/docs/reference/interfaces/AIAdapter.md deleted file mode 100644 index f0e61468..00000000 --- a/docs/reference/interfaces/AIAdapter.md +++ /dev/null @@ -1,214 +0,0 @@ ---- -id: AIAdapter -title: AIAdapter ---- - -# Interface: AIAdapter\ - -Defined in: [types.ts:684](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L684) - -AI adapter interface with support for endpoint-specific models and provider options. - -Generic parameters: -- TChatModels: Models that support chat/text completion -- TEmbeddingModels: Models that support embeddings -- TChatProviderOptions: Provider-specific options for chat endpoint -- TEmbeddingProviderOptions: Provider-specific options for embedding endpoint -- TModelProviderOptionsByName: Map from model name to its specific provider options -- TModelInputModalitiesByName: Map from model name to its supported input modalities -- TMessageMetadataByModality: Map from modality type to adapter-specific metadata types - -## Type Parameters - -### TChatModels - -`TChatModels` *extends* `ReadonlyArray`\<`string`\> = `ReadonlyArray`\<`string`\> - -### TEmbeddingModels - -`TEmbeddingModels` *extends* `ReadonlyArray`\<`string`\> = `ReadonlyArray`\<`string`\> - -### TChatProviderOptions - -`TChatProviderOptions` *extends* `Record`\<`string`, `any`\> = `Record`\<`string`, `any`\> - -### TEmbeddingProviderOptions - -`TEmbeddingProviderOptions` *extends* `Record`\<`string`, `any`\> = `Record`\<`string`, `any`\> - -### TModelProviderOptionsByName - -`TModelProviderOptionsByName` *extends* `Record`\<`string`, `any`\> = `Record`\<`string`, `any`\> - -### TModelInputModalitiesByName - -`TModelInputModalitiesByName` *extends* `Record`\<`string`, `ReadonlyArray`\<[`Modality`](../type-aliases/Modality.md)\>\> = `Record`\<`string`, `ReadonlyArray`\<[`Modality`](../type-aliases/Modality.md)\>\> - -### TMessageMetadataByModality - -`TMessageMetadataByModality` *extends* `object` = [`DefaultMessageMetadataByModality`](DefaultMessageMetadataByModality.md) - -## Properties - -### \_chatProviderOptions? - -```ts -optional _chatProviderOptions: TChatProviderOptions; -``` - -Defined in: [types.ts:711](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L711) - -*** - -### \_embeddingProviderOptions? - -```ts -optional _embeddingProviderOptions: TEmbeddingProviderOptions; -``` - -Defined in: [types.ts:712](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L712) - -*** - -### \_messageMetadataByModality? - -```ts -optional _messageMetadataByModality: TMessageMetadataByModality; -``` - -Defined in: [types.ts:729](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L729) - -Type-only map from modality type to adapter-specific metadata types. -Used to provide type-safe autocomplete for metadata on content parts. - -*** - -### \_modelInputModalitiesByName? - -```ts -optional _modelInputModalitiesByName: TModelInputModalitiesByName; -``` - -Defined in: [types.ts:724](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L724) - -Type-only map from model name to its supported input modalities. -Used by the core AI types to narrow ContentPart types based on the selected model. -Must be provided by all adapters. - -*** - -### \_modelProviderOptionsByName - -```ts -_modelProviderOptionsByName: TModelProviderOptionsByName; -``` - -Defined in: [types.ts:718](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L718) - -Type-only map from model name to its specific provider options. -Used by the core AI types to narrow providerOptions based on the selected model. -Must be provided by all adapters. - -*** - -### \_providerOptions? - -```ts -optional _providerOptions: TChatProviderOptions; -``` - -Defined in: [types.ts:710](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L710) - -*** - -### chatStream() - -```ts -chatStream: (options) => AsyncIterable; -``` - -Defined in: [types.ts:732](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L732) - -#### Parameters - -##### options - -[`ChatOptions`](ChatOptions.md)\<`string`, `TChatProviderOptions`\> - -#### Returns - -`AsyncIterable`\<[`StreamChunk`](../type-aliases/StreamChunk.md)\> - -*** - -### createEmbeddings() - -```ts -createEmbeddings: (options) => Promise; -``` - -Defined in: [types.ts:740](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L740) - -#### Parameters - -##### options - -[`EmbeddingOptions`](EmbeddingOptions.md) - -#### Returns - -`Promise`\<[`EmbeddingResult`](EmbeddingResult.md)\> - -*** - -### embeddingModels? - -```ts -optional embeddingModels: TEmbeddingModels; -``` - -Defined in: [types.ts:707](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L707) - -Models that support embeddings - -*** - -### models - -```ts -models: TChatModels; -``` - -Defined in: [types.ts:704](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L704) - -Models that support chat/text completion - -*** - -### name - -```ts -name: string; -``` - -Defined in: [types.ts:702](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L702) - -*** - -### summarize() - -```ts -summarize: (options) => Promise; -``` - -Defined in: [types.ts:737](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L737) - -#### Parameters - -##### options - -[`SummarizationOptions`](SummarizationOptions.md) - -#### Returns - -`Promise`\<[`SummarizationResult`](SummarizationResult.md)\> diff --git a/docs/reference/interfaces/AIAdapterConfig.md b/docs/reference/interfaces/AIAdapterConfig.md deleted file mode 100644 index 20ecbd53..00000000 --- a/docs/reference/interfaces/AIAdapterConfig.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -id: AIAdapterConfig -title: AIAdapterConfig ---- - -# Interface: AIAdapterConfig - -Defined in: [types.ts:743](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L743) - -## Properties - -### apiKey? - -```ts -optional apiKey: string; -``` - -Defined in: [types.ts:744](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L744) - -*** - -### baseUrl? - -```ts -optional baseUrl: string; -``` - -Defined in: [types.ts:745](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L745) - -*** - -### headers? - -```ts -optional headers: Record; -``` - -Defined in: [types.ts:748](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L748) - -*** - -### maxRetries? - -```ts -optional maxRetries: number; -``` - -Defined in: [types.ts:747](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L747) - -*** - -### timeout? - -```ts -optional timeout: number; -``` - -Defined in: [types.ts:746](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L746) diff --git a/docs/reference/interfaces/AgentLoopState.md b/docs/reference/interfaces/AgentLoopState.md deleted file mode 100644 index 9c117b02..00000000 --- a/docs/reference/interfaces/AgentLoopState.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -id: AgentLoopState -title: AgentLoopState ---- - -# Interface: AgentLoopState - -Defined in: [types.ts:450](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L450) - -State passed to agent loop strategy for determining whether to continue - -## Properties - -### finishReason - -```ts -finishReason: string | null; -``` - -Defined in: [types.ts:456](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L456) - -Finish reason from the last response - -*** - -### iterationCount - -```ts -iterationCount: number; -``` - -Defined in: [types.ts:452](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L452) - -Current iteration count (0-indexed) - -*** - -### messages - -```ts -messages: ModelMessage< - | string - | ContentPart[] - | null>[]; -``` - -Defined in: [types.ts:454](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L454) - -Current messages array diff --git a/docs/reference/interfaces/ApprovalRequestedStreamChunk.md b/docs/reference/interfaces/ApprovalRequestedStreamChunk.md deleted file mode 100644 index f596477c..00000000 --- a/docs/reference/interfaces/ApprovalRequestedStreamChunk.md +++ /dev/null @@ -1,120 +0,0 @@ ---- -id: ApprovalRequestedStreamChunk -title: ApprovalRequestedStreamChunk ---- - -# Interface: ApprovalRequestedStreamChunk - -Defined in: [types.ts:573](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L573) - -## Extends - -- [`BaseStreamChunk`](BaseStreamChunk.md) - -## Properties - -### approval - -```ts -approval: object; -``` - -Defined in: [types.ts:578](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L578) - -#### id - -```ts -id: string; -``` - -#### needsApproval - -```ts -needsApproval: true; -``` - -*** - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:524](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L524) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) - -*** - -### input - -```ts -input: any; -``` - -Defined in: [types.ts:577](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L577) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:525](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L525) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) - -*** - -### timestamp - -```ts -timestamp: number; -``` - -Defined in: [types.ts:526](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L526) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) - -*** - -### toolCallId - -```ts -toolCallId: string; -``` - -Defined in: [types.ts:575](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L575) - -*** - -### toolName - -```ts -toolName: string; -``` - -Defined in: [types.ts:576](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L576) - -*** - -### type - -```ts -type: "approval-requested"; -``` - -Defined in: [types.ts:574](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L574) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) diff --git a/docs/reference/interfaces/AudioPart.md b/docs/reference/interfaces/AudioPart.md deleted file mode 100644 index b4e8dc02..00000000 --- a/docs/reference/interfaces/AudioPart.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -id: AudioPart -title: AudioPart ---- - -# Interface: AudioPart\ - -Defined in: [types.ts:63](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L63) - -Audio content part for multimodal messages. - -## Type Parameters - -### TMetadata - -`TMetadata` = `unknown` - -Provider-specific metadata type - -## Properties - -### metadata? - -```ts -optional metadata: TMetadata; -``` - -Defined in: [types.ts:68](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L68) - -Provider-specific metadata (e.g., format, sample rate) - -*** - -### source - -```ts -source: ContentPartSource; -``` - -Defined in: [types.ts:66](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L66) - -Source of the audio content - -*** - -### type - -```ts -type: "audio"; -``` - -Defined in: [types.ts:64](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L64) diff --git a/docs/reference/interfaces/BaseStreamChunk.md b/docs/reference/interfaces/BaseStreamChunk.md deleted file mode 100644 index b8af3289..00000000 --- a/docs/reference/interfaces/BaseStreamChunk.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -id: BaseStreamChunk -title: BaseStreamChunk ---- - -# Interface: BaseStreamChunk - -Defined in: [types.ts:522](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L522) - -## Extended by - -- [`ContentStreamChunk`](ContentStreamChunk.md) -- [`ToolCallStreamChunk`](ToolCallStreamChunk.md) -- [`ToolResultStreamChunk`](ToolResultStreamChunk.md) -- [`DoneStreamChunk`](DoneStreamChunk.md) -- [`ErrorStreamChunk`](ErrorStreamChunk.md) -- [`ApprovalRequestedStreamChunk`](ApprovalRequestedStreamChunk.md) -- [`ToolInputAvailableStreamChunk`](ToolInputAvailableStreamChunk.md) -- [`ThinkingStreamChunk`](ThinkingStreamChunk.md) - -## Properties - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:524](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L524) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:525](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L525) - -*** - -### timestamp - -```ts -timestamp: number; -``` - -Defined in: [types.ts:526](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L526) - -*** - -### type - -```ts -type: StreamChunkType; -``` - -Defined in: [types.ts:523](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L523) diff --git a/docs/reference/interfaces/ChatCompletionChunk.md b/docs/reference/interfaces/ChatCompletionChunk.md deleted file mode 100644 index dec3e35c..00000000 --- a/docs/reference/interfaces/ChatCompletionChunk.md +++ /dev/null @@ -1,86 +0,0 @@ ---- -id: ChatCompletionChunk -title: ChatCompletionChunk ---- - -# Interface: ChatCompletionChunk - -Defined in: [types.ts:612](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L612) - -## Properties - -### content - -```ts -content: string; -``` - -Defined in: [types.ts:615](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L615) - -*** - -### finishReason? - -```ts -optional finishReason: "length" | "stop" | "content_filter" | null; -``` - -Defined in: [types.ts:617](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L617) - -*** - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:613](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L613) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:614](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L614) - -*** - -### role? - -```ts -optional role: "assistant"; -``` - -Defined in: [types.ts:616](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L616) - -*** - -### usage? - -```ts -optional usage: object; -``` - -Defined in: [types.ts:618](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L618) - -#### completionTokens - -```ts -completionTokens: number; -``` - -#### promptTokens - -```ts -promptTokens: number; -``` - -#### totalTokens - -```ts -totalTokens: number; -``` diff --git a/docs/reference/interfaces/ChatOptions.md b/docs/reference/interfaces/ChatOptions.md deleted file mode 100644 index 9a3efbf8..00000000 --- a/docs/reference/interfaces/ChatOptions.md +++ /dev/null @@ -1,161 +0,0 @@ ---- -id: ChatOptions -title: ChatOptions ---- - -# Interface: ChatOptions\ - -Defined in: [types.ts:476](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L476) - -Options passed into the SDK and further piped to the AI provider. - -## Type Parameters - -### TModel - -`TModel` *extends* `string` = `string` - -### TProviderOptionsSuperset - -`TProviderOptionsSuperset` *extends* `Record`\<`string`, `any`\> = `Record`\<`string`, `any`\> - -### TOutput - -`TOutput` *extends* [`ResponseFormat`](ResponseFormat.md)\<`any`\> \| `undefined` = `undefined` - -### TProviderOptionsForModel - -`TProviderOptionsForModel` = `TProviderOptionsSuperset` - -## Properties - -### abortController? - -```ts -optional abortController: AbortController; -``` - -Defined in: [types.ts:509](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L509) - -AbortController for request cancellation. - -Allows you to cancel an in-progress request using an AbortController. -Useful for implementing timeouts or user-initiated cancellations. - -#### Example - -```ts -const abortController = new AbortController(); -setTimeout(() => abortController.abort(), 5000); // Cancel after 5 seconds -await chat({ ..., abortController }); -``` - -#### See - -https://developer.mozilla.org/en-US/docs/Web/API/AbortController - -*** - -### agentLoopStrategy? - -```ts -optional agentLoopStrategy: AgentLoopStrategy; -``` - -Defined in: [types.ts:486](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L486) - -*** - -### conversationId? - -```ts -optional conversationId: string; -``` - -Defined in: [types.ts:495](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L495) - -Conversation ID for correlating client and server-side devtools events. -When provided, server-side events will be linked to the client conversation in devtools. - -*** - -### messages - -```ts -messages: ModelMessage< - | string - | ContentPart[] - | null>[]; -``` - -Defined in: [types.ts:483](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L483) - -*** - -### model - -```ts -model: TModel; -``` - -Defined in: [types.ts:482](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L482) - -*** - -### options? - -```ts -optional options: CommonOptions; -``` - -Defined in: [types.ts:487](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L487) - -*** - -### output? - -```ts -optional output: TOutput; -``` - -Defined in: [types.ts:490](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L490) - -*** - -### providerOptions? - -```ts -optional providerOptions: TProviderOptionsForModel; -``` - -Defined in: [types.ts:488](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L488) - -*** - -### request? - -```ts -optional request: Request | RequestInit; -``` - -Defined in: [types.ts:489](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L489) - -*** - -### systemPrompts? - -```ts -optional systemPrompts: string[]; -``` - -Defined in: [types.ts:485](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L485) - -*** - -### tools? - -```ts -optional tools: Tool>, ZodType>, string>[]; -``` - -Defined in: [types.ts:484](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L484) diff --git a/docs/reference/interfaces/ChunkRecording.md b/docs/reference/interfaces/ChunkRecording.md deleted file mode 100644 index 3833f041..00000000 --- a/docs/reference/interfaces/ChunkRecording.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -id: ChunkRecording -title: ChunkRecording ---- - -# Interface: ChunkRecording - -Defined in: [stream/types.ts:83](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L83) - -Recording format for replay testing - -## Properties - -### chunks - -```ts -chunks: object[]; -``` - -Defined in: [stream/types.ts:88](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L88) - -#### chunk - -```ts -chunk: StreamChunk; -``` - -#### index - -```ts -index: number; -``` - -#### timestamp - -```ts -timestamp: number; -``` - -*** - -### model? - -```ts -optional model: string; -``` - -Defined in: [stream/types.ts:86](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L86) - -*** - -### provider? - -```ts -optional provider: string; -``` - -Defined in: [stream/types.ts:87](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L87) - -*** - -### result? - -```ts -optional result: ProcessorResult; -``` - -Defined in: [stream/types.ts:93](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L93) - -*** - -### timestamp - -```ts -timestamp: number; -``` - -Defined in: [stream/types.ts:85](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L85) - -*** - -### version - -```ts -version: "1.0"; -``` - -Defined in: [stream/types.ts:84](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L84) diff --git a/docs/reference/interfaces/ChunkStrategy.md b/docs/reference/interfaces/ChunkStrategy.md deleted file mode 100644 index c9b06168..00000000 --- a/docs/reference/interfaces/ChunkStrategy.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -id: ChunkStrategy -title: ChunkStrategy ---- - -# Interface: ChunkStrategy - -Defined in: [stream/types.ts:43](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L43) - -Strategy for determining when to emit text updates - -## Properties - -### reset()? - -```ts -optional reset: () => void; -``` - -Defined in: [stream/types.ts:55](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L55) - -Optional: Reset strategy state (called when streaming starts) - -#### Returns - -`void` - -*** - -### shouldEmit() - -```ts -shouldEmit: (chunk, accumulated) => boolean; -``` - -Defined in: [stream/types.ts:50](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L50) - -Called for each text chunk received - -#### Parameters - -##### chunk - -`string` - -The new chunk of text (delta) - -##### accumulated - -`string` - -All text accumulated so far - -#### Returns - -`boolean` - -true if an update should be emitted now diff --git a/docs/reference/interfaces/ClientTool.md b/docs/reference/interfaces/ClientTool.md deleted file mode 100644 index 819d729d..00000000 --- a/docs/reference/interfaces/ClientTool.md +++ /dev/null @@ -1,114 +0,0 @@ ---- -id: ClientTool -title: ClientTool ---- - -# Interface: ClientTool\ - -Defined in: [tools/tool-definition.ts:18](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L18) - -Marker type for client-side tools - -## Type Parameters - -### TInput - -`TInput` *extends* `z.ZodType` = `z.ZodType` - -### TOutput - -`TOutput` *extends* `z.ZodType` = `z.ZodType` - -### TName - -`TName` *extends* `string` = `string` - -## Properties - -### \_\_toolSide - -```ts -__toolSide: "client"; -``` - -Defined in: [tools/tool-definition.ts:23](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L23) - -*** - -### description - -```ts -description: string; -``` - -Defined in: [tools/tool-definition.ts:25](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L25) - -*** - -### execute()? - -```ts -optional execute: (args) => output | Promise>; -``` - -Defined in: [tools/tool-definition.ts:30](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L30) - -#### Parameters - -##### args - -`output`\<`TInput`\> - -#### Returns - -`output`\<`TOutput`\> \| `Promise`\<`output`\<`TOutput`\>\> - -*** - -### inputSchema? - -```ts -optional inputSchema: TInput; -``` - -Defined in: [tools/tool-definition.ts:26](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L26) - -*** - -### metadata? - -```ts -optional metadata: Record; -``` - -Defined in: [tools/tool-definition.ts:29](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L29) - -*** - -### name - -```ts -name: TName; -``` - -Defined in: [tools/tool-definition.ts:24](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L24) - -*** - -### needsApproval? - -```ts -optional needsApproval: boolean; -``` - -Defined in: [tools/tool-definition.ts:28](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L28) - -*** - -### outputSchema? - -```ts -optional outputSchema: TOutput; -``` - -Defined in: [tools/tool-definition.ts:27](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L27) diff --git a/docs/reference/interfaces/ContentPartSource.md b/docs/reference/interfaces/ContentPartSource.md deleted file mode 100644 index 54c2839c..00000000 --- a/docs/reference/interfaces/ContentPartSource.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: ContentPartSource -title: ContentPartSource ---- - -# Interface: ContentPartSource - -Defined in: [types.ts:32](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L32) - -Source specification for multimodal content. -Supports both inline data (base64) and URL-based content. - -## Properties - -### type - -```ts -type: "data" | "url"; -``` - -Defined in: [types.ts:38](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L38) - -The type of source: -- 'data': Inline data (typically base64 encoded) -- 'url': URL reference to the content - -*** - -### value - -```ts -value: string; -``` - -Defined in: [types.ts:44](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L44) - -The actual content value: -- For 'data': base64-encoded string -- For 'url': HTTP(S) URL or data URI diff --git a/docs/reference/interfaces/ContentStreamChunk.md b/docs/reference/interfaces/ContentStreamChunk.md deleted file mode 100644 index 1763ca48..00000000 --- a/docs/reference/interfaces/ContentStreamChunk.md +++ /dev/null @@ -1,98 +0,0 @@ ---- -id: ContentStreamChunk -title: ContentStreamChunk ---- - -# Interface: ContentStreamChunk - -Defined in: [types.ts:529](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L529) - -## Extends - -- [`BaseStreamChunk`](BaseStreamChunk.md) - -## Properties - -### content - -```ts -content: string; -``` - -Defined in: [types.ts:532](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L532) - -*** - -### delta - -```ts -delta: string; -``` - -Defined in: [types.ts:531](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L531) - -*** - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:524](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L524) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:525](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L525) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) - -*** - -### role? - -```ts -optional role: "assistant"; -``` - -Defined in: [types.ts:533](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L533) - -*** - -### timestamp - -```ts -timestamp: number; -``` - -Defined in: [types.ts:526](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L526) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) - -*** - -### type - -```ts -type: "content"; -``` - -Defined in: [types.ts:530](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L530) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) diff --git a/docs/reference/interfaces/DefaultMessageMetadataByModality.md b/docs/reference/interfaces/DefaultMessageMetadataByModality.md deleted file mode 100644 index 6b4593d5..00000000 --- a/docs/reference/interfaces/DefaultMessageMetadataByModality.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -id: DefaultMessageMetadataByModality -title: DefaultMessageMetadataByModality ---- - -# Interface: DefaultMessageMetadataByModality - -Defined in: [types.ts:664](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L664) - -Default metadata type for adapters that don't define custom metadata. -Uses unknown for all modalities. - -## Properties - -### audio - -```ts -audio: unknown; -``` - -Defined in: [types.ts:667](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L667) - -*** - -### document - -```ts -document: unknown; -``` - -Defined in: [types.ts:669](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L669) - -*** - -### image - -```ts -image: unknown; -``` - -Defined in: [types.ts:666](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L666) - -*** - -### text - -```ts -text: unknown; -``` - -Defined in: [types.ts:665](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L665) - -*** - -### video - -```ts -video: unknown; -``` - -Defined in: [types.ts:668](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L668) diff --git a/docs/reference/interfaces/DocumentPart.md b/docs/reference/interfaces/DocumentPart.md deleted file mode 100644 index 3ee9232e..00000000 --- a/docs/reference/interfaces/DocumentPart.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -id: DocumentPart -title: DocumentPart ---- - -# Interface: DocumentPart\ - -Defined in: [types.ts:87](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L87) - -Document content part for multimodal messages (e.g., PDFs). - -## Type Parameters - -### TMetadata - -`TMetadata` = `unknown` - -Provider-specific metadata type (e.g., Anthropic's media_type) - -## Properties - -### metadata? - -```ts -optional metadata: TMetadata; -``` - -Defined in: [types.ts:92](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L92) - -Provider-specific metadata (e.g., media_type for PDFs) - -*** - -### source - -```ts -source: ContentPartSource; -``` - -Defined in: [types.ts:90](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L90) - -Source of the document content - -*** - -### type - -```ts -type: "document"; -``` - -Defined in: [types.ts:88](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L88) diff --git a/docs/reference/interfaces/DoneStreamChunk.md b/docs/reference/interfaces/DoneStreamChunk.md deleted file mode 100644 index 1f6343c9..00000000 --- a/docs/reference/interfaces/DoneStreamChunk.md +++ /dev/null @@ -1,106 +0,0 @@ ---- -id: DoneStreamChunk -title: DoneStreamChunk ---- - -# Interface: DoneStreamChunk - -Defined in: [types.ts:555](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L555) - -## Extends - -- [`BaseStreamChunk`](BaseStreamChunk.md) - -## Properties - -### finishReason - -```ts -finishReason: "length" | "stop" | "content_filter" | "tool_calls" | null; -``` - -Defined in: [types.ts:557](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L557) - -*** - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:524](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L524) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:525](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L525) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) - -*** - -### timestamp - -```ts -timestamp: number; -``` - -Defined in: [types.ts:526](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L526) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) - -*** - -### type - -```ts -type: "done"; -``` - -Defined in: [types.ts:556](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L556) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) - -*** - -### usage? - -```ts -optional usage: object; -``` - -Defined in: [types.ts:558](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L558) - -#### completionTokens - -```ts -completionTokens: number; -``` - -#### promptTokens - -```ts -promptTokens: number; -``` - -#### totalTokens - -```ts -totalTokens: number; -``` diff --git a/docs/reference/interfaces/EmbeddingOptions.md b/docs/reference/interfaces/EmbeddingOptions.md deleted file mode 100644 index 3e65e0a5..00000000 --- a/docs/reference/interfaces/EmbeddingOptions.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -id: EmbeddingOptions -title: EmbeddingOptions ---- - -# Interface: EmbeddingOptions - -Defined in: [types.ts:644](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L644) - -## Properties - -### dimensions? - -```ts -optional dimensions: number; -``` - -Defined in: [types.ts:647](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L647) - -*** - -### input - -```ts -input: string | string[]; -``` - -Defined in: [types.ts:646](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L646) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:645](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L645) diff --git a/docs/reference/interfaces/EmbeddingResult.md b/docs/reference/interfaces/EmbeddingResult.md deleted file mode 100644 index e45be9ba..00000000 --- a/docs/reference/interfaces/EmbeddingResult.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -id: EmbeddingResult -title: EmbeddingResult ---- - -# Interface: EmbeddingResult - -Defined in: [types.ts:650](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L650) - -## Properties - -### embeddings - -```ts -embeddings: number[][]; -``` - -Defined in: [types.ts:653](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L653) - -*** - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:651](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L651) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:652](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L652) - -*** - -### usage - -```ts -usage: object; -``` - -Defined in: [types.ts:654](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L654) - -#### promptTokens - -```ts -promptTokens: number; -``` - -#### totalTokens - -```ts -totalTokens: number; -``` diff --git a/docs/reference/interfaces/ErrorStreamChunk.md b/docs/reference/interfaces/ErrorStreamChunk.md deleted file mode 100644 index 96682fc6..00000000 --- a/docs/reference/interfaces/ErrorStreamChunk.md +++ /dev/null @@ -1,90 +0,0 @@ ---- -id: ErrorStreamChunk -title: ErrorStreamChunk ---- - -# Interface: ErrorStreamChunk - -Defined in: [types.ts:565](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L565) - -## Extends - -- [`BaseStreamChunk`](BaseStreamChunk.md) - -## Properties - -### error - -```ts -error: object; -``` - -Defined in: [types.ts:567](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L567) - -#### code? - -```ts -optional code: string; -``` - -#### message - -```ts -message: string; -``` - -*** - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:524](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L524) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:525](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L525) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) - -*** - -### timestamp - -```ts -timestamp: number; -``` - -Defined in: [types.ts:526](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L526) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) - -*** - -### type - -```ts -type: "error"; -``` - -Defined in: [types.ts:566](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L566) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) diff --git a/docs/reference/interfaces/ImagePart.md b/docs/reference/interfaces/ImagePart.md deleted file mode 100644 index 2111598b..00000000 --- a/docs/reference/interfaces/ImagePart.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -id: ImagePart -title: ImagePart ---- - -# Interface: ImagePart\ - -Defined in: [types.ts:51](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L51) - -Image content part for multimodal messages. - -## Type Parameters - -### TMetadata - -`TMetadata` = `unknown` - -Provider-specific metadata type (e.g., OpenAI's detail level) - -## Properties - -### metadata? - -```ts -optional metadata: TMetadata; -``` - -Defined in: [types.ts:56](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L56) - -Provider-specific metadata (e.g., OpenAI's detail: 'auto' | 'low' | 'high') - -*** - -### source - -```ts -source: ContentPartSource; -``` - -Defined in: [types.ts:54](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L54) - -Source of the image content - -*** - -### type - -```ts -type: "image"; -``` - -Defined in: [types.ts:52](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L52) diff --git a/docs/reference/interfaces/InternalToolCallState.md b/docs/reference/interfaces/InternalToolCallState.md deleted file mode 100644 index e8607bce..00000000 --- a/docs/reference/interfaces/InternalToolCallState.md +++ /dev/null @@ -1,70 +0,0 @@ ---- -id: InternalToolCallState -title: InternalToolCallState ---- - -# Interface: InternalToolCallState - -Defined in: [stream/types.ts:31](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L31) - -Internal state for a tool call being tracked - -## Properties - -### arguments - -```ts -arguments: string; -``` - -Defined in: [stream/types.ts:34](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L34) - -*** - -### id - -```ts -id: string; -``` - -Defined in: [stream/types.ts:32](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L32) - -*** - -### index - -```ts -index: number; -``` - -Defined in: [stream/types.ts:37](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L37) - -*** - -### name - -```ts -name: string; -``` - -Defined in: [stream/types.ts:33](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L33) - -*** - -### parsedArguments? - -```ts -optional parsedArguments: any; -``` - -Defined in: [stream/types.ts:36](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L36) - -*** - -### state - -```ts -state: ToolCallState; -``` - -Defined in: [stream/types.ts:35](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L35) diff --git a/docs/reference/interfaces/JSONParser.md b/docs/reference/interfaces/JSONParser.md deleted file mode 100644 index 228cd1a2..00000000 --- a/docs/reference/interfaces/JSONParser.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -id: JSONParser -title: JSONParser ---- - -# Interface: JSONParser - -Defined in: [stream/json-parser.ts:12](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L12) - -JSON Parser interface - allows for custom parser implementations - -## Properties - -### parse() - -```ts -parse: (jsonString) => any; -``` - -Defined in: [stream/json-parser.ts:18](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L18) - -Parse a JSON string (may be incomplete/partial) - -#### Parameters - -##### jsonString - -`string` - -The JSON string to parse - -#### Returns - -`any` - -The parsed object, or undefined if parsing fails diff --git a/docs/reference/interfaces/ModelMessage.md b/docs/reference/interfaces/ModelMessage.md deleted file mode 100644 index 4495276a..00000000 --- a/docs/reference/interfaces/ModelMessage.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -id: ModelMessage -title: ModelMessage ---- - -# Interface: ModelMessage\ - -Defined in: [types.ts:163](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L163) - -## Type Parameters - -### TContent - -`TContent` *extends* `string` \| `null` \| [`ContentPart`](../type-aliases/ContentPart.md)[] = `string` \| `null` \| [`ContentPart`](../type-aliases/ContentPart.md)[] - -## Properties - -### content - -```ts -content: TContent; -``` - -Defined in: [types.ts:170](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L170) - -*** - -### name? - -```ts -optional name: string; -``` - -Defined in: [types.ts:171](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L171) - -*** - -### role - -```ts -role: "user" | "assistant" | "tool"; -``` - -Defined in: [types.ts:169](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L169) - -*** - -### toolCallId? - -```ts -optional toolCallId: string; -``` - -Defined in: [types.ts:173](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L173) - -*** - -### toolCalls? - -```ts -optional toolCalls: ToolCall[]; -``` - -Defined in: [types.ts:172](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L172) diff --git a/docs/reference/interfaces/ProcessorResult.md b/docs/reference/interfaces/ProcessorResult.md deleted file mode 100644 index 9fb65250..00000000 --- a/docs/reference/interfaces/ProcessorResult.md +++ /dev/null @@ -1,50 +0,0 @@ ---- -id: ProcessorResult -title: ProcessorResult ---- - -# Interface: ProcessorResult - -Defined in: [stream/types.ts:61](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L61) - -Result from processing a stream - -## Properties - -### content - -```ts -content: string; -``` - -Defined in: [stream/types.ts:62](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L62) - -*** - -### finishReason? - -```ts -optional finishReason: string | null; -``` - -Defined in: [stream/types.ts:65](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L65) - -*** - -### thinking? - -```ts -optional thinking: string; -``` - -Defined in: [stream/types.ts:63](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L63) - -*** - -### toolCalls? - -```ts -optional toolCalls: ToolCall[]; -``` - -Defined in: [stream/types.ts:64](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L64) diff --git a/docs/reference/interfaces/ProcessorState.md b/docs/reference/interfaces/ProcessorState.md deleted file mode 100644 index 7ef5bba6..00000000 --- a/docs/reference/interfaces/ProcessorState.md +++ /dev/null @@ -1,70 +0,0 @@ ---- -id: ProcessorState -title: ProcessorState ---- - -# Interface: ProcessorState - -Defined in: [stream/types.ts:71](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L71) - -Current state of the processor - -## Properties - -### content - -```ts -content: string; -``` - -Defined in: [stream/types.ts:72](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L72) - -*** - -### done - -```ts -done: boolean; -``` - -Defined in: [stream/types.ts:77](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L77) - -*** - -### finishReason - -```ts -finishReason: string | null; -``` - -Defined in: [stream/types.ts:76](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L76) - -*** - -### thinking - -```ts -thinking: string; -``` - -Defined in: [stream/types.ts:73](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L73) - -*** - -### toolCallOrder - -```ts -toolCallOrder: string[]; -``` - -Defined in: [stream/types.ts:75](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L75) - -*** - -### toolCalls - -```ts -toolCalls: Map; -``` - -Defined in: [stream/types.ts:74](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L74) diff --git a/docs/reference/interfaces/ResponseFormat.md b/docs/reference/interfaces/ResponseFormat.md deleted file mode 100644 index 2d5f2962..00000000 --- a/docs/reference/interfaces/ResponseFormat.md +++ /dev/null @@ -1,151 +0,0 @@ ---- -id: ResponseFormat -title: ResponseFormat ---- - -# Interface: ResponseFormat\ - -Defined in: [types.ts:366](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L366) - -Structured output format specification. - -Constrains the model's output to match a specific JSON structure. -Useful for extracting structured data, form filling, or ensuring consistent response formats. - -## See - - - https://platform.openai.com/docs/guides/structured-outputs - - https://sdk.vercel.ai/docs/ai-sdk-core/structured-outputs - -## Type Parameters - -### TData - -`TData` = `any` - -TypeScript type of the expected data structure (for type safety) - -## Properties - -### \_\_data? - -```ts -optional __data: TData; -``` - -Defined in: [types.ts:444](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L444) - -**`Internal`** - -Type-only property to carry the inferred data type. - -This is never set at runtime - it only exists for TypeScript type inference. -Allows the SDK to know what type to expect when parsing the response. - -*** - -### json\_schema? - -```ts -optional json_schema: object; -``` - -Defined in: [types.ts:383](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L383) - -JSON schema specification (required when type is "json_schema"). - -Defines the exact structure the model's output must conform to. -OpenAI's structured outputs will guarantee the output matches this schema. - -#### description? - -```ts -optional description: string; -``` - -Optional description of what the schema represents. - -Helps document the purpose of this structured output. - -##### Example - -```ts -"User profile information including name, email, and preferences" -``` - -#### name - -```ts -name: string; -``` - -Unique name for the schema. - -Used to identify the schema in logs and debugging. -Should be descriptive (e.g., "user_profile", "search_results"). - -#### schema - -```ts -schema: Record; -``` - -JSON Schema definition for the expected output structure. - -Must be a valid JSON Schema (draft 2020-12 or compatible). -The model's output will be validated against this schema. - -##### See - -https://json-schema.org/ - -##### Example - -```ts -{ - * type: "object", - * properties: { - * name: { type: "string" }, - * age: { type: "number" }, - * email: { type: "string", format: "email" } - * }, - * required: ["name", "email"], - * additionalProperties: false - * } -``` - -#### strict? - -```ts -optional strict: boolean; -``` - -Whether to enforce strict schema validation. - -When true (recommended), the model guarantees output will match the schema exactly. -When false, the model will "best effort" match the schema. - -Default: true (for providers that support it) - -##### See - -https://platform.openai.com/docs/guides/structured-outputs#strict-mode - -*** - -### type - -```ts -type: "json_object" | "json_schema"; -``` - -Defined in: [types.ts:375](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L375) - -Type of structured output. - -- "json_object": Forces the model to output valid JSON (any structure) -- "json_schema": Validates output against a provided JSON Schema (strict structure) - -#### See - -https://platform.openai.com/docs/api-reference/chat/create#chat-create-response_format diff --git a/docs/reference/interfaces/ServerTool.md b/docs/reference/interfaces/ServerTool.md deleted file mode 100644 index 7dc40927..00000000 --- a/docs/reference/interfaces/ServerTool.md +++ /dev/null @@ -1,231 +0,0 @@ ---- -id: ServerTool -title: ServerTool ---- - -# Interface: ServerTool\ - -Defined in: [tools/tool-definition.ts:7](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L7) - -Marker type for server-side tools - -## Extends - -- [`Tool`](Tool.md)\<`TInput`, `TOutput`, `TName`\> - -## Type Parameters - -### TInput - -`TInput` *extends* `z.ZodType` = `z.ZodType` - -### TOutput - -`TOutput` *extends* `z.ZodType` = `z.ZodType` - -### TName - -`TName` *extends* `string` = `string` - -## Properties - -### \_\_toolSide - -```ts -__toolSide: "server"; -``` - -Defined in: [tools/tool-definition.ts:12](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L12) - -*** - -### description - -```ts -description: string; -``` - -Defined in: [types.ts:286](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L286) - -Clear description of what the tool does. - -This is crucial - the model uses this to decide when to call the tool. -Be specific about what the tool does, what parameters it needs, and what it returns. - -#### Example - -```ts -"Get the current weather in a given location. Returns temperature, conditions, and forecast." -``` - -#### Inherited from - -[`Tool`](Tool.md).[`description`](Tool.md#description) - -*** - -### execute()? - -```ts -optional execute: (args) => any; -``` - -Defined in: [types.ts:342](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L342) - -Optional function to execute when the model calls this tool. - -If provided, the SDK will automatically execute the function with the model's arguments -and feed the result back to the model. This enables autonomous tool use loops. - -Can return any value - will be automatically stringified if needed. - -#### Parameters - -##### args - -`any` - -The arguments parsed from the model's tool call (validated against inputSchema) - -#### Returns - -`any` - -Result to send back to the model (validated against outputSchema if provided) - -#### Example - -```ts -execute: async (args) => { - const weather = await fetchWeather(args.location); - return weather; // Can return object or string -} -``` - -#### Inherited from - -[`Tool`](Tool.md).[`execute`](Tool.md#execute) - -*** - -### inputSchema? - -```ts -optional inputSchema: TInput; -``` - -Defined in: [types.ts:305](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L305) - -Zod schema describing the tool's input parameters. - -Defines the structure and types of arguments the tool accepts. -The model will generate arguments matching this schema. -The schema is converted to JSON Schema for LLM providers. - -#### See - -https://zod.dev/ - -#### Example - -```ts -import { z } from 'zod'; - -z.object({ - location: z.string().describe("City name or coordinates"), - unit: z.enum(["celsius", "fahrenheit"]).optional() -}) -``` - -#### Inherited from - -[`Tool`](Tool.md).[`inputSchema`](Tool.md#inputschema) - -*** - -### metadata? - -```ts -optional metadata: Record; -``` - -Defined in: [types.ts:348](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L348) - -Additional metadata for adapters or custom extensions - -#### Inherited from - -[`Tool`](Tool.md).[`metadata`](Tool.md#metadata) - -*** - -### name - -```ts -name: TName; -``` - -Defined in: [types.ts:276](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L276) - -Unique name of the tool (used by the model to call it). - -Should be descriptive and follow naming conventions (e.g., snake_case or camelCase). -Must be unique within the tools array. - -#### Example - -```ts -"get_weather", "search_database", "sendEmail" -``` - -#### Inherited from - -[`Tool`](Tool.md).[`name`](Tool.md#name) - -*** - -### needsApproval? - -```ts -optional needsApproval: boolean; -``` - -Defined in: [types.ts:345](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L345) - -If true, tool execution requires user approval before running. Works with both server and client tools. - -#### Inherited from - -[`Tool`](Tool.md).[`needsApproval`](Tool.md#needsapproval) - -*** - -### outputSchema? - -```ts -optional outputSchema: TOutput; -``` - -Defined in: [types.ts:323](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L323) - -Optional Zod schema for validating tool output. - -If provided, tool results will be validated against this schema before -being sent back to the model. This catches bugs in tool implementations -and ensures consistent output formatting. - -Note: This is client-side validation only - not sent to LLM providers. - -#### Example - -```ts -z.object({ - temperature: z.number(), - conditions: z.string(), - forecast: z.array(z.string()).optional() -}) -``` - -#### Inherited from - -[`Tool`](Tool.md).[`outputSchema`](Tool.md#outputschema) diff --git a/docs/reference/interfaces/StreamProcessorEvents.md b/docs/reference/interfaces/StreamProcessorEvents.md deleted file mode 100644 index 313e1987..00000000 --- a/docs/reference/interfaces/StreamProcessorEvents.md +++ /dev/null @@ -1,228 +0,0 @@ ---- -id: StreamProcessorEvents -title: StreamProcessorEvents ---- - -# Interface: StreamProcessorEvents - -Defined in: [stream/processor.ts:51](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L51) - -Events emitted by the StreamProcessor - -## Properties - -### onApprovalRequest()? - -```ts -optional onApprovalRequest: (args) => void; -``` - -Defined in: [stream/processor.ts:66](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L66) - -#### Parameters - -##### args - -###### approvalId - -`string` - -###### input - -`any` - -###### toolCallId - -`string` - -###### toolName - -`string` - -#### Returns - -`void` - -*** - -### onError()? - -```ts -optional onError: (error) => void; -``` - -Defined in: [stream/processor.ts:58](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L58) - -#### Parameters - -##### error - -`Error` - -#### Returns - -`void` - -*** - -### onMessagesChange()? - -```ts -optional onMessagesChange: (messages) => void; -``` - -Defined in: [stream/processor.ts:53](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L53) - -#### Parameters - -##### messages - -[`UIMessage`](UIMessage.md)[] - -#### Returns - -`void` - -*** - -### onStreamEnd()? - -```ts -optional onStreamEnd: (message) => void; -``` - -Defined in: [stream/processor.ts:57](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L57) - -#### Parameters - -##### message - -[`UIMessage`](UIMessage.md) - -#### Returns - -`void` - -*** - -### onStreamStart()? - -```ts -optional onStreamStart: () => void; -``` - -Defined in: [stream/processor.ts:56](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L56) - -#### Returns - -`void` - -*** - -### onTextUpdate()? - -```ts -optional onTextUpdate: (messageId, content) => void; -``` - -Defined in: [stream/processor.ts:74](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L74) - -#### Parameters - -##### messageId - -`string` - -##### content - -`string` - -#### Returns - -`void` - -*** - -### onThinkingUpdate()? - -```ts -optional onThinkingUpdate: (messageId, content) => void; -``` - -Defined in: [stream/processor.ts:81](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L81) - -#### Parameters - -##### messageId - -`string` - -##### content - -`string` - -#### Returns - -`void` - -*** - -### onToolCall()? - -```ts -optional onToolCall: (args) => void; -``` - -Defined in: [stream/processor.ts:61](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L61) - -#### Parameters - -##### args - -###### input - -`any` - -###### toolCallId - -`string` - -###### toolName - -`string` - -#### Returns - -`void` - -*** - -### onToolCallStateChange()? - -```ts -optional onToolCallStateChange: (messageId, toolCallId, state, args) => void; -``` - -Defined in: [stream/processor.ts:75](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L75) - -#### Parameters - -##### messageId - -`string` - -##### toolCallId - -`string` - -##### state - -[`ToolCallState`](../type-aliases/ToolCallState.md) - -##### args - -`string` - -#### Returns - -`void` diff --git a/docs/reference/interfaces/StreamProcessorHandlers.md b/docs/reference/interfaces/StreamProcessorHandlers.md deleted file mode 100644 index c4e50f56..00000000 --- a/docs/reference/interfaces/StreamProcessorHandlers.md +++ /dev/null @@ -1,317 +0,0 @@ ---- -id: StreamProcessorHandlers -title: StreamProcessorHandlers ---- - -# Interface: StreamProcessorHandlers - -Defined in: [stream/processor.ts:88](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L88) - -Legacy handlers for backward compatibility -These are the old callback-style handlers - -## Properties - -### onApprovalRequested()? - -```ts -optional onApprovalRequested: (toolCallId, toolName, input, approvalId) => void; -``` - -Defined in: [stream/processor.ts:119](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L119) - -#### Parameters - -##### toolCallId - -`string` - -##### toolName - -`string` - -##### input - -`any` - -##### approvalId - -`string` - -#### Returns - -`void` - -*** - -### onError()? - -```ts -optional onError: (error) => void; -``` - -Defined in: [stream/processor.ts:133](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L133) - -#### Parameters - -##### error - -###### code? - -`string` - -###### message - -`string` - -#### Returns - -`void` - -*** - -### onStreamEnd()? - -```ts -optional onStreamEnd: (content, toolCalls?) => void; -``` - -Defined in: [stream/processor.ts:132](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L132) - -#### Parameters - -##### content - -`string` - -##### toolCalls? - -[`ToolCall`](ToolCall.md)[] - -#### Returns - -`void` - -*** - -### onTextUpdate()? - -```ts -optional onTextUpdate: (content) => void; -``` - -Defined in: [stream/processor.ts:89](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L89) - -#### Parameters - -##### content - -`string` - -#### Returns - -`void` - -*** - -### onThinkingUpdate()? - -```ts -optional onThinkingUpdate: (content) => void; -``` - -Defined in: [stream/processor.ts:90](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L90) - -#### Parameters - -##### content - -`string` - -#### Returns - -`void` - -*** - -### onToolCallComplete()? - -```ts -optional onToolCallComplete: (index, id, name, args) => void; -``` - -Defined in: [stream/processor.ts:95](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L95) - -#### Parameters - -##### index - -`number` - -##### id - -`string` - -##### name - -`string` - -##### args - -`string` - -#### Returns - -`void` - -*** - -### onToolCallDelta()? - -```ts -optional onToolCallDelta: (index, args) => void; -``` - -Defined in: [stream/processor.ts:94](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L94) - -#### Parameters - -##### index - -`number` - -##### args - -`string` - -#### Returns - -`void` - -*** - -### onToolCallStart()? - -```ts -optional onToolCallStart: (index, id, name) => void; -``` - -Defined in: [stream/processor.ts:93](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L93) - -#### Parameters - -##### index - -`number` - -##### id - -`string` - -##### name - -`string` - -#### Returns - -`void` - -*** - -### onToolCallStateChange()? - -```ts -optional onToolCallStateChange: (index, id, name, state, args, parsedArgs?) => void; -``` - -Defined in: [stream/processor.ts:101](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L101) - -#### Parameters - -##### index - -`number` - -##### id - -`string` - -##### name - -`string` - -##### state - -[`ToolCallState`](../type-aliases/ToolCallState.md) - -##### args - -`string` - -##### parsedArgs? - -`any` - -#### Returns - -`void` - -*** - -### onToolInputAvailable()? - -```ts -optional onToolInputAvailable: (toolCallId, toolName, input) => void; -``` - -Defined in: [stream/processor.ts:125](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L125) - -#### Parameters - -##### toolCallId - -`string` - -##### toolName - -`string` - -##### input - -`any` - -#### Returns - -`void` - -*** - -### onToolResultStateChange()? - -```ts -optional onToolResultStateChange: (toolCallId, content, state, error?) => void; -``` - -Defined in: [stream/processor.ts:111](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L111) - -#### Parameters - -##### toolCallId - -`string` - -##### content - -`string` - -##### state - -[`ToolResultState`](../type-aliases/ToolResultState.md) - -##### error? - -`string` - -#### Returns - -`void` diff --git a/docs/reference/interfaces/StreamProcessorOptions.md b/docs/reference/interfaces/StreamProcessorOptions.md deleted file mode 100644 index ebe37672..00000000 --- a/docs/reference/interfaces/StreamProcessorOptions.md +++ /dev/null @@ -1,94 +0,0 @@ ---- -id: StreamProcessorOptions -title: StreamProcessorOptions ---- - -# Interface: StreamProcessorOptions - -Defined in: [stream/processor.ts:139](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L139) - -Options for StreamProcessor - -## Properties - -### chunkStrategy? - -```ts -optional chunkStrategy: ChunkStrategy; -``` - -Defined in: [stream/processor.ts:140](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L140) - -*** - -### events? - -```ts -optional events: StreamProcessorEvents; -``` - -Defined in: [stream/processor.ts:142](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L142) - -New event-driven handlers - -*** - -### handlers? - -```ts -optional handlers: StreamProcessorHandlers; -``` - -Defined in: [stream/processor.ts:144](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L144) - -Legacy callback handlers (for backward compatibility) - -*** - -### initialMessages? - -```ts -optional initialMessages: UIMessage[]; -``` - -Defined in: [stream/processor.ts:151](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L151) - -Initial messages to populate the processor - -*** - -### jsonParser? - -```ts -optional jsonParser: object; -``` - -Defined in: [stream/processor.ts:145](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L145) - -#### parse() - -```ts -parse: (jsonString) => any; -``` - -##### Parameters - -###### jsonString - -`string` - -##### Returns - -`any` - -*** - -### recording? - -```ts -optional recording: boolean; -``` - -Defined in: [stream/processor.ts:149](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L149) - -Enable recording for replay testing diff --git a/docs/reference/interfaces/SummarizationOptions.md b/docs/reference/interfaces/SummarizationOptions.md deleted file mode 100644 index 3007ea69..00000000 --- a/docs/reference/interfaces/SummarizationOptions.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -id: SummarizationOptions -title: SummarizationOptions ---- - -# Interface: SummarizationOptions - -Defined in: [types.ts:625](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L625) - -## Properties - -### focus? - -```ts -optional focus: string[]; -``` - -Defined in: [types.ts:630](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L630) - -*** - -### maxLength? - -```ts -optional maxLength: number; -``` - -Defined in: [types.ts:628](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L628) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:626](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L626) - -*** - -### style? - -```ts -optional style: "bullet-points" | "paragraph" | "concise"; -``` - -Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) - -*** - -### text - -```ts -text: string; -``` - -Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) diff --git a/docs/reference/interfaces/SummarizationResult.md b/docs/reference/interfaces/SummarizationResult.md deleted file mode 100644 index fa025fe6..00000000 --- a/docs/reference/interfaces/SummarizationResult.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -id: SummarizationResult -title: SummarizationResult ---- - -# Interface: SummarizationResult - -Defined in: [types.ts:633](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L633) - -## Properties - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:634](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L634) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:635](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L635) - -*** - -### summary - -```ts -summary: string; -``` - -Defined in: [types.ts:636](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L636) - -*** - -### usage - -```ts -usage: object; -``` - -Defined in: [types.ts:637](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L637) - -#### completionTokens - -```ts -completionTokens: number; -``` - -#### promptTokens - -```ts -promptTokens: number; -``` - -#### totalTokens - -```ts -totalTokens: number; -``` diff --git a/docs/reference/interfaces/TextPart.md b/docs/reference/interfaces/TextPart.md deleted file mode 100644 index 34df956e..00000000 --- a/docs/reference/interfaces/TextPart.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -id: TextPart -title: TextPart ---- - -# Interface: TextPart\ - -Defined in: [types.ts:179](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L179) - -Message parts - building blocks of UIMessage - -## Type Parameters - -### TMetadata - -`TMetadata` = `unknown` - -## Properties - -### content - -```ts -content: string; -``` - -Defined in: [types.ts:181](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L181) - -*** - -### metadata? - -```ts -optional metadata: TMetadata; -``` - -Defined in: [types.ts:182](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L182) - -*** - -### type - -```ts -type: "text"; -``` - -Defined in: [types.ts:180](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L180) diff --git a/docs/reference/interfaces/ThinkingPart.md b/docs/reference/interfaces/ThinkingPart.md deleted file mode 100644 index 2b226b21..00000000 --- a/docs/reference/interfaces/ThinkingPart.md +++ /dev/null @@ -1,28 +0,0 @@ ---- -id: ThinkingPart -title: ThinkingPart ---- - -# Interface: ThinkingPart - -Defined in: [types.ts:209](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L209) - -## Properties - -### content - -```ts -content: string; -``` - -Defined in: [types.ts:211](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L211) - -*** - -### type - -```ts -type: "thinking"; -``` - -Defined in: [types.ts:210](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L210) diff --git a/docs/reference/interfaces/ThinkingStreamChunk.md b/docs/reference/interfaces/ThinkingStreamChunk.md deleted file mode 100644 index 05828450..00000000 --- a/docs/reference/interfaces/ThinkingStreamChunk.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -id: ThinkingStreamChunk -title: ThinkingStreamChunk ---- - -# Interface: ThinkingStreamChunk - -Defined in: [types.ts:591](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L591) - -## Extends - -- [`BaseStreamChunk`](BaseStreamChunk.md) - -## Properties - -### content - -```ts -content: string; -``` - -Defined in: [types.ts:594](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L594) - -*** - -### delta? - -```ts -optional delta: string; -``` - -Defined in: [types.ts:593](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L593) - -*** - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:524](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L524) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:525](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L525) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) - -*** - -### timestamp - -```ts -timestamp: number; -``` - -Defined in: [types.ts:526](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L526) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) - -*** - -### type - -```ts -type: "thinking"; -``` - -Defined in: [types.ts:592](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L592) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) diff --git a/docs/reference/interfaces/Tool.md b/docs/reference/interfaces/Tool.md deleted file mode 100644 index 3841b042..00000000 --- a/docs/reference/interfaces/Tool.md +++ /dev/null @@ -1,204 +0,0 @@ ---- -id: Tool -title: Tool ---- - -# Interface: Tool\ - -Defined in: [types.ts:263](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L263) - -Tool/Function definition for function calling. - -Tools allow the model to interact with external systems, APIs, or perform computations. -The model will decide when to call tools based on the user's request and the tool descriptions. - -Tools use Zod schemas for runtime validation and type safety. - -## See - - - https://platform.openai.com/docs/guides/function-calling - - https://docs.anthropic.com/claude/docs/tool-use - -## Extended by - -- [`ToolDefinitionInstance`](ToolDefinitionInstance.md) -- [`ServerTool`](ServerTool.md) - -## Type Parameters - -### TInput - -`TInput` *extends* `z.ZodType` = `z.ZodType` - -### TOutput - -`TOutput` *extends* `z.ZodType` = `z.ZodType` - -### TName - -`TName` *extends* `string` = `string` - -## Properties - -### description - -```ts -description: string; -``` - -Defined in: [types.ts:286](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L286) - -Clear description of what the tool does. - -This is crucial - the model uses this to decide when to call the tool. -Be specific about what the tool does, what parameters it needs, and what it returns. - -#### Example - -```ts -"Get the current weather in a given location. Returns temperature, conditions, and forecast." -``` - -*** - -### execute()? - -```ts -optional execute: (args) => any; -``` - -Defined in: [types.ts:342](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L342) - -Optional function to execute when the model calls this tool. - -If provided, the SDK will automatically execute the function with the model's arguments -and feed the result back to the model. This enables autonomous tool use loops. - -Can return any value - will be automatically stringified if needed. - -#### Parameters - -##### args - -`any` - -The arguments parsed from the model's tool call (validated against inputSchema) - -#### Returns - -`any` - -Result to send back to the model (validated against outputSchema if provided) - -#### Example - -```ts -execute: async (args) => { - const weather = await fetchWeather(args.location); - return weather; // Can return object or string -} -``` - -*** - -### inputSchema? - -```ts -optional inputSchema: TInput; -``` - -Defined in: [types.ts:305](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L305) - -Zod schema describing the tool's input parameters. - -Defines the structure and types of arguments the tool accepts. -The model will generate arguments matching this schema. -The schema is converted to JSON Schema for LLM providers. - -#### See - -https://zod.dev/ - -#### Example - -```ts -import { z } from 'zod'; - -z.object({ - location: z.string().describe("City name or coordinates"), - unit: z.enum(["celsius", "fahrenheit"]).optional() -}) -``` - -*** - -### metadata? - -```ts -optional metadata: Record; -``` - -Defined in: [types.ts:348](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L348) - -Additional metadata for adapters or custom extensions - -*** - -### name - -```ts -name: TName; -``` - -Defined in: [types.ts:276](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L276) - -Unique name of the tool (used by the model to call it). - -Should be descriptive and follow naming conventions (e.g., snake_case or camelCase). -Must be unique within the tools array. - -#### Example - -```ts -"get_weather", "search_database", "sendEmail" -``` - -*** - -### needsApproval? - -```ts -optional needsApproval: boolean; -``` - -Defined in: [types.ts:345](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L345) - -If true, tool execution requires user approval before running. Works with both server and client tools. - -*** - -### outputSchema? - -```ts -optional outputSchema: TOutput; -``` - -Defined in: [types.ts:323](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L323) - -Optional Zod schema for validating tool output. - -If provided, tool results will be validated against this schema before -being sent back to the model. This catches bugs in tool implementations -and ensures consistent output formatting. - -Note: This is client-side validation only - not sent to LLM providers. - -#### Example - -```ts -z.object({ - temperature: z.number(), - conditions: z.string(), - forecast: z.array(z.string()).optional() -}) -``` diff --git a/docs/reference/interfaces/ToolCall.md b/docs/reference/interfaces/ToolCall.md deleted file mode 100644 index ac9a60d0..00000000 --- a/docs/reference/interfaces/ToolCall.md +++ /dev/null @@ -1,50 +0,0 @@ ---- -id: ToolCall -title: ToolCall ---- - -# Interface: ToolCall - -Defined in: [types.ts:5](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L5) - -## Properties - -### function - -```ts -function: object; -``` - -Defined in: [types.ts:8](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L8) - -#### arguments - -```ts -arguments: string; -``` - -#### name - -```ts -name: string; -``` - -*** - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:6](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L6) - -*** - -### type - -```ts -type: "function"; -``` - -Defined in: [types.ts:7](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L7) diff --git a/docs/reference/interfaces/ToolCallPart.md b/docs/reference/interfaces/ToolCallPart.md deleted file mode 100644 index 66052c25..00000000 --- a/docs/reference/interfaces/ToolCallPart.md +++ /dev/null @@ -1,100 +0,0 @@ ---- -id: ToolCallPart -title: ToolCallPart ---- - -# Interface: ToolCallPart - -Defined in: [types.ts:185](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L185) - -## Properties - -### approval? - -```ts -optional approval: object; -``` - -Defined in: [types.ts:192](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L192) - -Approval metadata if tool requires user approval - -#### approved? - -```ts -optional approved: boolean; -``` - -#### id - -```ts -id: string; -``` - -#### needsApproval - -```ts -needsApproval: boolean; -``` - -*** - -### arguments - -```ts -arguments: string; -``` - -Defined in: [types.ts:189](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L189) - -*** - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:187](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L187) - -*** - -### name - -```ts -name: string; -``` - -Defined in: [types.ts:188](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L188) - -*** - -### output? - -```ts -optional output: any; -``` - -Defined in: [types.ts:198](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L198) - -Tool execution output (for client tools or after approval) - -*** - -### state - -```ts -state: ToolCallState; -``` - -Defined in: [types.ts:190](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L190) - -*** - -### type - -```ts -type: "tool-call"; -``` - -Defined in: [types.ts:186](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L186) diff --git a/docs/reference/interfaces/ToolCallStreamChunk.md b/docs/reference/interfaces/ToolCallStreamChunk.md deleted file mode 100644 index 85be420d..00000000 --- a/docs/reference/interfaces/ToolCallStreamChunk.md +++ /dev/null @@ -1,118 +0,0 @@ ---- -id: ToolCallStreamChunk -title: ToolCallStreamChunk ---- - -# Interface: ToolCallStreamChunk - -Defined in: [types.ts:536](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L536) - -## Extends - -- [`BaseStreamChunk`](BaseStreamChunk.md) - -## Properties - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:524](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L524) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) - -*** - -### index - -```ts -index: number; -``` - -Defined in: [types.ts:546](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L546) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:525](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L525) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) - -*** - -### timestamp - -```ts -timestamp: number; -``` - -Defined in: [types.ts:526](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L526) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) - -*** - -### toolCall - -```ts -toolCall: object; -``` - -Defined in: [types.ts:538](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L538) - -#### function - -```ts -function: object; -``` - -##### function.arguments - -```ts -arguments: string; -``` - -##### function.name - -```ts -name: string; -``` - -#### id - -```ts -id: string; -``` - -#### type - -```ts -type: "function"; -``` - -*** - -### type - -```ts -type: "tool_call"; -``` - -Defined in: [types.ts:537](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L537) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) diff --git a/docs/reference/interfaces/ToolConfig.md b/docs/reference/interfaces/ToolConfig.md deleted file mode 100644 index 4af8fcf8..00000000 --- a/docs/reference/interfaces/ToolConfig.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -id: ToolConfig -title: ToolConfig ---- - -# Interface: ToolConfig - -Defined in: [types.ts:351](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L351) - -## Indexable - -```ts -[key: string]: Tool>, ZodType>, string> -``` diff --git a/docs/reference/interfaces/ToolDefinition.md b/docs/reference/interfaces/ToolDefinition.md deleted file mode 100644 index ee4ccf6a..00000000 --- a/docs/reference/interfaces/ToolDefinition.md +++ /dev/null @@ -1,279 +0,0 @@ ---- -id: ToolDefinition -title: ToolDefinition ---- - -# Interface: ToolDefinition\ - -Defined in: [tools/tool-definition.ts:95](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L95) - -Tool definition builder that allows creating server or client tools from a shared definition - -## Extends - -- [`ToolDefinitionInstance`](ToolDefinitionInstance.md)\<`TInput`, `TOutput`, `TName`\> - -## Type Parameters - -### TInput - -`TInput` *extends* `z.ZodType` = `z.ZodType` - -### TOutput - -`TOutput` *extends* `z.ZodType` = `z.ZodType` - -### TName - -`TName` *extends* `string` = `string` - -## Properties - -### \_\_toolSide - -```ts -__toolSide: "definition"; -``` - -Defined in: [tools/tool-definition.ts:43](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L43) - -#### Inherited from - -[`ToolDefinitionInstance`](ToolDefinitionInstance.md).[`__toolSide`](ToolDefinitionInstance.md#__toolside) - -*** - -### client() - -```ts -client: (execute?) => ClientTool; -``` - -Defined in: [tools/tool-definition.ts:112](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L112) - -Create a client-side tool with optional execute function - -#### Parameters - -##### execute? - -(`args`) => `output`\<`TOutput`\> \| `Promise`\<`output`\<`TOutput`\>\> - -#### Returns - -[`ClientTool`](ClientTool.md)\<`TInput`, `TOutput`, `TName`\> - -*** - -### description - -```ts -description: string; -``` - -Defined in: [types.ts:286](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L286) - -Clear description of what the tool does. - -This is crucial - the model uses this to decide when to call the tool. -Be specific about what the tool does, what parameters it needs, and what it returns. - -#### Example - -```ts -"Get the current weather in a given location. Returns temperature, conditions, and forecast." -``` - -#### Inherited from - -[`ToolDefinitionInstance`](ToolDefinitionInstance.md).[`description`](ToolDefinitionInstance.md#description) - -*** - -### execute()? - -```ts -optional execute: (args) => any; -``` - -Defined in: [types.ts:342](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L342) - -Optional function to execute when the model calls this tool. - -If provided, the SDK will automatically execute the function with the model's arguments -and feed the result back to the model. This enables autonomous tool use loops. - -Can return any value - will be automatically stringified if needed. - -#### Parameters - -##### args - -`any` - -The arguments parsed from the model's tool call (validated against inputSchema) - -#### Returns - -`any` - -Result to send back to the model (validated against outputSchema if provided) - -#### Example - -```ts -execute: async (args) => { - const weather = await fetchWeather(args.location); - return weather; // Can return object or string -} -``` - -#### Inherited from - -[`ToolDefinitionInstance`](ToolDefinitionInstance.md).[`execute`](ToolDefinitionInstance.md#execute) - -*** - -### inputSchema? - -```ts -optional inputSchema: TInput; -``` - -Defined in: [types.ts:305](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L305) - -Zod schema describing the tool's input parameters. - -Defines the structure and types of arguments the tool accepts. -The model will generate arguments matching this schema. -The schema is converted to JSON Schema for LLM providers. - -#### See - -https://zod.dev/ - -#### Example - -```ts -import { z } from 'zod'; - -z.object({ - location: z.string().describe("City name or coordinates"), - unit: z.enum(["celsius", "fahrenheit"]).optional() -}) -``` - -#### Inherited from - -[`ToolDefinitionInstance`](ToolDefinitionInstance.md).[`inputSchema`](ToolDefinitionInstance.md#inputschema) - -*** - -### metadata? - -```ts -optional metadata: Record; -``` - -Defined in: [types.ts:348](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L348) - -Additional metadata for adapters or custom extensions - -#### Inherited from - -[`ToolDefinitionInstance`](ToolDefinitionInstance.md).[`metadata`](ToolDefinitionInstance.md#metadata) - -*** - -### name - -```ts -name: TName; -``` - -Defined in: [types.ts:276](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L276) - -Unique name of the tool (used by the model to call it). - -Should be descriptive and follow naming conventions (e.g., snake_case or camelCase). -Must be unique within the tools array. - -#### Example - -```ts -"get_weather", "search_database", "sendEmail" -``` - -#### Inherited from - -[`ToolDefinitionInstance`](ToolDefinitionInstance.md).[`name`](ToolDefinitionInstance.md#name) - -*** - -### needsApproval? - -```ts -optional needsApproval: boolean; -``` - -Defined in: [types.ts:345](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L345) - -If true, tool execution requires user approval before running. Works with both server and client tools. - -#### Inherited from - -[`ToolDefinitionInstance`](ToolDefinitionInstance.md).[`needsApproval`](ToolDefinitionInstance.md#needsapproval) - -*** - -### outputSchema? - -```ts -optional outputSchema: TOutput; -``` - -Defined in: [types.ts:323](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L323) - -Optional Zod schema for validating tool output. - -If provided, tool results will be validated against this schema before -being sent back to the model. This catches bugs in tool implementations -and ensures consistent output formatting. - -Note: This is client-side validation only - not sent to LLM providers. - -#### Example - -```ts -z.object({ - temperature: z.number(), - conditions: z.string(), - forecast: z.array(z.string()).optional() -}) -``` - -#### Inherited from - -[`ToolDefinitionInstance`](ToolDefinitionInstance.md).[`outputSchema`](ToolDefinitionInstance.md#outputschema) - -*** - -### server() - -```ts -server: (execute) => ServerTool; -``` - -Defined in: [tools/tool-definition.ts:103](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L103) - -Create a server-side tool with execute function - -#### Parameters - -##### execute - -(`args`) => `output`\<`TOutput`\> \| `Promise`\<`output`\<`TOutput`\>\> - -#### Returns - -[`ServerTool`](ServerTool.md)\<`TInput`, `TOutput`, `TName`\> diff --git a/docs/reference/interfaces/ToolDefinitionConfig.md b/docs/reference/interfaces/ToolDefinitionConfig.md deleted file mode 100644 index df55b556..00000000 --- a/docs/reference/interfaces/ToolDefinitionConfig.md +++ /dev/null @@ -1,84 +0,0 @@ ---- -id: ToolDefinitionConfig -title: ToolDefinitionConfig ---- - -# Interface: ToolDefinitionConfig\ - -Defined in: [tools/tool-definition.ts:79](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L79) - -Tool definition configuration - -## Type Parameters - -### TInput - -`TInput` *extends* `z.ZodType` = `z.ZodType` - -### TOutput - -`TOutput` *extends* `z.ZodType` = `z.ZodType` - -### TName - -`TName` *extends* `string` = `string` - -## Properties - -### description - -```ts -description: string; -``` - -Defined in: [tools/tool-definition.ts:85](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L85) - -*** - -### inputSchema? - -```ts -optional inputSchema: TInput; -``` - -Defined in: [tools/tool-definition.ts:86](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L86) - -*** - -### metadata? - -```ts -optional metadata: Record; -``` - -Defined in: [tools/tool-definition.ts:89](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L89) - -*** - -### name - -```ts -name: TName; -``` - -Defined in: [tools/tool-definition.ts:84](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L84) - -*** - -### needsApproval? - -```ts -optional needsApproval: boolean; -``` - -Defined in: [tools/tool-definition.ts:88](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L88) - -*** - -### outputSchema? - -```ts -optional outputSchema: TOutput; -``` - -Defined in: [tools/tool-definition.ts:87](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L87) diff --git a/docs/reference/interfaces/ToolDefinitionInstance.md b/docs/reference/interfaces/ToolDefinitionInstance.md deleted file mode 100644 index 6884845c..00000000 --- a/docs/reference/interfaces/ToolDefinitionInstance.md +++ /dev/null @@ -1,235 +0,0 @@ ---- -id: ToolDefinitionInstance -title: ToolDefinitionInstance ---- - -# Interface: ToolDefinitionInstance\ - -Defined in: [tools/tool-definition.ts:38](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L38) - -Tool definition that can be used directly or instantiated for server/client - -## Extends - -- [`Tool`](Tool.md)\<`TInput`, `TOutput`, `TName`\> - -## Extended by - -- [`ToolDefinition`](ToolDefinition.md) - -## Type Parameters - -### TInput - -`TInput` *extends* `z.ZodType` = `z.ZodType` - -### TOutput - -`TOutput` *extends* `z.ZodType` = `z.ZodType` - -### TName - -`TName` *extends* `string` = `string` - -## Properties - -### \_\_toolSide - -```ts -__toolSide: "definition"; -``` - -Defined in: [tools/tool-definition.ts:43](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L43) - -*** - -### description - -```ts -description: string; -``` - -Defined in: [types.ts:286](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L286) - -Clear description of what the tool does. - -This is crucial - the model uses this to decide when to call the tool. -Be specific about what the tool does, what parameters it needs, and what it returns. - -#### Example - -```ts -"Get the current weather in a given location. Returns temperature, conditions, and forecast." -``` - -#### Inherited from - -[`Tool`](Tool.md).[`description`](Tool.md#description) - -*** - -### execute()? - -```ts -optional execute: (args) => any; -``` - -Defined in: [types.ts:342](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L342) - -Optional function to execute when the model calls this tool. - -If provided, the SDK will automatically execute the function with the model's arguments -and feed the result back to the model. This enables autonomous tool use loops. - -Can return any value - will be automatically stringified if needed. - -#### Parameters - -##### args - -`any` - -The arguments parsed from the model's tool call (validated against inputSchema) - -#### Returns - -`any` - -Result to send back to the model (validated against outputSchema if provided) - -#### Example - -```ts -execute: async (args) => { - const weather = await fetchWeather(args.location); - return weather; // Can return object or string -} -``` - -#### Inherited from - -[`Tool`](Tool.md).[`execute`](Tool.md#execute) - -*** - -### inputSchema? - -```ts -optional inputSchema: TInput; -``` - -Defined in: [types.ts:305](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L305) - -Zod schema describing the tool's input parameters. - -Defines the structure and types of arguments the tool accepts. -The model will generate arguments matching this schema. -The schema is converted to JSON Schema for LLM providers. - -#### See - -https://zod.dev/ - -#### Example - -```ts -import { z } from 'zod'; - -z.object({ - location: z.string().describe("City name or coordinates"), - unit: z.enum(["celsius", "fahrenheit"]).optional() -}) -``` - -#### Inherited from - -[`Tool`](Tool.md).[`inputSchema`](Tool.md#inputschema) - -*** - -### metadata? - -```ts -optional metadata: Record; -``` - -Defined in: [types.ts:348](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L348) - -Additional metadata for adapters or custom extensions - -#### Inherited from - -[`Tool`](Tool.md).[`metadata`](Tool.md#metadata) - -*** - -### name - -```ts -name: TName; -``` - -Defined in: [types.ts:276](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L276) - -Unique name of the tool (used by the model to call it). - -Should be descriptive and follow naming conventions (e.g., snake_case or camelCase). -Must be unique within the tools array. - -#### Example - -```ts -"get_weather", "search_database", "sendEmail" -``` - -#### Inherited from - -[`Tool`](Tool.md).[`name`](Tool.md#name) - -*** - -### needsApproval? - -```ts -optional needsApproval: boolean; -``` - -Defined in: [types.ts:345](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L345) - -If true, tool execution requires user approval before running. Works with both server and client tools. - -#### Inherited from - -[`Tool`](Tool.md).[`needsApproval`](Tool.md#needsapproval) - -*** - -### outputSchema? - -```ts -optional outputSchema: TOutput; -``` - -Defined in: [types.ts:323](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L323) - -Optional Zod schema for validating tool output. - -If provided, tool results will be validated against this schema before -being sent back to the model. This catches bugs in tool implementations -and ensures consistent output formatting. - -Note: This is client-side validation only - not sent to LLM providers. - -#### Example - -```ts -z.object({ - temperature: z.number(), - conditions: z.string(), - forecast: z.array(z.string()).optional() -}) -``` - -#### Inherited from - -[`Tool`](Tool.md).[`outputSchema`](Tool.md#outputschema) diff --git a/docs/reference/interfaces/ToolInputAvailableStreamChunk.md b/docs/reference/interfaces/ToolInputAvailableStreamChunk.md deleted file mode 100644 index a22783a7..00000000 --- a/docs/reference/interfaces/ToolInputAvailableStreamChunk.md +++ /dev/null @@ -1,98 +0,0 @@ ---- -id: ToolInputAvailableStreamChunk -title: ToolInputAvailableStreamChunk ---- - -# Interface: ToolInputAvailableStreamChunk - -Defined in: [types.ts:584](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L584) - -## Extends - -- [`BaseStreamChunk`](BaseStreamChunk.md) - -## Properties - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:524](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L524) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) - -*** - -### input - -```ts -input: any; -``` - -Defined in: [types.ts:588](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L588) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:525](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L525) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) - -*** - -### timestamp - -```ts -timestamp: number; -``` - -Defined in: [types.ts:526](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L526) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) - -*** - -### toolCallId - -```ts -toolCallId: string; -``` - -Defined in: [types.ts:586](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L586) - -*** - -### toolName - -```ts -toolName: string; -``` - -Defined in: [types.ts:587](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L587) - -*** - -### type - -```ts -type: "tool-input-available"; -``` - -Defined in: [types.ts:585](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L585) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) diff --git a/docs/reference/interfaces/ToolResultPart.md b/docs/reference/interfaces/ToolResultPart.md deleted file mode 100644 index 35204171..00000000 --- a/docs/reference/interfaces/ToolResultPart.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -id: ToolResultPart -title: ToolResultPart ---- - -# Interface: ToolResultPart - -Defined in: [types.ts:201](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L201) - -## Properties - -### content - -```ts -content: string; -``` - -Defined in: [types.ts:204](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L204) - -*** - -### error? - -```ts -optional error: string; -``` - -Defined in: [types.ts:206](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L206) - -*** - -### state - -```ts -state: ToolResultState; -``` - -Defined in: [types.ts:205](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L205) - -*** - -### toolCallId - -```ts -toolCallId: string; -``` - -Defined in: [types.ts:203](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L203) - -*** - -### type - -```ts -type: "tool-result"; -``` - -Defined in: [types.ts:202](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L202) diff --git a/docs/reference/interfaces/ToolResultStreamChunk.md b/docs/reference/interfaces/ToolResultStreamChunk.md deleted file mode 100644 index c6911e59..00000000 --- a/docs/reference/interfaces/ToolResultStreamChunk.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -id: ToolResultStreamChunk -title: ToolResultStreamChunk ---- - -# Interface: ToolResultStreamChunk - -Defined in: [types.ts:549](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L549) - -## Extends - -- [`BaseStreamChunk`](BaseStreamChunk.md) - -## Properties - -### content - -```ts -content: string; -``` - -Defined in: [types.ts:552](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L552) - -*** - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:524](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L524) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:525](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L525) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) - -*** - -### timestamp - -```ts -timestamp: number; -``` - -Defined in: [types.ts:526](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L526) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) - -*** - -### toolCallId - -```ts -toolCallId: string; -``` - -Defined in: [types.ts:551](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L551) - -*** - -### type - -```ts -type: "tool_result"; -``` - -Defined in: [types.ts:550](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L550) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) diff --git a/docs/reference/interfaces/UIMessage.md b/docs/reference/interfaces/UIMessage.md deleted file mode 100644 index 8bbe7522..00000000 --- a/docs/reference/interfaces/UIMessage.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -id: UIMessage -title: UIMessage ---- - -# Interface: UIMessage - -Defined in: [types.ts:224](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L224) - -UIMessage - Domain-specific message format optimized for building chat UIs -Contains parts that can be text, tool calls, or tool results - -## Properties - -### createdAt? - -```ts -optional createdAt: Date; -``` - -Defined in: [types.ts:228](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L228) - -*** - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:225](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L225) - -*** - -### parts - -```ts -parts: MessagePart[]; -``` - -Defined in: [types.ts:227](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L227) - -*** - -### role - -```ts -role: "user" | "assistant" | "system"; -``` - -Defined in: [types.ts:226](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L226) diff --git a/docs/reference/interfaces/VideoPart.md b/docs/reference/interfaces/VideoPart.md deleted file mode 100644 index 8768b04d..00000000 --- a/docs/reference/interfaces/VideoPart.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -id: VideoPart -title: VideoPart ---- - -# Interface: VideoPart\ - -Defined in: [types.ts:75](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L75) - -Video content part for multimodal messages. - -## Type Parameters - -### TMetadata - -`TMetadata` = `unknown` - -Provider-specific metadata type - -## Properties - -### metadata? - -```ts -optional metadata: TMetadata; -``` - -Defined in: [types.ts:80](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L80) - -Provider-specific metadata (e.g., duration, resolution) - -*** - -### source - -```ts -source: ContentPartSource; -``` - -Defined in: [types.ts:78](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L78) - -Source of the video content - -*** - -### type - -```ts -type: "video"; -``` - -Defined in: [types.ts:76](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L76) diff --git a/docs/reference/type-aliases/AgentLoopStrategy.md b/docs/reference/type-aliases/AgentLoopStrategy.md deleted file mode 100644 index c16fe5be..00000000 --- a/docs/reference/type-aliases/AgentLoopStrategy.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -id: AgentLoopStrategy -title: AgentLoopStrategy ---- - -# Type Alias: AgentLoopStrategy() - -```ts -type AgentLoopStrategy = (state) => boolean; -``` - -Defined in: [types.ts:471](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L471) - -Strategy function that determines whether the agent loop should continue - -## Parameters - -### state - -[`AgentLoopState`](../interfaces/AgentLoopState.md) - -Current state of the agent loop - -## Returns - -`boolean` - -true to continue looping, false to stop - -## Example - -```typescript -// Continue for up to 5 iterations -const strategy: AgentLoopStrategy = ({ iterationCount }) => iterationCount < 5; -``` diff --git a/docs/reference/type-aliases/AnyClientTool.md b/docs/reference/type-aliases/AnyClientTool.md deleted file mode 100644 index 4f395412..00000000 --- a/docs/reference/type-aliases/AnyClientTool.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -id: AnyClientTool -title: AnyClientTool ---- - -# Type Alias: AnyClientTool - -```ts -type AnyClientTool = - | ClientTool -| ToolDefinitionInstance; -``` - -Defined in: [tools/tool-definition.ts:49](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L49) - -Union type for any kind of client-side tool (client tool or definition) diff --git a/docs/reference/type-aliases/ChatStreamOptionsForModel.md b/docs/reference/type-aliases/ChatStreamOptionsForModel.md deleted file mode 100644 index 48bad9c2..00000000 --- a/docs/reference/type-aliases/ChatStreamOptionsForModel.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -id: ChatStreamOptionsForModel -title: ChatStreamOptionsForModel ---- - -# Type Alias: ChatStreamOptionsForModel\ - -```ts -type ChatStreamOptionsForModel = TAdapter extends AIAdapter ? Omit & object : never; -``` - -Defined in: [types.ts:811](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L811) - -Chat options constrained by a specific model's capabilities. -Unlike ChatStreamOptionsUnion which creates a union over all models, -this type takes a specific model and constrains messages accordingly. - -## Type Parameters - -### TAdapter - -`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `any`, `any`\> - -### TModel - -`TModel` *extends* `string` diff --git a/docs/reference/type-aliases/ChatStreamOptionsUnion.md b/docs/reference/type-aliases/ChatStreamOptionsUnion.md deleted file mode 100644 index e35ec3cd..00000000 --- a/docs/reference/type-aliases/ChatStreamOptionsUnion.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -id: ChatStreamOptionsUnion -title: ChatStreamOptionsUnion ---- - -# Type Alias: ChatStreamOptionsUnion\ - -```ts -type ChatStreamOptionsUnion = TAdapter extends AIAdapter ? Models[number] extends infer TModel ? TModel extends string ? Omit & object : never : never : never; -``` - -Defined in: [types.ts:751](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L751) - -## Type Parameters - -### TAdapter - -`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `any`, `any`\> diff --git a/docs/reference/type-aliases/ConstrainedContent.md b/docs/reference/type-aliases/ConstrainedContent.md deleted file mode 100644 index 781ee84d..00000000 --- a/docs/reference/type-aliases/ConstrainedContent.md +++ /dev/null @@ -1,44 +0,0 @@ ---- -id: ConstrainedContent -title: ConstrainedContent ---- - -# Type Alias: ConstrainedContent\ - -```ts -type ConstrainedContent = - | string - | null - | ContentPartForModalities, TImageMeta, TAudioMeta, TVideoMeta, TDocumentMeta, TTextMeta>[]; -``` - -Defined in: [types.ts:142](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L142) - -Type for message content constrained by supported modalities. -When modalities is ['text', 'image'], only TextPart and ImagePart are allowed in the array. - -## Type Parameters - -### TModalities - -`TModalities` *extends* `ReadonlyArray`\<[`Modality`](Modality.md)\> - -### TImageMeta - -`TImageMeta` = `unknown` - -### TAudioMeta - -`TAudioMeta` = `unknown` - -### TVideoMeta - -`TVideoMeta` = `unknown` - -### TDocumentMeta - -`TDocumentMeta` = `unknown` - -### TTextMeta - -`TTextMeta` = `unknown` diff --git a/docs/reference/type-aliases/ConstrainedModelMessage.md b/docs/reference/type-aliases/ConstrainedModelMessage.md deleted file mode 100644 index c67448d9..00000000 --- a/docs/reference/type-aliases/ConstrainedModelMessage.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -id: ConstrainedModelMessage -title: ConstrainedModelMessage ---- - -# Type Alias: ConstrainedModelMessage\ - -```ts -type ConstrainedModelMessage = Omit & object; -``` - -Defined in: [types.ts:234](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L234) - -A ModelMessage with content constrained to only allow content parts -matching the specified input modalities. - -## Type Declaration - -### content - -```ts -content: ConstrainedContent; -``` - -## Type Parameters - -### TModalities - -`TModalities` *extends* `ReadonlyArray`\<[`Modality`](Modality.md)\> - -### TImageMeta - -`TImageMeta` = `unknown` - -### TAudioMeta - -`TAudioMeta` = `unknown` - -### TVideoMeta - -`TVideoMeta` = `unknown` - -### TDocumentMeta - -`TDocumentMeta` = `unknown` - -### TTextMeta - -`TTextMeta` = `unknown` diff --git a/docs/reference/type-aliases/ContentPart.md b/docs/reference/type-aliases/ContentPart.md deleted file mode 100644 index 9ebc9aa4..00000000 --- a/docs/reference/type-aliases/ContentPart.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -id: ContentPart -title: ContentPart ---- - -# Type Alias: ContentPart\ - -```ts -type ContentPart = - | TextPart - | ImagePart - | AudioPart - | VideoPart -| DocumentPart; -``` - -Defined in: [types.ts:102](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L102) - -Union type for all multimodal content parts. - -## Type Parameters - -### TImageMeta - -`TImageMeta` = `unknown` - -Provider-specific image metadata type - -### TAudioMeta - -`TAudioMeta` = `unknown` - -Provider-specific audio metadata type - -### TVideoMeta - -`TVideoMeta` = `unknown` - -Provider-specific video metadata type - -### TDocumentMeta - -`TDocumentMeta` = `unknown` - -Provider-specific document metadata type - -### TTextMeta - -`TTextMeta` = `unknown` diff --git a/docs/reference/type-aliases/ContentPartForModalities.md b/docs/reference/type-aliases/ContentPartForModalities.md deleted file mode 100644 index 648c53f5..00000000 --- a/docs/reference/type-aliases/ContentPartForModalities.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -id: ContentPartForModalities -title: ContentPartForModalities ---- - -# Type Alias: ContentPartForModalities\ - -```ts -type ContentPartForModalities = Extract, { - type: TModalities; -}>; -``` - -Defined in: [types.ts:119](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L119) - -Helper type to filter ContentPart union to only include specific modalities. -Used to constrain message content based on model capabilities. - -## Type Parameters - -### TModalities - -`TModalities` *extends* [`Modality`](Modality.md) - -### TImageMeta - -`TImageMeta` = `unknown` - -### TAudioMeta - -`TAudioMeta` = `unknown` - -### TVideoMeta - -`TVideoMeta` = `unknown` - -### TDocumentMeta - -`TDocumentMeta` = `unknown` - -### TTextMeta - -`TTextMeta` = `unknown` diff --git a/docs/reference/type-aliases/ExtractModalitiesForModel.md b/docs/reference/type-aliases/ExtractModalitiesForModel.md deleted file mode 100644 index fe6e500e..00000000 --- a/docs/reference/type-aliases/ExtractModalitiesForModel.md +++ /dev/null @@ -1,24 +0,0 @@ ---- -id: ExtractModalitiesForModel -title: ExtractModalitiesForModel ---- - -# Type Alias: ExtractModalitiesForModel\ - -```ts -type ExtractModalitiesForModel = TAdapter extends AIAdapter ? TModel extends keyof ModelInputModalities ? ModelInputModalities[TModel] : ReadonlyArray : ReadonlyArray; -``` - -Defined in: [types.ts:870](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L870) - -Extract the supported input modalities for a specific model from an adapter. - -## Type Parameters - -### TAdapter - -`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `any`\> - -### TModel - -`TModel` *extends* `string` diff --git a/docs/reference/type-aliases/ExtractModelsFromAdapter.md b/docs/reference/type-aliases/ExtractModelsFromAdapter.md deleted file mode 100644 index a35e672f..00000000 --- a/docs/reference/type-aliases/ExtractModelsFromAdapter.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -id: ExtractModelsFromAdapter -title: ExtractModelsFromAdapter ---- - -# Type Alias: ExtractModelsFromAdapter\ - -```ts -type ExtractModelsFromAdapter = T extends AIAdapter ? M[number] : never; -``` - -Defined in: [types.ts:864](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L864) - -## Type Parameters - -### T - -`T` diff --git a/docs/reference/type-aliases/InferToolInput.md b/docs/reference/type-aliases/InferToolInput.md deleted file mode 100644 index 0a3cc815..00000000 --- a/docs/reference/type-aliases/InferToolInput.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -id: InferToolInput -title: InferToolInput ---- - -# Type Alias: InferToolInput\ - -```ts -type InferToolInput = T extends object ? TInput extends z.ZodType ? z.infer : any : any; -``` - -Defined in: [tools/tool-definition.ts:61](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L61) - -Extract the input type from a tool (inferred from Zod schema) - -## Type Parameters - -### T - -`T` diff --git a/docs/reference/type-aliases/InferToolName.md b/docs/reference/type-aliases/InferToolName.md deleted file mode 100644 index 25b0aa93..00000000 --- a/docs/reference/type-aliases/InferToolName.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -id: InferToolName -title: InferToolName ---- - -# Type Alias: InferToolName\ - -```ts -type InferToolName = T extends object ? N : never; -``` - -Defined in: [tools/tool-definition.ts:56](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L56) - -Extract the tool name as a literal type - -## Type Parameters - -### T - -`T` diff --git a/docs/reference/type-aliases/InferToolOutput.md b/docs/reference/type-aliases/InferToolOutput.md deleted file mode 100644 index f88a7393..00000000 --- a/docs/reference/type-aliases/InferToolOutput.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -id: InferToolOutput -title: InferToolOutput ---- - -# Type Alias: InferToolOutput\ - -```ts -type InferToolOutput = T extends object ? TOutput extends z.ZodType ? z.infer : any : any; -``` - -Defined in: [tools/tool-definition.ts:70](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L70) - -Extract the output type from a tool (inferred from Zod schema) - -## Type Parameters - -### T - -`T` diff --git a/docs/reference/type-aliases/MessagePart.md b/docs/reference/type-aliases/MessagePart.md deleted file mode 100644 index ab63fa58..00000000 --- a/docs/reference/type-aliases/MessagePart.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -id: MessagePart -title: MessagePart ---- - -# Type Alias: MessagePart - -```ts -type MessagePart = - | TextPart - | ToolCallPart - | ToolResultPart - | ThinkingPart; -``` - -Defined in: [types.ts:214](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L214) diff --git a/docs/reference/type-aliases/ModalitiesArrayToUnion.md b/docs/reference/type-aliases/ModalitiesArrayToUnion.md deleted file mode 100644 index 6190668d..00000000 --- a/docs/reference/type-aliases/ModalitiesArrayToUnion.md +++ /dev/null @@ -1,21 +0,0 @@ ---- -id: ModalitiesArrayToUnion -title: ModalitiesArrayToUnion ---- - -# Type Alias: ModalitiesArrayToUnion\ - -```ts -type ModalitiesArrayToUnion = T[number]; -``` - -Defined in: [types.ts:135](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L135) - -Helper type to convert a readonly array of modalities to a union type. -e.g., readonly ['text', 'image'] -> 'text' | 'image' - -## Type Parameters - -### T - -`T` *extends* `ReadonlyArray`\<[`Modality`](Modality.md)\> diff --git a/docs/reference/type-aliases/Modality.md b/docs/reference/type-aliases/Modality.md deleted file mode 100644 index 8c8e527e..00000000 --- a/docs/reference/type-aliases/Modality.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -id: Modality -title: Modality ---- - -# Type Alias: Modality - -```ts -type Modality = "text" | "image" | "audio" | "video" | "document"; -``` - -Defined in: [types.ts:26](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L26) - -Supported input modality types for multimodal content. -- 'text': Plain text content -- 'image': Image content (base64 or URL) -- 'audio': Audio content (base64 or URL) -- 'video': Video content (base64 or URL) -- 'document': Document content like PDFs (base64 or URL) diff --git a/docs/reference/type-aliases/StreamChunk.md b/docs/reference/type-aliases/StreamChunk.md deleted file mode 100644 index e809e93d..00000000 --- a/docs/reference/type-aliases/StreamChunk.md +++ /dev/null @@ -1,22 +0,0 @@ ---- -id: StreamChunk -title: StreamChunk ---- - -# Type Alias: StreamChunk - -```ts -type StreamChunk = - | ContentStreamChunk - | ToolCallStreamChunk - | ToolResultStreamChunk - | DoneStreamChunk - | ErrorStreamChunk - | ApprovalRequestedStreamChunk - | ToolInputAvailableStreamChunk - | ThinkingStreamChunk; -``` - -Defined in: [types.ts:600](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L600) - -Chunk returned by the sdk during streaming chat completions. diff --git a/docs/reference/type-aliases/StreamChunkType.md b/docs/reference/type-aliases/StreamChunkType.md deleted file mode 100644 index 0587e96a..00000000 --- a/docs/reference/type-aliases/StreamChunkType.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -id: StreamChunkType -title: StreamChunkType ---- - -# Type Alias: StreamChunkType - -```ts -type StreamChunkType = - | "content" - | "tool_call" - | "tool_result" - | "done" - | "error" - | "approval-requested" - | "tool-input-available" - | "thinking"; -``` - -Defined in: [types.ts:512](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L512) diff --git a/docs/reference/type-aliases/ToolCallState.md b/docs/reference/type-aliases/ToolCallState.md deleted file mode 100644 index 7ff8e334..00000000 --- a/docs/reference/type-aliases/ToolCallState.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -id: ToolCallState -title: ToolCallState ---- - -# Type Alias: ToolCallState - -```ts -type ToolCallState = - | "awaiting-input" - | "input-streaming" - | "input-complete" - | "approval-requested" - | "approval-responded"; -``` - -Defined in: [stream/types.ts:13](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L13) - -Tool call states - track the lifecycle of a tool call diff --git a/docs/reference/type-aliases/ToolResultState.md b/docs/reference/type-aliases/ToolResultState.md deleted file mode 100644 index d9d29e6e..00000000 --- a/docs/reference/type-aliases/ToolResultState.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -id: ToolResultState -title: ToolResultState ---- - -# Type Alias: ToolResultState - -```ts -type ToolResultState = "streaming" | "complete" | "error"; -``` - -Defined in: [stream/types.ts:23](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L23) - -Tool result states - track the lifecycle of a tool result diff --git a/docs/reference/variables/aiEventClient.md b/docs/reference/variables/aiEventClient.md deleted file mode 100644 index b7d52031..00000000 --- a/docs/reference/variables/aiEventClient.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -id: aiEventClient -title: aiEventClient ---- - -# Variable: aiEventClient - -```ts -const aiEventClient: AiEventClient; -``` - -Defined in: [event-client.ts:387](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/event-client.ts#L387) diff --git a/docs/reference/variables/defaultJSONParser.md b/docs/reference/variables/defaultJSONParser.md deleted file mode 100644 index e5e32dad..00000000 --- a/docs/reference/variables/defaultJSONParser.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -id: defaultJSONParser -title: defaultJSONParser ---- - -# Variable: defaultJSONParser - -```ts -const defaultJSONParser: PartialJSONParser; -``` - -Defined in: [stream/json-parser.ts:49](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L49) - -Default parser instance diff --git a/packages/typescript/ai-devtools/src/store/ai-context.tsx b/packages/typescript/ai-devtools/src/store/ai-context.tsx index c63a7c1e..d3c3a051 100644 --- a/packages/typescript/ai-devtools/src/store/ai-context.tsx +++ b/packages/typescript/ai-devtools/src/store/ai-context.tsx @@ -1042,9 +1042,10 @@ export const AIProvider: ParentComponent = (props) => { type: 'tool_result', messageId: e.payload.messageId, toolCallId: e.payload.toolCallId, - content: typeof e.payload.result === 'string' - ? e.payload.result - : JSON.stringify(e.payload.result), + content: + typeof e.payload.result === 'string' + ? e.payload.result + : JSON.stringify(e.payload.result), timestamp: e.payload.timestamp, chunkCount: 1, } diff --git a/packages/typescript/ai/src/stream/processor.ts b/packages/typescript/ai/src/stream/processor.ts index d0585a05..ded6f66b 100644 --- a/packages/typescript/ai/src/stream/processor.ts +++ b/packages/typescript/ai/src/stream/processor.ts @@ -1244,11 +1244,18 @@ export class StreamProcessor { this.toolCallOrder.push(toolCallId) // Emit legacy lifecycle event - this.handlers.onToolCallStart?.(actualIndex, toolCallId, toolCall.function.name) + this.handlers.onToolCallStart?.( + actualIndex, + toolCallId, + toolCall.function.name, + ) // Emit legacy delta for initial arguments if (toolCall.function.arguments) { - this.handlers.onToolCallDelta?.(actualIndex, toolCall.function.arguments) + this.handlers.onToolCallDelta?.( + actualIndex, + toolCall.function.arguments, + ) } // Update UIMessage @@ -1291,13 +1298,15 @@ export class StreamProcessor { ) } else { // Update existing tool call arguments - existingToolCall.name = - existingToolCall.name || toolCall.function.name + existingToolCall.name = existingToolCall.name || toolCall.function.name existingToolCall.arguments += toolCall.function.arguments // Emit delta event for additional arguments if (toolCall.function.arguments) { - this.handlers.onToolCallDelta?.(existingToolCall.index, toolCall.function.arguments) + this.handlers.onToolCallDelta?.( + existingToolCall.index, + toolCall.function.arguments, + ) } } } diff --git a/packages/typescript/ai/src/types.ts b/packages/typescript/ai/src/types.ts index f2f7cc36..80c1b3d5 100644 --- a/packages/typescript/ai/src/types.ts +++ b/packages/typescript/ai/src/types.ts @@ -517,7 +517,7 @@ export interface ChatOptions< * AG-UI Protocol event types. * Based on the AG-UI specification for agent-user interaction. * @see https://docs.ag-ui.com/concepts/events - * + * * Includes legacy type aliases for backward compatibility during migration. */ export type EventType = diff --git a/packages/typescript/ai/tests/ai-chat.test.ts b/packages/typescript/ai/tests/ai-chat.test.ts index 156a39de..44fae68f 100644 --- a/packages/typescript/ai/tests/ai-chat.test.ts +++ b/packages/typescript/ai/tests/ai-chat.test.ts @@ -455,7 +455,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { expect(tool.execute).toHaveBeenCalledWith({ location: 'Paris' }) expect(adapter.chatStreamCallCount).toBeGreaterThanOrEqual(2) - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks).toHaveLength(1) // Check events @@ -560,7 +562,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { // Tool should be executed with complete arguments expect(tool.execute).toHaveBeenCalledWith({ a: 10, b: 20 }) - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks.length).toBeGreaterThan(0) }) @@ -652,7 +656,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { expect(tool1.execute).toHaveBeenCalled() expect(tool2.execute).toHaveBeenCalled() - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks).toHaveLength(2) // Check iteration event @@ -950,7 +956,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }), ) - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks).toHaveLength(1) const resultChunk = toolResultChunks[0] as any @@ -1103,7 +1111,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }), ) - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks).toHaveLength(1) const resultChunk = toolResultChunks[0] as any @@ -1156,7 +1166,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { ) // Should still produce a tool_result with error - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks.length).toBeGreaterThan(0) const resultChunk = toolResultChunks[0] as any @@ -1282,7 +1294,8 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { ) const inputChunks = chunks.filter( - (c) => c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', + (c) => + c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', ) expect(inputChunks).toHaveLength(1) @@ -1387,7 +1400,8 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { (c) => c.type === 'CUSTOM' && (c as any).name === 'approval-requested', ) const inputChunks = chunks.filter( - (c) => c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', + (c) => + c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', ) expect(approvalChunks.length + inputChunks.length).toBeGreaterThan(0) @@ -2446,7 +2460,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { ) expect(toolResultEvents.length).toBeGreaterThan(0) expect(toolResultEvents[0]?.data.toolCallId).toBe('call-previous') - expect(toolResultEvents[0]?.data.result).toEqual({ result: 'previous result' }) + expect(toolResultEvents[0]?.data.result).toEqual({ + result: 'previous result', + }) }) }) @@ -2547,7 +2563,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }) const chunks1 = await collectChunks(stream1) - const approvalChunk = chunks1.find((c) => c.type === 'CUSTOM' && (c as any).name === 'approval-requested') + const approvalChunk = chunks1.find( + (c) => c.type === 'CUSTOM' && (c as any).name === 'approval-requested', + ) expect(approvalChunk).toBeDefined() // Second call - with approval response in message parts @@ -2670,7 +2688,10 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }) const chunks1 = await collectChunks(stream1) - const inputChunk = chunks1.find((c) => c.type === 'CUSTOM' && (c as any).name === 'tool-input-available') + const inputChunk = chunks1.find( + (c) => + c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', + ) expect(inputChunk).toBeDefined() // Second call - with client tool output in message parts @@ -2969,7 +2990,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { const chunks = await collectChunks(stream) const toolCallChunks = chunks.filter((c) => c.type === 'tool_call') - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) // We should have received tool_call chunks expect(toolCallChunks.length).toBeGreaterThan(0) diff --git a/packages/typescript/smoke-tests/adapters/src/harness.ts b/packages/typescript/smoke-tests/adapters/src/harness.ts index 17cc3fca..5e96825c 100644 --- a/packages/typescript/smoke-tests/adapters/src/harness.ts +++ b/packages/typescript/smoke-tests/adapters/src/harness.ts @@ -227,7 +227,7 @@ export async function captureStream(opts: { } } else if (chunk.type === 'TOOL_CALL_END') { chunkData.toolCallId = chunk.toolCallId - + // Capture input/arguments from TOOL_CALL_END (OpenAI sends complete args here) if (chunk.input !== undefined) { const id = chunk.toolCallId @@ -236,7 +236,7 @@ export async function captureStream(opts: { existing.arguments = JSON.stringify(chunk.input) toolCallMap.set(id, existing) } - + // Update the assistant draft's tool call arguments if (assistantDraft) { const existingToolCall = assistantDraft.toolCalls?.find( @@ -247,7 +247,7 @@ export async function captureStream(opts: { } } } - + // Tool result is included in TOOL_CALL_END for server-executed tools if (chunk.result !== undefined) { const content = From 11e979e4118cc7c1c177c41d2dc401b1b39c685c Mon Sep 17 00:00:00 2001 From: Jack Herrington Date: Wed, 10 Dec 2025 08:40:55 -0800 Subject: [PATCH 4/7] merging main --- packages/typescript/ai/src/core/chat.ts | 5 +- packages/typescript/ai/tests/ai-chat.test.ts | 48 +++++++++++++++----- 2 files changed, 40 insertions(+), 13 deletions(-) diff --git a/packages/typescript/ai/src/core/chat.ts b/packages/typescript/ai/src/core/chat.ts index 9d5346d1..eb376274 100644 --- a/packages/typescript/ai/src/core/chat.ts +++ b/packages/typescript/ai/src/core/chat.ts @@ -319,7 +319,10 @@ class ChatEngine< streamId: this.streamId, messageId: this.currentMessageId || undefined, toolCallId: chunk.toolCallId, - result: chunk.result, + result: + typeof chunk.result === 'string' + ? chunk.result + : JSON.stringify(chunk.result), timestamp: Date.now(), }) } diff --git a/packages/typescript/ai/tests/ai-chat.test.ts b/packages/typescript/ai/tests/ai-chat.test.ts index 156a39de..75e0ed1f 100644 --- a/packages/typescript/ai/tests/ai-chat.test.ts +++ b/packages/typescript/ai/tests/ai-chat.test.ts @@ -455,7 +455,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { expect(tool.execute).toHaveBeenCalledWith({ location: 'Paris' }) expect(adapter.chatStreamCallCount).toBeGreaterThanOrEqual(2) - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks).toHaveLength(1) // Check events @@ -560,7 +562,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { // Tool should be executed with complete arguments expect(tool.execute).toHaveBeenCalledWith({ a: 10, b: 20 }) - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks.length).toBeGreaterThan(0) }) @@ -652,7 +656,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { expect(tool1.execute).toHaveBeenCalled() expect(tool2.execute).toHaveBeenCalled() - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks).toHaveLength(2) // Check iteration event @@ -950,7 +956,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }), ) - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks).toHaveLength(1) const resultChunk = toolResultChunks[0] as any @@ -1103,7 +1111,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }), ) - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks).toHaveLength(1) const resultChunk = toolResultChunks[0] as any @@ -1156,7 +1166,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { ) // Should still produce a tool_result with error - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) expect(toolResultChunks.length).toBeGreaterThan(0) const resultChunk = toolResultChunks[0] as any @@ -1282,7 +1294,8 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { ) const inputChunks = chunks.filter( - (c) => c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', + (c) => + c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', ) expect(inputChunks).toHaveLength(1) @@ -1387,7 +1400,8 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { (c) => c.type === 'CUSTOM' && (c as any).name === 'approval-requested', ) const inputChunks = chunks.filter( - (c) => c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', + (c) => + c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', ) expect(approvalChunks.length + inputChunks.length).toBeGreaterThan(0) @@ -2407,6 +2421,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { yield { type: 'TEXT_MESSAGE_CONTENT', model: 'test-model', + messageId: 'msg-1', timestamp: Date.now(), delta: 'Using tool', content: 'Using tool', @@ -2446,7 +2461,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { ) expect(toolResultEvents.length).toBeGreaterThan(0) expect(toolResultEvents[0]?.data.toolCallId).toBe('call-previous') - expect(toolResultEvents[0]?.data.result).toEqual({ result: 'previous result' }) + expect(toolResultEvents[0]?.data.result).toEqual( + JSON.stringify({ result: 'previous result' }), + ) }) }) @@ -2547,7 +2564,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }) const chunks1 = await collectChunks(stream1) - const approvalChunk = chunks1.find((c) => c.type === 'CUSTOM' && (c as any).name === 'approval-requested') + const approvalChunk = chunks1.find( + (c) => c.type === 'CUSTOM' && (c as any).name === 'approval-requested', + ) expect(approvalChunk).toBeDefined() // Second call - with approval response in message parts @@ -2670,7 +2689,10 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }) const chunks1 = await collectChunks(stream1) - const inputChunk = chunks1.find((c) => c.type === 'CUSTOM' && (c as any).name === 'tool-input-available') + const inputChunk = chunks1.find( + (c) => + c.type === 'CUSTOM' && (c as any).name === 'tool-input-available', + ) expect(inputChunk).toBeDefined() // Second call - with client tool output in message parts @@ -2969,7 +2991,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { const chunks = await collectChunks(stream) const toolCallChunks = chunks.filter((c) => c.type === 'tool_call') - const toolResultChunks = chunks.filter((c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined) + const toolResultChunks = chunks.filter( + (c) => c.type === 'TOOL_CALL_END' && (c as any).result !== undefined, + ) // We should have received tool_call chunks expect(toolCallChunks.length).toBeGreaterThan(0) From 9b7af51ad052bf041184776c0622b7134be5458f Mon Sep 17 00:00:00 2001 From: Jack Herrington Date: Wed, 10 Dec 2025 08:49:44 -0800 Subject: [PATCH 5/7] Regenerate docs after merge Updates API reference documentation to reflect current exports --- docs/reference/classes/BaseAdapter.md | 310 +++++++++++++ docs/reference/classes/BatchStrategy.md | 87 ++++ docs/reference/classes/CompositeStrategy.md | 91 ++++ docs/reference/classes/ImmediateStrategy.md | 58 +++ docs/reference/classes/PartialJSONParser.md | 57 +++ docs/reference/classes/PunctuationStrategy.md | 61 +++ docs/reference/classes/StreamProcessor.md | 407 ++++++++++++++++++ docs/reference/classes/ToolCallManager.md | 100 ++++- .../reference/classes/WordBoundaryStrategy.md | 61 +++ docs/reference/functions/chat.md | 55 +++ docs/reference/functions/chatOptions.md | 32 ++ docs/reference/functions/combineStrategies.md | 44 ++ .../convertMessagesToModelMessages.md | 35 ++ .../reference/functions/createReplayStream.md | 24 ++ docs/reference/functions/embedding.md | 30 ++ docs/reference/functions/generateMessageId.md | 18 + docs/reference/functions/maxIterations.md | 40 ++ docs/reference/functions/messages.md | 94 ++++ .../functions/modelMessageToUIMessage.md | 39 ++ .../functions/modelMessagesToUIMessages.md | 33 ++ .../functions/normalizeToUIMessage.md | 38 ++ docs/reference/functions/parsePartialJSON.md | 28 ++ docs/reference/functions/summarize.md | 30 ++ .../functions/toServerSentEventsStream.md | 47 ++ docs/reference/functions/toStreamResponse.md | 51 +++ .../functions/uiMessageToModelMessages.md | 39 ++ docs/reference/functions/untilFinishReason.md | 40 ++ docs/reference/index.md | 33 +- docs/reference/interfaces/AIAdapter.md | 26 +- docs/reference/interfaces/AIAdapterConfig.md | 12 +- .../ApprovalRequestedStreamChunk.md | 64 ++- docs/reference/interfaces/BaseEvent.md | 72 ++++ docs/reference/interfaces/BaseStreamChunk.md | 59 --- .../interfaces/ChatCompletionChunk.md | 14 +- docs/reference/interfaces/ChunkRecording.md | 88 ++++ docs/reference/interfaces/ChunkStrategy.md | 58 +++ .../interfaces/ContentStreamChunk.md | 58 ++- docs/reference/interfaces/CustomEvent.md | 95 ++++ .../DefaultMessageMetadataByModality.md | 12 +- docs/reference/interfaces/DoneStreamChunk.md | 56 +-- docs/reference/interfaces/EmbeddingOptions.md | 8 +- docs/reference/interfaces/EmbeddingResult.md | 10 +- docs/reference/interfaces/ErrorStreamChunk.md | 63 ++- .../interfaces/InternalToolCallState.md | 70 +++ docs/reference/interfaces/JSONParser.md | 36 ++ docs/reference/interfaces/ProcessorResult.md | 50 +++ docs/reference/interfaces/ProcessorState.md | 70 +++ docs/reference/interfaces/RunErrorEvent.md | 106 +++++ docs/reference/interfaces/RunFinishedEvent.md | 122 ++++++ docs/reference/interfaces/RunStartedEvent.md | 94 ++++ docs/reference/interfaces/StateDeltaEvent.md | 102 +++++ .../interfaces/StateSnapshotEvent.md | 84 ++++ .../reference/interfaces/StepFinishedEvent.md | 108 +++++ docs/reference/interfaces/StepStartedEvent.md | 94 ++++ .../interfaces/StreamProcessorEvents.md | 228 ++++++++++ .../interfaces/StreamProcessorHandlers.md | 317 ++++++++++++++ .../interfaces/StreamProcessorOptions.md | 94 ++++ .../interfaces/SummarizationOptions.md | 12 +- .../interfaces/SummarizationResult.md | 10 +- .../interfaces/TextMessageContentEvent.md | 106 +++++ .../interfaces/TextMessageEndEvent.md | 84 ++++ .../interfaces/TextMessageStartEvent.md | 94 ++++ .../interfaces/ThinkingStreamChunk.md | 48 +-- .../reference/interfaces/ToolCallArgsEvent.md | 108 +++++ docs/reference/interfaces/ToolCallEndEvent.md | 118 +++++ .../interfaces/ToolCallStartEvent.md | 130 ++++++ .../interfaces/ToolCallStreamChunk.md | 88 ++-- .../ToolInputAvailableStreamChunk.md | 54 +-- .../interfaces/ToolResultStreamChunk.md | 48 +-- docs/reference/type-aliases/AnyClientTool.md | 16 + .../type-aliases/ChatStreamOptionsForModel.md | 2 +- .../type-aliases/ChatStreamOptionsUnion.md | 2 +- docs/reference/type-aliases/EventType.md | 43 ++ .../type-aliases/ExtractModalitiesForModel.md | 2 +- .../type-aliases/ExtractModelsFromAdapter.md | 2 +- docs/reference/type-aliases/InferToolName.md | 20 + docs/reference/type-aliases/StreamChunk.md | 28 +- .../reference/type-aliases/StreamChunkType.md | 12 +- docs/reference/type-aliases/ToolCallState.md | 19 + .../reference/type-aliases/ToolResultState.md | 14 + docs/reference/variables/defaultJSONParser.md | 14 + 81 files changed, 4870 insertions(+), 456 deletions(-) create mode 100644 docs/reference/classes/BaseAdapter.md create mode 100644 docs/reference/classes/BatchStrategy.md create mode 100644 docs/reference/classes/CompositeStrategy.md create mode 100644 docs/reference/classes/ImmediateStrategy.md create mode 100644 docs/reference/classes/PartialJSONParser.md create mode 100644 docs/reference/classes/PunctuationStrategy.md create mode 100644 docs/reference/classes/StreamProcessor.md create mode 100644 docs/reference/classes/WordBoundaryStrategy.md create mode 100644 docs/reference/functions/chat.md create mode 100644 docs/reference/functions/chatOptions.md create mode 100644 docs/reference/functions/combineStrategies.md create mode 100644 docs/reference/functions/convertMessagesToModelMessages.md create mode 100644 docs/reference/functions/createReplayStream.md create mode 100644 docs/reference/functions/embedding.md create mode 100644 docs/reference/functions/generateMessageId.md create mode 100644 docs/reference/functions/maxIterations.md create mode 100644 docs/reference/functions/messages.md create mode 100644 docs/reference/functions/modelMessageToUIMessage.md create mode 100644 docs/reference/functions/modelMessagesToUIMessages.md create mode 100644 docs/reference/functions/normalizeToUIMessage.md create mode 100644 docs/reference/functions/parsePartialJSON.md create mode 100644 docs/reference/functions/summarize.md create mode 100644 docs/reference/functions/toServerSentEventsStream.md create mode 100644 docs/reference/functions/toStreamResponse.md create mode 100644 docs/reference/functions/uiMessageToModelMessages.md create mode 100644 docs/reference/functions/untilFinishReason.md create mode 100644 docs/reference/interfaces/BaseEvent.md delete mode 100644 docs/reference/interfaces/BaseStreamChunk.md create mode 100644 docs/reference/interfaces/ChunkRecording.md create mode 100644 docs/reference/interfaces/ChunkStrategy.md create mode 100644 docs/reference/interfaces/CustomEvent.md create mode 100644 docs/reference/interfaces/InternalToolCallState.md create mode 100644 docs/reference/interfaces/JSONParser.md create mode 100644 docs/reference/interfaces/ProcessorResult.md create mode 100644 docs/reference/interfaces/ProcessorState.md create mode 100644 docs/reference/interfaces/RunErrorEvent.md create mode 100644 docs/reference/interfaces/RunFinishedEvent.md create mode 100644 docs/reference/interfaces/RunStartedEvent.md create mode 100644 docs/reference/interfaces/StateDeltaEvent.md create mode 100644 docs/reference/interfaces/StateSnapshotEvent.md create mode 100644 docs/reference/interfaces/StepFinishedEvent.md create mode 100644 docs/reference/interfaces/StepStartedEvent.md create mode 100644 docs/reference/interfaces/StreamProcessorEvents.md create mode 100644 docs/reference/interfaces/StreamProcessorHandlers.md create mode 100644 docs/reference/interfaces/StreamProcessorOptions.md create mode 100644 docs/reference/interfaces/TextMessageContentEvent.md create mode 100644 docs/reference/interfaces/TextMessageEndEvent.md create mode 100644 docs/reference/interfaces/TextMessageStartEvent.md create mode 100644 docs/reference/interfaces/ToolCallArgsEvent.md create mode 100644 docs/reference/interfaces/ToolCallEndEvent.md create mode 100644 docs/reference/interfaces/ToolCallStartEvent.md create mode 100644 docs/reference/type-aliases/AnyClientTool.md create mode 100644 docs/reference/type-aliases/EventType.md create mode 100644 docs/reference/type-aliases/InferToolName.md create mode 100644 docs/reference/type-aliases/ToolCallState.md create mode 100644 docs/reference/type-aliases/ToolResultState.md create mode 100644 docs/reference/variables/defaultJSONParser.md diff --git a/docs/reference/classes/BaseAdapter.md b/docs/reference/classes/BaseAdapter.md new file mode 100644 index 00000000..e31a4893 --- /dev/null +++ b/docs/reference/classes/BaseAdapter.md @@ -0,0 +1,310 @@ +--- +id: BaseAdapter +title: BaseAdapter +--- + +# Abstract Class: BaseAdapter\ + +Defined in: [base-adapter.ts:26](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L26) + +Base adapter class with support for endpoint-specific models and provider options. + +Generic parameters: +- TChatModels: Models that support chat/text completion +- TEmbeddingModels: Models that support embeddings +- TChatProviderOptions: Provider-specific options for chat endpoint +- TEmbeddingProviderOptions: Provider-specific options for embedding endpoint +- TModelProviderOptionsByName: Provider-specific options for model by name +- TModelInputModalitiesByName: Map from model name to its supported input modalities +- TMessageMetadataByModality: Map from modality type to adapter-specific metadata types + +## Type Parameters + +### TChatModels + +`TChatModels` *extends* `ReadonlyArray`\<`string`\> = `ReadonlyArray`\<`string`\> + +### TEmbeddingModels + +`TEmbeddingModels` *extends* `ReadonlyArray`\<`string`\> = `ReadonlyArray`\<`string`\> + +### TChatProviderOptions + +`TChatProviderOptions` *extends* `Record`\<`string`, `any`\> = `Record`\<`string`, `any`\> + +### TEmbeddingProviderOptions + +`TEmbeddingProviderOptions` *extends* `Record`\<`string`, `any`\> = `Record`\<`string`, `any`\> + +### TModelProviderOptionsByName + +`TModelProviderOptionsByName` *extends* `Record`\<`string`, `any`\> = `Record`\<`string`, `any`\> + +### TModelInputModalitiesByName + +`TModelInputModalitiesByName` *extends* `Record`\<`string`, `ReadonlyArray`\<[`Modality`](../type-aliases/Modality.md)\>\> = `Record`\<`string`, `ReadonlyArray`\<[`Modality`](../type-aliases/Modality.md)\>\> + +### TMessageMetadataByModality + +`TMessageMetadataByModality` *extends* `object` = [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md) + +## Implements + +- [`AIAdapter`](../interfaces/AIAdapter.md)\<`TChatModels`, `TEmbeddingModels`, `TChatProviderOptions`, `TEmbeddingProviderOptions`, `TModelProviderOptionsByName`, `TModelInputModalitiesByName`, `TMessageMetadataByModality`\> + +## Constructors + +### Constructor + +```ts +new BaseAdapter(config): BaseAdapter; +``` + +Defined in: [base-adapter.ts:66](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L66) + +#### Parameters + +##### config + +[`AIAdapterConfig`](../interfaces/AIAdapterConfig.md) = `{}` + +#### Returns + +`BaseAdapter`\<`TChatModels`, `TEmbeddingModels`, `TChatProviderOptions`, `TEmbeddingProviderOptions`, `TModelProviderOptionsByName`, `TModelInputModalitiesByName`, `TMessageMetadataByModality`\> + +## Properties + +### \_chatProviderOptions? + +```ts +optional _chatProviderOptions: TChatProviderOptions; +``` + +Defined in: [base-adapter.ts:57](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L57) + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`_chatProviderOptions`](../interfaces/AIAdapter.md#_chatprovideroptions) + +*** + +### \_embeddingProviderOptions? + +```ts +optional _embeddingProviderOptions: TEmbeddingProviderOptions; +``` + +Defined in: [base-adapter.ts:58](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L58) + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`_embeddingProviderOptions`](../interfaces/AIAdapter.md#_embeddingprovideroptions) + +*** + +### \_messageMetadataByModality? + +```ts +optional _messageMetadataByModality: TMessageMetadataByModality; +``` + +Defined in: [base-adapter.ts:64](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L64) + +Type-only map from modality type to adapter-specific metadata types. +Used to provide type-safe autocomplete for metadata on content parts. + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`_messageMetadataByModality`](../interfaces/AIAdapter.md#_messagemetadatabymodality) + +*** + +### \_modelInputModalitiesByName? + +```ts +optional _modelInputModalitiesByName: TModelInputModalitiesByName; +``` + +Defined in: [base-adapter.ts:62](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L62) + +Type-only map from model name to its supported input modalities. +Used by the core AI types to narrow ContentPart types based on the selected model. +Must be provided by all adapters. + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`_modelInputModalitiesByName`](../interfaces/AIAdapter.md#_modelinputmodalitiesbyname) + +*** + +### \_modelProviderOptionsByName + +```ts +_modelProviderOptionsByName: TModelProviderOptionsByName; +``` + +Defined in: [base-adapter.ts:60](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L60) + +Type-only map from model name to its specific provider options. +Used by the core AI types to narrow providerOptions based on the selected model. +Must be provided by all adapters. + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`_modelProviderOptionsByName`](../interfaces/AIAdapter.md#_modelprovideroptionsbyname) + +*** + +### \_providerOptions? + +```ts +optional _providerOptions: TChatProviderOptions; +``` + +Defined in: [base-adapter.ts:56](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L56) + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`_providerOptions`](../interfaces/AIAdapter.md#_provideroptions) + +*** + +### config + +```ts +protected config: AIAdapterConfig; +``` + +Defined in: [base-adapter.ts:53](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L53) + +*** + +### embeddingModels? + +```ts +optional embeddingModels: TEmbeddingModels; +``` + +Defined in: [base-adapter.ts:52](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L52) + +Models that support embeddings + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`embeddingModels`](../interfaces/AIAdapter.md#embeddingmodels) + +*** + +### models + +```ts +abstract models: TChatModels; +``` + +Defined in: [base-adapter.ts:51](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L51) + +Models that support chat/text completion + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`models`](../interfaces/AIAdapter.md#models) + +*** + +### name + +```ts +abstract name: string; +``` + +Defined in: [base-adapter.ts:50](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L50) + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`name`](../interfaces/AIAdapter.md#name) + +## Methods + +### chatStream() + +```ts +abstract chatStream(options): AsyncIterable; +``` + +Defined in: [base-adapter.ts:70](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L70) + +#### Parameters + +##### options + +[`ChatOptions`](../interfaces/ChatOptions.md) + +#### Returns + +`AsyncIterable`\<[`StreamChunk`](../type-aliases/StreamChunk.md)\> + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`chatStream`](../interfaces/AIAdapter.md#chatstream) + +*** + +### createEmbeddings() + +```ts +abstract createEmbeddings(options): Promise; +``` + +Defined in: [base-adapter.ts:75](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L75) + +#### Parameters + +##### options + +[`EmbeddingOptions`](../interfaces/EmbeddingOptions.md) + +#### Returns + +`Promise`\<[`EmbeddingResult`](../interfaces/EmbeddingResult.md)\> + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`createEmbeddings`](../interfaces/AIAdapter.md#createembeddings) + +*** + +### generateId() + +```ts +protected generateId(): string; +``` + +Defined in: [base-adapter.ts:77](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L77) + +#### Returns + +`string` + +*** + +### summarize() + +```ts +abstract summarize(options): Promise; +``` + +Defined in: [base-adapter.ts:72](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/base-adapter.ts#L72) + +#### Parameters + +##### options + +[`SummarizationOptions`](../interfaces/SummarizationOptions.md) + +#### Returns + +`Promise`\<[`SummarizationResult`](../interfaces/SummarizationResult.md)\> + +#### Implementation of + +[`AIAdapter`](../interfaces/AIAdapter.md).[`summarize`](../interfaces/AIAdapter.md#summarize) diff --git a/docs/reference/classes/BatchStrategy.md b/docs/reference/classes/BatchStrategy.md new file mode 100644 index 00000000..a437b0aa --- /dev/null +++ b/docs/reference/classes/BatchStrategy.md @@ -0,0 +1,87 @@ +--- +id: BatchStrategy +title: BatchStrategy +--- + +# Class: BatchStrategy + +Defined in: [stream/strategies.ts:34](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L34) + +Batch Strategy - emit every N chunks +Useful for reducing UI update frequency + +## Implements + +- [`ChunkStrategy`](../interfaces/ChunkStrategy.md) + +## Constructors + +### Constructor + +```ts +new BatchStrategy(batchSize): BatchStrategy; +``` + +Defined in: [stream/strategies.ts:37](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L37) + +#### Parameters + +##### batchSize + +`number` = `5` + +#### Returns + +`BatchStrategy` + +## Methods + +### reset() + +```ts +reset(): void; +``` + +Defined in: [stream/strategies.ts:48](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L48) + +Optional: Reset strategy state (called when streaming starts) + +#### Returns + +`void` + +#### Implementation of + +[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`reset`](../interfaces/ChunkStrategy.md#reset) + +*** + +### shouldEmit() + +```ts +shouldEmit(_chunk, _accumulated): boolean; +``` + +Defined in: [stream/strategies.ts:39](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L39) + +Called for each text chunk received + +#### Parameters + +##### \_chunk + +`string` + +##### \_accumulated + +`string` + +#### Returns + +`boolean` + +true if an update should be emitted now + +#### Implementation of + +[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`shouldEmit`](../interfaces/ChunkStrategy.md#shouldemit) diff --git a/docs/reference/classes/CompositeStrategy.md b/docs/reference/classes/CompositeStrategy.md new file mode 100644 index 00000000..5c6f71e3 --- /dev/null +++ b/docs/reference/classes/CompositeStrategy.md @@ -0,0 +1,91 @@ +--- +id: CompositeStrategy +title: CompositeStrategy +--- + +# Class: CompositeStrategy + +Defined in: [stream/strategies.ts:68](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L68) + +Composite Strategy - combine multiple strategies (OR logic) +Emits if ANY strategy says to emit + +## Implements + +- [`ChunkStrategy`](../interfaces/ChunkStrategy.md) + +## Constructors + +### Constructor + +```ts +new CompositeStrategy(strategies): CompositeStrategy; +``` + +Defined in: [stream/strategies.ts:69](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L69) + +#### Parameters + +##### strategies + +[`ChunkStrategy`](../interfaces/ChunkStrategy.md)[] + +#### Returns + +`CompositeStrategy` + +## Methods + +### reset() + +```ts +reset(): void; +``` + +Defined in: [stream/strategies.ts:75](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L75) + +Optional: Reset strategy state (called when streaming starts) + +#### Returns + +`void` + +#### Implementation of + +[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`reset`](../interfaces/ChunkStrategy.md#reset) + +*** + +### shouldEmit() + +```ts +shouldEmit(chunk, accumulated): boolean; +``` + +Defined in: [stream/strategies.ts:71](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L71) + +Called for each text chunk received + +#### Parameters + +##### chunk + +`string` + +The new chunk of text (delta) + +##### accumulated + +`string` + +All text accumulated so far + +#### Returns + +`boolean` + +true if an update should be emitted now + +#### Implementation of + +[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`shouldEmit`](../interfaces/ChunkStrategy.md#shouldemit) diff --git a/docs/reference/classes/ImmediateStrategy.md b/docs/reference/classes/ImmediateStrategy.md new file mode 100644 index 00000000..fcf89ea0 --- /dev/null +++ b/docs/reference/classes/ImmediateStrategy.md @@ -0,0 +1,58 @@ +--- +id: ImmediateStrategy +title: ImmediateStrategy +--- + +# Class: ImmediateStrategy + +Defined in: [stream/strategies.ts:12](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L12) + +Immediate Strategy - emit on every chunk (default behavior) + +## Implements + +- [`ChunkStrategy`](../interfaces/ChunkStrategy.md) + +## Constructors + +### Constructor + +```ts +new ImmediateStrategy(): ImmediateStrategy; +``` + +#### Returns + +`ImmediateStrategy` + +## Methods + +### shouldEmit() + +```ts +shouldEmit(_chunk, _accumulated): boolean; +``` + +Defined in: [stream/strategies.ts:13](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L13) + +Called for each text chunk received + +#### Parameters + +##### \_chunk + +`string` + +##### \_accumulated + +`string` + +#### Returns + +`boolean` + +true if an update should be emitted now + +#### Implementation of + +[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`shouldEmit`](../interfaces/ChunkStrategy.md#shouldemit) diff --git a/docs/reference/classes/PartialJSONParser.md b/docs/reference/classes/PartialJSONParser.md new file mode 100644 index 00000000..59fdcfb0 --- /dev/null +++ b/docs/reference/classes/PartialJSONParser.md @@ -0,0 +1,57 @@ +--- +id: PartialJSONParser +title: PartialJSONParser +--- + +# Class: PartialJSONParser + +Defined in: [stream/json-parser.ts:25](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L25) + +Partial JSON Parser implementation using the partial-json library +This parser can handle incomplete JSON strings during streaming + +## Implements + +- [`JSONParser`](../interfaces/JSONParser.md) + +## Constructors + +### Constructor + +```ts +new PartialJSONParser(): PartialJSONParser; +``` + +#### Returns + +`PartialJSONParser` + +## Methods + +### parse() + +```ts +parse(jsonString): any; +``` + +Defined in: [stream/json-parser.ts:31](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L31) + +Parse a potentially incomplete JSON string + +#### Parameters + +##### jsonString + +`string` + +The JSON string to parse (may be incomplete) + +#### Returns + +`any` + +The parsed object, or undefined if parsing fails + +#### Implementation of + +[`JSONParser`](../interfaces/JSONParser.md).[`parse`](../interfaces/JSONParser.md#parse) diff --git a/docs/reference/classes/PunctuationStrategy.md b/docs/reference/classes/PunctuationStrategy.md new file mode 100644 index 00000000..f5e1ebe4 --- /dev/null +++ b/docs/reference/classes/PunctuationStrategy.md @@ -0,0 +1,61 @@ +--- +id: PunctuationStrategy +title: PunctuationStrategy +--- + +# Class: PunctuationStrategy + +Defined in: [stream/strategies.ts:22](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L22) + +Punctuation Strategy - emit when chunk contains punctuation +Useful for natural text flow in UI + +## Implements + +- [`ChunkStrategy`](../interfaces/ChunkStrategy.md) + +## Constructors + +### Constructor + +```ts +new PunctuationStrategy(): PunctuationStrategy; +``` + +#### Returns + +`PunctuationStrategy` + +## Methods + +### shouldEmit() + +```ts +shouldEmit(chunk, _accumulated): boolean; +``` + +Defined in: [stream/strategies.ts:25](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L25) + +Called for each text chunk received + +#### Parameters + +##### chunk + +`string` + +The new chunk of text (delta) + +##### \_accumulated + +`string` + +#### Returns + +`boolean` + +true if an update should be emitted now + +#### Implementation of + +[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`shouldEmit`](../interfaces/ChunkStrategy.md#shouldemit) diff --git a/docs/reference/classes/StreamProcessor.md b/docs/reference/classes/StreamProcessor.md new file mode 100644 index 00000000..d7d3e141 --- /dev/null +++ b/docs/reference/classes/StreamProcessor.md @@ -0,0 +1,407 @@ +--- +id: StreamProcessor +title: StreamProcessor +--- + +# Class: StreamProcessor + +Defined in: [stream/processor.ts:171](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L171) + +StreamProcessor - State machine for processing AI response streams + +Manages the full UIMessage[] conversation and emits events on changes. + +State tracking: +- Full message array +- Current assistant message being streamed +- Text content accumulation +- Multiple parallel tool calls +- Tool call completion detection + +Tool call completion is detected when: +1. A new tool call starts at a different index +2. Text content arrives +3. Stream ends + +## Constructors + +### Constructor + +```ts +new StreamProcessor(options): StreamProcessor; +``` + +Defined in: [stream/processor.ts:200](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L200) + +#### Parameters + +##### options + +[`StreamProcessorOptions`](../interfaces/StreamProcessorOptions.md) = `{}` + +#### Returns + +`StreamProcessor` + +## Methods + +### addToolApprovalResponse() + +```ts +addToolApprovalResponse(approvalId, approved): void; +``` + +Defined in: [stream/processor.ts:314](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L314) + +Add an approval response (called by client after handling onApprovalRequest) + +#### Parameters + +##### approvalId + +`string` + +##### approved + +`boolean` + +#### Returns + +`void` + +*** + +### addToolResult() + +```ts +addToolResult( + toolCallId, + output, + error?): void; +``` + +Defined in: [stream/processor.ts:270](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L270) + +Add a tool result (called by client after handling onToolCall) + +#### Parameters + +##### toolCallId + +`string` + +##### output + +`any` + +##### error? + +`string` + +#### Returns + +`void` + +*** + +### addUserMessage() + +```ts +addUserMessage(content): UIMessage; +``` + +Defined in: [stream/processor.ts:228](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L228) + +Add a user message to the conversation + +#### Parameters + +##### content + +`string` + +#### Returns + +[`UIMessage`](../interfaces/UIMessage.md) + +*** + +### areAllToolsComplete() + +```ts +areAllToolsComplete(): boolean; +``` + +Defined in: [stream/processor.ts:345](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L345) + +Check if all tool calls in the last assistant message are complete +Useful for auto-continue logic + +#### Returns + +`boolean` + +*** + +### clearMessages() + +```ts +clearMessages(): void; +``` + +Defined in: [stream/processor.ts:377](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L377) + +Clear all messages + +#### Returns + +`void` + +*** + +### finalizeStream() + +```ts +finalizeStream(): void; +``` + +Defined in: [stream/processor.ts:1033](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1033) + +Finalize the stream - complete all pending operations + +#### Returns + +`void` + +*** + +### getMessages() + +```ts +getMessages(): UIMessage[]; +``` + +Defined in: [stream/processor.ts:337](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L337) + +Get current messages + +#### Returns + +[`UIMessage`](../interfaces/UIMessage.md)[] + +*** + +### getRecording() + +```ts +getRecording(): ChunkRecording | null; +``` + +Defined in: [stream/processor.ts:1533](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1533) + +Get the current recording + +#### Returns + +[`ChunkRecording`](../interfaces/ChunkRecording.md) \| `null` + +*** + +### getState() + +```ts +getState(): ProcessorState; +``` + +Defined in: [stream/processor.ts:1092](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1092) + +Get current processor state (legacy) + +#### Returns + +[`ProcessorState`](../interfaces/ProcessorState.md) + +*** + +### process() + +```ts +process(stream): Promise; +``` + +Defined in: [stream/processor.ts:390](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L390) + +Process a stream and emit events through handlers + +#### Parameters + +##### stream + +`AsyncIterable`\<`any`\> + +#### Returns + +`Promise`\<[`ProcessorResult`](../interfaces/ProcessorResult.md)\> + +*** + +### processChunk() + +```ts +processChunk(chunk): void; +``` + +Defined in: [stream/processor.ts:418](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L418) + +Process a single chunk from the stream + +#### Parameters + +##### chunk + +[`StreamChunk`](../type-aliases/StreamChunk.md) + +#### Returns + +`void` + +*** + +### removeMessagesAfter() + +```ts +removeMessagesAfter(index): void; +``` + +Defined in: [stream/processor.ts:369](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L369) + +Remove messages after a certain index (for reload/retry) + +#### Parameters + +##### index + +`number` + +#### Returns + +`void` + +*** + +### reset() + +```ts +reset(): void; +``` + +Defined in: [stream/processor.ts:1556](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1556) + +Full reset (including messages) + +#### Returns + +`void` + +*** + +### setMessages() + +```ts +setMessages(messages): void; +``` + +Defined in: [stream/processor.ts:220](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L220) + +Set the messages array (e.g., from persisted state) + +#### Parameters + +##### messages + +[`UIMessage`](../interfaces/UIMessage.md)[] + +#### Returns + +`void` + +*** + +### startAssistantMessage() + +```ts +startAssistantMessage(): string; +``` + +Defined in: [stream/processor.ts:246](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L246) + +Start streaming a new assistant message +Returns the message ID + +#### Returns + +`string` + +*** + +### startRecording() + +```ts +startRecording(): void; +``` + +Defined in: [stream/processor.ts:1520](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1520) + +Start recording chunks + +#### Returns + +`void` + +*** + +### toModelMessages() + +```ts +toModelMessages(): ModelMessage< + | string + | ContentPart[] + | null>[]; +``` + +Defined in: [stream/processor.ts:326](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L326) + +Get the conversation as ModelMessages (for sending to LLM) + +#### Returns + +[`ModelMessage`](../interfaces/ModelMessage.md)\< + \| `string` + \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] + \| `null`\>[] + +*** + +### replay() + +```ts +static replay(recording, options?): Promise; +``` + +Defined in: [stream/processor.ts:1565](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1565) + +Replay a recording through the processor + +#### Parameters + +##### recording + +[`ChunkRecording`](../interfaces/ChunkRecording.md) + +##### options? + +[`StreamProcessorOptions`](../interfaces/StreamProcessorOptions.md) + +#### Returns + +`Promise`\<[`ProcessorResult`](../interfaces/ProcessorResult.md)\> diff --git a/docs/reference/classes/ToolCallManager.md b/docs/reference/classes/ToolCallManager.md index 7b0c7894..234aa302 100644 --- a/docs/reference/classes/ToolCallManager.md +++ b/docs/reference/classes/ToolCallManager.md @@ -5,7 +5,7 @@ title: ToolCallManager # Class: ToolCallManager -Defined in: [tools/tool-calls.ts:51](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L51) +Defined in: [tools/tool-calls.ts:53](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L53) Manages tool call accumulation and execution for the chat() method's automatic tool execution loop. @@ -47,7 +47,7 @@ if (manager.hasToolCalls()) { new ToolCallManager(tools): ToolCallManager; ``` -Defined in: [tools/tool-calls.ts:55](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L55) +Defined in: [tools/tool-calls.ts:57](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L57) #### Parameters @@ -61,15 +61,37 @@ readonly [`Tool`](../interfaces/Tool.md)\<`ZodType`\<`unknown`, `unknown`, `$Zod ## Methods -### addToolCallChunk() +### addToolCallArgsEvent() + +```ts +addToolCallArgsEvent(chunk): void; +``` + +Defined in: [tools/tool-calls.ts:79](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L79) + +Add a TOOL_CALL_ARGS event to accumulate arguments + +#### Parameters + +##### chunk + +[`ToolCallArgsEvent`](../interfaces/ToolCallArgsEvent.md) + +#### Returns + +`void` + +*** + +### ~~addToolCallChunk()~~ ```ts addToolCallChunk(chunk): void; ``` -Defined in: [tools/tool-calls.ts:63](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L63) +Defined in: [tools/tool-calls.ts:106](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L106) -Add a tool call chunk to the accumulator +Add a tool call chunk to the accumulator (legacy format) Handles streaming tool calls by accumulating arguments #### Parameters @@ -118,6 +140,32 @@ Handles streaming tool calls by accumulating arguments `void` +#### Deprecated + +Use addToolCallStartEvent and addToolCallArgsEvent instead + +*** + +### addToolCallStartEvent() + +```ts +addToolCallStartEvent(chunk): void; +``` + +Defined in: [tools/tool-calls.ts:64](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L64) + +Add a TOOL_CALL_START event to begin tracking a tool call + +#### Parameters + +##### chunk + +[`ToolCallStartEvent`](../interfaces/ToolCallStartEvent.md) + +#### Returns + +`void` + *** ### clear() @@ -126,7 +174,7 @@ Handles streaming tool calls by accumulating arguments clear(): void; ``` -Defined in: [tools/tool-calls.ts:208](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L208) +Defined in: [tools/tool-calls.ts:251](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L251) Clear the tool calls map for the next iteration @@ -136,29 +184,55 @@ Clear the tool calls map for the next iteration *** +### completeToolCall() + +```ts +completeToolCall(toolCallId, input?): void; +``` + +Defined in: [tools/tool-calls.ts:92](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L92) + +Complete a tool call with its final input + +#### Parameters + +##### toolCallId + +`string` + +##### input? + +`any` + +#### Returns + +`void` + +*** + ### executeTools() ```ts -executeTools(doneChunk): AsyncGenerator[] | null>[], void>; ``` -Defined in: [tools/tool-calls.ts:121](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L121) +Defined in: [tools/tool-calls.ts:164](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L164) Execute all tool calls and return tool result messages -Also yields tool_result chunks for streaming +Also yields TOOL_CALL_END events for streaming #### Parameters ##### doneChunk -[`DoneStreamChunk`](../interfaces/DoneStreamChunk.md) +[`RunFinishedEvent`](../interfaces/RunFinishedEvent.md) #### Returns -`AsyncGenerator`\<[`ToolResultStreamChunk`](../interfaces/ToolResultStreamChunk.md), [`ModelMessage`](../interfaces/ModelMessage.md)\< +`AsyncGenerator`\<[`ToolCallEndEvent`](../interfaces/ToolCallEndEvent.md), [`ModelMessage`](../interfaces/ModelMessage.md)\< \| `string` \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] \| `null`\>[], `void`\> @@ -171,7 +245,7 @@ Also yields tool_result chunks for streaming getToolCalls(): ToolCall[]; ``` -Defined in: [tools/tool-calls.ts:111](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L111) +Defined in: [tools/tool-calls.ts:154](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L154) Get all complete tool calls (filtered for valid ID and name) @@ -187,7 +261,7 @@ Get all complete tool calls (filtered for valid ID and name) hasToolCalls(): boolean; ``` -Defined in: [tools/tool-calls.ts:104](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L104) +Defined in: [tools/tool-calls.ts:147](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-calls.ts#L147) Check if there are any complete tool calls to execute diff --git a/docs/reference/classes/WordBoundaryStrategy.md b/docs/reference/classes/WordBoundaryStrategy.md new file mode 100644 index 00000000..985ce4f6 --- /dev/null +++ b/docs/reference/classes/WordBoundaryStrategy.md @@ -0,0 +1,61 @@ +--- +id: WordBoundaryStrategy +title: WordBoundaryStrategy +--- + +# Class: WordBoundaryStrategy + +Defined in: [stream/strategies.ts:57](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L57) + +Word Boundary Strategy - emit at word boundaries +Prevents cutting words in half + +## Implements + +- [`ChunkStrategy`](../interfaces/ChunkStrategy.md) + +## Constructors + +### Constructor + +```ts +new WordBoundaryStrategy(): WordBoundaryStrategy; +``` + +#### Returns + +`WordBoundaryStrategy` + +## Methods + +### shouldEmit() + +```ts +shouldEmit(chunk, _accumulated): boolean; +``` + +Defined in: [stream/strategies.ts:58](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/strategies.ts#L58) + +Called for each text chunk received + +#### Parameters + +##### chunk + +`string` + +The new chunk of text (delta) + +##### \_accumulated + +`string` + +#### Returns + +`boolean` + +true if an update should be emitted now + +#### Implementation of + +[`ChunkStrategy`](../interfaces/ChunkStrategy.md).[`shouldEmit`](../interfaces/ChunkStrategy.md#shouldemit) diff --git a/docs/reference/functions/chat.md b/docs/reference/functions/chat.md new file mode 100644 index 00000000..5a37f3f8 --- /dev/null +++ b/docs/reference/functions/chat.md @@ -0,0 +1,55 @@ +--- +id: chat +title: chat +--- + +# Function: chat() + +```ts +function chat(options): AsyncIterable; +``` + +Defined in: [core/chat.ts:880](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/core/chat.ts#L880) + +Standalone chat streaming function with type inference from adapter +Returns an async iterable of StreamChunks for streaming responses +Includes automatic tool execution loop + +## Type Parameters + +### TAdapter + +`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `any`, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> + +### TModel + +`TModel` *extends* `any` + +## Parameters + +### options + +[`ChatStreamOptionsForModel`](../type-aliases/ChatStreamOptionsForModel.md)\<`TAdapter`, `TModel`\> + +Chat options + +## Returns + +`AsyncIterable`\<[`StreamChunk`](../type-aliases/StreamChunk.md)\> + +## Example + +```typescript +const stream = chat({ + adapter: openai(), + model: 'gpt-4o', + messages: [{ role: 'user', content: 'Hello!' }], + tools: [weatherTool], // Optional: auto-executed when called +}); + +for await (const chunk of stream) { + if (chunk.type === 'content') { + console.log(chunk.delta); + } +} +``` diff --git a/docs/reference/functions/chatOptions.md b/docs/reference/functions/chatOptions.md new file mode 100644 index 00000000..d776680b --- /dev/null +++ b/docs/reference/functions/chatOptions.md @@ -0,0 +1,32 @@ +--- +id: chatOptions +title: chatOptions +--- + +# Function: chatOptions() + +```ts +function chatOptions(options): Omit, "model" | "providerOptions" | "messages" | "abortController"> & object; +``` + +Defined in: [utilities/chat-options.ts:3](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/chat-options.ts#L3) + +## Type Parameters + +### TAdapter + +`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `Record`\<`string`, readonly [`Modality`](../type-aliases/Modality.md)[]\>, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> + +### TModel + +`TModel` *extends* `any` + +## Parameters + +### options + +`Omit`\<[`ChatStreamOptionsUnion`](../type-aliases/ChatStreamOptionsUnion.md)\<`TAdapter`\>, `"model"` \| `"providerOptions"` \| `"messages"` \| `"abortController"`\> & `object` + +## Returns + +`Omit`\<[`ChatStreamOptionsUnion`](../type-aliases/ChatStreamOptionsUnion.md)\<`TAdapter`\>, `"model"` \| `"providerOptions"` \| `"messages"` \| `"abortController"`\> & `object` diff --git a/docs/reference/functions/combineStrategies.md b/docs/reference/functions/combineStrategies.md new file mode 100644 index 00000000..454a0f33 --- /dev/null +++ b/docs/reference/functions/combineStrategies.md @@ -0,0 +1,44 @@ +--- +id: combineStrategies +title: combineStrategies +--- + +# Function: combineStrategies() + +```ts +function combineStrategies(strategies): AgentLoopStrategy; +``` + +Defined in: [utilities/agent-loop-strategies.ts:79](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/agent-loop-strategies.ts#L79) + +Creates a strategy that combines multiple strategies with AND logic +All strategies must return true to continue + +## Parameters + +### strategies + +[`AgentLoopStrategy`](../type-aliases/AgentLoopStrategy.md)[] + +Array of strategies to combine + +## Returns + +[`AgentLoopStrategy`](../type-aliases/AgentLoopStrategy.md) + +AgentLoopStrategy that continues only if all strategies return true + +## Example + +```typescript +const stream = chat({ + adapter: openai(), + model: "gpt-4o", + messages: [...], + tools: [weatherTool], + agentLoopStrategy: combineStrategies([ + maxIterations(10), + ({ messages }) => messages.length < 100, + ]), +}); +``` diff --git a/docs/reference/functions/convertMessagesToModelMessages.md b/docs/reference/functions/convertMessagesToModelMessages.md new file mode 100644 index 00000000..00224ace --- /dev/null +++ b/docs/reference/functions/convertMessagesToModelMessages.md @@ -0,0 +1,35 @@ +--- +id: convertMessagesToModelMessages +title: convertMessagesToModelMessages +--- + +# Function: convertMessagesToModelMessages() + +```ts +function convertMessagesToModelMessages(messages): ModelMessage< + | string + | ContentPart[] + | null>[]; +``` + +Defined in: [message-converters.ts:38](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L38) + +Convert UIMessages or ModelMessages to ModelMessages + +## Parameters + +### messages + +( + \| [`UIMessage`](../interfaces/UIMessage.md) + \| [`ModelMessage`](../interfaces/ModelMessage.md)\< + \| `string` + \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] + \| `null`\>)[] + +## Returns + +[`ModelMessage`](../interfaces/ModelMessage.md)\< + \| `string` + \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] + \| `null`\>[] diff --git a/docs/reference/functions/createReplayStream.md b/docs/reference/functions/createReplayStream.md new file mode 100644 index 00000000..6df2e4e5 --- /dev/null +++ b/docs/reference/functions/createReplayStream.md @@ -0,0 +1,24 @@ +--- +id: createReplayStream +title: createReplayStream +--- + +# Function: createReplayStream() + +```ts +function createReplayStream(recording): AsyncIterable; +``` + +Defined in: [stream/processor.ts:1577](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L1577) + +Create an async iterable from a recording + +## Parameters + +### recording + +[`ChunkRecording`](../interfaces/ChunkRecording.md) + +## Returns + +`AsyncIterable`\<[`StreamChunk`](../type-aliases/StreamChunk.md)\> diff --git a/docs/reference/functions/embedding.md b/docs/reference/functions/embedding.md new file mode 100644 index 00000000..058e3ff4 --- /dev/null +++ b/docs/reference/functions/embedding.md @@ -0,0 +1,30 @@ +--- +id: embedding +title: embedding +--- + +# Function: embedding() + +```ts +function embedding(options): Promise; +``` + +Defined in: [core/embedding.ts:16](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/core/embedding.ts#L16) + +Standalone embedding function with type inference from adapter + +## Type Parameters + +### TAdapter + +`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `Record`\<`string`, readonly [`Modality`](../type-aliases/Modality.md)[]\>, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> + +## Parameters + +### options + +`Omit`\<[`EmbeddingOptions`](../interfaces/EmbeddingOptions.md), `"model"`\> & `object` + +## Returns + +`Promise`\<[`EmbeddingResult`](../interfaces/EmbeddingResult.md)\> diff --git a/docs/reference/functions/generateMessageId.md b/docs/reference/functions/generateMessageId.md new file mode 100644 index 00000000..44568ce3 --- /dev/null +++ b/docs/reference/functions/generateMessageId.md @@ -0,0 +1,18 @@ +--- +id: generateMessageId +title: generateMessageId +--- + +# Function: generateMessageId() + +```ts +function generateMessageId(): string; +``` + +Defined in: [message-converters.ts:283](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L283) + +Generate a unique message ID + +## Returns + +`string` diff --git a/docs/reference/functions/maxIterations.md b/docs/reference/functions/maxIterations.md new file mode 100644 index 00000000..1ab98cda --- /dev/null +++ b/docs/reference/functions/maxIterations.md @@ -0,0 +1,40 @@ +--- +id: maxIterations +title: maxIterations +--- + +# Function: maxIterations() + +```ts +function maxIterations(max): AgentLoopStrategy; +``` + +Defined in: [utilities/agent-loop-strategies.ts:20](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/agent-loop-strategies.ts#L20) + +Creates a strategy that continues for a maximum number of iterations + +## Parameters + +### max + +`number` + +Maximum number of iterations to allow + +## Returns + +[`AgentLoopStrategy`](../type-aliases/AgentLoopStrategy.md) + +AgentLoopStrategy that stops after max iterations + +## Example + +```typescript +const stream = chat({ + adapter: openai(), + model: "gpt-4o", + messages: [...], + tools: [weatherTool], + agentLoopStrategy: maxIterations(3), // Max 3 iterations +}); +``` diff --git a/docs/reference/functions/messages.md b/docs/reference/functions/messages.md new file mode 100644 index 00000000..b426dc57 --- /dev/null +++ b/docs/reference/functions/messages.md @@ -0,0 +1,94 @@ +--- +id: messages +title: messages +--- + +# Function: messages() + +```ts +function messages(_options, msgs): TAdapter extends AIAdapter ? TModel extends keyof ModelInputModalities ? ModelInputModalities[TModel] extends readonly Modality[] ? ConstrainedModelMessage[] : ModelMessage< + | string + | ContentPart[] + | null>[] : ModelMessage< + | string + | ContentPart[] + | null>[] : ModelMessage< + | string + | ContentPart[] + | null>[]; +``` + +Defined in: [utilities/messages.ts:33](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/messages.ts#L33) + +Type-safe helper to create a messages array constrained by a model's supported modalities. + +This function provides compile-time checking that your messages only contain +content types supported by the specified model. It's particularly useful when +combining typed messages with untyped data (like from request.json()). + +## Type Parameters + +### TAdapter + +`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `any`, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> + +### TModel + +`TModel` *extends* `any` + +## Parameters + +### \_options + +#### adapter + +`TAdapter` + +#### model + +`TModel` + +### msgs + +`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `ModelInputModalities`, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> ? `TModel` *extends* keyof `ModelInputModalities` ? `ModelInputModalities`\[`TModel`\<`TModel`\>\] *extends* readonly [`Modality`](../type-aliases/Modality.md)[] ? [`ConstrainedModelMessage`](../type-aliases/ConstrainedModelMessage.md)\<`any`\[`any`\]\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< + \| `string` + \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] + \| `null`\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< + \| `string` + \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] + \| `null`\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< + \| `string` + \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] + \| `null`\>[] + +## Returns + +`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `ModelInputModalities`, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> ? `TModel` *extends* keyof `ModelInputModalities` ? `ModelInputModalities`\[`TModel`\<`TModel`\>\] *extends* readonly [`Modality`](../type-aliases/Modality.md)[] ? [`ConstrainedModelMessage`](../type-aliases/ConstrainedModelMessage.md)\<`any`\[`any`\]\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< + \| `string` + \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] + \| `null`\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< + \| `string` + \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] + \| `null`\>[] : [`ModelMessage`](../interfaces/ModelMessage.md)\< + \| `string` + \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] + \| `null`\>[] + +## Example + +```typescript +import { messages, chat } from '@tanstack/ai' +import { openai } from '@tanstack/ai-openai' + +const adapter = openai() + +// This will error at compile time because gpt-4o only supports text+image +const msgs = messages({ adapter, model: 'gpt-4o' }, [ + { + role: 'user', + content: [ + { type: 'video', source: { type: 'url', value: '...' } } // Error! + ] + } +]) +``` diff --git a/docs/reference/functions/modelMessageToUIMessage.md b/docs/reference/functions/modelMessageToUIMessage.md new file mode 100644 index 00000000..02e80f8d --- /dev/null +++ b/docs/reference/functions/modelMessageToUIMessage.md @@ -0,0 +1,39 @@ +--- +id: modelMessageToUIMessage +title: modelMessageToUIMessage +--- + +# Function: modelMessageToUIMessage() + +```ts +function modelMessageToUIMessage(modelMessage, id?): UIMessage; +``` + +Defined in: [message-converters.ts:158](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L158) + +Convert a ModelMessage to UIMessage + +This conversion creates a parts-based structure: +- content field → TextPart +- toolCalls array → ToolCallPart[] +- role="tool" messages should be converted separately and merged + +## Parameters + +### modelMessage + +[`ModelMessage`](../interfaces/ModelMessage.md) + +The ModelMessage to convert + +### id? + +`string` + +Optional ID for the UIMessage (generated if not provided) + +## Returns + +[`UIMessage`](../interfaces/UIMessage.md) + +A UIMessage with parts diff --git a/docs/reference/functions/modelMessagesToUIMessages.md b/docs/reference/functions/modelMessagesToUIMessages.md new file mode 100644 index 00000000..dd50df71 --- /dev/null +++ b/docs/reference/functions/modelMessagesToUIMessages.md @@ -0,0 +1,33 @@ +--- +id: modelMessagesToUIMessages +title: modelMessagesToUIMessages +--- + +# Function: modelMessagesToUIMessages() + +```ts +function modelMessagesToUIMessages(modelMessages): UIMessage[]; +``` + +Defined in: [message-converters.ts:211](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L211) + +Convert an array of ModelMessages to UIMessages + +This handles merging tool result messages with their corresponding assistant messages + +## Parameters + +### modelMessages + +[`ModelMessage`](../interfaces/ModelMessage.md)\< + \| `string` + \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] + \| `null`\>[] + +Array of ModelMessages to convert + +## Returns + +[`UIMessage`](../interfaces/UIMessage.md)[] + +Array of UIMessages diff --git a/docs/reference/functions/normalizeToUIMessage.md b/docs/reference/functions/normalizeToUIMessage.md new file mode 100644 index 00000000..a42310e3 --- /dev/null +++ b/docs/reference/functions/normalizeToUIMessage.md @@ -0,0 +1,38 @@ +--- +id: normalizeToUIMessage +title: normalizeToUIMessage +--- + +# Function: normalizeToUIMessage() + +```ts +function normalizeToUIMessage(message, generateId): UIMessage; +``` + +Defined in: [message-converters.ts:260](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L260) + +Normalize a message (UIMessage or ModelMessage) to a UIMessage +Ensures the message has an ID and createdAt timestamp + +## Parameters + +### message + +Either a UIMessage or ModelMessage + +[`UIMessage`](../interfaces/UIMessage.md) | [`ModelMessage`](../interfaces/ModelMessage.md)\< +\| `string` +\| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] +\| `null`\> + +### generateId + +() => `string` + +Function to generate a message ID if needed + +## Returns + +[`UIMessage`](../interfaces/UIMessage.md) + +A UIMessage with guaranteed id and createdAt diff --git a/docs/reference/functions/parsePartialJSON.md b/docs/reference/functions/parsePartialJSON.md new file mode 100644 index 00000000..c3fb3806 --- /dev/null +++ b/docs/reference/functions/parsePartialJSON.md @@ -0,0 +1,28 @@ +--- +id: parsePartialJSON +title: parsePartialJSON +--- + +# Function: parsePartialJSON() + +```ts +function parsePartialJSON(jsonString): any; +``` + +Defined in: [stream/json-parser.ts:56](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L56) + +Parse partial JSON string (convenience function) + +## Parameters + +### jsonString + +`string` + +The JSON string to parse (may be incomplete) + +## Returns + +`any` + +The parsed object, or undefined if parsing fails diff --git a/docs/reference/functions/summarize.md b/docs/reference/functions/summarize.md new file mode 100644 index 00000000..2ae9aead --- /dev/null +++ b/docs/reference/functions/summarize.md @@ -0,0 +1,30 @@ +--- +id: summarize +title: summarize +--- + +# Function: summarize() + +```ts +function summarize(options): Promise; +``` + +Defined in: [core/summarize.ts:16](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/core/summarize.ts#L16) + +Standalone summarize function with type inference from adapter + +## Type Parameters + +### TAdapter + +`TAdapter` *extends* [`AIAdapter`](../interfaces/AIAdapter.md)\<`any`, `any`, `any`, `any`, `any`, `Record`\<`string`, readonly [`Modality`](../type-aliases/Modality.md)[]\>, [`DefaultMessageMetadataByModality`](../interfaces/DefaultMessageMetadataByModality.md)\> + +## Parameters + +### options + +`Omit`\<[`SummarizationOptions`](../interfaces/SummarizationOptions.md), `"model"`\> & `object` + +## Returns + +`Promise`\<[`SummarizationResult`](../interfaces/SummarizationResult.md)\> diff --git a/docs/reference/functions/toServerSentEventsStream.md b/docs/reference/functions/toServerSentEventsStream.md new file mode 100644 index 00000000..65582450 --- /dev/null +++ b/docs/reference/functions/toServerSentEventsStream.md @@ -0,0 +1,47 @@ +--- +id: toServerSentEventsStream +title: toServerSentEventsStream +--- + +# Function: toServerSentEventsStream() + +```ts +function toServerSentEventsStream(stream, abortController?): ReadableStream>; +``` + +Defined in: [utilities/stream-to-response.ts:22](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/stream-to-response.ts#L22) + +Convert a StreamChunk async iterable to a ReadableStream in Server-Sent Events format + +This creates a ReadableStream that emits chunks in SSE format: +- Each chunk is prefixed with "data: " +- Each chunk is followed by "\n\n" +- Stream ends with "data: [DONE]\n\n" + +## Parameters + +### stream + +`AsyncIterable`\<[`StreamChunk`](../type-aliases/StreamChunk.md)\> + +AsyncIterable of StreamChunks from chat() + +### abortController? + +`AbortController` + +Optional AbortController to abort when stream is cancelled + +## Returns + +`ReadableStream`\<`Uint8Array`\<`ArrayBufferLike`\>\> + +ReadableStream in Server-Sent Events format + +## Example + +```typescript +const stream = chat({ adapter: openai(), model: "gpt-4o", messages: [...] }); +const readableStream = toServerSentEventsStream(stream); +// Use with Response, or any API that accepts ReadableStream +``` diff --git a/docs/reference/functions/toStreamResponse.md b/docs/reference/functions/toStreamResponse.md new file mode 100644 index 00000000..3d920408 --- /dev/null +++ b/docs/reference/functions/toStreamResponse.md @@ -0,0 +1,51 @@ +--- +id: toStreamResponse +title: toStreamResponse +--- + +# Function: toStreamResponse() + +```ts +function toStreamResponse(stream, init?): Response; +``` + +Defined in: [utilities/stream-to-response.ts:103](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/stream-to-response.ts#L103) + +Create a streaming HTTP response from a StreamChunk async iterable +Includes proper headers for Server-Sent Events + +## Parameters + +### stream + +`AsyncIterable`\<[`StreamChunk`](../type-aliases/StreamChunk.md)\> + +AsyncIterable of StreamChunks from chat() + +### init? + +`ResponseInit` & `object` + +Optional Response initialization options + +## Returns + +`Response` + +Response object with SSE headers and streaming body + +## Example + +```typescript +export async function POST(request: Request) { + const { messages } = await request.json(); + const abortController = new AbortController(); + const stream = chat({ + adapter: openai(), + model: "gpt-4o", + messages, + options: { abortSignal: abortController.signal } + }); + return toStreamResponse(stream, undefined, abortController); +} +``` diff --git a/docs/reference/functions/uiMessageToModelMessages.md b/docs/reference/functions/uiMessageToModelMessages.md new file mode 100644 index 00000000..9b295ad6 --- /dev/null +++ b/docs/reference/functions/uiMessageToModelMessages.md @@ -0,0 +1,39 @@ +--- +id: uiMessageToModelMessages +title: uiMessageToModelMessages +--- + +# Function: uiMessageToModelMessages() + +```ts +function uiMessageToModelMessages(uiMessage): ModelMessage< + | string + | ContentPart[] + | null>[]; +``` + +Defined in: [message-converters.ts:65](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/message-converters.ts#L65) + +Convert a UIMessage to ModelMessage(s) + +This conversion handles the parts-based structure: +- Text parts → content field +- ToolCall parts → toolCalls array +- ToolResult parts → separate role="tool" messages + +## Parameters + +### uiMessage + +[`UIMessage`](../interfaces/UIMessage.md) + +The UIMessage to convert + +## Returns + +[`ModelMessage`](../interfaces/ModelMessage.md)\< + \| `string` + \| [`ContentPart`](../type-aliases/ContentPart.md)\<`unknown`, `unknown`, `unknown`, `unknown`, `unknown`\>[] + \| `null`\>[] + +An array of ModelMessages (may be multiple if tool results are present) diff --git a/docs/reference/functions/untilFinishReason.md b/docs/reference/functions/untilFinishReason.md new file mode 100644 index 00000000..2522b3f0 --- /dev/null +++ b/docs/reference/functions/untilFinishReason.md @@ -0,0 +1,40 @@ +--- +id: untilFinishReason +title: untilFinishReason +--- + +# Function: untilFinishReason() + +```ts +function untilFinishReason(stopReasons): AgentLoopStrategy; +``` + +Defined in: [utilities/agent-loop-strategies.ts:41](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/utilities/agent-loop-strategies.ts#L41) + +Creates a strategy that continues until a specific finish reason is encountered + +## Parameters + +### stopReasons + +`string`[] + +Finish reasons that should stop the loop + +## Returns + +[`AgentLoopStrategy`](../type-aliases/AgentLoopStrategy.md) + +AgentLoopStrategy that stops on specific finish reasons + +## Example + +```typescript +const stream = chat({ + adapter: openai(), + model: "gpt-4o", + messages: [...], + tools: [weatherTool], + agentLoopStrategy: untilFinishReason(["stop", "length"]), +}); +``` diff --git a/docs/reference/index.md b/docs/reference/index.md index a3d506c0..b6b4e4d4 100644 --- a/docs/reference/index.md +++ b/docs/reference/index.md @@ -22,22 +22,23 @@ title: "@tanstack/ai" - [AgentLoopState](interfaces/AgentLoopState.md) - [AIAdapter](interfaces/AIAdapter.md) - [AIAdapterConfig](interfaces/AIAdapterConfig.md) -- [ApprovalRequestedStreamChunk](interfaces/ApprovalRequestedStreamChunk.md) +- [~~ApprovalRequestedStreamChunk~~](interfaces/ApprovalRequestedStreamChunk.md) - [AudioPart](interfaces/AudioPart.md) -- [BaseStreamChunk](interfaces/BaseStreamChunk.md) +- [BaseEvent](interfaces/BaseEvent.md) - [ChatCompletionChunk](interfaces/ChatCompletionChunk.md) - [ChatOptions](interfaces/ChatOptions.md) - [ChunkRecording](interfaces/ChunkRecording.md) - [ChunkStrategy](interfaces/ChunkStrategy.md) - [ClientTool](interfaces/ClientTool.md) - [ContentPartSource](interfaces/ContentPartSource.md) -- [ContentStreamChunk](interfaces/ContentStreamChunk.md) +- [~~ContentStreamChunk~~](interfaces/ContentStreamChunk.md) +- [CustomEvent](interfaces/CustomEvent.md) - [DefaultMessageMetadataByModality](interfaces/DefaultMessageMetadataByModality.md) - [DocumentPart](interfaces/DocumentPart.md) -- [DoneStreamChunk](interfaces/DoneStreamChunk.md) +- [~~DoneStreamChunk~~](interfaces/DoneStreamChunk.md) - [EmbeddingOptions](interfaces/EmbeddingOptions.md) - [EmbeddingResult](interfaces/EmbeddingResult.md) -- [ErrorStreamChunk](interfaces/ErrorStreamChunk.md) +- [~~ErrorStreamChunk~~](interfaces/ErrorStreamChunk.md) - [ImagePart](interfaces/ImagePart.md) - [InternalToolCallState](interfaces/InternalToolCallState.md) - [JSONParser](interfaces/JSONParser.md) @@ -46,26 +47,39 @@ title: "@tanstack/ai" - [ProcessorResult](interfaces/ProcessorResult.md) - [ProcessorState](interfaces/ProcessorState.md) - [ResponseFormat](interfaces/ResponseFormat.md) +- [RunErrorEvent](interfaces/RunErrorEvent.md) +- [RunFinishedEvent](interfaces/RunFinishedEvent.md) +- [RunStartedEvent](interfaces/RunStartedEvent.md) - [ServerTool](interfaces/ServerTool.md) +- [StateDeltaEvent](interfaces/StateDeltaEvent.md) +- [StateSnapshotEvent](interfaces/StateSnapshotEvent.md) +- [StepFinishedEvent](interfaces/StepFinishedEvent.md) +- [StepStartedEvent](interfaces/StepStartedEvent.md) - [StreamProcessorEvents](interfaces/StreamProcessorEvents.md) - [StreamProcessorHandlers](interfaces/StreamProcessorHandlers.md) - [StreamProcessorOptions](interfaces/StreamProcessorOptions.md) - [SummarizationOptions](interfaces/SummarizationOptions.md) - [SummarizationResult](interfaces/SummarizationResult.md) +- [TextMessageContentEvent](interfaces/TextMessageContentEvent.md) +- [TextMessageEndEvent](interfaces/TextMessageEndEvent.md) +- [TextMessageStartEvent](interfaces/TextMessageStartEvent.md) - [TextPart](interfaces/TextPart.md) - [ThinkingPart](interfaces/ThinkingPart.md) -- [ThinkingStreamChunk](interfaces/ThinkingStreamChunk.md) +- [~~ThinkingStreamChunk~~](interfaces/ThinkingStreamChunk.md) - [Tool](interfaces/Tool.md) - [ToolCall](interfaces/ToolCall.md) +- [ToolCallArgsEvent](interfaces/ToolCallArgsEvent.md) +- [ToolCallEndEvent](interfaces/ToolCallEndEvent.md) - [ToolCallPart](interfaces/ToolCallPart.md) -- [ToolCallStreamChunk](interfaces/ToolCallStreamChunk.md) +- [ToolCallStartEvent](interfaces/ToolCallStartEvent.md) +- [~~ToolCallStreamChunk~~](interfaces/ToolCallStreamChunk.md) - [ToolConfig](interfaces/ToolConfig.md) - [ToolDefinition](interfaces/ToolDefinition.md) - [ToolDefinitionConfig](interfaces/ToolDefinitionConfig.md) - [ToolDefinitionInstance](interfaces/ToolDefinitionInstance.md) -- [ToolInputAvailableStreamChunk](interfaces/ToolInputAvailableStreamChunk.md) +- [~~ToolInputAvailableStreamChunk~~](interfaces/ToolInputAvailableStreamChunk.md) - [ToolResultPart](interfaces/ToolResultPart.md) -- [ToolResultStreamChunk](interfaces/ToolResultStreamChunk.md) +- [~~ToolResultStreamChunk~~](interfaces/ToolResultStreamChunk.md) - [UIMessage](interfaces/UIMessage.md) - [VideoPart](interfaces/VideoPart.md) @@ -79,6 +93,7 @@ title: "@tanstack/ai" - [ConstrainedModelMessage](type-aliases/ConstrainedModelMessage.md) - [ContentPart](type-aliases/ContentPart.md) - [ContentPartForModalities](type-aliases/ContentPartForModalities.md) +- [EventType](type-aliases/EventType.md) - [ExtractModalitiesForModel](type-aliases/ExtractModalitiesForModel.md) - [ExtractModelsFromAdapter](type-aliases/ExtractModelsFromAdapter.md) - [InferSchemaType](type-aliases/InferSchemaType.md) diff --git a/docs/reference/interfaces/AIAdapter.md b/docs/reference/interfaces/AIAdapter.md index 2ad47311..9794f317 100644 --- a/docs/reference/interfaces/AIAdapter.md +++ b/docs/reference/interfaces/AIAdapter.md @@ -5,7 +5,7 @@ title: AIAdapter # Interface: AIAdapter\ -Defined in: [types.ts:756](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L756) +Defined in: [types.ts:1018](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1018) AI adapter interface with support for endpoint-specific models and provider options. @@ -56,7 +56,7 @@ Generic parameters: optional _chatProviderOptions: TChatProviderOptions; ``` -Defined in: [types.ts:783](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L783) +Defined in: [types.ts:1043](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1043) *** @@ -66,7 +66,7 @@ Defined in: [types.ts:783](https://github.com/TanStack/ai/blob/main/packages/typ optional _embeddingProviderOptions: TEmbeddingProviderOptions; ``` -Defined in: [types.ts:784](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L784) +Defined in: [types.ts:1044](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1044) *** @@ -76,7 +76,7 @@ Defined in: [types.ts:784](https://github.com/TanStack/ai/blob/main/packages/typ optional _messageMetadataByModality: TMessageMetadataByModality; ``` -Defined in: [types.ts:801](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L801) +Defined in: [types.ts:1061](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1061) Type-only map from modality type to adapter-specific metadata types. Used to provide type-safe autocomplete for metadata on content parts. @@ -89,7 +89,7 @@ Used to provide type-safe autocomplete for metadata on content parts. optional _modelInputModalitiesByName: TModelInputModalitiesByName; ``` -Defined in: [types.ts:796](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L796) +Defined in: [types.ts:1056](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1056) Type-only map from model name to its supported input modalities. Used by the core AI types to narrow ContentPart types based on the selected model. @@ -103,7 +103,7 @@ Must be provided by all adapters. _modelProviderOptionsByName: TModelProviderOptionsByName; ``` -Defined in: [types.ts:790](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L790) +Defined in: [types.ts:1050](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1050) Type-only map from model name to its specific provider options. Used by the core AI types to narrow providerOptions based on the selected model. @@ -117,7 +117,7 @@ Must be provided by all adapters. optional _providerOptions: TChatProviderOptions; ``` -Defined in: [types.ts:782](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L782) +Defined in: [types.ts:1042](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1042) *** @@ -127,7 +127,7 @@ Defined in: [types.ts:782](https://github.com/TanStack/ai/blob/main/packages/typ chatStream: (options) => AsyncIterable; ``` -Defined in: [types.ts:804](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L804) +Defined in: [types.ts:1064](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1064) #### Parameters @@ -147,7 +147,7 @@ Defined in: [types.ts:804](https://github.com/TanStack/ai/blob/main/packages/typ createEmbeddings: (options) => Promise; ``` -Defined in: [types.ts:812](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L812) +Defined in: [types.ts:1072](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1072) #### Parameters @@ -167,7 +167,7 @@ Defined in: [types.ts:812](https://github.com/TanStack/ai/blob/main/packages/typ optional embeddingModels: TEmbeddingModels; ``` -Defined in: [types.ts:779](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L779) +Defined in: [types.ts:1039](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1039) Models that support embeddings @@ -179,7 +179,7 @@ Models that support embeddings models: TChatModels; ``` -Defined in: [types.ts:776](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L776) +Defined in: [types.ts:1036](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1036) Models that support chat/text completion @@ -191,7 +191,7 @@ Models that support chat/text completion name: string; ``` -Defined in: [types.ts:774](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L774) +Defined in: [types.ts:1034](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1034) *** @@ -201,7 +201,7 @@ Defined in: [types.ts:774](https://github.com/TanStack/ai/blob/main/packages/typ summarize: (options) => Promise; ``` -Defined in: [types.ts:809](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L809) +Defined in: [types.ts:1069](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1069) #### Parameters diff --git a/docs/reference/interfaces/AIAdapterConfig.md b/docs/reference/interfaces/AIAdapterConfig.md index 76abb781..97e4eac9 100644 --- a/docs/reference/interfaces/AIAdapterConfig.md +++ b/docs/reference/interfaces/AIAdapterConfig.md @@ -5,7 +5,7 @@ title: AIAdapterConfig # Interface: AIAdapterConfig -Defined in: [types.ts:815](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L815) +Defined in: [types.ts:1075](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1075) ## Properties @@ -15,7 +15,7 @@ Defined in: [types.ts:815](https://github.com/TanStack/ai/blob/main/packages/typ optional apiKey: string; ``` -Defined in: [types.ts:816](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L816) +Defined in: [types.ts:1076](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1076) *** @@ -25,7 +25,7 @@ Defined in: [types.ts:816](https://github.com/TanStack/ai/blob/main/packages/typ optional baseUrl: string; ``` -Defined in: [types.ts:817](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L817) +Defined in: [types.ts:1077](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1077) *** @@ -35,7 +35,7 @@ Defined in: [types.ts:817](https://github.com/TanStack/ai/blob/main/packages/typ optional headers: Record; ``` -Defined in: [types.ts:820](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L820) +Defined in: [types.ts:1080](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1080) *** @@ -45,7 +45,7 @@ Defined in: [types.ts:820](https://github.com/TanStack/ai/blob/main/packages/typ optional maxRetries: number; ``` -Defined in: [types.ts:819](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L819) +Defined in: [types.ts:1079](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1079) *** @@ -55,4 +55,4 @@ Defined in: [types.ts:819](https://github.com/TanStack/ai/blob/main/packages/typ optional timeout: number; ``` -Defined in: [types.ts:818](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L818) +Defined in: [types.ts:1078](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1078) diff --git a/docs/reference/interfaces/ApprovalRequestedStreamChunk.md b/docs/reference/interfaces/ApprovalRequestedStreamChunk.md index ab57b5dd..be85be3c 100644 --- a/docs/reference/interfaces/ApprovalRequestedStreamChunk.md +++ b/docs/reference/interfaces/ApprovalRequestedStreamChunk.md @@ -3,31 +3,31 @@ id: ApprovalRequestedStreamChunk title: ApprovalRequestedStreamChunk --- -# Interface: ApprovalRequestedStreamChunk +# ~~Interface: ApprovalRequestedStreamChunk~~ -Defined in: [types.ts:645](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L645) +Defined in: [types.ts:917](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L917) -## Extends +## Deprecated -- [`BaseStreamChunk`](BaseStreamChunk.md) +Use CustomEvent with name='approval-requested' instead ## Properties -### approval +### ~~approval?~~ ```ts -approval: object; +optional approval: object; ``` -Defined in: [types.ts:650](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L650) +Defined in: [types.ts:925](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L925) -#### id +#### ~~id~~ ```ts id: string; ``` -#### needsApproval +#### ~~needsApproval~~ ```ts needsApproval: true; @@ -35,86 +35,70 @@ needsApproval: true; *** -### id +### ~~id~~ ```ts id: string; ``` -Defined in: [types.ts:596](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L596) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) +Defined in: [types.ts:919](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L919) *** -### input +### ~~input~~ ```ts -input: any; +input: Record; ``` -Defined in: [types.ts:649](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L649) +Defined in: [types.ts:924](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L924) *** -### model +### ~~model~~ ```ts model: string; ``` -Defined in: [types.ts:597](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L597) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) +Defined in: [types.ts:920](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L920) *** -### timestamp +### ~~timestamp~~ ```ts timestamp: number; ``` -Defined in: [types.ts:598](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L598) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) +Defined in: [types.ts:921](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L921) *** -### toolCallId +### ~~toolCallId~~ ```ts toolCallId: string; ``` -Defined in: [types.ts:647](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L647) +Defined in: [types.ts:922](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L922) *** -### toolName +### ~~toolName~~ ```ts toolName: string; ``` -Defined in: [types.ts:648](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L648) +Defined in: [types.ts:923](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L923) *** -### type +### ~~type~~ ```ts type: "approval-requested"; ``` -Defined in: [types.ts:646](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L646) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) +Defined in: [types.ts:918](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L918) diff --git a/docs/reference/interfaces/BaseEvent.md b/docs/reference/interfaces/BaseEvent.md new file mode 100644 index 00000000..587c093b --- /dev/null +++ b/docs/reference/interfaces/BaseEvent.md @@ -0,0 +1,72 @@ +--- +id: BaseEvent +title: BaseEvent +--- + +# Interface: BaseEvent + +Defined in: [types.ts:625](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L625) + +Base structure for all AG-UI events. +Extends AG-UI spec with TanStack AI additions (model field). + +## Extended by + +- [`RunStartedEvent`](RunStartedEvent.md) +- [`RunFinishedEvent`](RunFinishedEvent.md) +- [`RunErrorEvent`](RunErrorEvent.md) +- [`TextMessageStartEvent`](TextMessageStartEvent.md) +- [`TextMessageContentEvent`](TextMessageContentEvent.md) +- [`TextMessageEndEvent`](TextMessageEndEvent.md) +- [`ToolCallStartEvent`](ToolCallStartEvent.md) +- [`ToolCallArgsEvent`](ToolCallArgsEvent.md) +- [`ToolCallEndEvent`](ToolCallEndEvent.md) +- [`StepStartedEvent`](StepStartedEvent.md) +- [`StepFinishedEvent`](StepFinishedEvent.md) +- [`StateSnapshotEvent`](StateSnapshotEvent.md) +- [`StateDeltaEvent`](StateDeltaEvent.md) +- [`CustomEvent`](CustomEvent.md) + +## Properties + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +*** + +### type + +```ts +type: EventType; +``` + +Defined in: [types.ts:626](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L626) diff --git a/docs/reference/interfaces/BaseStreamChunk.md b/docs/reference/interfaces/BaseStreamChunk.md deleted file mode 100644 index 81481d16..00000000 --- a/docs/reference/interfaces/BaseStreamChunk.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -id: BaseStreamChunk -title: BaseStreamChunk ---- - -# Interface: BaseStreamChunk - -Defined in: [types.ts:594](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L594) - -## Extended by - -- [`ContentStreamChunk`](ContentStreamChunk.md) -- [`ToolCallStreamChunk`](ToolCallStreamChunk.md) -- [`ToolResultStreamChunk`](ToolResultStreamChunk.md) -- [`DoneStreamChunk`](DoneStreamChunk.md) -- [`ErrorStreamChunk`](ErrorStreamChunk.md) -- [`ApprovalRequestedStreamChunk`](ApprovalRequestedStreamChunk.md) -- [`ToolInputAvailableStreamChunk`](ToolInputAvailableStreamChunk.md) -- [`ThinkingStreamChunk`](ThinkingStreamChunk.md) - -## Properties - -### id - -```ts -id: string; -``` - -Defined in: [types.ts:596](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L596) - -*** - -### model - -```ts -model: string; -``` - -Defined in: [types.ts:597](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L597) - -*** - -### timestamp - -```ts -timestamp: number; -``` - -Defined in: [types.ts:598](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L598) - -*** - -### type - -```ts -type: StreamChunkType; -``` - -Defined in: [types.ts:595](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L595) diff --git a/docs/reference/interfaces/ChatCompletionChunk.md b/docs/reference/interfaces/ChatCompletionChunk.md index 78235a12..3e4ac8ec 100644 --- a/docs/reference/interfaces/ChatCompletionChunk.md +++ b/docs/reference/interfaces/ChatCompletionChunk.md @@ -5,7 +5,7 @@ title: ChatCompletionChunk # Interface: ChatCompletionChunk -Defined in: [types.ts:684](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L684) +Defined in: [types.ts:946](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L946) ## Properties @@ -15,7 +15,7 @@ Defined in: [types.ts:684](https://github.com/TanStack/ai/blob/main/packages/typ content: string; ``` -Defined in: [types.ts:687](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L687) +Defined in: [types.ts:949](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L949) *** @@ -25,7 +25,7 @@ Defined in: [types.ts:687](https://github.com/TanStack/ai/blob/main/packages/typ optional finishReason: "length" | "stop" | "content_filter" | null; ``` -Defined in: [types.ts:689](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L689) +Defined in: [types.ts:951](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L951) *** @@ -35,7 +35,7 @@ Defined in: [types.ts:689](https://github.com/TanStack/ai/blob/main/packages/typ id: string; ``` -Defined in: [types.ts:685](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L685) +Defined in: [types.ts:947](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L947) *** @@ -45,7 +45,7 @@ Defined in: [types.ts:685](https://github.com/TanStack/ai/blob/main/packages/typ model: string; ``` -Defined in: [types.ts:686](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L686) +Defined in: [types.ts:948](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L948) *** @@ -55,7 +55,7 @@ Defined in: [types.ts:686](https://github.com/TanStack/ai/blob/main/packages/typ optional role: "assistant"; ``` -Defined in: [types.ts:688](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L688) +Defined in: [types.ts:950](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L950) *** @@ -65,7 +65,7 @@ Defined in: [types.ts:688](https://github.com/TanStack/ai/blob/main/packages/typ optional usage: object; ``` -Defined in: [types.ts:690](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L690) +Defined in: [types.ts:952](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L952) #### completionTokens diff --git a/docs/reference/interfaces/ChunkRecording.md b/docs/reference/interfaces/ChunkRecording.md new file mode 100644 index 00000000..3833f041 --- /dev/null +++ b/docs/reference/interfaces/ChunkRecording.md @@ -0,0 +1,88 @@ +--- +id: ChunkRecording +title: ChunkRecording +--- + +# Interface: ChunkRecording + +Defined in: [stream/types.ts:83](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L83) + +Recording format for replay testing + +## Properties + +### chunks + +```ts +chunks: object[]; +``` + +Defined in: [stream/types.ts:88](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L88) + +#### chunk + +```ts +chunk: StreamChunk; +``` + +#### index + +```ts +index: number; +``` + +#### timestamp + +```ts +timestamp: number; +``` + +*** + +### model? + +```ts +optional model: string; +``` + +Defined in: [stream/types.ts:86](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L86) + +*** + +### provider? + +```ts +optional provider: string; +``` + +Defined in: [stream/types.ts:87](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L87) + +*** + +### result? + +```ts +optional result: ProcessorResult; +``` + +Defined in: [stream/types.ts:93](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L93) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [stream/types.ts:85](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L85) + +*** + +### version + +```ts +version: "1.0"; +``` + +Defined in: [stream/types.ts:84](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L84) diff --git a/docs/reference/interfaces/ChunkStrategy.md b/docs/reference/interfaces/ChunkStrategy.md new file mode 100644 index 00000000..c9b06168 --- /dev/null +++ b/docs/reference/interfaces/ChunkStrategy.md @@ -0,0 +1,58 @@ +--- +id: ChunkStrategy +title: ChunkStrategy +--- + +# Interface: ChunkStrategy + +Defined in: [stream/types.ts:43](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L43) + +Strategy for determining when to emit text updates + +## Properties + +### reset()? + +```ts +optional reset: () => void; +``` + +Defined in: [stream/types.ts:55](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L55) + +Optional: Reset strategy state (called when streaming starts) + +#### Returns + +`void` + +*** + +### shouldEmit() + +```ts +shouldEmit: (chunk, accumulated) => boolean; +``` + +Defined in: [stream/types.ts:50](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L50) + +Called for each text chunk received + +#### Parameters + +##### chunk + +`string` + +The new chunk of text (delta) + +##### accumulated + +`string` + +All text accumulated so far + +#### Returns + +`boolean` + +true if an update should be emitted now diff --git a/docs/reference/interfaces/ContentStreamChunk.md b/docs/reference/interfaces/ContentStreamChunk.md index e03782a4..626e564b 100644 --- a/docs/reference/interfaces/ContentStreamChunk.md +++ b/docs/reference/interfaces/ContentStreamChunk.md @@ -3,96 +3,86 @@ id: ContentStreamChunk title: ContentStreamChunk --- -# Interface: ContentStreamChunk +# ~~Interface: ContentStreamChunk~~ -Defined in: [types.ts:601](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L601) +Defined in: [types.ts:833](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L833) -## Extends +## Deprecated -- [`BaseStreamChunk`](BaseStreamChunk.md) +Use TextMessageContentEvent instead ## Properties -### content +### ~~content~~ ```ts content: string; ``` -Defined in: [types.ts:604](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L604) +Defined in: [types.ts:841](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L841) + +Full accumulated content so far *** -### delta +### ~~delta~~ ```ts delta: string; ``` -Defined in: [types.ts:603](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L603) +Defined in: [types.ts:839](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L839) + +Incremental text delta *** -### id +### ~~id~~ ```ts id: string; ``` -Defined in: [types.ts:596](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L596) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) +Defined in: [types.ts:835](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L835) *** -### model +### ~~model~~ ```ts model: string; ``` -Defined in: [types.ts:597](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L597) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) +Defined in: [types.ts:836](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L836) *** -### role? +### ~~role?~~ ```ts optional role: "assistant"; ``` -Defined in: [types.ts:605](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L605) +Defined in: [types.ts:843](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L843) + +Role of the message *** -### timestamp +### ~~timestamp~~ ```ts timestamp: number; ``` -Defined in: [types.ts:598](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L598) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) +Defined in: [types.ts:837](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L837) *** -### type +### ~~type~~ ```ts type: "content"; ``` -Defined in: [types.ts:602](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L602) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) +Defined in: [types.ts:834](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L834) diff --git a/docs/reference/interfaces/CustomEvent.md b/docs/reference/interfaces/CustomEvent.md new file mode 100644 index 00000000..603163ae --- /dev/null +++ b/docs/reference/interfaces/CustomEvent.md @@ -0,0 +1,95 @@ +--- +id: CustomEvent +title: CustomEvent +--- + +# Interface: CustomEvent + +Defined in: [types.ts:783](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L783) + +Custom event for extensibility. +Used for features not covered by standard AG-UI events (e.g., approval flows). + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### name + +```ts +name: string; +``` + +Defined in: [types.ts:785](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L785) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### type + +```ts +type: "CUSTOM"; +``` + +Defined in: [types.ts:784](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L784) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) + +*** + +### value + +```ts +value: unknown; +``` + +Defined in: [types.ts:786](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L786) diff --git a/docs/reference/interfaces/DefaultMessageMetadataByModality.md b/docs/reference/interfaces/DefaultMessageMetadataByModality.md index d13b5888..688d91ae 100644 --- a/docs/reference/interfaces/DefaultMessageMetadataByModality.md +++ b/docs/reference/interfaces/DefaultMessageMetadataByModality.md @@ -5,7 +5,7 @@ title: DefaultMessageMetadataByModality # Interface: DefaultMessageMetadataByModality -Defined in: [types.ts:736](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L736) +Defined in: [types.ts:998](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L998) Default metadata type for adapters that don't define custom metadata. Uses unknown for all modalities. @@ -18,7 +18,7 @@ Uses unknown for all modalities. audio: unknown; ``` -Defined in: [types.ts:739](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L739) +Defined in: [types.ts:1001](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1001) *** @@ -28,7 +28,7 @@ Defined in: [types.ts:739](https://github.com/TanStack/ai/blob/main/packages/typ document: unknown; ``` -Defined in: [types.ts:741](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L741) +Defined in: [types.ts:1003](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1003) *** @@ -38,7 +38,7 @@ Defined in: [types.ts:741](https://github.com/TanStack/ai/blob/main/packages/typ image: unknown; ``` -Defined in: [types.ts:738](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L738) +Defined in: [types.ts:1000](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1000) *** @@ -48,7 +48,7 @@ Defined in: [types.ts:738](https://github.com/TanStack/ai/blob/main/packages/typ text: unknown; ``` -Defined in: [types.ts:737](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L737) +Defined in: [types.ts:999](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L999) *** @@ -58,4 +58,4 @@ Defined in: [types.ts:737](https://github.com/TanStack/ai/blob/main/packages/typ video: unknown; ``` -Defined in: [types.ts:740](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L740) +Defined in: [types.ts:1002](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1002) diff --git a/docs/reference/interfaces/DoneStreamChunk.md b/docs/reference/interfaces/DoneStreamChunk.md index a62e11e8..05a287ce 100644 --- a/docs/reference/interfaces/DoneStreamChunk.md +++ b/docs/reference/interfaces/DoneStreamChunk.md @@ -3,103 +3,87 @@ id: DoneStreamChunk title: DoneStreamChunk --- -# Interface: DoneStreamChunk +# ~~Interface: DoneStreamChunk~~ -Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) +Defined in: [types.ts:849](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L849) -## Extends +## Deprecated -- [`BaseStreamChunk`](BaseStreamChunk.md) +Use RunFinishedEvent instead ## Properties -### finishReason +### ~~finishReason?~~ ```ts -finishReason: "length" | "stop" | "content_filter" | "tool_calls" | null; +optional finishReason: "length" | "stop" | "content_filter" | "tool_calls" | null; ``` -Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) +Defined in: [types.ts:854](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L854) *** -### id +### ~~id~~ ```ts id: string; ``` -Defined in: [types.ts:596](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L596) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) +Defined in: [types.ts:851](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L851) *** -### model +### ~~model~~ ```ts model: string; ``` -Defined in: [types.ts:597](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L597) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) +Defined in: [types.ts:852](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L852) *** -### timestamp +### ~~timestamp~~ ```ts timestamp: number; ``` -Defined in: [types.ts:598](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L598) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) +Defined in: [types.ts:853](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L853) *** -### type +### ~~type~~ ```ts type: "done"; ``` -Defined in: [types.ts:628](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L628) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) +Defined in: [types.ts:850](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L850) *** -### usage? +### ~~usage?~~ ```ts optional usage: object; ``` -Defined in: [types.ts:630](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L630) +Defined in: [types.ts:855](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L855) -#### completionTokens +#### ~~completionTokens~~ ```ts completionTokens: number; ``` -#### promptTokens +#### ~~promptTokens~~ ```ts promptTokens: number; ``` -#### totalTokens +#### ~~totalTokens~~ ```ts totalTokens: number; diff --git a/docs/reference/interfaces/EmbeddingOptions.md b/docs/reference/interfaces/EmbeddingOptions.md index 471035f6..759a0c58 100644 --- a/docs/reference/interfaces/EmbeddingOptions.md +++ b/docs/reference/interfaces/EmbeddingOptions.md @@ -5,7 +5,7 @@ title: EmbeddingOptions # Interface: EmbeddingOptions -Defined in: [types.ts:716](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L716) +Defined in: [types.ts:978](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L978) ## Properties @@ -15,7 +15,7 @@ Defined in: [types.ts:716](https://github.com/TanStack/ai/blob/main/packages/typ optional dimensions: number; ``` -Defined in: [types.ts:719](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L719) +Defined in: [types.ts:981](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L981) *** @@ -25,7 +25,7 @@ Defined in: [types.ts:719](https://github.com/TanStack/ai/blob/main/packages/typ input: string | string[]; ``` -Defined in: [types.ts:718](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L718) +Defined in: [types.ts:980](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L980) *** @@ -35,4 +35,4 @@ Defined in: [types.ts:718](https://github.com/TanStack/ai/blob/main/packages/typ model: string; ``` -Defined in: [types.ts:717](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L717) +Defined in: [types.ts:979](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L979) diff --git a/docs/reference/interfaces/EmbeddingResult.md b/docs/reference/interfaces/EmbeddingResult.md index 39e385e4..2884de0f 100644 --- a/docs/reference/interfaces/EmbeddingResult.md +++ b/docs/reference/interfaces/EmbeddingResult.md @@ -5,7 +5,7 @@ title: EmbeddingResult # Interface: EmbeddingResult -Defined in: [types.ts:722](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L722) +Defined in: [types.ts:984](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L984) ## Properties @@ -15,7 +15,7 @@ Defined in: [types.ts:722](https://github.com/TanStack/ai/blob/main/packages/typ embeddings: number[][]; ``` -Defined in: [types.ts:725](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L725) +Defined in: [types.ts:987](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L987) *** @@ -25,7 +25,7 @@ Defined in: [types.ts:725](https://github.com/TanStack/ai/blob/main/packages/typ id: string; ``` -Defined in: [types.ts:723](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L723) +Defined in: [types.ts:985](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L985) *** @@ -35,7 +35,7 @@ Defined in: [types.ts:723](https://github.com/TanStack/ai/blob/main/packages/typ model: string; ``` -Defined in: [types.ts:724](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L724) +Defined in: [types.ts:986](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L986) *** @@ -45,7 +45,7 @@ Defined in: [types.ts:724](https://github.com/TanStack/ai/blob/main/packages/typ usage: object; ``` -Defined in: [types.ts:726](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L726) +Defined in: [types.ts:988](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L988) #### promptTokens diff --git a/docs/reference/interfaces/ErrorStreamChunk.md b/docs/reference/interfaces/ErrorStreamChunk.md index 57cb491d..5ade040b 100644 --- a/docs/reference/interfaces/ErrorStreamChunk.md +++ b/docs/reference/interfaces/ErrorStreamChunk.md @@ -3,88 +3,75 @@ id: ErrorStreamChunk title: ErrorStreamChunk --- -# Interface: ErrorStreamChunk +# ~~Interface: ErrorStreamChunk~~ -Defined in: [types.ts:637](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L637) +Defined in: [types.ts:865](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L865) -## Extends +## Deprecated -- [`BaseStreamChunk`](BaseStreamChunk.md) +Use RunErrorEvent instead ## Properties -### error +### ~~code?~~ ```ts -error: object; +optional code: string; ``` -Defined in: [types.ts:639](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L639) - -#### code? +Defined in: [types.ts:871](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L871) -```ts -optional code: string; -``` +*** -#### message +### ~~error~~ ```ts -message: string; +error: + | string + | { + code?: string; + message: string; +}; ``` +Defined in: [types.ts:870](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L870) + *** -### id +### ~~id~~ ```ts id: string; ``` -Defined in: [types.ts:596](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L596) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) +Defined in: [types.ts:867](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L867) *** -### model +### ~~model~~ ```ts model: string; ``` -Defined in: [types.ts:597](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L597) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) +Defined in: [types.ts:868](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L868) *** -### timestamp +### ~~timestamp~~ ```ts timestamp: number; ``` -Defined in: [types.ts:598](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L598) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) +Defined in: [types.ts:869](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L869) *** -### type +### ~~type~~ ```ts type: "error"; ``` -Defined in: [types.ts:638](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L638) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) +Defined in: [types.ts:866](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L866) diff --git a/docs/reference/interfaces/InternalToolCallState.md b/docs/reference/interfaces/InternalToolCallState.md new file mode 100644 index 00000000..e8607bce --- /dev/null +++ b/docs/reference/interfaces/InternalToolCallState.md @@ -0,0 +1,70 @@ +--- +id: InternalToolCallState +title: InternalToolCallState +--- + +# Interface: InternalToolCallState + +Defined in: [stream/types.ts:31](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L31) + +Internal state for a tool call being tracked + +## Properties + +### arguments + +```ts +arguments: string; +``` + +Defined in: [stream/types.ts:34](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L34) + +*** + +### id + +```ts +id: string; +``` + +Defined in: [stream/types.ts:32](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L32) + +*** + +### index + +```ts +index: number; +``` + +Defined in: [stream/types.ts:37](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L37) + +*** + +### name + +```ts +name: string; +``` + +Defined in: [stream/types.ts:33](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L33) + +*** + +### parsedArguments? + +```ts +optional parsedArguments: any; +``` + +Defined in: [stream/types.ts:36](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L36) + +*** + +### state + +```ts +state: ToolCallState; +``` + +Defined in: [stream/types.ts:35](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L35) diff --git a/docs/reference/interfaces/JSONParser.md b/docs/reference/interfaces/JSONParser.md new file mode 100644 index 00000000..228cd1a2 --- /dev/null +++ b/docs/reference/interfaces/JSONParser.md @@ -0,0 +1,36 @@ +--- +id: JSONParser +title: JSONParser +--- + +# Interface: JSONParser + +Defined in: [stream/json-parser.ts:12](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L12) + +JSON Parser interface - allows for custom parser implementations + +## Properties + +### parse() + +```ts +parse: (jsonString) => any; +``` + +Defined in: [stream/json-parser.ts:18](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L18) + +Parse a JSON string (may be incomplete/partial) + +#### Parameters + +##### jsonString + +`string` + +The JSON string to parse + +#### Returns + +`any` + +The parsed object, or undefined if parsing fails diff --git a/docs/reference/interfaces/ProcessorResult.md b/docs/reference/interfaces/ProcessorResult.md new file mode 100644 index 00000000..9fb65250 --- /dev/null +++ b/docs/reference/interfaces/ProcessorResult.md @@ -0,0 +1,50 @@ +--- +id: ProcessorResult +title: ProcessorResult +--- + +# Interface: ProcessorResult + +Defined in: [stream/types.ts:61](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L61) + +Result from processing a stream + +## Properties + +### content + +```ts +content: string; +``` + +Defined in: [stream/types.ts:62](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L62) + +*** + +### finishReason? + +```ts +optional finishReason: string | null; +``` + +Defined in: [stream/types.ts:65](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L65) + +*** + +### thinking? + +```ts +optional thinking: string; +``` + +Defined in: [stream/types.ts:63](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L63) + +*** + +### toolCalls? + +```ts +optional toolCalls: ToolCall[]; +``` + +Defined in: [stream/types.ts:64](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L64) diff --git a/docs/reference/interfaces/ProcessorState.md b/docs/reference/interfaces/ProcessorState.md new file mode 100644 index 00000000..7ef5bba6 --- /dev/null +++ b/docs/reference/interfaces/ProcessorState.md @@ -0,0 +1,70 @@ +--- +id: ProcessorState +title: ProcessorState +--- + +# Interface: ProcessorState + +Defined in: [stream/types.ts:71](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L71) + +Current state of the processor + +## Properties + +### content + +```ts +content: string; +``` + +Defined in: [stream/types.ts:72](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L72) + +*** + +### done + +```ts +done: boolean; +``` + +Defined in: [stream/types.ts:77](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L77) + +*** + +### finishReason + +```ts +finishReason: string | null; +``` + +Defined in: [stream/types.ts:76](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L76) + +*** + +### thinking + +```ts +thinking: string; +``` + +Defined in: [stream/types.ts:73](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L73) + +*** + +### toolCallOrder + +```ts +toolCallOrder: string[]; +``` + +Defined in: [stream/types.ts:75](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L75) + +*** + +### toolCalls + +```ts +toolCalls: Map; +``` + +Defined in: [stream/types.ts:74](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L74) diff --git a/docs/reference/interfaces/RunErrorEvent.md b/docs/reference/interfaces/RunErrorEvent.md new file mode 100644 index 00000000..95f8d0ef --- /dev/null +++ b/docs/reference/interfaces/RunErrorEvent.md @@ -0,0 +1,106 @@ +--- +id: RunErrorEvent +title: RunErrorEvent +--- + +# Interface: RunErrorEvent + +Defined in: [types.ts:660](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L660) + +Emitted when an error occurs during a run. + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### error + +```ts +error: object; +``` + +Defined in: [types.ts:663](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L663) + +#### code? + +```ts +optional code: string; +``` + +#### message + +```ts +message: string; +``` + +*** + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### runId? + +```ts +optional runId: string; +``` + +Defined in: [types.ts:662](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L662) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### type + +```ts +type: "RUN_ERROR"; +``` + +Defined in: [types.ts:661](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L661) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/RunFinishedEvent.md b/docs/reference/interfaces/RunFinishedEvent.md new file mode 100644 index 00000000..3d2876e5 --- /dev/null +++ b/docs/reference/interfaces/RunFinishedEvent.md @@ -0,0 +1,122 @@ +--- +id: RunFinishedEvent +title: RunFinishedEvent +--- + +# Interface: RunFinishedEvent + +Defined in: [types.ts:646](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L646) + +Emitted when a run completes successfully. + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### finishReason + +```ts +finishReason: "length" | "stop" | "content_filter" | "tool_calls" | null; +``` + +Defined in: [types.ts:649](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L649) + +*** + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### runId + +```ts +runId: string; +``` + +Defined in: [types.ts:648](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L648) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### type + +```ts +type: "RUN_FINISHED"; +``` + +Defined in: [types.ts:647](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L647) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) + +*** + +### usage? + +```ts +optional usage: object; +``` + +Defined in: [types.ts:650](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L650) + +#### completionTokens + +```ts +completionTokens: number; +``` + +#### promptTokens + +```ts +promptTokens: number; +``` + +#### totalTokens + +```ts +totalTokens: number; +``` diff --git a/docs/reference/interfaces/RunStartedEvent.md b/docs/reference/interfaces/RunStartedEvent.md new file mode 100644 index 00000000..3e976bb2 --- /dev/null +++ b/docs/reference/interfaces/RunStartedEvent.md @@ -0,0 +1,94 @@ +--- +id: RunStartedEvent +title: RunStartedEvent +--- + +# Interface: RunStartedEvent + +Defined in: [types.ts:637](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L637) + +Emitted when a run starts. + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### runId + +```ts +runId: string; +``` + +Defined in: [types.ts:639](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L639) + +*** + +### threadId? + +```ts +optional threadId: string; +``` + +Defined in: [types.ts:640](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L640) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### type + +```ts +type: "RUN_STARTED"; +``` + +Defined in: [types.ts:638](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L638) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/StateDeltaEvent.md b/docs/reference/interfaces/StateDeltaEvent.md new file mode 100644 index 00000000..3c429088 --- /dev/null +++ b/docs/reference/interfaces/StateDeltaEvent.md @@ -0,0 +1,102 @@ +--- +id: StateDeltaEvent +title: StateDeltaEvent +--- + +# Interface: StateDeltaEvent + +Defined in: [types.ts:770](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L770) + +Emitted for incremental state updates. + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### delta + +```ts +delta: object[]; +``` + +Defined in: [types.ts:772](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L772) + +#### op + +```ts +op: "add" | "remove" | "replace"; +``` + +#### path + +```ts +path: string; +``` + +#### value? + +```ts +optional value: unknown; +``` + +*** + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### type + +```ts +type: "STATE_DELTA"; +``` + +Defined in: [types.ts:771](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L771) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/StateSnapshotEvent.md b/docs/reference/interfaces/StateSnapshotEvent.md new file mode 100644 index 00000000..2dd69737 --- /dev/null +++ b/docs/reference/interfaces/StateSnapshotEvent.md @@ -0,0 +1,84 @@ +--- +id: StateSnapshotEvent +title: StateSnapshotEvent +--- + +# Interface: StateSnapshotEvent + +Defined in: [types.ts:762](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L762) + +Emitted for full state synchronization. + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### state + +```ts +state: Record; +``` + +Defined in: [types.ts:764](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L764) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### type + +```ts +type: "STATE_SNAPSHOT"; +``` + +Defined in: [types.ts:763](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L763) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/StepFinishedEvent.md b/docs/reference/interfaces/StepFinishedEvent.md new file mode 100644 index 00000000..3a1d39ae --- /dev/null +++ b/docs/reference/interfaces/StepFinishedEvent.md @@ -0,0 +1,108 @@ +--- +id: StepFinishedEvent +title: StepFinishedEvent +--- + +# Interface: StepFinishedEvent + +Defined in: [types.ts:750](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L750) + +Emitted when a reasoning/thinking step completes or streams content. + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### content + +```ts +content: string; +``` + +Defined in: [types.ts:756](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L756) + +Full accumulated thinking content + +*** + +### delta? + +```ts +optional delta: string; +``` + +Defined in: [types.ts:754](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L754) + +Incremental thinking token + +*** + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### stepId + +```ts +stepId: string; +``` + +Defined in: [types.ts:752](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L752) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### type + +```ts +type: "STEP_FINISHED"; +``` + +Defined in: [types.ts:751](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L751) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/StepStartedEvent.md b/docs/reference/interfaces/StepStartedEvent.md new file mode 100644 index 00000000..b5cce5f6 --- /dev/null +++ b/docs/reference/interfaces/StepStartedEvent.md @@ -0,0 +1,94 @@ +--- +id: StepStartedEvent +title: StepStartedEvent +--- + +# Interface: StepStartedEvent + +Defined in: [types.ts:741](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L741) + +Emitted when a reasoning/thinking step starts. + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### stepId + +```ts +stepId: string; +``` + +Defined in: [types.ts:743](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L743) + +*** + +### stepType + +```ts +stepType: "thinking" | "reasoning" | "planning"; +``` + +Defined in: [types.ts:744](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L744) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### type + +```ts +type: "STEP_STARTED"; +``` + +Defined in: [types.ts:742](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L742) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/StreamProcessorEvents.md b/docs/reference/interfaces/StreamProcessorEvents.md new file mode 100644 index 00000000..313e1987 --- /dev/null +++ b/docs/reference/interfaces/StreamProcessorEvents.md @@ -0,0 +1,228 @@ +--- +id: StreamProcessorEvents +title: StreamProcessorEvents +--- + +# Interface: StreamProcessorEvents + +Defined in: [stream/processor.ts:51](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L51) + +Events emitted by the StreamProcessor + +## Properties + +### onApprovalRequest()? + +```ts +optional onApprovalRequest: (args) => void; +``` + +Defined in: [stream/processor.ts:66](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L66) + +#### Parameters + +##### args + +###### approvalId + +`string` + +###### input + +`any` + +###### toolCallId + +`string` + +###### toolName + +`string` + +#### Returns + +`void` + +*** + +### onError()? + +```ts +optional onError: (error) => void; +``` + +Defined in: [stream/processor.ts:58](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L58) + +#### Parameters + +##### error + +`Error` + +#### Returns + +`void` + +*** + +### onMessagesChange()? + +```ts +optional onMessagesChange: (messages) => void; +``` + +Defined in: [stream/processor.ts:53](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L53) + +#### Parameters + +##### messages + +[`UIMessage`](UIMessage.md)[] + +#### Returns + +`void` + +*** + +### onStreamEnd()? + +```ts +optional onStreamEnd: (message) => void; +``` + +Defined in: [stream/processor.ts:57](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L57) + +#### Parameters + +##### message + +[`UIMessage`](UIMessage.md) + +#### Returns + +`void` + +*** + +### onStreamStart()? + +```ts +optional onStreamStart: () => void; +``` + +Defined in: [stream/processor.ts:56](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L56) + +#### Returns + +`void` + +*** + +### onTextUpdate()? + +```ts +optional onTextUpdate: (messageId, content) => void; +``` + +Defined in: [stream/processor.ts:74](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L74) + +#### Parameters + +##### messageId + +`string` + +##### content + +`string` + +#### Returns + +`void` + +*** + +### onThinkingUpdate()? + +```ts +optional onThinkingUpdate: (messageId, content) => void; +``` + +Defined in: [stream/processor.ts:81](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L81) + +#### Parameters + +##### messageId + +`string` + +##### content + +`string` + +#### Returns + +`void` + +*** + +### onToolCall()? + +```ts +optional onToolCall: (args) => void; +``` + +Defined in: [stream/processor.ts:61](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L61) + +#### Parameters + +##### args + +###### input + +`any` + +###### toolCallId + +`string` + +###### toolName + +`string` + +#### Returns + +`void` + +*** + +### onToolCallStateChange()? + +```ts +optional onToolCallStateChange: (messageId, toolCallId, state, args) => void; +``` + +Defined in: [stream/processor.ts:75](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L75) + +#### Parameters + +##### messageId + +`string` + +##### toolCallId + +`string` + +##### state + +[`ToolCallState`](../type-aliases/ToolCallState.md) + +##### args + +`string` + +#### Returns + +`void` diff --git a/docs/reference/interfaces/StreamProcessorHandlers.md b/docs/reference/interfaces/StreamProcessorHandlers.md new file mode 100644 index 00000000..c4e50f56 --- /dev/null +++ b/docs/reference/interfaces/StreamProcessorHandlers.md @@ -0,0 +1,317 @@ +--- +id: StreamProcessorHandlers +title: StreamProcessorHandlers +--- + +# Interface: StreamProcessorHandlers + +Defined in: [stream/processor.ts:88](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L88) + +Legacy handlers for backward compatibility +These are the old callback-style handlers + +## Properties + +### onApprovalRequested()? + +```ts +optional onApprovalRequested: (toolCallId, toolName, input, approvalId) => void; +``` + +Defined in: [stream/processor.ts:119](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L119) + +#### Parameters + +##### toolCallId + +`string` + +##### toolName + +`string` + +##### input + +`any` + +##### approvalId + +`string` + +#### Returns + +`void` + +*** + +### onError()? + +```ts +optional onError: (error) => void; +``` + +Defined in: [stream/processor.ts:133](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L133) + +#### Parameters + +##### error + +###### code? + +`string` + +###### message + +`string` + +#### Returns + +`void` + +*** + +### onStreamEnd()? + +```ts +optional onStreamEnd: (content, toolCalls?) => void; +``` + +Defined in: [stream/processor.ts:132](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L132) + +#### Parameters + +##### content + +`string` + +##### toolCalls? + +[`ToolCall`](ToolCall.md)[] + +#### Returns + +`void` + +*** + +### onTextUpdate()? + +```ts +optional onTextUpdate: (content) => void; +``` + +Defined in: [stream/processor.ts:89](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L89) + +#### Parameters + +##### content + +`string` + +#### Returns + +`void` + +*** + +### onThinkingUpdate()? + +```ts +optional onThinkingUpdate: (content) => void; +``` + +Defined in: [stream/processor.ts:90](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L90) + +#### Parameters + +##### content + +`string` + +#### Returns + +`void` + +*** + +### onToolCallComplete()? + +```ts +optional onToolCallComplete: (index, id, name, args) => void; +``` + +Defined in: [stream/processor.ts:95](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L95) + +#### Parameters + +##### index + +`number` + +##### id + +`string` + +##### name + +`string` + +##### args + +`string` + +#### Returns + +`void` + +*** + +### onToolCallDelta()? + +```ts +optional onToolCallDelta: (index, args) => void; +``` + +Defined in: [stream/processor.ts:94](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L94) + +#### Parameters + +##### index + +`number` + +##### args + +`string` + +#### Returns + +`void` + +*** + +### onToolCallStart()? + +```ts +optional onToolCallStart: (index, id, name) => void; +``` + +Defined in: [stream/processor.ts:93](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L93) + +#### Parameters + +##### index + +`number` + +##### id + +`string` + +##### name + +`string` + +#### Returns + +`void` + +*** + +### onToolCallStateChange()? + +```ts +optional onToolCallStateChange: (index, id, name, state, args, parsedArgs?) => void; +``` + +Defined in: [stream/processor.ts:101](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L101) + +#### Parameters + +##### index + +`number` + +##### id + +`string` + +##### name + +`string` + +##### state + +[`ToolCallState`](../type-aliases/ToolCallState.md) + +##### args + +`string` + +##### parsedArgs? + +`any` + +#### Returns + +`void` + +*** + +### onToolInputAvailable()? + +```ts +optional onToolInputAvailable: (toolCallId, toolName, input) => void; +``` + +Defined in: [stream/processor.ts:125](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L125) + +#### Parameters + +##### toolCallId + +`string` + +##### toolName + +`string` + +##### input + +`any` + +#### Returns + +`void` + +*** + +### onToolResultStateChange()? + +```ts +optional onToolResultStateChange: (toolCallId, content, state, error?) => void; +``` + +Defined in: [stream/processor.ts:111](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L111) + +#### Parameters + +##### toolCallId + +`string` + +##### content + +`string` + +##### state + +[`ToolResultState`](../type-aliases/ToolResultState.md) + +##### error? + +`string` + +#### Returns + +`void` diff --git a/docs/reference/interfaces/StreamProcessorOptions.md b/docs/reference/interfaces/StreamProcessorOptions.md new file mode 100644 index 00000000..ebe37672 --- /dev/null +++ b/docs/reference/interfaces/StreamProcessorOptions.md @@ -0,0 +1,94 @@ +--- +id: StreamProcessorOptions +title: StreamProcessorOptions +--- + +# Interface: StreamProcessorOptions + +Defined in: [stream/processor.ts:139](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L139) + +Options for StreamProcessor + +## Properties + +### chunkStrategy? + +```ts +optional chunkStrategy: ChunkStrategy; +``` + +Defined in: [stream/processor.ts:140](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L140) + +*** + +### events? + +```ts +optional events: StreamProcessorEvents; +``` + +Defined in: [stream/processor.ts:142](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L142) + +New event-driven handlers + +*** + +### handlers? + +```ts +optional handlers: StreamProcessorHandlers; +``` + +Defined in: [stream/processor.ts:144](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L144) + +Legacy callback handlers (for backward compatibility) + +*** + +### initialMessages? + +```ts +optional initialMessages: UIMessage[]; +``` + +Defined in: [stream/processor.ts:151](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L151) + +Initial messages to populate the processor + +*** + +### jsonParser? + +```ts +optional jsonParser: object; +``` + +Defined in: [stream/processor.ts:145](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L145) + +#### parse() + +```ts +parse: (jsonString) => any; +``` + +##### Parameters + +###### jsonString + +`string` + +##### Returns + +`any` + +*** + +### recording? + +```ts +optional recording: boolean; +``` + +Defined in: [stream/processor.ts:149](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/processor.ts#L149) + +Enable recording for replay testing diff --git a/docs/reference/interfaces/SummarizationOptions.md b/docs/reference/interfaces/SummarizationOptions.md index 575580e2..f009ab58 100644 --- a/docs/reference/interfaces/SummarizationOptions.md +++ b/docs/reference/interfaces/SummarizationOptions.md @@ -5,7 +5,7 @@ title: SummarizationOptions # Interface: SummarizationOptions -Defined in: [types.ts:697](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L697) +Defined in: [types.ts:959](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L959) ## Properties @@ -15,7 +15,7 @@ Defined in: [types.ts:697](https://github.com/TanStack/ai/blob/main/packages/typ optional focus: string[]; ``` -Defined in: [types.ts:702](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L702) +Defined in: [types.ts:964](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L964) *** @@ -25,7 +25,7 @@ Defined in: [types.ts:702](https://github.com/TanStack/ai/blob/main/packages/typ optional maxLength: number; ``` -Defined in: [types.ts:700](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L700) +Defined in: [types.ts:962](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L962) *** @@ -35,7 +35,7 @@ Defined in: [types.ts:700](https://github.com/TanStack/ai/blob/main/packages/typ model: string; ``` -Defined in: [types.ts:698](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L698) +Defined in: [types.ts:960](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L960) *** @@ -45,7 +45,7 @@ Defined in: [types.ts:698](https://github.com/TanStack/ai/blob/main/packages/typ optional style: "bullet-points" | "paragraph" | "concise"; ``` -Defined in: [types.ts:701](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L701) +Defined in: [types.ts:963](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L963) *** @@ -55,4 +55,4 @@ Defined in: [types.ts:701](https://github.com/TanStack/ai/blob/main/packages/typ text: string; ``` -Defined in: [types.ts:699](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L699) +Defined in: [types.ts:961](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L961) diff --git a/docs/reference/interfaces/SummarizationResult.md b/docs/reference/interfaces/SummarizationResult.md index 390814d8..1dcaa38e 100644 --- a/docs/reference/interfaces/SummarizationResult.md +++ b/docs/reference/interfaces/SummarizationResult.md @@ -5,7 +5,7 @@ title: SummarizationResult # Interface: SummarizationResult -Defined in: [types.ts:705](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L705) +Defined in: [types.ts:967](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L967) ## Properties @@ -15,7 +15,7 @@ Defined in: [types.ts:705](https://github.com/TanStack/ai/blob/main/packages/typ id: string; ``` -Defined in: [types.ts:706](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L706) +Defined in: [types.ts:968](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L968) *** @@ -25,7 +25,7 @@ Defined in: [types.ts:706](https://github.com/TanStack/ai/blob/main/packages/typ model: string; ``` -Defined in: [types.ts:707](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L707) +Defined in: [types.ts:969](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L969) *** @@ -35,7 +35,7 @@ Defined in: [types.ts:707](https://github.com/TanStack/ai/blob/main/packages/typ summary: string; ``` -Defined in: [types.ts:708](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L708) +Defined in: [types.ts:970](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L970) *** @@ -45,7 +45,7 @@ Defined in: [types.ts:708](https://github.com/TanStack/ai/blob/main/packages/typ usage: object; ``` -Defined in: [types.ts:709](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L709) +Defined in: [types.ts:971](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L971) #### completionTokens diff --git a/docs/reference/interfaces/TextMessageContentEvent.md b/docs/reference/interfaces/TextMessageContentEvent.md new file mode 100644 index 00000000..0e4274c4 --- /dev/null +++ b/docs/reference/interfaces/TextMessageContentEvent.md @@ -0,0 +1,106 @@ +--- +id: TextMessageContentEvent +title: TextMessageContentEvent +--- + +# Interface: TextMessageContentEvent + +Defined in: [types.ts:681](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L681) + +Emitted when text content is generated (streaming tokens). + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### content? + +```ts +optional content: string; +``` + +Defined in: [types.ts:686](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L686) + +TanStack AI addition: Full accumulated content so far + +*** + +### delta + +```ts +delta: string; +``` + +Defined in: [types.ts:684](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L684) + +*** + +### messageId + +```ts +messageId: string; +``` + +Defined in: [types.ts:683](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L683) + +*** + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### type + +```ts +type: "TEXT_MESSAGE_CONTENT"; +``` + +Defined in: [types.ts:682](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L682) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/TextMessageEndEvent.md b/docs/reference/interfaces/TextMessageEndEvent.md new file mode 100644 index 00000000..88d072c8 --- /dev/null +++ b/docs/reference/interfaces/TextMessageEndEvent.md @@ -0,0 +1,84 @@ +--- +id: TextMessageEndEvent +title: TextMessageEndEvent +--- + +# Interface: TextMessageEndEvent + +Defined in: [types.ts:692](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L692) + +Emitted when a text message completes. + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### messageId + +```ts +messageId: string; +``` + +Defined in: [types.ts:694](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L694) + +*** + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### type + +```ts +type: "TEXT_MESSAGE_END"; +``` + +Defined in: [types.ts:693](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L693) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/TextMessageStartEvent.md b/docs/reference/interfaces/TextMessageStartEvent.md new file mode 100644 index 00000000..6a2fefb6 --- /dev/null +++ b/docs/reference/interfaces/TextMessageStartEvent.md @@ -0,0 +1,94 @@ +--- +id: TextMessageStartEvent +title: TextMessageStartEvent +--- + +# Interface: TextMessageStartEvent + +Defined in: [types.ts:672](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L672) + +Emitted when a text message starts. + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### messageId + +```ts +messageId: string; +``` + +Defined in: [types.ts:674](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L674) + +*** + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### role + +```ts +role: "assistant"; +``` + +Defined in: [types.ts:675](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L675) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### type + +```ts +type: "TEXT_MESSAGE_START"; +``` + +Defined in: [types.ts:673](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L673) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/ThinkingStreamChunk.md b/docs/reference/interfaces/ThinkingStreamChunk.md index d67abbe0..37a6e550 100644 --- a/docs/reference/interfaces/ThinkingStreamChunk.md +++ b/docs/reference/interfaces/ThinkingStreamChunk.md @@ -3,86 +3,70 @@ id: ThinkingStreamChunk title: ThinkingStreamChunk --- -# Interface: ThinkingStreamChunk +# ~~Interface: ThinkingStreamChunk~~ -Defined in: [types.ts:663](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L663) +Defined in: [types.ts:905](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L905) -## Extends +## Deprecated -- [`BaseStreamChunk`](BaseStreamChunk.md) +Use StepStartedEvent/StepFinishedEvent instead ## Properties -### content +### ~~content~~ ```ts content: string; ``` -Defined in: [types.ts:666](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L666) +Defined in: [types.ts:911](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L911) *** -### delta? +### ~~delta?~~ ```ts optional delta: string; ``` -Defined in: [types.ts:665](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L665) +Defined in: [types.ts:910](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L910) *** -### id +### ~~id~~ ```ts id: string; ``` -Defined in: [types.ts:596](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L596) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) +Defined in: [types.ts:907](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L907) *** -### model +### ~~model~~ ```ts model: string; ``` -Defined in: [types.ts:597](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L597) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) +Defined in: [types.ts:908](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L908) *** -### timestamp +### ~~timestamp~~ ```ts timestamp: number; ``` -Defined in: [types.ts:598](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L598) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) +Defined in: [types.ts:909](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L909) *** -### type +### ~~type~~ ```ts type: "thinking"; ``` -Defined in: [types.ts:664](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L664) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) +Defined in: [types.ts:906](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L906) diff --git a/docs/reference/interfaces/ToolCallArgsEvent.md b/docs/reference/interfaces/ToolCallArgsEvent.md new file mode 100644 index 00000000..8db2650e --- /dev/null +++ b/docs/reference/interfaces/ToolCallArgsEvent.md @@ -0,0 +1,108 @@ +--- +id: ToolCallArgsEvent +title: ToolCallArgsEvent +--- + +# Interface: ToolCallArgsEvent + +Defined in: [types.ts:716](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L716) + +Emitted when tool call arguments are streaming. + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### args? + +```ts +optional args: string; +``` + +Defined in: [types.ts:722](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L722) + +Full accumulated arguments so far + +*** + +### delta + +```ts +delta: string; +``` + +Defined in: [types.ts:720](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L720) + +Incremental JSON arguments delta + +*** + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### toolCallId + +```ts +toolCallId: string; +``` + +Defined in: [types.ts:718](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L718) + +*** + +### type + +```ts +type: "TOOL_CALL_ARGS"; +``` + +Defined in: [types.ts:717](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L717) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/ToolCallEndEvent.md b/docs/reference/interfaces/ToolCallEndEvent.md new file mode 100644 index 00000000..1b10572b --- /dev/null +++ b/docs/reference/interfaces/ToolCallEndEvent.md @@ -0,0 +1,118 @@ +--- +id: ToolCallEndEvent +title: ToolCallEndEvent +--- + +# Interface: ToolCallEndEvent + +Defined in: [types.ts:728](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L728) + +Emitted when a tool call completes (with optional result). + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### input? + +```ts +optional input: unknown; +``` + +Defined in: [types.ts:733](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L733) + +Final parsed input arguments + +*** + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### result? + +```ts +optional result: unknown; +``` + +Defined in: [types.ts:735](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L735) + +Tool execution result (present when tool has executed) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### toolCallId + +```ts +toolCallId: string; +``` + +Defined in: [types.ts:730](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L730) + +*** + +### toolName + +```ts +toolName: string; +``` + +Defined in: [types.ts:731](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L731) + +*** + +### type + +```ts +type: "TOOL_CALL_END"; +``` + +Defined in: [types.ts:729](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L729) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/ToolCallStartEvent.md b/docs/reference/interfaces/ToolCallStartEvent.md new file mode 100644 index 00000000..75f1af0f --- /dev/null +++ b/docs/reference/interfaces/ToolCallStartEvent.md @@ -0,0 +1,130 @@ +--- +id: ToolCallStartEvent +title: ToolCallStartEvent +--- + +# Interface: ToolCallStartEvent + +Defined in: [types.ts:700](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L700) + +Emitted when a tool call starts. + +## Extends + +- [`BaseEvent`](BaseEvent.md) + +## Properties + +### approval? + +```ts +optional approval: object; +``` + +Defined in: [types.ts:707](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L707) + +Approval metadata if tool requires user approval + +#### id + +```ts +id: string; +``` + +#### needsApproval + +```ts +needsApproval: true; +``` + +*** + +### index? + +```ts +optional index: number; +``` + +Defined in: [types.ts:705](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L705) + +Index for parallel tool calls + +*** + +### model? + +```ts +optional model: string; +``` + +Defined in: [types.ts:629](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L629) + +TanStack AI addition: Model identifier for multi-model support + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`model`](BaseEvent.md#model) + +*** + +### rawEvent? + +```ts +optional rawEvent: unknown; +``` + +Defined in: [types.ts:631](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L631) + +Original provider event for debugging/advanced use cases + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`rawEvent`](BaseEvent.md#rawevent) + +*** + +### timestamp + +```ts +timestamp: number; +``` + +Defined in: [types.ts:627](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L627) + +#### Inherited from + +[`BaseEvent`](BaseEvent.md).[`timestamp`](BaseEvent.md#timestamp) + +*** + +### toolCallId + +```ts +toolCallId: string; +``` + +Defined in: [types.ts:702](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L702) + +*** + +### toolName + +```ts +toolName: string; +``` + +Defined in: [types.ts:703](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L703) + +*** + +### type + +```ts +type: "TOOL_CALL_START"; +``` + +Defined in: [types.ts:701](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L701) + +#### Overrides + +[`BaseEvent`](BaseEvent.md).[`type`](BaseEvent.md#type) diff --git a/docs/reference/interfaces/ToolCallStreamChunk.md b/docs/reference/interfaces/ToolCallStreamChunk.md index 1f5ab343..9583d1e5 100644 --- a/docs/reference/interfaces/ToolCallStreamChunk.md +++ b/docs/reference/interfaces/ToolCallStreamChunk.md @@ -3,116 +3,92 @@ id: ToolCallStreamChunk title: ToolCallStreamChunk --- -# Interface: ToolCallStreamChunk +# ~~Interface: ToolCallStreamChunk~~ -Defined in: [types.ts:608](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L608) +Defined in: [types.ts:877](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L877) -## Extends +## Deprecated -- [`BaseStreamChunk`](BaseStreamChunk.md) +Use ToolCallStartEvent and ToolCallArgsEvent instead ## Properties -### id +### ~~approval?~~ ```ts -id: string; +optional approval: object; ``` -Defined in: [types.ts:596](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L596) - -#### Inherited from +Defined in: [types.ts:884](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L884) -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) +#### ~~id~~ -*** +```ts +id: string; +``` -### index +#### ~~needsApproval~~ ```ts -index: number; +needsApproval: true; ``` -Defined in: [types.ts:618](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L618) - *** -### model +### ~~id~~ ```ts -model: string; +id: string; ``` -Defined in: [types.ts:597](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L597) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) +Defined in: [types.ts:879](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L879) *** -### timestamp +### ~~index~~ ```ts -timestamp: number; +index: number; ``` -Defined in: [types.ts:598](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L598) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) +Defined in: [types.ts:883](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L883) *** -### toolCall +### ~~model~~ ```ts -toolCall: object; +model: string; ``` -Defined in: [types.ts:610](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L610) - -#### function +Defined in: [types.ts:880](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L880) -```ts -function: object; -``` +*** -##### function.arguments +### ~~timestamp~~ ```ts -arguments: string; +timestamp: number; ``` -##### function.name +Defined in: [types.ts:881](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L881) -```ts -name: string; -``` +*** -#### id +### ~~toolCall~~ ```ts -id: string; +toolCall: ToolCall; ``` -#### type - -```ts -type: "function"; -``` +Defined in: [types.ts:882](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L882) *** -### type +### ~~type~~ ```ts type: "tool_call"; ``` -Defined in: [types.ts:609](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L609) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) +Defined in: [types.ts:878](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L878) diff --git a/docs/reference/interfaces/ToolInputAvailableStreamChunk.md b/docs/reference/interfaces/ToolInputAvailableStreamChunk.md index a7256939..a375e231 100644 --- a/docs/reference/interfaces/ToolInputAvailableStreamChunk.md +++ b/docs/reference/interfaces/ToolInputAvailableStreamChunk.md @@ -3,96 +3,80 @@ id: ToolInputAvailableStreamChunk title: ToolInputAvailableStreamChunk --- -# Interface: ToolInputAvailableStreamChunk +# ~~Interface: ToolInputAvailableStreamChunk~~ -Defined in: [types.ts:656](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L656) +Defined in: [types.ts:934](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L934) -## Extends +## Deprecated -- [`BaseStreamChunk`](BaseStreamChunk.md) +Use CustomEvent with name='tool-input-available' instead ## Properties -### id +### ~~id~~ ```ts id: string; ``` -Defined in: [types.ts:596](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L596) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) +Defined in: [types.ts:936](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L936) *** -### input +### ~~input~~ ```ts -input: any; +input: Record; ``` -Defined in: [types.ts:660](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L660) +Defined in: [types.ts:941](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L941) *** -### model +### ~~model~~ ```ts model: string; ``` -Defined in: [types.ts:597](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L597) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) +Defined in: [types.ts:937](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L937) *** -### timestamp +### ~~timestamp~~ ```ts timestamp: number; ``` -Defined in: [types.ts:598](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L598) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) +Defined in: [types.ts:938](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L938) *** -### toolCallId +### ~~toolCallId~~ ```ts toolCallId: string; ``` -Defined in: [types.ts:658](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L658) +Defined in: [types.ts:939](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L939) *** -### toolName +### ~~toolName~~ ```ts toolName: string; ``` -Defined in: [types.ts:659](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L659) +Defined in: [types.ts:940](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L940) *** -### type +### ~~type~~ ```ts type: "tool-input-available"; ``` -Defined in: [types.ts:657](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L657) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) +Defined in: [types.ts:935](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L935) diff --git a/docs/reference/interfaces/ToolResultStreamChunk.md b/docs/reference/interfaces/ToolResultStreamChunk.md index e5f728a7..57f32e32 100644 --- a/docs/reference/interfaces/ToolResultStreamChunk.md +++ b/docs/reference/interfaces/ToolResultStreamChunk.md @@ -3,86 +3,70 @@ id: ToolResultStreamChunk title: ToolResultStreamChunk --- -# Interface: ToolResultStreamChunk +# ~~Interface: ToolResultStreamChunk~~ -Defined in: [types.ts:621](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L621) +Defined in: [types.ts:893](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L893) -## Extends +## Deprecated -- [`BaseStreamChunk`](BaseStreamChunk.md) +Use ToolCallEndEvent instead ## Properties -### content +### ~~content~~ ```ts content: string; ``` -Defined in: [types.ts:624](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L624) +Defined in: [types.ts:899](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L899) *** -### id +### ~~id~~ ```ts id: string; ``` -Defined in: [types.ts:596](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L596) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`id`](BaseStreamChunk.md#id) +Defined in: [types.ts:895](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L895) *** -### model +### ~~model~~ ```ts model: string; ``` -Defined in: [types.ts:597](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L597) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`model`](BaseStreamChunk.md#model) +Defined in: [types.ts:896](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L896) *** -### timestamp +### ~~timestamp~~ ```ts timestamp: number; ``` -Defined in: [types.ts:598](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L598) - -#### Inherited from - -[`BaseStreamChunk`](BaseStreamChunk.md).[`timestamp`](BaseStreamChunk.md#timestamp) +Defined in: [types.ts:897](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L897) *** -### toolCallId +### ~~toolCallId~~ ```ts toolCallId: string; ``` -Defined in: [types.ts:623](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L623) +Defined in: [types.ts:898](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L898) *** -### type +### ~~type~~ ```ts type: "tool_result"; ``` -Defined in: [types.ts:622](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L622) - -#### Overrides - -[`BaseStreamChunk`](BaseStreamChunk.md).[`type`](BaseStreamChunk.md#type) +Defined in: [types.ts:894](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L894) diff --git a/docs/reference/type-aliases/AnyClientTool.md b/docs/reference/type-aliases/AnyClientTool.md new file mode 100644 index 00000000..4f395412 --- /dev/null +++ b/docs/reference/type-aliases/AnyClientTool.md @@ -0,0 +1,16 @@ +--- +id: AnyClientTool +title: AnyClientTool +--- + +# Type Alias: AnyClientTool + +```ts +type AnyClientTool = + | ClientTool +| ToolDefinitionInstance; +``` + +Defined in: [tools/tool-definition.ts:49](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L49) + +Union type for any kind of client-side tool (client tool or definition) diff --git a/docs/reference/type-aliases/ChatStreamOptionsForModel.md b/docs/reference/type-aliases/ChatStreamOptionsForModel.md index 651be480..8801022a 100644 --- a/docs/reference/type-aliases/ChatStreamOptionsForModel.md +++ b/docs/reference/type-aliases/ChatStreamOptionsForModel.md @@ -9,7 +9,7 @@ title: ChatStreamOptionsForModel type ChatStreamOptionsForModel = TAdapter extends AIAdapter ? Omit & object : never; ``` -Defined in: [types.ts:883](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L883) +Defined in: [types.ts:1143](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1143) Chat options constrained by a specific model's capabilities. Unlike ChatStreamOptionsUnion which creates a union over all models, diff --git a/docs/reference/type-aliases/ChatStreamOptionsUnion.md b/docs/reference/type-aliases/ChatStreamOptionsUnion.md index 02e3cb26..a94cc681 100644 --- a/docs/reference/type-aliases/ChatStreamOptionsUnion.md +++ b/docs/reference/type-aliases/ChatStreamOptionsUnion.md @@ -9,7 +9,7 @@ title: ChatStreamOptionsUnion type ChatStreamOptionsUnion = TAdapter extends AIAdapter ? Models[number] extends infer TModel ? TModel extends string ? Omit & object : never : never : never; ``` -Defined in: [types.ts:823](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L823) +Defined in: [types.ts:1083](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1083) ## Type Parameters diff --git a/docs/reference/type-aliases/EventType.md b/docs/reference/type-aliases/EventType.md new file mode 100644 index 00000000..ce5bea37 --- /dev/null +++ b/docs/reference/type-aliases/EventType.md @@ -0,0 +1,43 @@ +--- +id: EventType +title: EventType +--- + +# Type Alias: EventType + +```ts +type EventType = + | "RUN_STARTED" + | "RUN_FINISHED" + | "RUN_ERROR" + | "TEXT_MESSAGE_START" + | "TEXT_MESSAGE_CONTENT" + | "TEXT_MESSAGE_END" + | "TOOL_CALL_START" + | "TOOL_CALL_ARGS" + | "TOOL_CALL_END" + | "STEP_STARTED" + | "STEP_FINISHED" + | "STATE_SNAPSHOT" + | "STATE_DELTA" + | "CUSTOM" + | "content" + | "done" + | "error" + | "tool_call" + | "tool_result" + | "thinking" + | "approval-requested" + | "tool-input-available"; +``` + +Defined in: [types.ts:595](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L595) + +AG-UI Protocol event types. +Based on the AG-UI specification for agent-user interaction. + +## See + +https://docs.ag-ui.com/concepts/events + +Includes legacy type aliases for backward compatibility during migration. diff --git a/docs/reference/type-aliases/ExtractModalitiesForModel.md b/docs/reference/type-aliases/ExtractModalitiesForModel.md index fe165380..80b484e5 100644 --- a/docs/reference/type-aliases/ExtractModalitiesForModel.md +++ b/docs/reference/type-aliases/ExtractModalitiesForModel.md @@ -9,7 +9,7 @@ title: ExtractModalitiesForModel type ExtractModalitiesForModel = TAdapter extends AIAdapter ? TModel extends keyof ModelInputModalities ? ModelInputModalities[TModel] : ReadonlyArray : ReadonlyArray; ``` -Defined in: [types.ts:942](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L942) +Defined in: [types.ts:1202](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1202) Extract the supported input modalities for a specific model from an adapter. diff --git a/docs/reference/type-aliases/ExtractModelsFromAdapter.md b/docs/reference/type-aliases/ExtractModelsFromAdapter.md index 7b1edfb0..a8dd3e29 100644 --- a/docs/reference/type-aliases/ExtractModelsFromAdapter.md +++ b/docs/reference/type-aliases/ExtractModelsFromAdapter.md @@ -9,7 +9,7 @@ title: ExtractModelsFromAdapter type ExtractModelsFromAdapter = T extends AIAdapter ? M[number] : never; ``` -Defined in: [types.ts:936](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L936) +Defined in: [types.ts:1196](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L1196) ## Type Parameters diff --git a/docs/reference/type-aliases/InferToolName.md b/docs/reference/type-aliases/InferToolName.md new file mode 100644 index 00000000..25b0aa93 --- /dev/null +++ b/docs/reference/type-aliases/InferToolName.md @@ -0,0 +1,20 @@ +--- +id: InferToolName +title: InferToolName +--- + +# Type Alias: InferToolName\ + +```ts +type InferToolName = T extends object ? N : never; +``` + +Defined in: [tools/tool-definition.ts:56](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/tools/tool-definition.ts#L56) + +Extract the tool name as a literal type + +## Type Parameters + +### T + +`T` diff --git a/docs/reference/type-aliases/StreamChunk.md b/docs/reference/type-aliases/StreamChunk.md index 7227135c..ba01818f 100644 --- a/docs/reference/type-aliases/StreamChunk.md +++ b/docs/reference/type-aliases/StreamChunk.md @@ -7,16 +7,32 @@ title: StreamChunk ```ts type StreamChunk = + | RunStartedEvent + | RunFinishedEvent + | RunErrorEvent + | TextMessageStartEvent + | TextMessageContentEvent + | TextMessageEndEvent + | ToolCallStartEvent + | ToolCallArgsEvent + | ToolCallEndEvent + | StepStartedEvent + | StepFinishedEvent + | StateSnapshotEvent + | StateDeltaEvent + | CustomEvent | ContentStreamChunk - | ToolCallStreamChunk - | ToolResultStreamChunk | DoneStreamChunk | ErrorStreamChunk + | ToolCallStreamChunk + | ToolResultStreamChunk + | ThinkingStreamChunk | ApprovalRequestedStreamChunk - | ToolInputAvailableStreamChunk - | ThinkingStreamChunk; + | ToolInputAvailableStreamChunk; ``` -Defined in: [types.ts:672](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L672) +Defined in: [types.ts:794](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L794) -Chunk returned by the sdk during streaming chat completions. +Union type for all AG-UI events. +This is the primary type for streaming chat completions. +Includes legacy types for backward compatibility. diff --git a/docs/reference/type-aliases/StreamChunkType.md b/docs/reference/type-aliases/StreamChunkType.md index 3d83a468..eccefe89 100644 --- a/docs/reference/type-aliases/StreamChunkType.md +++ b/docs/reference/type-aliases/StreamChunkType.md @@ -6,15 +6,7 @@ title: StreamChunkType # Type Alias: StreamChunkType ```ts -type StreamChunkType = - | "content" - | "tool_call" - | "tool_result" - | "done" - | "error" - | "approval-requested" - | "tool-input-available" - | "thinking"; +type StreamChunkType = EventType; ``` -Defined in: [types.ts:584](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L584) +Defined in: [types.ts:821](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/types.ts#L821) diff --git a/docs/reference/type-aliases/ToolCallState.md b/docs/reference/type-aliases/ToolCallState.md new file mode 100644 index 00000000..7ff8e334 --- /dev/null +++ b/docs/reference/type-aliases/ToolCallState.md @@ -0,0 +1,19 @@ +--- +id: ToolCallState +title: ToolCallState +--- + +# Type Alias: ToolCallState + +```ts +type ToolCallState = + | "awaiting-input" + | "input-streaming" + | "input-complete" + | "approval-requested" + | "approval-responded"; +``` + +Defined in: [stream/types.ts:13](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L13) + +Tool call states - track the lifecycle of a tool call diff --git a/docs/reference/type-aliases/ToolResultState.md b/docs/reference/type-aliases/ToolResultState.md new file mode 100644 index 00000000..d9d29e6e --- /dev/null +++ b/docs/reference/type-aliases/ToolResultState.md @@ -0,0 +1,14 @@ +--- +id: ToolResultState +title: ToolResultState +--- + +# Type Alias: ToolResultState + +```ts +type ToolResultState = "streaming" | "complete" | "error"; +``` + +Defined in: [stream/types.ts:23](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/types.ts#L23) + +Tool result states - track the lifecycle of a tool result diff --git a/docs/reference/variables/defaultJSONParser.md b/docs/reference/variables/defaultJSONParser.md new file mode 100644 index 00000000..e5e32dad --- /dev/null +++ b/docs/reference/variables/defaultJSONParser.md @@ -0,0 +1,14 @@ +--- +id: defaultJSONParser +title: defaultJSONParser +--- + +# Variable: defaultJSONParser + +```ts +const defaultJSONParser: PartialJSONParser; +``` + +Defined in: [stream/json-parser.ts:49](https://github.com/TanStack/ai/blob/main/packages/typescript/ai/src/stream/json-parser.ts#L49) + +Default parser instance From 09a0e0c9890ce65089c230c05c21fbdec5cd5138 Mon Sep 17 00:00:00 2001 From: Jack Herrington Date: Wed, 10 Dec 2025 18:01:06 -0800 Subject: [PATCH 6/7] small tweaks and fixups --- .../tanstack-ai/src/tanstack_ai/__init__.py | 64 +++++++---- .../src/tanstack_ai/anthropic_adapter.py | 90 ++++++++-------- .../tanstack-ai/src/tanstack_ai/chat.py | 98 ++++++++++------- .../src/tanstack_ai/tool_manager.py | 2 - .../tanstack-ai/src/tanstack_ai/types.py | 7 +- packages/typescript/ai/package.json | 8 +- .../smoke-tests/adapters/src/index.ts | 13 ++- packages/typescript/smoke-tests/package.json | 3 +- pnpm-lock.yaml | 100 +++++++++++++++++- pnpm-workspace.yaml | 1 + testing/panel/package.json | 16 +-- 11 files changed, 267 insertions(+), 135 deletions(-) diff --git a/packages/python/tanstack-ai/src/tanstack_ai/__init__.py b/packages/python/tanstack-ai/src/tanstack_ai/__init__.py index a55f21e2..cff1a0ba 100644 --- a/packages/python/tanstack-ai/src/tanstack_ai/__init__.py +++ b/packages/python/tanstack-ai/src/tanstack_ai/__init__.py @@ -31,7 +31,7 @@ combine_strategies, ) -# Types +# Types - AG-UI Events from .types import ( # Core types Tool, @@ -39,16 +39,27 @@ ModelMessage, ChatOptions, AIAdapterConfig, - # Stream chunk types + # AG-UI Event types + EventType, + BaseEvent, StreamChunk, - ContentStreamChunk, - ThinkingStreamChunk, - ToolCallStreamChunk, - ToolInputAvailableStreamChunk, - ApprovalRequestedStreamChunk, - ToolResultStreamChunk, - DoneStreamChunk, - ErrorStreamChunk, + UsageInfo, + ErrorInfo, + RunStartedEvent, + RunFinishedEvent, + RunErrorEvent, + TextMessageStartEvent, + TextMessageContentEvent, + TextMessageEndEvent, + ToolCallStartEvent, + ToolCallArgsEvent, + ToolCallEndEvent, + StepStartedEvent, + StepFinishedEvent, + StateSnapshotEvent, + StateDeltaEvent, + CustomEvent, + ApprovalInfo, # Agent loop types AgentLoopState, AgentLoopStrategy, @@ -83,23 +94,37 @@ "max_iterations", "until_finish_reason", "combine_strategies", - # Types + # Core types "Tool", "ToolCall", "ModelMessage", "ChatOptions", "AIAdapterConfig", + # AG-UI Event types + "EventType", + "BaseEvent", "StreamChunk", - "ContentStreamChunk", - "ThinkingStreamChunk", - "ToolCallStreamChunk", - "ToolInputAvailableStreamChunk", - "ApprovalRequestedStreamChunk", - "ToolResultStreamChunk", - "DoneStreamChunk", - "ErrorStreamChunk", + "UsageInfo", + "ErrorInfo", + "RunStartedEvent", + "RunFinishedEvent", + "RunErrorEvent", + "TextMessageStartEvent", + "TextMessageContentEvent", + "TextMessageEndEvent", + "ToolCallStartEvent", + "ToolCallArgsEvent", + "ToolCallEndEvent", + "StepStartedEvent", + "StepFinishedEvent", + "StateSnapshotEvent", + "StateDeltaEvent", + "CustomEvent", + "ApprovalInfo", + # Agent loop types "AgentLoopState", "AgentLoopStrategy", + # Other types "SummarizationOptions", "SummarizationResult", "EmbeddingOptions", @@ -115,4 +140,3 @@ ] __version__ = "0.1.0" - diff --git a/packages/python/tanstack-ai/src/tanstack_ai/anthropic_adapter.py b/packages/python/tanstack-ai/src/tanstack_ai/anthropic_adapter.py index a3ba0cd2..e6dcdf62 100644 --- a/packages/python/tanstack-ai/src/tanstack_ai/anthropic_adapter.py +++ b/packages/python/tanstack-ai/src/tanstack_ai/anthropic_adapter.py @@ -28,16 +28,15 @@ from .types import ( AIAdapterConfig, ChatOptions, - ContentStreamChunk, - DoneStreamChunk, EmbeddingOptions, EmbeddingResult, - ErrorStreamChunk, + RunErrorEvent, + RunFinishedEvent, StreamChunk, SummarizationOptions, SummarizationResult, - ThinkingStreamChunk, - ToolCallStreamChunk, + TextMessageContentEvent, + ToolCallStartEvent, ) @@ -102,7 +101,7 @@ async def chat_stream(self, options: ChatOptions) -> AsyncIterator[StreamChunk]: options: Chat options Yields: - StreamChunk objects + StreamChunk objects (AG-UI events) """ try: # Format messages for Anthropic (function returns tuple of (system, messages)) @@ -145,6 +144,7 @@ async def chat_stream(self, options: ChatOptions) -> AsyncIterator[StreamChunk]: # Make the streaming request message_id = self._generate_id() + run_id = self._generate_id() accumulated_content = "" accumulated_thinking = "" tool_calls: Dict[int, Dict[str, Any]] = {} @@ -169,11 +169,8 @@ async def chat_stream(self, options: ChatOptions) -> AsyncIterator[StreamChunk]: # Tool use block tool_calls[event.index] = { "id": block.id, - "type": "function", - "function": { - "name": block.name, - "arguments": "", - }, + "name": block.name, + "arguments": "", } elif event.type == "content_block_delta": @@ -182,35 +179,34 @@ async def chat_stream(self, options: ChatOptions) -> AsyncIterator[StreamChunk]: if delta.type == "text_delta": # Text content delta accumulated_content += delta.text - yield ContentStreamChunk( - type="content", - id=message_id, - model=options.model, - timestamp=timestamp, - delta=delta.text, - content=accumulated_content, - role="assistant", - ) + chunk: TextMessageContentEvent = { + "type": "TEXT_MESSAGE_CONTENT", + "messageId": message_id, + "model": options.model, + "timestamp": timestamp, + "delta": delta.text, + "content": accumulated_content, + } + yield chunk elif delta.type == "input_json_delta": # Tool input delta if event.index in tool_calls: - tool_calls[event.index]["function"][ - "arguments" - ] += delta.partial_json + tool_calls[event.index]["arguments"] += delta.partial_json elif event.type == "content_block_stop": # Content block completed if event.index in tool_calls: # Emit tool call chunk tool_call = tool_calls[event.index] - yield ToolCallStreamChunk( - type="tool_call", - id=message_id, - model=options.model, - timestamp=timestamp, - toolCall=tool_call, - index=event.index, - ) + chunk: ToolCallStartEvent = { + "type": "TOOL_CALL_START", + "timestamp": timestamp, + "model": options.model, + "toolCallId": tool_call["id"], + "toolName": tool_call["name"], + "index": event.index, + } + yield chunk elif event.type == "message_delta": # Message metadata delta (finish reason, usage) @@ -238,27 +234,29 @@ async def chat_stream(self, options: ChatOptions) -> AsyncIterator[StreamChunk]: elif final_message.stop_reason == "tool_use": finish_reason = "tool_calls" - yield DoneStreamChunk( - type="done", - id=message_id, - model=options.model, - timestamp=timestamp, - finishReason=finish_reason, - usage=usage, - ) + done_chunk: RunFinishedEvent = { + "type": "RUN_FINISHED", + "runId": run_id, + "model": options.model, + "timestamp": timestamp, + "finishReason": finish_reason, + "usage": usage, + } + yield done_chunk except Exception as e: # Emit error chunk - yield ErrorStreamChunk( - type="error", - id=self._generate_id(), - model=options.model, - timestamp=int(time.time() * 1000), - error={ + error_chunk: RunErrorEvent = { + "type": "RUN_ERROR", + "runId": self._generate_id(), + "model": options.model, + "timestamp": int(time.time() * 1000), + "error": { "message": str(e), "code": getattr(e, "code", None), }, - ) + } + yield error_chunk def _format_tools(self, tools: List[Any]) -> List[Dict[str, Any]]: """ diff --git a/packages/python/tanstack-ai/src/tanstack_ai/chat.py b/packages/python/tanstack-ai/src/tanstack_ai/chat.py index 93edd10d..126afd33 100644 --- a/packages/python/tanstack-ai/src/tanstack_ai/chat.py +++ b/packages/python/tanstack-ai/src/tanstack_ai/chat.py @@ -22,15 +22,14 @@ ) from .types import ( AgentLoopStrategy, - ApprovalRequestedStreamChunk, ChatOptions, - DoneStreamChunk, + CustomEvent, ModelMessage, + RunFinishedEvent, StreamChunk, Tool, ToolCall, - ToolInputAvailableStreamChunk, - ToolResultStreamChunk, + ToolCallEndEvent, ) @@ -119,7 +118,7 @@ def __init__( self.last_finish_reason: Optional[str] = None self.current_message_id: Optional[str] = None self.accumulated_content = "" - self.done_chunk: Optional[DoneStreamChunk] = None + self.done_chunk: Optional[RunFinishedEvent] = None self.should_emit_stream_end = True self.early_termination = False self.tool_phase: ToolPhaseResult = ToolPhaseResult.CONTINUE @@ -128,6 +127,7 @@ def __init__( # Generate IDs self.request_id = self._create_id("chat") self.stream_id = self._create_id("stream") + self.run_id = self._create_id("run") async def chat(self) -> AsyncIterator[StreamChunk]: """ @@ -215,17 +215,32 @@ def _handle_stream_chunk(self, chunk: StreamChunk) -> None: """ chunk_type = chunk.get("type") - if chunk_type == "content": - self.accumulated_content = chunk["content"] - elif chunk_type == "tool_call": - self.tool_call_manager.add_tool_call_chunk(chunk) - elif chunk_type == "done": + if chunk_type == "TEXT_MESSAGE_CONTENT": + self.accumulated_content = chunk.get("content", "") + elif chunk_type == "TOOL_CALL_START": + # Build legacy format for tool call manager + tool_call_chunk = { + "index": chunk.get("index", 0), + "toolCall": { + "id": chunk.get("toolCallId"), + "type": "function", + "function": { + "name": chunk.get("toolName"), + "arguments": "", + }, + }, + } + self.tool_call_manager.add_tool_call_chunk(tool_call_chunk) + elif chunk_type == "TOOL_CALL_ARGS": + # Accumulate arguments - find the tool call and append + pass # Tool call manager handles this via TOOL_CALL_START chunks + elif chunk_type == "RUN_FINISHED": self._handle_done_chunk(chunk) - elif chunk_type == "error": + elif chunk_type == "RUN_ERROR": self.early_termination = True self.should_emit_stream_end = False - def _handle_done_chunk(self, chunk: DoneStreamChunk) -> None: + def _handle_done_chunk(self, chunk: RunFinishedEvent) -> None: """Handle a done chunk.""" # Don't overwrite a tool_calls finishReason with a stop finishReason if ( @@ -373,21 +388,20 @@ def _collect_client_state(self) -> tuple[Dict[str, bool], Dict[str, Any]]: async def _emit_approval_requests( self, approval_requests: List[ApprovalRequest], - done_chunk: DoneStreamChunk, + done_chunk: RunFinishedEvent, ) -> AsyncIterator[StreamChunk]: """Emit approval request chunks.""" for approval in approval_requests: - chunk: ApprovalRequestedStreamChunk = { - "type": "approval-requested", - "id": done_chunk["id"], - "model": done_chunk["model"], + chunk: CustomEvent = { + "type": "CUSTOM", + "name": "approval-requested", "timestamp": int(time.time() * 1000), - "toolCallId": approval.tool_call_id, - "toolName": approval.tool_name, - "input": approval.input, - "approval": { - "id": approval.approval_id, - "needsApproval": True, + "model": self.options.model, + "value": { + "toolCallId": approval.tool_call_id, + "toolName": approval.tool_name, + "input": approval.input, + "approvalId": approval.approval_id, }, } yield chunk @@ -395,37 +409,39 @@ async def _emit_approval_requests( async def _emit_client_tool_inputs( self, client_requests: List[ClientToolRequest], - done_chunk: DoneStreamChunk, + done_chunk: RunFinishedEvent, ) -> AsyncIterator[StreamChunk]: """Emit tool-input-available chunks for client execution.""" for client_tool in client_requests: - chunk: ToolInputAvailableStreamChunk = { - "type": "tool-input-available", - "id": done_chunk["id"], - "model": done_chunk["model"], + chunk: CustomEvent = { + "type": "CUSTOM", + "name": "tool-input-available", "timestamp": int(time.time() * 1000), - "toolCallId": client_tool.tool_call_id, - "toolName": client_tool.tool_name, - "input": client_tool.input, + "model": self.options.model, + "value": { + "toolCallId": client_tool.tool_call_id, + "toolName": client_tool.tool_name, + "input": client_tool.input, + }, } yield chunk async def _emit_tool_results( self, results: List[ToolResult], - done_chunk: DoneStreamChunk, + done_chunk: RunFinishedEvent, ) -> AsyncIterator[StreamChunk]: """Emit tool result chunks and add to messages.""" for result in results: content = json.dumps(result.result) - chunk: ToolResultStreamChunk = { - "type": "tool_result", - "id": done_chunk["id"], - "model": done_chunk["model"], + chunk: ToolCallEndEvent = { + "type": "TOOL_CALL_END", "timestamp": int(time.time() * 1000), + "model": self.options.model, "toolCallId": result.tool_call_id, - "content": content, + "toolName": "", # We don't have the tool name here + "result": content, } yield chunk @@ -454,11 +470,11 @@ def _get_pending_tool_calls_from_messages(self) -> List[ToolCall]: return pending - def _create_synthetic_done_chunk(self) -> DoneStreamChunk: + def _create_synthetic_done_chunk(self) -> RunFinishedEvent: """Create a synthetic done chunk for pending tool calls.""" return { - "type": "done", - "id": self._create_id("pending"), + "type": "RUN_FINISHED", + "runId": self.run_id, "model": self.options.model, "timestamp": int(time.time() * 1000), "finishReason": "tool_calls", @@ -533,7 +549,7 @@ async def chat( ... messages=[{"role": "user", "content": "Hello!"}], ... tools=[weather_tool], ... ): - ... if chunk["type"] == "content": + ... if chunk["type"] == "TEXT_MESSAGE_CONTENT": ... print(chunk["delta"], end="", flush=True) """ chat_options = ChatOptions( diff --git a/packages/python/tanstack-ai/src/tanstack_ai/tool_manager.py b/packages/python/tanstack-ai/src/tanstack_ai/tool_manager.py index 07d07d5c..e28c1c33 100644 --- a/packages/python/tanstack-ai/src/tanstack_ai/tool_manager.py +++ b/packages/python/tanstack-ai/src/tanstack_ai/tool_manager.py @@ -10,11 +10,9 @@ from typing import Any, Dict, List, Optional, Tuple from .types import ( - DoneStreamChunk, ModelMessage, Tool, ToolCall, - ToolResultStreamChunk, ) diff --git a/packages/python/tanstack-ai/src/tanstack_ai/types.py b/packages/python/tanstack-ai/src/tanstack_ai/types.py index 1f82077a..361c74f5 100644 --- a/packages/python/tanstack-ai/src/tanstack_ai/types.py +++ b/packages/python/tanstack-ai/src/tanstack_ai/types.py @@ -1,8 +1,8 @@ """ Type definitions for TanStack AI Python package. -This module defines the core types used throughout the package, mirroring the -TypeScript implementation for consistency across platforms. +This module defines the core types used throughout the package, following the +AG-UI (Agent-User Interface) protocol for consistent event streaming. """ from dataclasses import dataclass, field @@ -104,9 +104,6 @@ class Tool: "CUSTOM", ] -# Legacy alias for backwards compatibility -StreamChunkType = EventType - class BaseEvent(TypedDict, total=False): """Base structure for all AG-UI events.""" diff --git a/packages/typescript/ai/package.json b/packages/typescript/ai/package.json index c395b371..5948d10c 100644 --- a/packages/typescript/ai/package.json +++ b/packages/typescript/ai/package.json @@ -52,14 +52,14 @@ ], "dependencies": { "@tanstack/devtools-event-client": "^0.4.0", - "partial-json": "^0.1.7" + "partial-json": "^0.1.7", + "zod": "^4.1.13" }, "peerDependencies": { "@alcyone-labs/zod-to-json-schema": "^4.0.0", - "zod": "^3.0.0 || ^4.0.0" + "zod": "^4.0.0" }, "devDependencies": { - "@vitest/coverage-v8": "4.0.14", - "zod": "^4.1.13" + "@vitest/coverage-v8": "4.0.14" } } diff --git a/packages/typescript/smoke-tests/adapters/src/index.ts b/packages/typescript/smoke-tests/adapters/src/index.ts index 43e19f41..87f2dfd1 100644 --- a/packages/typescript/smoke-tests/adapters/src/index.ts +++ b/packages/typescript/smoke-tests/adapters/src/index.ts @@ -1,19 +1,18 @@ import { config } from 'dotenv' + import { - chat, embedding, + maxIterations, summarize, toolDefinition, - maxIterations, - type Tool, } from '@tanstack/ai' import { z } from 'zod' import { createAnthropic } from '@tanstack/ai-anthropic' import { createGemini } from '@tanstack/ai-gemini' import { ollama } from '@tanstack/ai-ollama' import { createOpenAI } from '@tanstack/ai-openai' + import { - AdapterContext, buildApprovalMessages, captureStream, createDebugEnvelope, @@ -22,6 +21,10 @@ import { writeDebugFile, } from './harness' +import type { Tool } from '@tanstack/ai' + +import type { AdapterContext } from './harness' + // Load .env.local first (higher priority), then .env config({ path: '.env.local' }) config({ path: '.env' }) @@ -38,7 +41,7 @@ const OPENAI_SUMMARY_MODEL = process.env.OPENAI_SUMMARY_MODEL || OPENAI_MODEL const OPENAI_EMBEDDING_MODEL = process.env.OPENAI_EMBEDDING_MODEL || 'text-embedding-3-small' -const GEMINI_MODEL = process.env.GEMINI_MODEL || 'gemini-2.0-flash-lite' +const GEMINI_MODEL = process.env.GEMINI_MODEL || 'gemini-2.5-flash-lite' const GEMINI_SUMMARY_MODEL = process.env.GEMINI_SUMMARY_MODEL || GEMINI_MODEL const GEMINI_EMBEDDING_MODEL = process.env.GEMINI_EMBEDDING_MODEL || 'gemini-embedding-001' diff --git a/packages/typescript/smoke-tests/package.json b/packages/typescript/smoke-tests/package.json index 1146be21..ed24bab2 100644 --- a/packages/typescript/smoke-tests/package.json +++ b/packages/typescript/smoke-tests/package.json @@ -1,4 +1,5 @@ { "name": "smoke-tests", + "version": "0.0.0", "private": true -} +} \ No newline at end of file diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 214dfb20..aa55e008 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -605,13 +605,13 @@ importers: partial-json: specifier: ^0.1.7 version: 0.1.7 + zod: + specifier: ^4.1.13 + version: 4.1.13 devDependencies: '@vitest/coverage-v8': specifier: 4.0.14 version: 4.0.14(vitest@4.0.14(@types/node@24.10.1)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.2.0(postcss@8.5.6))(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) - zod: - specifier: ^4.1.13 - version: 4.1.13 packages/typescript/ai-anthropic: dependencies: @@ -1133,6 +1133,100 @@ importers: specifier: ^2.11.10 version: 2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.10)(vite@7.2.4(@types/node@24.10.1)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + testing/panel: + dependencies: + '@alcyone-labs/zod-to-json-schema': + specifier: ^4.0.10 + version: 4.0.10(zod@4.1.13) + '@tailwindcss/vite': + specifier: ^4.1.17 + version: 4.1.17(vite@7.2.4(@types/node@24.10.1)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/ai': + specifier: workspace:* + version: link:../../packages/typescript/ai + '@tanstack/ai-anthropic': + specifier: workspace:* + version: link:../../packages/typescript/ai-anthropic + '@tanstack/ai-client': + specifier: workspace:* + version: link:../../packages/typescript/ai-client + '@tanstack/ai-gemini': + specifier: workspace:* + version: link:../../packages/typescript/ai-gemini + '@tanstack/ai-openai': + specifier: workspace:* + version: link:../../packages/typescript/ai-openai + '@tanstack/ai-react': + specifier: workspace:* + version: link:../../packages/typescript/ai-react + '@tanstack/ai-react-ui': + specifier: workspace:* + version: link:../../packages/typescript/ai-react-ui + '@tanstack/nitro-v2-vite-plugin': + specifier: ^1.139.0 + version: 1.139.0(rolldown@1.0.0-beta.53)(vite@7.2.4(@types/node@24.10.1)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/react-router': + specifier: ^1.139.7 + version: 1.139.7(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@tanstack/react-start': + specifier: ^1.139.8 + version: 1.139.8(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.10)(vite@7.2.4(@types/node@24.10.1)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@7.2.4(@types/node@24.10.1)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + highlight.js: + specifier: ^11.11.1 + version: 11.11.1 + lucide-react: + specifier: ^0.555.0 + version: 0.555.0(react@19.2.0) + react: + specifier: ^19.2.0 + version: 19.2.0 + react-dom: + specifier: ^19.2.0 + version: 19.2.0(react@19.2.0) + react-markdown: + specifier: ^10.1.0 + version: 10.1.0(@types/react@19.2.7)(react@19.2.0) + rehype-highlight: + specifier: ^7.0.2 + version: 7.0.2 + rehype-raw: + specifier: ^7.0.0 + version: 7.0.0 + rehype-sanitize: + specifier: ^6.0.0 + version: 6.0.0 + remark-gfm: + specifier: ^4.0.1 + version: 4.0.1 + tailwindcss: + specifier: ^4.1.17 + version: 4.1.17 + vite-tsconfig-paths: + specifier: ^5.1.4 + version: 5.1.4(typescript@5.9.3)(vite@7.2.4(@types/node@24.10.1)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + zod: + specifier: ^4.1.13 + version: 4.1.13 + devDependencies: + '@types/node': + specifier: ^24.10.1 + version: 24.10.1 + '@types/react': + specifier: ^19.2.7 + version: 19.2.7 + '@types/react-dom': + specifier: ^19.2.3 + version: 19.2.3(@types/react@19.2.7) + '@vitejs/plugin-react': + specifier: ^5.1.1 + version: 5.1.1(vite@7.2.4(@types/node@24.10.1)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + typescript: + specifier: 5.9.3 + version: 5.9.3 + vite: + specifier: ^7.2.4 + version: 7.2.4(@types/node@24.10.1)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + packages: '@acemir/cssom@0.9.24': diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 9ffeb0fe..47ed9ebd 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -6,3 +6,4 @@ packages: - 'packages/typescript/*' - 'packages/typescript/smoke-tests/*' - 'examples/*' + - 'testing/*' diff --git a/testing/panel/package.json b/testing/panel/package.json index bb38ee26..1c887439 100644 --- a/testing/panel/package.json +++ b/testing/panel/package.json @@ -8,6 +8,7 @@ "preview": "vite preview" }, "dependencies": { + "@alcyone-labs/zod-to-json-schema": "^4.0.10", "@tailwindcss/vite": "^4.1.17", "@tanstack/ai": "workspace:*", "@tanstack/ai-anthropic": "workspace:*", @@ -16,22 +17,21 @@ "@tanstack/ai-openai": "workspace:*", "@tanstack/ai-react": "workspace:*", "@tanstack/ai-react-ui": "workspace:*", - "@tanstack/nitro-v2-vite-plugin": "^1.139.7", + "@tanstack/nitro-v2-vite-plugin": "^1.139.0", "@tanstack/react-router": "^1.139.7", - "@tanstack/react-start": "^1.139.7", - "@tanstack/start": "^1.139.7", - "highlight.js": "^11.11.4", + "@tanstack/react-start": "^1.139.8", + "highlight.js": "^11.11.1", "lucide-react": "^0.555.0", "react": "^19.2.0", "react-dom": "^19.2.0", - "react-markdown": "^10.0.0", - "rehype-highlight": "^7.0.1", + "react-markdown": "^10.1.0", + "rehype-highlight": "^7.0.2", "rehype-raw": "^7.0.0", "rehype-sanitize": "^6.0.0", - "remark-gfm": "^4.0.0", + "remark-gfm": "^4.0.1", "tailwindcss": "^4.1.17", "vite-tsconfig-paths": "^5.1.4", - "zod": "^3.25.0" + "zod": "^4.1.13" }, "devDependencies": { "@types/node": "^24.10.1", From bb1b1f0312b75dd65bd779ce7f04d752943f06ea Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Thu, 11 Dec 2025 02:02:09 +0000 Subject: [PATCH 7/7] ci: apply automated fixes --- packages/typescript/smoke-tests/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/typescript/smoke-tests/package.json b/packages/typescript/smoke-tests/package.json index ed24bab2..a1d570e7 100644 --- a/packages/typescript/smoke-tests/package.json +++ b/packages/typescript/smoke-tests/package.json @@ -2,4 +2,4 @@ "name": "smoke-tests", "version": "0.0.0", "private": true -} \ No newline at end of file +}