|
1 | | -import type { PluginState } from "./index" |
| 1 | +import type { PluginState, ToolStatus } from "./index" |
2 | 2 | import type { Logger } from "../logger" |
3 | 3 |
|
| 4 | +/** Maximum number of entries to keep in the tool parameters cache */ |
| 5 | +const MAX_TOOL_CACHE_SIZE = 500 |
| 6 | + |
4 | 7 | /** |
5 | | - * Cache tool parameters from OpenAI Chat Completions and Anthropic style messages. |
6 | | - * Extracts tool call IDs and their parameters from assistant messages. |
7 | | - * |
8 | | - * Supports: |
9 | | - * - OpenAI format: message.tool_calls[] with id, function.name, function.arguments |
10 | | - * - Anthropic format: message.content[] with type='tool_use', id, name, input |
| 8 | + * Sync tool parameters from OpenCode's session.messages() API. |
| 9 | + * This is the single source of truth for tool parameters, replacing |
| 10 | + * format-specific parsing from LLM API requests. |
11 | 11 | */ |
12 | | -export function cacheToolParametersFromMessages( |
13 | | - messages: any[], |
| 12 | +export async function syncToolParametersFromOpenCode( |
| 13 | + client: any, |
| 14 | + sessionId: string, |
14 | 15 | state: PluginState, |
15 | 16 | logger?: Logger |
16 | | -): void { |
17 | | - let openaiCached = 0 |
18 | | - let anthropicCached = 0 |
| 17 | +): Promise<void> { |
| 18 | + try { |
| 19 | + const messagesResponse = await client.session.messages({ |
| 20 | + path: { id: sessionId }, |
| 21 | + query: { limit: 100 } |
| 22 | + }) |
| 23 | + const messages = messagesResponse.data || messagesResponse |
19 | 24 |
|
20 | | - for (const message of messages) { |
21 | | - if (message.role !== 'assistant') { |
22 | | - continue |
| 25 | + if (!Array.isArray(messages)) { |
| 26 | + return |
23 | 27 | } |
24 | 28 |
|
25 | | - if (Array.isArray(message.tool_calls)) { |
26 | | - for (const toolCall of message.tool_calls) { |
27 | | - if (!toolCall.id || !toolCall.function) { |
28 | | - continue |
29 | | - } |
| 29 | + let synced = 0 |
30 | 30 |
|
31 | | - try { |
32 | | - const params = typeof toolCall.function.arguments === 'string' |
33 | | - ? JSON.parse(toolCall.function.arguments) |
34 | | - : toolCall.function.arguments |
35 | | - state.toolParameters.set(toolCall.id.toLowerCase(), { |
36 | | - tool: toolCall.function.name, |
37 | | - parameters: params |
38 | | - }) |
39 | | - openaiCached++ |
40 | | - } catch (error) { |
41 | | - } |
42 | | - } |
43 | | - } |
| 31 | + for (const msg of messages) { |
| 32 | + if (!msg.parts) continue |
44 | 33 |
|
45 | | - if (Array.isArray(message.content)) { |
46 | | - for (const part of message.content) { |
47 | | - if (part.type !== 'tool_use' || !part.id || !part.name) { |
48 | | - continue |
49 | | - } |
| 34 | + for (const part of msg.parts) { |
| 35 | + if (part.type !== "tool" || !part.callID) continue |
50 | 36 |
|
51 | | - state.toolParameters.set(part.id.toLowerCase(), { |
52 | | - tool: part.name, |
53 | | - parameters: part.input ?? {} |
| 37 | + const id = part.callID.toLowerCase() |
| 38 | + |
| 39 | + // Skip if already cached (optimization) |
| 40 | + if (state.toolParameters.has(id)) continue |
| 41 | + |
| 42 | + const status = part.state?.status as ToolStatus | undefined |
| 43 | + state.toolParameters.set(id, { |
| 44 | + tool: part.tool, |
| 45 | + parameters: part.state?.input ?? {}, |
| 46 | + status, |
| 47 | + error: status === "error" ? part.state?.error : undefined, |
54 | 48 | }) |
55 | | - anthropicCached++ |
| 49 | + synced++ |
56 | 50 | } |
57 | 51 | } |
58 | | - } |
59 | | - |
60 | | - if (logger && (openaiCached > 0 || anthropicCached > 0)) { |
61 | | - logger.debug("tool-cache", "Cached tool parameters from messages", { |
62 | | - openaiFormat: openaiCached, |
63 | | - anthropicFormat: anthropicCached, |
64 | | - totalCached: state.toolParameters.size |
65 | | - }) |
66 | | - } |
67 | | -} |
68 | | - |
69 | | -/** |
70 | | - * Cache tool parameters from OpenAI Responses API format. |
71 | | - * Extracts from input array items with type='function_call'. |
72 | | - */ |
73 | | -export function cacheToolParametersFromInput( |
74 | | - input: any[], |
75 | | - state: PluginState, |
76 | | - logger?: Logger |
77 | | -): void { |
78 | | - let cached = 0 |
79 | 52 |
|
80 | | - for (const item of input) { |
81 | | - if (item.type !== 'function_call' || !item.call_id || !item.name) { |
82 | | - continue |
83 | | - } |
| 53 | + trimToolParametersCache(state) |
84 | 54 |
|
85 | | - try { |
86 | | - const params = typeof item.arguments === 'string' |
87 | | - ? JSON.parse(item.arguments) |
88 | | - : item.arguments |
89 | | - state.toolParameters.set(item.call_id.toLowerCase(), { |
90 | | - tool: item.name, |
91 | | - parameters: params |
| 55 | + if (logger && synced > 0) { |
| 56 | + logger.debug("tool-cache", "Synced tool parameters from OpenCode", { |
| 57 | + sessionId: sessionId.slice(0, 8), |
| 58 | + synced, |
| 59 | + totalCached: state.toolParameters.size |
92 | 60 | }) |
93 | | - cached++ |
94 | | - } catch (error) { |
95 | 61 | } |
96 | | - } |
97 | | - |
98 | | - if (logger && cached > 0) { |
99 | | - logger.debug("tool-cache", "Cached tool parameters from input", { |
100 | | - responsesApiFormat: cached, |
101 | | - totalCached: state.toolParameters.size |
| 62 | + } catch (error) { |
| 63 | + logger?.warn("tool-cache", "Failed to sync tool parameters from OpenCode", { |
| 64 | + sessionId: sessionId.slice(0, 8), |
| 65 | + error: error instanceof Error ? error.message : String(error) |
102 | 66 | }) |
103 | 67 | } |
104 | 68 | } |
105 | 69 |
|
106 | | -/** Maximum number of entries to keep in the tool parameters cache */ |
107 | | -const MAX_TOOL_CACHE_SIZE = 500 |
108 | | - |
109 | 70 | /** |
110 | 71 | * Trim the tool parameters cache to prevent unbounded memory growth. |
111 | 72 | * Uses FIFO eviction - removes oldest entries first. |
|
0 commit comments