Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion lib/deduplicator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@ export function detectDuplicates(
): DuplicateDetectionResult {
const signatureMap = new Map<string, string[]>()

const protectedToolsLower = protectedTools.map(t => t.toLowerCase())
const deduplicatableIds = unprunedToolCallIds.filter(id => {
const metadata = toolMetadata.get(id)
return !metadata || !protectedTools.includes(metadata.tool)
return !metadata || !protectedToolsLower.includes(metadata.tool.toLowerCase())
})

for (const id of deduplicatableIds) {
Expand Down
15 changes: 11 additions & 4 deletions lib/fetch-wrapper/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ export function installFetchWrapper(
const body = JSON.parse(init.body)
const inputUrl = typeof input === 'string' ? input : 'URL object'
let modified = false
// Track tool IDs cached from this request for session-scoped deduplication
const cachedToolIds: string[] = []

// Try each format handler in order
// OpenAI Chat Completions & Anthropic style (body.messages)
Expand All @@ -61,6 +63,9 @@ export function installFetchWrapper(
if (result.modified) {
modified = true
}
if (result.cachedToolIds) {
cachedToolIds.push(...result.cachedToolIds)
}
}

// Google/Gemini style (body.contents)
Expand All @@ -77,15 +82,17 @@ export function installFetchWrapper(
if (result.modified) {
modified = true
}
if (result.cachedToolIds) {
cachedToolIds.push(...result.cachedToolIds)
}
}

// Run deduplication after handlers have populated toolParameters cache
// Run deduplication only on tool IDs from the current request (session-scoped)
const sessionId = state.lastSeenSessionId
if (sessionId && state.toolParameters.size > 0) {
const toolIds = Array.from(state.toolParameters.keys())
if (sessionId && cachedToolIds.length > 1) {
const alreadyPruned = state.prunedIds.get(sessionId) ?? []
const alreadyPrunedLower = new Set(alreadyPruned.map(id => id.toLowerCase()))
const unpruned = toolIds.filter(id => !alreadyPrunedLower.has(id.toLowerCase()))
const unpruned = cachedToolIds.filter(id => !alreadyPrunedLower.has(id.toLowerCase()))
if (unpruned.length > 1) {
const { duplicateIds } = detectDuplicates(state.toolParameters, unpruned, config.protectedTools)
if (duplicateIds.length > 0) {
Expand Down
10 changes: 5 additions & 5 deletions lib/fetch-wrapper/openai-chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ export async function handleOpenAIChatAndAnthropic(
return { modified: false, body }
}

// Cache tool parameters from messages
cacheToolParametersFromMessages(body.messages, ctx.state)
// Cache tool parameters from messages and track which IDs were cached
const cachedToolIds = cacheToolParametersFromMessages(body.messages, ctx.state)

let modified = false

Expand Down Expand Up @@ -64,7 +64,7 @@ export async function handleOpenAIChatAndAnthropic(
const { allSessions, allPrunedIds } = await getAllPrunedIds(ctx.client, ctx.state, ctx.logger)

if (toolMessages.length === 0 || allPrunedIds.size === 0) {
return { modified, body }
return { modified, body, cachedToolIds }
}

let replacedCount = 0
Expand Down Expand Up @@ -125,8 +125,8 @@ export async function handleOpenAIChatAndAnthropic(
)
}

return { modified: true, body }
return { modified: true, body, cachedToolIds }
}

return { modified, body }
return { modified, body, cachedToolIds }
}
12 changes: 6 additions & 6 deletions lib/fetch-wrapper/openai-responses.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ export async function handleOpenAIResponses(
return { modified: false, body }
}

// Cache tool parameters from input
cacheToolParametersFromInput(body.input, ctx.state)
// Cache tool parameters from input and track which IDs were cached
const cachedToolIds = cacheToolParametersFromInput(body.input, ctx.state)

let modified = false

Expand Down Expand Up @@ -52,13 +52,13 @@ export async function handleOpenAIResponses(
const functionOutputs = body.input.filter((item: any) => item.type === 'function_call_output')

if (functionOutputs.length === 0) {
return { modified, body }
return { modified, body, cachedToolIds }
}

const { allSessions, allPrunedIds } = await getAllPrunedIds(ctx.client, ctx.state, ctx.logger)

if (allPrunedIds.size === 0) {
return { modified, body }
return { modified, body, cachedToolIds }
}

let replacedCount = 0
Expand Down Expand Up @@ -99,8 +99,8 @@ export async function handleOpenAIResponses(
)
}

return { modified: true, body }
return { modified: true, body, cachedToolIds }
}

return { modified, body }
return { modified, body, cachedToolIds }
}
2 changes: 2 additions & 0 deletions lib/fetch-wrapper/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ export interface FetchHandlerResult {
modified: boolean
/** The potentially modified body object */
body: any
/** Tool call IDs that were cached from this request (for session-scoped deduplication) */
cachedToolIds?: string[]
}

/** Session data returned from getAllPrunedIds */
Expand Down
31 changes: 29 additions & 2 deletions lib/tool-cache.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,32 @@
import type { PluginState } from "./state"

/** Maximum number of tool parameters to cache to prevent unbounded memory growth */
const MAX_TOOL_PARAMETERS_CACHE_SIZE = 500

/**
* Ensures the toolParameters cache doesn't exceed the maximum size.
* Removes oldest entries (first inserted) when limit is exceeded.
*/
function trimToolParametersCache(state: PluginState): void {
if (state.toolParameters.size > MAX_TOOL_PARAMETERS_CACHE_SIZE) {
const excess = state.toolParameters.size - MAX_TOOL_PARAMETERS_CACHE_SIZE
const keys = Array.from(state.toolParameters.keys())
for (let i = 0; i < excess; i++) {
state.toolParameters.delete(keys[i])
}
}
}

/**
* Cache tool parameters from OpenAI Chat Completions style messages.
* Extracts tool call IDs and their parameters from assistant messages with tool_calls.
* Returns the list of tool call IDs that were cached from this request.
*/
export function cacheToolParametersFromMessages(
messages: any[],
state: PluginState
): void {
): string[] {
const cachedIds: string[] = []
for (const message of messages) {
if (message.role !== 'assistant' || !Array.isArray(message.tool_calls)) {
continue
Expand All @@ -26,21 +45,26 @@ export function cacheToolParametersFromMessages(
tool: toolCall.function.name,
parameters: params
})
cachedIds.push(toolCall.id)
} catch (error) {
// Silently ignore parse errors
}
}
}
trimToolParametersCache(state)
return cachedIds
}

/**
* Cache tool parameters from OpenAI Responses API format.
* Extracts from input array items with type='function_call'.
* Returns the list of tool call IDs that were cached from this request.
*/
export function cacheToolParametersFromInput(
input: any[],
state: PluginState
): void {
): string[] {
const cachedIds: string[] = []
for (const item of input) {
if (item.type !== 'function_call' || !item.call_id || !item.name) {
continue
Expand All @@ -54,8 +78,11 @@ export function cacheToolParametersFromInput(
tool: item.name,
parameters: params
})
cachedIds.push(item.call_id)
} catch (error) {
// Silently ignore parse errors
}
}
trimToolParametersCache(state)
return cachedIds
}