Skip to content

Commit 93bcb2f

Browse files
committed
fix: case-sensitive protected tools, cross-session dedup, unbounded cache
Fixes from PR #57 applied to refactored file structure: - Normalize tool names to lowercase for protected tools check - Scope deduplication to tool IDs from current request only - Add trimToolParametersCache() with 500-entry FIFO eviction Closes #57
1 parent 1b1fc0c commit 93bcb2f

File tree

5 files changed

+39
-6
lines changed

5 files changed

+39
-6
lines changed

lib/config.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ function createDefaultConfig(): void {
123123
"pruning_summary": "detailed",
124124
// How often to nudge the AI to prune (every N tool results, 0 = disabled)
125125
"nudge_freq": 10
126-
// Additional tools to protect from pruning (merged with built-in: task, todowrite, todoread, prune)
126+
// Additional tools to protect from pruning (merged with built-in: task, todowrite, todoread, prune, batch)
127127
// "protectedTools": ["bash"]
128128
}
129129
`

lib/core/strategies/deduplication.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@ export const deduplicationStrategy: PruningStrategy = {
1717

1818
const deduplicatableIds = unprunedIds.filter(id => {
1919
const metadata = toolMetadata.get(id)
20-
return !metadata || !protectedTools.includes(metadata.tool)
20+
const protectedToolsLower = protectedTools.map(t => t.toLowerCase())
21+
return !metadata || !protectedToolsLower.includes(metadata.tool.toLowerCase())
2122
})
2223

2324
for (const id of deduplicatableIds) {

lib/fetch-wrapper/index.ts

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import { handleGemini } from "./gemini"
88
import { handleOpenAIResponses } from "./openai-responses"
99
import { runStrategies } from "../core/strategies"
1010
import { accumulateGCStats } from "./gc-tracker"
11+
import { trimToolParametersCache } from "../state/tool-cache"
1112

1213
export type { FetchHandlerContext, FetchHandlerResult, SynthPrompts } from "./types"
1314

@@ -55,6 +56,9 @@ export function installFetchWrapper(
5556
const inputUrl = typeof input === 'string' ? input : 'URL object'
5657
let modified = false
5758

59+
// Capture tool IDs before handlers run to track what gets cached this request
60+
const toolIdsBefore = new Set(state.toolParameters.keys())
61+
5862
// Try each format handler in order
5963
// OpenAI Chat Completions & Anthropic style (body.messages)
6064
if (body.messages && Array.isArray(body.messages)) {
@@ -80,9 +84,14 @@ export function installFetchWrapper(
8084
}
8185
}
8286

83-
// Run strategies after handlers have populated toolParameters cache
87+
// Run strategies when new tools are cached
88+
// We use all tool IDs for deduplication detection (to find duplicates across requests)
89+
// but pruning is session-scoped via state.prunedIds
8490
const sessionId = state.lastSeenSessionId
85-
if (sessionId && state.toolParameters.size > 0) {
91+
const toolIdsAfter = Array.from(state.toolParameters.keys())
92+
const newToolsCached = toolIdsAfter.filter(id => !toolIdsBefore.has(id)).length > 0
93+
94+
if (sessionId && newToolsCached && state.toolParameters.size > 0) {
8695
const toolIds = Array.from(state.toolParameters.keys())
8796
const alreadyPruned = state.prunedIds.get(sessionId) ?? []
8897
const alreadyPrunedLower = new Set(alreadyPruned.map(id => id.toLowerCase()))
@@ -102,6 +111,9 @@ export function installFetchWrapper(
102111
accumulateGCStats(state, sessionId, result.prunedIds, body, logger)
103112
}
104113
}
114+
115+
// Trim cache to prevent unbounded memory growth
116+
trimToolParametersCache(state)
105117
}
106118

107119
if (modified) {

lib/state/tool-cache.ts

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,3 +59,23 @@ export function cacheToolParametersFromInput(
5959
}
6060
}
6161
}
62+
63+
/** Maximum number of entries to keep in the tool parameters cache */
64+
const MAX_TOOL_CACHE_SIZE = 500
65+
66+
/**
67+
* Trim the tool parameters cache to prevent unbounded memory growth.
68+
* Uses FIFO eviction - removes oldest entries first.
69+
*/
70+
export function trimToolParametersCache(state: PluginState): void {
71+
if (state.toolParameters.size <= MAX_TOOL_CACHE_SIZE) {
72+
return
73+
}
74+
75+
const keysToRemove = Array.from(state.toolParameters.keys())
76+
.slice(0, state.toolParameters.size - MAX_TOOL_CACHE_SIZE)
77+
78+
for (const key of keysToRemove) {
79+
state.toolParameters.delete(key)
80+
}
81+
}

lib/tokenizer.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ export async function estimateTokensBatch(texts: string[]): Promise<number[]> {
99

1010
export function formatTokenCount(tokens: number): string {
1111
if (tokens >= 1000) {
12-
return `${(tokens / 1000).toFixed(1)}K`.replace('.0K', 'K')
12+
return `${(tokens / 1000).toFixed(1)}K`.replace('.0K', 'K') + ' tokens'
1313
}
14-
return tokens.toString()
14+
return tokens.toString() + ' tokens'
1515
}

0 commit comments

Comments
 (0)