@@ -4,35 +4,16 @@ import { getConfig } from "./lib/config"
44import { Logger } from "./lib/logger"
55import { StateManager } from "./lib/state"
66import { Janitor } from "./lib/janitor"
7- import { join } from "path"
8- import { homedir } from "os"
97
108/**
119 * Checks if a session is a subagent (child session)
1210 * Subagent sessions should skip pruning operations
1311 */
14- async function isSubagentSession (
15- client : any ,
16- sessionID : string ,
17- logger : Logger
18- ) : Promise < boolean > {
12+ async function isSubagentSession ( client : any , sessionID : string ) : Promise < boolean > {
1913 try {
2014 const result = await client . session . get ( { path : { id : sessionID } } )
21-
22- if ( result . data ?. parentID ) {
23- logger . debug ( "subagent-check" , "Detected subagent session, skipping pruning" , {
24- sessionID,
25- parentID : result . data . parentID
26- } )
27- return true
28- }
29-
30- return false
15+ return ! ! result . data ?. parentID
3116 } catch ( error : any ) {
32- logger . error ( "subagent-check" , "Failed to check if session is subagent" , {
33- sessionID,
34- error : error . message
35- } )
3617 // On error, assume it's not a subagent and continue (fail open)
3718 return false
3819 }
@@ -58,7 +39,7 @@ const plugin: Plugin = (async (ctx) => {
5839 const modelCache = new Map < string , { providerID : string ; modelID : string } > ( ) // sessionID -> model info
5940 const janitor = new Janitor ( ctx . client , stateManager , logger , toolParametersCache , config . protectedTools , modelCache , config . model , config . showModelErrorToasts , config . pruningMode , config . pruning_summary , ctx . directory )
6041
61- const cacheToolParameters = ( messages : any [ ] , component : string ) => {
42+ const cacheToolParameters = ( messages : any [ ] ) => {
6243 for ( const message of messages ) {
6344 if ( message . role !== 'assistant' || ! Array . isArray ( message . tool_calls ) ) {
6445 continue
@@ -77,11 +58,6 @@ const plugin: Plugin = (async (ctx) => {
7758 tool : toolCall . function . name ,
7859 parameters : params
7960 } )
80- logger . debug ( component , "Cached tool parameters" , {
81- callID : toolCall . id ,
82- tool : toolCall . function . name ,
83- hasParams : ! ! params
84- } )
8561 } catch ( error ) {
8662 // Ignore JSON parse errors for individual tool calls
8763 }
@@ -97,138 +73,63 @@ const plugin: Plugin = (async (ctx) => {
9773 try {
9874 const body = JSON . parse ( init . body )
9975 if ( body . messages && Array . isArray ( body . messages ) ) {
100- logger . info ( "global-fetch" , "🔥 AI REQUEST INTERCEPTED via global fetch!" , {
101- url : typeof input === 'string' ? input . substring ( 0 , 80 ) : 'URL object' ,
102- messageCount : body . messages . length
103- } )
104-
10576 // Cache tool parameters for janitor metadata
106- cacheToolParameters ( body . messages , "global-fetch" )
107-
108- // Always save wrapped context if debug is enabled (even when no tool messages)
109- // This captures janitor's AI inference which has messageCount=1 (just prompt)
110- const shouldLogAllRequests = logger . enabled
111-
77+ cacheToolParameters ( body . messages )
78+
11279 // Check for tool messages that might need pruning
11380 const toolMessages = body . messages . filter ( ( m : any ) => m . role === 'tool' )
114-
81+
11582 // Collect all pruned IDs across all sessions (excluding subagents)
11683 // This is safe because tool_call_ids are globally unique
11784 const allSessions = await ctx . client . session . list ( )
11885 const allPrunedIds = new Set < string > ( )
11986
12087 if ( allSessions . data ) {
12188 for ( const session of allSessions . data ) {
122- // Skip subagent sessions (don't log - it's normal and would spam logs)
123- if ( session . parentID ) {
124- continue
125- }
126-
89+ if ( session . parentID ) continue // Skip subagent sessions
12790 const prunedIds = await stateManager . get ( session . id )
12891 prunedIds . forEach ( id => allPrunedIds . add ( id ) )
12992 }
13093 }
13194
132- // Only process tool message replacement if there are tool messages
133- if ( toolMessages . length > 0 ) {
134- logger . debug ( "global-fetch" , "Found tool messages in request" , {
135- toolMessageCount : toolMessages . length ,
136- toolCallIds : toolMessages . map ( ( m : any ) => m . tool_call_id ) . slice ( 0 , 5 )
137- } )
138-
139- if ( allPrunedIds . size > 0 ) {
140- let replacedCount = 0
141- const originalMessages = JSON . parse ( JSON . stringify ( body . messages ) ) // Deep copy for logging
142-
143- body . messages = body . messages . map ( ( m : any ) => {
144- // Normalize ID to lowercase for case-insensitive matching
145- if ( m . role === 'tool' && allPrunedIds . has ( m . tool_call_id ?. toLowerCase ( ) ) ) {
146- replacedCount ++
147- return {
148- ...m ,
149- content : '[Output removed to save context - information superseded or no longer needed]'
150- }
95+ // Only process tool message replacement if there are tool messages and pruned IDs
96+ if ( toolMessages . length > 0 && allPrunedIds . size > 0 ) {
97+ let replacedCount = 0
98+
99+ body . messages = body . messages . map ( ( m : any ) => {
100+ // Normalize ID to lowercase for case-insensitive matching
101+ if ( m . role === 'tool' && allPrunedIds . has ( m . tool_call_id ?. toLowerCase ( ) ) ) {
102+ replacedCount ++
103+ return {
104+ ...m ,
105+ content : '[Output removed to save context - information superseded or no longer needed]'
151106 }
152- return m
153- } )
154-
155- if ( replacedCount > 0 ) {
156- logger . info ( "global-fetch" , "✂️ Replaced pruned tool messages" , {
157- totalPrunedIds : allPrunedIds . size ,
158- replacedCount : replacedCount ,
159- totalMessages : body . messages . length
160- } )
107+ }
108+ return m
109+ } )
161110
162- // Save wrapped context to file if debug is enabled
163- await logger . saveWrappedContext (
164- "global" , // Use "global" as session ID since we don't know which session this is
165- body . messages ,
166- {
167- url : typeof input === 'string' ? input : 'URL object' ,
168- totalPrunedIds : allPrunedIds . size ,
169- replacedCount,
170- totalMessages : body . messages . length ,
171- originalMessageCount : originalMessages . length
172- }
173- )
111+ if ( replacedCount > 0 ) {
112+ logger . info ( "fetch" , "Replaced pruned tool outputs" , {
113+ replaced : replacedCount ,
114+ total : toolMessages . length
115+ } )
174116
175- // Update the request body with modified messages
176- init . body = JSON . stringify ( body )
177- } else if ( shouldLogAllRequests ) {
178- // Log even when no replacements occurred (tool messages exist but none were pruned)
117+ // Save wrapped context to file if debug is enabled
118+ if ( logger . enabled ) {
179119 await logger . saveWrappedContext (
180120 "global" ,
181121 body . messages ,
182122 {
183123 url : typeof input === 'string' ? input : 'URL object' ,
184- totalPrunedIds : allPrunedIds . size ,
185- replacedCount : 0 ,
186- totalMessages : body . messages . length ,
187- toolMessageCount : toolMessages . length ,
188- note : "Tool messages exist but none were replaced"
124+ replacedCount,
125+ totalMessages : body . messages . length
189126 }
190127 )
191128 }
192- } else if ( shouldLogAllRequests ) {
193- // Log when tool messages exist but no pruned IDs exist yet
194- await logger . saveWrappedContext (
195- "global" ,
196- body . messages ,
197- {
198- url : typeof input === 'string' ? input : 'URL object' ,
199- totalPrunedIds : 0 ,
200- replacedCount : 0 ,
201- totalMessages : body . messages . length ,
202- toolMessageCount : toolMessages . length ,
203- note : "No pruned IDs exist yet"
204- }
205- )
129+
130+ // Update the request body with modified messages
131+ init . body = JSON . stringify ( body )
206132 }
207- } else if ( shouldLogAllRequests ) {
208- // Log requests with NO tool messages (e.g., janitor's shadow inference)
209- // Detect if this is a janitor request by checking any message for the janitor prompt
210- // Note: AI SDK may add system messages for JSON schema, so we check all messages
211- const isJanitorRequest = body . messages . some ( ( m : any ) =>
212- typeof m . content === 'string' &&
213- m . content . includes ( 'conversation analyzer that identifies obsolete tool outputs' )
214- )
215-
216- const sessionId = isJanitorRequest ? "janitor-shadow" : "global"
217-
218- await logger . saveWrappedContext (
219- sessionId ,
220- body . messages ,
221- {
222- url : typeof input === 'string' ? input : 'URL object' ,
223- totalPrunedIds : allPrunedIds . size ,
224- replacedCount : 0 ,
225- totalMessages : body . messages . length ,
226- toolMessageCount : 0 ,
227- note : isJanitorRequest
228- ? "Janitor shadow inference with embedded session history in prompt"
229- : "No tool messages in request (likely title generation or other inference)"
230- }
231- )
232133 }
233134 }
234135 } catch ( e ) {
@@ -239,17 +140,9 @@ const plugin: Plugin = (async (ctx) => {
239140 return originalGlobalFetch ( input , init )
240141 }
241142
242- logger . info ( "plugin" , "Dynamic Context Pruning plugin initialized" , {
243- enabled : config . enabled ,
244- debug : config . debug ,
245- protectedTools : config . protectedTools ,
246- model : config . model ,
247- pruningMode : config . pruningMode ,
248- pruning_summary : config . pruning_summary ,
249- globalConfigFile : join ( homedir ( ) , ".config" , "opencode" , "dcp.jsonc" ) ,
250- projectConfigFile : ctx . directory ? join ( ctx . directory , ".opencode" , "dcp.jsonc" ) : "N/A" ,
251- logDirectory : join ( homedir ( ) , ".config" , "opencode" , "logs" , "dcp" ) ,
252- globalFetchWrapped : true
143+ logger . info ( "plugin" , "DCP initialized" , {
144+ mode : config . pruningMode ,
145+ model : config . model || "auto"
253146 } )
254147
255148 return {
@@ -259,82 +152,37 @@ const plugin: Plugin = (async (ctx) => {
259152 event : async ( { event } ) => {
260153 if ( event . type === "session.status" && event . properties . status . type === "idle" ) {
261154 // Skip pruning for subagent sessions
262- if ( await isSubagentSession ( ctx . client , event . properties . sessionID , logger ) ) return
263-
264- logger . debug ( "event" , "Session became idle, triggering janitor" , {
265- sessionID : event . properties . sessionID
266- } )
155+ if ( await isSubagentSession ( ctx . client , event . properties . sessionID ) ) return
267156
268157 // Fire and forget the janitor - don't block the event handler
269158 janitor . run ( event . properties . sessionID ) . catch ( err => {
270- logger . error ( "event" , "Janitor failed" , {
271- sessionID : event . properties . sessionID ,
272- error : err . message ,
273- stack : err . stack
274- } )
159+ logger . error ( "janitor" , "Failed" , { error : err . message } )
275160 } )
276161 }
277162 } ,
278163
279164 /**
280- * Chat Params Hook: Wraps fetch function to filter pruned tool responses
165+ * Chat Params Hook: Caches model info for janitor
281166 */
282167 "chat.params" : async ( input , output ) => {
283168 const sessionId = input . sessionID
284169
285- // Debug: Log the entire input structure to see what we're getting
286- logger . debug ( "chat.params" , "Hook input structure" , {
287- sessionID : sessionId ,
288- hasProvider : ! ! input . provider ,
289- hasModel : ! ! input . model ,
290- providerKeys : input . provider ? Object . keys ( input . provider ) : [ ] ,
291- provider : input . provider ,
292- modelKeys : input . model ? Object . keys ( input . model ) : [ ] ,
293- model : input . model
294- } )
295-
296170 // Cache model information for this session so janitor can access it
297171 // The provider.id is actually nested at provider.info.id (not in SDK types)
298172 let providerID = ( input . provider as any ) ?. info ?. id || input . provider ?. id
299173 const modelID = input . model ?. id
300-
174+
301175 // If provider.id is not available, try to get it from the message
302176 if ( ! providerID && input . message ?. model ?. providerID ) {
303177 providerID = input . message . model . providerID
304- logger . debug ( "chat.params" , "Got providerID from message instead of provider object" , {
305- sessionID : sessionId ,
306- providerID : providerID
307- } )
308178 }
309-
179+
310180 if ( providerID && modelID ) {
311181 modelCache . set ( sessionId , {
312182 providerID : providerID ,
313183 modelID : modelID
314184 } )
315- logger . debug ( "chat.params" , "Cached model info for session" , {
316- sessionID : sessionId ,
317- providerID : providerID ,
318- modelID : modelID
319- } )
320- } else {
321- logger . warn ( "chat.params" , "Missing provider or model info in hook input" , {
322- sessionID : sessionId ,
323- hasProvider : ! ! input . provider ,
324- hasModel : ! ! input . model ,
325- providerID : providerID ,
326- modelID : modelID ,
327- inputKeys : Object . keys ( input ) ,
328- messageModel : input . message ?. model
329- } )
330185 }
331-
332- // Skip pruning for subagent sessions
333- if ( await isSubagentSession ( ctx . client , sessionId , logger ) ) return
334-
335- // Note: Pruning is handled by the global fetch wrapper (lines 95-239)
336- // which intercepts all AI requests and replaces pruned tool outputs.
337- // The global wrapper uses case-insensitive matching and queries all sessions.
338186 } ,
339187 }
340188} ) satisfies Plugin
0 commit comments