Skip to content

Commit 4e2530c

Browse files
committed
feat: add reasoning block extraction and improve onTool strategy
- Extract and log reasoning blocks from session messages for debugging - Include encrypted content size from OpenAI, Anthropic, and Google providers - Enable ai-analysis strategy by default on context_pruning tool calls - Handle reasoning type parts in message minimization - Fix README.md config key typo (plugins -> plugin) - Add comments explaining provider workarounds
1 parent a70d7c5 commit 4e2530c

File tree

6 files changed

+97
-7
lines changed

6 files changed

+97
-7
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ Add to your OpenCode config:
1313
```jsonc
1414
// opencode.jsonc
1515
{
16-
"plugins": ["@tarquinen/[email protected]"]
16+
"plugin": ["@tarquinen/[email protected]"]
1717
}
1818
```
1919

index.ts

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,14 +100,33 @@ const plugin: Plugin = (async (ctx) => {
100100
})
101101

102102
if (logger.enabled) {
103+
// Fetch session messages to extract reasoning blocks
104+
let sessionMessages: any[] | undefined
105+
try {
106+
const activeSessions = allSessions.data?.filter(s => !s.parentID) || []
107+
if (activeSessions.length > 0) {
108+
const mostRecentSession = activeSessions[0]
109+
const messagesResponse = await ctx.client.session.messages({
110+
path: { id: mostRecentSession.id },
111+
query: { limit: 100 }
112+
})
113+
sessionMessages = Array.isArray(messagesResponse.data)
114+
? messagesResponse.data
115+
: Array.isArray(messagesResponse) ? messagesResponse : undefined
116+
}
117+
} catch (e) {
118+
// Silently continue without session messages
119+
}
120+
103121
await logger.saveWrappedContext(
104122
"global",
105123
body.messages,
106124
{
107125
url: typeof input === 'string' ? input : 'URL object',
108126
replacedCount,
109127
totalMessages: body.messages.length
110-
}
128+
},
129+
sessionMessages
111130
)
112131
}
113132

lib/config.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ const defaultConfig: PluginConfig = {
3535
pruning_summary: 'detailed',
3636
strategies: {
3737
onIdle: ['deduplication', 'ai-analysis'],
38-
onTool: ['deduplication']
38+
onTool: ['deduplication', 'ai-analysis']
3939
}
4040
}
4141

@@ -114,7 +114,7 @@ function createDefaultConfig(): void {
114114
// Strategies to run when session goes idle
115115
"onIdle": ["deduplication", "ai-analysis"],
116116
// Strategies to run when AI calls context_pruning tool
117-
"onTool": ["deduplication"]
117+
"onTool": ["deduplication", "ai-analysis"]
118118
},
119119
// Summary display: "off", "minimal", or "detailed"
120120
"pruning_summary": "detailed",

lib/logger.ts

Lines changed: 50 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,42 @@ export class Logger {
147147
return result
148148
}
149149

150-
async saveWrappedContext(sessionID: string, messages: any[], metadata: any) {
150+
private extractReasoningBlocks(sessionMessages: any[]): any[] {
151+
const reasoningBlocks: any[] = []
152+
153+
for (const msg of sessionMessages) {
154+
if (!msg.parts) continue
155+
156+
for (const part of msg.parts) {
157+
if (part.type === "reasoning") {
158+
// Calculate encrypted content size for different providers
159+
let encryptedContentLength = 0
160+
if (part.metadata?.openai?.reasoningEncryptedContent) {
161+
encryptedContentLength = part.metadata.openai.reasoningEncryptedContent.length
162+
} else if (part.metadata?.anthropic?.signature) {
163+
encryptedContentLength = part.metadata.anthropic.signature.length
164+
} else if (part.metadata?.google?.thoughtSignature) {
165+
encryptedContentLength = part.metadata.google.thoughtSignature.length
166+
}
167+
168+
reasoningBlocks.push({
169+
messageId: msg.id,
170+
messageRole: msg.role,
171+
text: part.text,
172+
textLength: part.text?.length || 0,
173+
encryptedContentLength,
174+
time: part.time,
175+
hasMetadata: !!part.metadata,
176+
metadataKeys: part.metadata ? Object.keys(part.metadata) : []
177+
})
178+
}
179+
}
180+
}
181+
182+
return reasoningBlocks
183+
}
184+
185+
async saveWrappedContext(sessionID: string, messages: any[], metadata: any, sessionMessages?: any[]) {
151186
if (!this.enabled) return
152187

153188
try {
@@ -197,11 +232,24 @@ export class Logger {
197232
}
198233
}
199234
} else {
235+
// Extract reasoning blocks from session messages if available
236+
const reasoningBlocks = sessionMessages
237+
? this.extractReasoningBlocks(sessionMessages)
238+
: []
239+
200240
content = {
201241
timestamp: new Date().toISOString(),
202242
sessionID,
203243
metadata,
204-
messages
244+
messages,
245+
...(reasoningBlocks.length > 0 && {
246+
reasoning: {
247+
count: reasoningBlocks.length,
248+
totalTextCharacters: reasoningBlocks.reduce((sum, b) => sum + b.textLength, 0),
249+
totalEncryptedCharacters: reasoningBlocks.reduce((sum, b) => sum + b.encryptedContentLength, 0),
250+
blocks: reasoningBlocks
251+
}
252+
})
205253
}
206254
}
207255

lib/model-selector.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ export interface ModelInfo {
88

99
export const FALLBACK_MODELS: Record<string, string> = {
1010
openai: 'gpt-5-mini',
11-
anthropic: 'claude-haiku-4-5',
11+
anthropic: 'claude-haiku-4-5', //This model isn't broken in opencode-auth-provider
1212
google: 'gemini-2.5-flash',
1313
deepseek: 'deepseek-chat',
1414
xai: 'grok-4-fast',
@@ -28,6 +28,7 @@ const PROVIDER_PRIORITY = [
2828
'opencode'
2929
];
3030

31+
// TODO: some anthropic provided models aren't supported by the opencode-auth-provider package, so this provides a temporary workaround
3132
const SKIP_PROVIDERS = ['github-copilot', 'anthropic'];
3233

3334
export interface ModelSelectionResult {

lib/prompt.ts

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,28 @@ function minimizeMessages(messages: any[], alreadyPrunedIds?: string[], protecte
2626
}
2727
}
2828

29+
// TODO: This should use the opencode normalized system instead of per provider settings
30+
if (part.type === 'reasoning') {
31+
// Calculate encrypted content size if present
32+
let encryptedContentLength = 0
33+
if (part.metadata?.openai?.reasoningEncryptedContent) {
34+
encryptedContentLength = part.metadata.openai.reasoningEncryptedContent.length
35+
} else if (part.metadata?.anthropic?.signature) {
36+
encryptedContentLength = part.metadata.anthropic.signature.length
37+
} else if (part.metadata?.google?.thoughtSignature) {
38+
encryptedContentLength = part.metadata.google.thoughtSignature.length
39+
}
40+
41+
return {
42+
type: 'reasoning',
43+
text: part.text,
44+
textLength: part.text?.length || 0,
45+
encryptedContentLength,
46+
...(part.time && { time: part.time }),
47+
...(part.metadata && { metadataKeys: Object.keys(part.metadata) })
48+
}
49+
}
50+
2951
if (part.type === 'tool') {
3052
const callIDLower = part.callID?.toLowerCase()
3153
const isAlreadyPruned = prunedIdsSet.has(callIDLower)

0 commit comments

Comments
 (0)