Skip to content

Commit fb78038

Browse files
committed
init
1 parent 7034350 commit fb78038

File tree

5 files changed

+88
-4
lines changed

5 files changed

+88
-4
lines changed

.claude/settings.local.json

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
{
2+
"permissions": {
3+
"allow": [
4+
"Bash(cat:*)",
5+
"Bash(for f in ~/.local/share/opencode/storage/part/*/*)",
6+
"Bash(do grep -l \"\"type\"\":\"\"reasoning\"\" $f)",
7+
"Bash(done)",
8+
"WebSearch",
9+
"WebFetch(domain:ai-sdk.dev)"
10+
],
11+
"deny": [],
12+
"ask": []
13+
}
14+
}

CLAUDE.md

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
# CLAUDE.md
2+
3+
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
4+
5+
## Build Commands
6+
7+
```bash
8+
npm run build # Clean and compile TypeScript
9+
npm run typecheck # Type check without emitting
10+
npm run dev # Run in OpenCode plugin dev mode
11+
npm run test # Run tests (node --import tsx --test tests/*.test.ts)
12+
```
13+
14+
## Architecture
15+
16+
This is an OpenCode plugin that optimizes token usage by pruning obsolete tool outputs from conversation context. The plugin is non-destructive—pruning state is kept in memory only, with original session data remaining intact.
17+
18+
### Core Components
19+
20+
**index.ts** - Plugin entry point. Registers:
21+
- Global fetch wrapper that intercepts LLM requests and replaces pruned tool outputs with placeholder text
22+
- Event handler for `session.status` idle events triggering automatic pruning
23+
- `chat.params` hook to cache session model info
24+
- `context_pruning` tool for AI-initiated pruning
25+
26+
**lib/janitor.ts** - Orchestrates the two-phase pruning process:
27+
1. Deduplication phase: Fast, zero-cost detection of repeated tool calls (keeps most recent)
28+
2. AI analysis phase: Uses LLM to semantically identify obsolete outputs
29+
30+
**lib/deduplicator.ts** - Implements duplicate detection by creating normalized signatures from tool name + parameters
31+
32+
**lib/model-selector.ts** - Model selection cascade: config model → session model → fallback models (with provider priority order)
33+
34+
**lib/config.ts** - Config loading with precedence: defaults → global (~/.config/opencode/dcp.jsonc) → project (.opencode/dcp.jsonc)
35+
36+
**lib/prompt.ts** - Builds the analysis prompt with minimized message history for LLM evaluation
37+
38+
### Key Concepts
39+
40+
- **Tool call IDs**: Normalized to lowercase for consistent matching
41+
- **Protected tools**: Never pruned (default: task, todowrite, todoread, context_pruning)
42+
- **Batch tool expansion**: When a batch tool is pruned, its child tool calls are also pruned
43+
- **Strategies**: `deduplication` (fast) and `ai-analysis` (thorough), configurable per trigger (`onIdle`, `onTool`)
44+
45+
### State Management
46+
47+
Plugin maintains in-memory state per session:
48+
- `prunedIdsState`: Map of session ID → array of pruned tool call IDs
49+
- `statsState`: Map of session ID → cumulative pruning statistics
50+
- `toolParametersCache`: Cached tool parameters extracted from LLM request bodies
51+
- `modelCache`: Cached provider/model info from chat.params hook

index.ts

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,10 +63,21 @@ const plugin: Plugin = (async (ctx) => {
6363
if (init?.body && typeof init.body === 'string') {
6464
try {
6565
const body = JSON.parse(init.body)
66+
6667
if (body.messages && Array.isArray(body.messages)) {
6768
cacheToolParameters(body.messages)
6869

69-
const toolMessages = body.messages.filter((m: any) => m.role === 'tool')
70+
const toolMessages = body.messages.filter((m: any) => {
71+
if (m.role === 'tool') return true;
72+
if (m.role === 'assistant') {
73+
if (Array.isArray(m.content)) {
74+
for (const part of m.content) {
75+
if (part.type === 'tool_use') return true;
76+
}
77+
}
78+
}
79+
return false;
80+
});
7081

7182
const allSessions = await ctx.client.session.list()
7283
const allPrunedIds = new Set<string>()
@@ -93,6 +104,7 @@ const plugin: Plugin = (async (ctx) => {
93104
return m
94105
})
95106

107+
console.log(replacedCount);
96108
if (replacedCount > 0) {
97109
logger.info("fetch", "Replaced pruned tool outputs", {
98110
replaced: replacedCount,

lib/config.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ import { parse } from 'jsonc-parser'
55
import { Logger } from './logger'
66
import type { PluginInput } from '@opencode-ai/plugin'
77

8-
export type PruningStrategy = "deduplication" | "ai-analysis"
8+
export type PruningStrategy = "deduplication" | "ai-analysis" | "strip-reasoning"
99

1010
export interface PluginConfig {
1111
enabled: boolean
@@ -34,8 +34,8 @@ const defaultConfig: PluginConfig = {
3434
strictModelSelection: false,
3535
pruning_summary: 'detailed',
3636
strategies: {
37-
onIdle: ['deduplication', 'ai-analysis'],
38-
onTool: ['deduplication', 'ai-analysis']
37+
onIdle: ['deduplication', 'ai-analysis', "strip-reasoning"],
38+
onTool: ['deduplication', 'ai-analysis', "strip-reasoning"]
3939
}
4040
}
4141

lib/janitor.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ export interface SessionStats {
1515
export interface PruningResult {
1616
prunedCount: number
1717
tokensSaved: number
18+
thinkingIds: string[]
1819
deduplicatedIds: string[]
1920
llmPrunedIds: string[]
2021
deduplicationDetails: Map<string, any>
@@ -155,6 +156,12 @@ export class Janitor {
155156
return !metadata || !this.protectedTools.includes(metadata.tool)
156157
}).length
157158

159+
// PHASE 1.5: STRIP-REASONING
160+
let reasoningPrunedIds: string[] = []
161+
162+
if (strategies.includes('strip-reasoning')) {
163+
}
164+
158165
// PHASE 2: LLM ANALYSIS
159166
let llmPrunedIds: string[] = []
160167

0 commit comments

Comments
 (0)