diff --git a/README.md b/README.md index 59b222a..a87fbcd 100644 --- a/README.md +++ b/README.md @@ -27,10 +27,10 @@ DCP uses multiple strategies to reduce context size: **Deduplication** — Identifies repeated tool calls (e.g., reading the same file multiple times) and keeps only the most recent output. Runs automatically on every request with zero LLM cost. -**On Idle Analysis** — Uses a language model to semantically analyze conversation context during idle periods and identify tool outputs that are no longer relevant. - **Prune Tool** — Exposes a `prune` tool that the AI can call to manually trigger pruning when it determines context cleanup is needed. +**On Idle Analysis** — Uses a language model to semantically analyze conversation context during idle periods and identify tool outputs that are no longer relevant. + *More strategies coming soon.* Your session history is never modified. DCP replaces pruned outputs with a placeholder before sending requests to your LLM. @@ -84,14 +84,14 @@ DCP uses its own config file: // (Legacy) Run an LLM to analyze what tool calls are no longer relevant on idle "onIdle": { "enabled": false, + // Additional tools to protect from pruning + "protectedTools": [], // Override model for analysis (format: "provider/model") // "model": "anthropic/claude-haiku-4-5", // Show toast notifications when model selection fails "showModelErrorToasts": true, // When true, fallback models are not permitted - "strictModelSelection": false, - // Additional tools to protect from pruning - "protectedTools": [] + "strictModelSelection": false } } } diff --git a/lib/config.ts b/lib/config.ts index eb90adc..2eef0c9 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -234,9 +234,9 @@ const defaultConfig: PluginConfig = { }, onIdle: { enabled: false, + protectedTools: [...DEFAULT_PROTECTED_TOOLS], showModelErrorToasts: true, - strictModelSelection: false, - protectedTools: [...DEFAULT_PROTECTED_TOOLS] + strictModelSelection: false } } } @@ -336,14 +336,14 @@ function createDefaultConfig(): void { // (Legacy) Run an LLM to analyze what tool calls are no longer relevant on idle "onIdle": { "enabled": false, + // Additional tools to protect from pruning + "protectedTools": [], // Override model for analysis (format: "provider/model") // "model": "anthropic/claude-haiku-4-5", // Show toast notifications when model selection fails "showModelErrorToasts": true, // When true, fallback models are not permitted - "strictModelSelection": false, - // Additional tools to protect from pruning - "protectedTools": [] + "strictModelSelection": false } } }