Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
364 changes: 41 additions & 323 deletions index.ts

Large diffs are not rendered by default.

2 changes: 0 additions & 2 deletions lib/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,6 @@ function loadConfigFile(configPath: string): Partial<PluginConfig> | null {
const fileContent = readFileSync(configPath, 'utf-8')
return parse(fileContent) as Partial<PluginConfig>
} catch (error: any) {
const logger = new Logger(true)
logger.error('config', `Failed to read config from ${configPath}: ${error.message}`)
return null
}
}
Expand Down
312 changes: 82 additions & 230 deletions lib/janitor.ts

Large diffs are not rendered by default.

54 changes: 35 additions & 19 deletions lib/logger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,29 +23,55 @@ export class Logger {
}
}

/**
* Formats data object into a compact, readable string
* e.g., {saved: "~4.1K", pruned: 4, duplicates: 0} -> "saved=~4.1K pruned=4 duplicates=0"
*/
private formatData(data?: any): string {
if (!data) return ""

const parts: string[] = []
for (const [key, value] of Object.entries(data)) {
if (value === undefined || value === null) continue

// Format arrays compactly
if (Array.isArray(value)) {
if (value.length === 0) continue
parts.push(`${key}=[${value.slice(0, 3).join(",")}${value.length > 3 ? `...+${value.length - 3}` : ""}]`)
}
// Format objects inline if small, skip if large
else if (typeof value === 'object') {
const str = JSON.stringify(value)
if (str.length < 50) {
parts.push(`${key}=${str}`)
}
}
// Format primitives directly
else {
parts.push(`${key}=${value}`)
}
}
return parts.join(" ")
}

private async write(level: string, component: string, message: string, data?: any) {
if (!this.enabled) return

try {
await this.ensureLogDir()

const timestamp = new Date().toISOString()
const logEntry = {
timestamp,
level,
component,
message,
...(data && { data })
}
const dataStr = this.formatData(data)

// Simple, readable format: TIMESTAMP LEVEL component: message | key=value key=value
const logLine = `${timestamp} ${level.padEnd(5)} ${component}: ${message}${dataStr ? " | " + dataStr : ""}\n`

const dailyLogDir = join(this.logDir, "daily")
if (!existsSync(dailyLogDir)) {
await mkdir(dailyLogDir, { recursive: true })
}

const logFile = join(dailyLogDir, `${new Date().toISOString().split('T')[0]}.log`)
const logLine = JSON.stringify(logEntry) + "\n"

await writeFile(logFile, logLine, { flag: "a" })
} catch (error) {
// Silently fail - don't break the plugin if logging fails
Expand Down Expand Up @@ -140,7 +166,6 @@ export class Logger {
// We detect being "inside a string" by tracking quotes
let result = ''
let inString = false
let escaped = false

for (let i = 0; i < jsonText.length; i++) {
const char = jsonText[i]
Expand Down Expand Up @@ -237,15 +262,6 @@ export class Logger {
const jsonString = JSON.stringify(content, null, 2)

await writeFile(filepath, jsonString)

// Log that we saved it
await this.debug("logger", "Saved AI context", {
sessionID,
filepath,
messageCount: messages.length,
isJanitorShadow,
parsed: isJanitorShadow
})
} catch (error) {
// Silently fail - don't break the plugin if logging fails
}
Expand Down
82 changes: 4 additions & 78 deletions lib/model-selector.ts
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,6 @@ export async function selectModel(
configModel?: string,
workspaceDir?: string
): Promise<ModelSelectionResult> {
logger?.info('model-selector', 'Model selection started', { currentModel, configModel, workspaceDir });

// Lazy import with retry logic - handles plugin initialization timing issues
// Some providers (like openai via @openhax/codex) may not be ready on first attempt
// Pass workspaceDir so OpencodeAI can find project-level config and plugins
Expand All @@ -133,32 +131,20 @@ export async function selectModel(
if (configModel) {
const parts = configModel.split('/');
if (parts.length !== 2) {
logger?.warn('model-selector', '✗ Invalid config model format, expected "provider/model"', {
configModel
});
logger?.warn('model-selector', 'Invalid config model format', { configModel });
} else {
const [providerID, modelID] = parts;
logger?.debug('model-selector', 'Attempting to use config-specified model', {
providerID,
modelID
});

try {
const model = await opencodeAI.getLanguageModel(providerID, modelID);
logger?.info('model-selector', '✓ Successfully using config-specified model', {
providerID,
modelID
});
return {
model,
modelInfo: { providerID, modelID },
source: 'config',
reason: 'Using model specified in dcp.jsonc config'
};
} catch (error: any) {
logger?.warn('model-selector', '✗ Failed to use config-specified model, falling back', {
providerID,
modelID,
logger?.warn('model-selector', `Config model failed: ${providerID}/${modelID}`, {
error: error.message
});
failedModelInfo = { providerID, modelID };
Expand All @@ -169,39 +155,20 @@ export async function selectModel(
// Step 2: Try user's current model (if not skipped provider)
if (currentModel) {
if (shouldSkipProvider(currentModel.providerID)) {
logger?.info('model-selector', 'Skipping user model (provider not suitable for background tasks)', {
providerID: currentModel.providerID,
modelID: currentModel.modelID,
reason: 'github-copilot and anthropic are skipped for analysis'
});
// Track as failed so we can show toast
if (!failedModelInfo) {
failedModelInfo = currentModel;
}
} else {
logger?.debug('model-selector', 'Attempting to use user\'s current model', {
providerID: currentModel.providerID,
modelID: currentModel.modelID
});

try {
const model = await opencodeAI.getLanguageModel(currentModel.providerID, currentModel.modelID);
logger?.info('model-selector', '✓ Successfully using user\'s current model', {
providerID: currentModel.providerID,
modelID: currentModel.modelID
});
return {
model,
modelInfo: currentModel,
source: 'user-model',
reason: 'Using current session model'
};
} catch (error: any) {
logger?.warn('model-selector', '✗ Failed to use user\'s current model', {
providerID: currentModel.providerID,
modelID: currentModel.modelID,
error: error.message
});
if (!failedModelInfo) {
failedModelInfo = currentModel;
}
Expand All @@ -210,43 +177,16 @@ export async function selectModel(
}

// Step 3: Try fallback models from authenticated providers
logger?.debug('model-selector', 'Fetching available authenticated providers');
const providers = await opencodeAI.listProviders();
const availableProviderIDs = Object.keys(providers);
logger?.info('model-selector', 'Available authenticated providers', {
providerCount: availableProviderIDs.length,
providerIDs: availableProviderIDs,
providers: Object.entries(providers).map(([id, info]: [string, any]) => ({
id,
source: info.source,
name: info.info?.name
}))
});

logger?.debug('model-selector', 'Attempting fallback models from providers', {
priorityOrder: PROVIDER_PRIORITY
});

for (const providerID of PROVIDER_PRIORITY) {
if (!providers[providerID]) {
logger?.debug('model-selector', `Skipping ${providerID} (not authenticated)`);
continue;
}
if (!providers[providerID]) continue;

const fallbackModelID = FALLBACK_MODELS[providerID];
if (!fallbackModelID) {
logger?.debug('model-selector', `Skipping ${providerID} (no fallback model configured)`);
continue;
}

logger?.debug('model-selector', `Attempting ${providerID}/${fallbackModelID}`);
if (!fallbackModelID) continue;

try {
const model = await opencodeAI.getLanguageModel(providerID, fallbackModelID);
logger?.info('model-selector', `✓ Successfully using fallback model`, {
providerID,
modelID: fallbackModelID
});
return {
model,
modelInfo: { providerID, modelID: fallbackModelID },
Expand All @@ -255,9 +195,6 @@ export async function selectModel(
failedModel: failedModelInfo
};
} catch (error: any) {
logger?.warn('model-selector', `✗ Failed to use ${providerID}/${fallbackModelID}`, {
error: error.message
});
continue;
}
}
Expand All @@ -270,14 +207,8 @@ export async function selectModel(
* This can be used by the plugin to get the current session's model
*/
export function extractModelFromSession(sessionState: any, logger?: Logger): ModelInfo | undefined {
logger?.debug('model-selector', 'Extracting model from session state');

// Try to get from ACP session state
if (sessionState?.model?.providerID && sessionState?.model?.modelID) {
logger?.info('model-selector', 'Found model in ACP session state', {
providerID: sessionState.model.providerID,
modelID: sessionState.model.modelID
});
return {
providerID: sessionState.model.providerID,
modelID: sessionState.model.modelID
Expand All @@ -288,17 +219,12 @@ export function extractModelFromSession(sessionState: any, logger?: Logger): Mod
if (sessionState?.messages && Array.isArray(sessionState.messages)) {
const lastMessage = sessionState.messages[sessionState.messages.length - 1];
if (lastMessage?.model?.providerID && lastMessage?.model?.modelID) {
logger?.info('model-selector', 'Found model in last message', {
providerID: lastMessage.model.providerID,
modelID: lastMessage.model.modelID
});
return {
providerID: lastMessage.model.providerID,
modelID: lastMessage.model.modelID
};
}
}

logger?.warn('model-selector', 'Could not extract model from session state');
return undefined;
}
20 changes: 20 additions & 0 deletions lib/state.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
// lib/state.ts

export interface SessionStats {
totalToolsPruned: number
totalTokensSaved: number
}

export class StateManager {
private state: Map<string, string[]> = new Map()
private stats: Map<string, SessionStats> = new Map()

async get(sessionID: string): Promise<string[]> {
return this.state.get(sessionID) ?? []
Expand All @@ -10,4 +16,18 @@ export class StateManager {
async set(sessionID: string, prunedIds: string[]): Promise<void> {
this.state.set(sessionID, prunedIds)
}

async getStats(sessionID: string): Promise<SessionStats> {
return this.stats.get(sessionID) ?? { totalToolsPruned: 0, totalTokensSaved: 0 }
}

async addStats(sessionID: string, toolsPruned: number, tokensSaved: number): Promise<SessionStats> {
const current = await this.getStats(sessionID)
const updated: SessionStats = {
totalToolsPruned: current.totalToolsPruned + toolsPruned,
totalTokensSaved: current.totalTokensSaved + tokensSaved
}
this.stats.set(sessionID, updated)
return updated
}
}
30 changes: 4 additions & 26 deletions lib/tokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,41 +10,19 @@
* is actually needed.
*/

import type { Logger } from './logger'

/**
* Batch estimates tokens for multiple text samples
*
* @param texts - Array of text strings to tokenize
* @param logger - Optional logger instance
* @returns Array of token counts
*/
export async function estimateTokensBatch(
texts: string[],
logger?: Logger
): Promise<number[]> {
export async function estimateTokensBatch(texts: string[]): Promise<number[]> {
try {
// Lazy import - only load the 53MB gpt-tokenizer package when actually needed
const { encode } = await import('gpt-tokenizer')

const results = texts.map(text => {
const tokens = encode(text)
return tokens.length
})

logger?.debug('tokenizer', 'Batch token estimation complete', {
batchSize: texts.length,
totalTokens: results.reduce((sum, count) => sum + count, 0),
avgTokensPerText: Math.round(results.reduce((sum, count) => sum + count, 0) / results.length)
})

return results
} catch (error: any) {
logger?.warn('tokenizer', 'Batch tokenization failed, using fallback', {
error: error.message
})

// Fallback to character-based estimation
return texts.map(text => encode(text).length)
} catch {
// Fallback to character-based estimation if tokenizer fails
return texts.map(text => Math.round(text.length / 4))
}
}
Expand Down
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@tarquinen/opencode-dcp",
"version": "0.3.9",
"version": "0.3.10",
"type": "module",
"description": "OpenCode plugin that optimizes token usage by pruning obsolete tool outputs from conversation context",
"main": "./dist/index.js",
Expand Down