diff --git a/.gitignore b/.gitignore index e88b29a..ec8b0b2 100644 --- a/.gitignore +++ b/.gitignore @@ -24,3 +24,6 @@ Thumbs.db *.swp *.swo *~ + +# OpenCode +.opencode/ diff --git a/.npmignore b/.npmignore index 331144c..cb4b50d 100644 --- a/.npmignore +++ b/.npmignore @@ -18,3 +18,6 @@ lib/ # Git .git/ .gitignore + +# OpenCode +.opencode/ diff --git a/lib/janitor.ts b/lib/janitor.ts index f41f04f..7b184f9 100644 --- a/lib/janitor.ts +++ b/lib/janitor.ts @@ -261,7 +261,7 @@ export class Janitor { }) } - const modelSelection = await selectModel(currentModelInfo, this.logger, this.configModel) + const modelSelection = await selectModel(currentModelInfo, this.logger, this.configModel, this.workingDirectory) this.logger.info("janitor", "Model selected for analysis", { sessionID, diff --git a/lib/model-selector.ts b/lib/model-selector.ts index 70d4986..7c2dd7f 100644 --- a/lib/model-selector.ts +++ b/lib/model-selector.ts @@ -69,13 +69,13 @@ function shouldSkipProvider(providerID: string): boolean { * Attempts to import OpencodeAI with retry logic to handle plugin initialization timing issues. * Some providers (like openai via @openhax/codex) may not be fully initialized on first attempt. */ -async function importOpencodeAI(logger?: Logger, maxRetries: number = 3, delayMs: number = 100): Promise { +async function importOpencodeAI(logger?: Logger, maxRetries: number = 3, delayMs: number = 100, workspaceDir?: string): Promise { let lastError: Error | undefined; for (let attempt = 1; attempt <= maxRetries; attempt++) { try { const { OpencodeAI } = await import('@tarquinen/opencode-auth-provider'); - return new OpencodeAI(); + return new OpencodeAI({ workspaceDir }); } catch (error: any) { lastError = error; @@ -117,13 +117,15 @@ async function importOpencodeAI(logger?: Logger, maxRetries: number = 3, delayMs export async function selectModel( currentModel?: ModelInfo, logger?: Logger, - configModel?: string + configModel?: string, + workspaceDir?: string ): Promise { - logger?.info('model-selector', 'Model selection started', { currentModel, configModel }); + logger?.info('model-selector', 'Model selection started', { currentModel, configModel, workspaceDir }); // Lazy import with retry logic - handles plugin initialization timing issues // Some providers (like openai via @openhax/codex) may not be ready on first attempt - const opencodeAI = await importOpencodeAI(logger); + // Pass workspaceDir so OpencodeAI can find project-level config and plugins + const opencodeAI = await importOpencodeAI(logger, 3, 100, workspaceDir); let failedModelInfo: ModelInfo | undefined; diff --git a/package-lock.json b/package-lock.json index aa494e0..003b2e8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@tarquinen/opencode-dcp", - "version": "0.3.6", + "version": "0.3.7", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@tarquinen/opencode-dcp", - "version": "0.3.6", + "version": "0.3.7", "license": "MIT", "dependencies": { "@ai-sdk/openai-compatible": "^1.0.27", diff --git a/package.json b/package.json index 939957b..a53009c 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@tarquinen/opencode-dcp", - "version": "0.3.6", + "version": "0.3.7", "type": "module", "description": "OpenCode plugin that optimizes token usage by pruning obsolete tool outputs from conversation context", "main": "./dist/index.js",