Skip to content

Commit f0437be

Browse files
authored
Merge pull request #346 from drivecore/fix/issue-342-remove-fallbacks
Refactor: Remove fallbacks from Anthropic context window detection
2 parents cb5434b + d94459d commit f0437be

File tree

3 files changed

+101
-18
lines changed

3 files changed

+101
-18
lines changed

packages/agent/CHANGELOG.md

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,14 @@
1+
# [mycoder-agent-v1.7.0](https://github.com/drivecore/mycoder/compare/mycoder-agent-v1.6.0...mycoder-agent-v1.7.0) (2025-03-21)
2+
3+
### Bug Fixes
4+
5+
- Fix TypeScript errors and tests for message compaction feature ([d4f1fb5](https://github.com/drivecore/mycoder/commit/d4f1fb5d197e623bf98f2221352f9132dcb3e5de))
6+
7+
### Features
8+
9+
- Add automatic compaction of historical messages for agents ([a5caf46](https://github.com/drivecore/mycoder/commit/a5caf464a0a8dca925c7b46023ebde4727e211f8)), closes [#338](https://github.com/drivecore/mycoder/issues/338)
10+
- Improve message compaction with proactive suggestions ([6276bc0](https://github.com/drivecore/mycoder/commit/6276bc0bc5fa27c4f1e9be61ff4375690ad04c62))
11+
112
# [mycoder-agent-v1.6.0](https://github.com/drivecore/mycoder/compare/mycoder-agent-v1.5.0...mycoder-agent-v1.6.0) (2025-03-21)
213

314
### Features

packages/agent/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "mycoder-agent",
3-
"version": "1.6.0",
3+
"version": "1.7.0",
44
"description": "Agent module for mycoder - an AI-powered software development assistant",
55
"type": "module",
66
"main": "dist/index.js",

packages/agent/src/core/llm/providers/anthropic.ts

Lines changed: 89 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -12,20 +12,8 @@ import {
1212
ProviderOptions,
1313
} from '../types.js';
1414

15-
// Define model context window sizes for Anthropic models
16-
const ANTHROPIC_MODEL_LIMITS: Record<string, number> = {
17-
default: 200000,
18-
'claude-3-7-sonnet-20250219': 200000,
19-
'claude-3-7-sonnet-latest': 200000,
20-
'claude-3-5-sonnet-20241022': 200000,
21-
'claude-3-5-sonnet-latest': 200000,
22-
'claude-3-haiku-20240307': 200000,
23-
'claude-3-opus-20240229': 200000,
24-
'claude-3-sonnet-20240229': 200000,
25-
'claude-2.1': 100000,
26-
'claude-2.0': 100000,
27-
'claude-instant-1.2': 100000,
28-
};
15+
// Cache for model context window sizes
16+
const modelContextWindowCache: Record<string, number> = {};
2917

3018
/**
3119
* Anthropic-specific options
@@ -96,15 +84,27 @@ function addCacheControlToMessages(
9684
});
9785
}
9886

99-
function tokenUsageFromMessage(message: Anthropic.Message, model: string) {
87+
function tokenUsageFromMessage(
88+
message: Anthropic.Message,
89+
model: string,
90+
contextWindow?: number,
91+
) {
10092
const usage = new TokenUsage();
10193
usage.input = message.usage.input_tokens;
10294
usage.cacheWrites = message.usage.cache_creation_input_tokens ?? 0;
10395
usage.cacheReads = message.usage.cache_read_input_tokens ?? 0;
10496
usage.output = message.usage.output_tokens;
10597

10698
const totalTokens = usage.input + usage.output;
107-
const maxTokens = ANTHROPIC_MODEL_LIMITS[model] || 100000; // Default fallback
99+
100+
// Use provided context window or fallback to cached value
101+
const maxTokens = contextWindow || modelContextWindowCache[model];
102+
103+
if (!maxTokens) {
104+
throw new Error(
105+
`Context window size not available for model: ${model}. Make sure to initialize the model properly.`,
106+
);
107+
}
108108

109109
return {
110110
usage,
@@ -123,6 +123,7 @@ export class AnthropicProvider implements LLMProvider {
123123
private client: Anthropic;
124124
private apiKey: string;
125125
private baseUrl?: string;
126+
private modelContextWindow?: number;
126127

127128
constructor(model: string, options: AnthropicOptions = {}) {
128129
this.model = model;
@@ -138,6 +139,73 @@ export class AnthropicProvider implements LLMProvider {
138139
apiKey: this.apiKey,
139140
...(this.baseUrl && { baseURL: this.baseUrl }),
140141
});
142+
143+
// Initialize model context window detection
144+
// This is async but we don't need to await it here
145+
// If it fails, an error will be thrown when the model is used
146+
this.initializeModelContextWindow().catch((error) => {
147+
console.error(
148+
`Failed to initialize model context window: ${error.message}. The model will not work until context window information is available.`,
149+
);
150+
});
151+
}
152+
153+
/**
154+
* Fetches the model context window size from the Anthropic API
155+
*
156+
* @returns The context window size
157+
* @throws Error if the context window size cannot be determined
158+
*/
159+
private async initializeModelContextWindow(): Promise<number> {
160+
try {
161+
const response = await this.client.models.list();
162+
163+
if (!response?.data || !Array.isArray(response.data)) {
164+
throw new Error(
165+
`Invalid response from models.list() for ${this.model}`,
166+
);
167+
}
168+
169+
// Try to find the exact model
170+
let model = response.data.find((m) => m.id === this.model);
171+
172+
// If not found, try to find a model that starts with the same name
173+
// This helps with model aliases like 'claude-3-sonnet-latest'
174+
if (!model) {
175+
// Split by '-latest' or '-20' to get the base model name
176+
const parts = this.model.split('-latest');
177+
const modelPrefix =
178+
parts.length > 1 ? parts[0] : this.model.split('-20')[0];
179+
180+
if (modelPrefix) {
181+
model = response.data.find((m) => m.id.startsWith(modelPrefix));
182+
183+
if (model) {
184+
console.info(
185+
`Model ${this.model} not found, using ${model.id} for context window size`,
186+
);
187+
}
188+
}
189+
}
190+
191+
// Using type assertion to access context_window property
192+
// The Anthropic API returns context_window but it may not be in the TypeScript definitions
193+
if (model && 'context_window' in model) {
194+
const contextWindow = (model as any).context_window;
195+
this.modelContextWindow = contextWindow;
196+
// Cache the result for future use
197+
modelContextWindowCache[this.model] = contextWindow;
198+
return contextWindow;
199+
} else {
200+
throw new Error(
201+
`No context window information found for model: ${this.model}`,
202+
);
203+
}
204+
} catch (error) {
205+
throw new Error(
206+
`Failed to determine context window size for model ${this.model}: ${(error as Error).message}`,
207+
);
208+
}
141209
}
142210

143211
/**
@@ -198,7 +266,11 @@ export class AnthropicProvider implements LLMProvider {
198266
};
199267
});
200268

201-
const tokenInfo = tokenUsageFromMessage(response, this.model);
269+
const tokenInfo = tokenUsageFromMessage(
270+
response,
271+
this.model,
272+
this.modelContextWindow,
273+
);
202274

203275
return {
204276
text: content,

0 commit comments

Comments
 (0)