Skip to content

Commit 74a1d9c

Browse files
committed
feat: add custom model context window override for all providers
- Add modelContextWindow field to base provider settings schema - Implement applyModelOverrides method in BaseProvider class - Update all provider implementations to apply context window override - Add comprehensive tests for context window override functionality This allows users to customize the context window size for any provider to work around corporate upload limits or other restrictions. Fixes #8397
1 parent 702b269 commit 74a1d9c

File tree

12 files changed

+186
-6
lines changed

12 files changed

+186
-6
lines changed

.tmp/Roo-Code

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Subproject commit 86debeef43acbea9bdc1aa4b38d514541e164c91

.tmp/pr-8396

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Subproject commit a18538995de0f7d9cfc4a40d31380fb141d5604e

packages/types/src/provider-settings.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -179,6 +179,9 @@ const baseProviderSettingsSchema = z.object({
179179
modelMaxTokens: z.number().optional(),
180180
modelMaxThinkingTokens: z.number().optional(),
181181

182+
// Model context window override.
183+
modelContextWindow: z.number().optional(),
184+
182185
// Model verbosity.
183186
verbosity: verbosityLevelsSchema.optional(),
184187
})
Lines changed: 126 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,126 @@
1+
import { describe, it, expect, beforeEach } from "vitest"
2+
import { AnthropicHandler } from "../anthropic"
3+
import { OpenRouterHandler } from "../openrouter"
4+
import { OpenAiHandler } from "../openai"
5+
import { GeminiHandler } from "../gemini"
6+
import type { ApiHandlerOptions } from "../../../shared/api"
7+
8+
describe("Context Window Override", () => {
9+
describe("AnthropicHandler", () => {
10+
it("should apply modelContextWindow override", () => {
11+
const options: ApiHandlerOptions = {
12+
apiKey: "test-key",
13+
apiModelId: "claude-3-5-sonnet-20241022",
14+
modelContextWindow: 50000, // Custom context window
15+
}
16+
17+
const handler = new AnthropicHandler(options)
18+
const model = handler.getModel()
19+
20+
expect(model.info.contextWindow).toBe(50000)
21+
})
22+
23+
it("should use default context window when no override is provided", () => {
24+
const options: ApiHandlerOptions = {
25+
apiKey: "test-key",
26+
apiModelId: "claude-3-5-sonnet-20241022",
27+
}
28+
29+
const handler = new AnthropicHandler(options)
30+
const model = handler.getModel()
31+
32+
// Should use the default context window for this model
33+
expect(model.info.contextWindow).toBe(200000)
34+
})
35+
})
36+
37+
describe("OpenRouterHandler", () => {
38+
it("should apply modelContextWindow override", async () => {
39+
const options: ApiHandlerOptions = {
40+
openRouterApiKey: "test-key",
41+
openRouterModelId: "anthropic/claude-3.5-sonnet",
42+
modelContextWindow: 75000, // Custom context window
43+
}
44+
45+
const handler = new OpenRouterHandler(options)
46+
// Mock the models to avoid actual API calls
47+
;(handler as any).models = {
48+
"anthropic/claude-3.5-sonnet": {
49+
contextWindow: 200000,
50+
maxTokens: 8192,
51+
supportsPromptCache: true,
52+
supportsImages: true,
53+
},
54+
}
55+
56+
const model = handler.getModel()
57+
expect(model.info.contextWindow).toBe(75000)
58+
})
59+
})
60+
61+
describe("OpenAiHandler", () => {
62+
it("should apply modelContextWindow override to custom model info", () => {
63+
const options: ApiHandlerOptions = {
64+
openAiApiKey: "test-key",
65+
openAiModelId: "gpt-4",
66+
openAiCustomModelInfo: {
67+
contextWindow: 128000,
68+
maxTokens: 4096,
69+
supportsPromptCache: false,
70+
supportsImages: true,
71+
},
72+
modelContextWindow: 60000, // Custom context window
73+
}
74+
75+
const handler = new OpenAiHandler(options)
76+
const model = handler.getModel()
77+
78+
expect(model.info.contextWindow).toBe(60000)
79+
})
80+
})
81+
82+
describe("GeminiHandler", () => {
83+
it("should apply modelContextWindow override", () => {
84+
const options: ApiHandlerOptions = {
85+
geminiApiKey: "test-key",
86+
apiModelId: "gemini-1.5-pro-latest",
87+
modelContextWindow: 100000, // Custom context window
88+
}
89+
90+
const handler = new GeminiHandler(options)
91+
const model = handler.getModel()
92+
93+
expect(model.info.contextWindow).toBe(100000)
94+
})
95+
})
96+
97+
describe("Edge cases", () => {
98+
it("should not apply override when modelContextWindow is 0", () => {
99+
const options: ApiHandlerOptions = {
100+
apiKey: "test-key",
101+
apiModelId: "claude-3-5-sonnet-20241022",
102+
modelContextWindow: 0, // Zero should not override
103+
}
104+
105+
const handler = new AnthropicHandler(options)
106+
const model = handler.getModel()
107+
108+
// Should use the default context window
109+
expect(model.info.contextWindow).toBe(200000)
110+
})
111+
112+
it("should not apply override when modelContextWindow is negative", () => {
113+
const options: ApiHandlerOptions = {
114+
apiKey: "test-key",
115+
apiModelId: "claude-3-5-sonnet-20241022",
116+
modelContextWindow: -1000, // Negative should not override
117+
}
118+
119+
const handler = new AnthropicHandler(options)
120+
const model = handler.getModel()
121+
122+
// Should use the default context window
123+
expect(model.info.contextWindow).toBe(200000)
124+
})
125+
})
126+
})

src/api/providers/anthropic.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -264,6 +264,9 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa
264264
}
265265
}
266266

267+
// Apply user-configured overrides (e.g., custom context window)
268+
info = this.applyModelOverrides(info, this.options)
269+
267270
const params = getModelParams({
268271
format: "anthropic",
269272
modelId: id,

src/api/providers/base-openai-compatible-provider.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,6 +140,9 @@ export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
140140
? (this.options.apiModelId as ModelName)
141141
: this.defaultProviderModelId
142142

143-
return { id, info: this.providerModels[id] }
143+
// Apply user-configured overrides (e.g., custom context window)
144+
const info = this.applyModelOverrides(this.providerModels[id], this.options)
145+
146+
return { id, info }
144147
}
145148
}

src/api/providers/base-provider.ts

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import { Anthropic } from "@anthropic-ai/sdk"
33
import type { ModelInfo } from "@roo-code/types"
44

55
import type { ApiHandler, ApiHandlerCreateMessageMetadata } from "../index"
6+
import type { ApiHandlerOptions } from "../../shared/api"
67
import { ApiStream } from "../transform/stream"
78
import { countTokens } from "../../utils/countTokens"
89

@@ -18,6 +19,26 @@ export abstract class BaseProvider implements ApiHandler {
1819

1920
abstract getModel(): { id: string; info: ModelInfo }
2021

22+
/**
23+
* Applies user-configured overrides to model info.
24+
* This allows users to customize model parameters like context window size
25+
* to work around corporate restrictions or other limitations.
26+
*
27+
* @param info The original model info
28+
* @param options The API handler options containing user overrides
29+
* @returns The model info with overrides applied
30+
*/
31+
protected applyModelOverrides(info: ModelInfo, options: ApiHandlerOptions): ModelInfo {
32+
const overriddenInfo = { ...info }
33+
34+
// Apply context window override if specified
35+
if (options.modelContextWindow && options.modelContextWindow > 0) {
36+
overriddenInfo.contextWindow = options.modelContextWindow
37+
}
38+
39+
return overriddenInfo
40+
}
41+
2142
/**
2243
* Default token counting implementation using tiktoken.
2344
* Providers can override this to use their native token counting endpoints.

src/api/providers/bedrock.ts

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -373,7 +373,7 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH
373373
maxTokens: modelConfig.maxTokens || (modelConfig.info.maxTokens as number),
374374
temperature: modelConfig.temperature ?? (this.options.modelTemperature as number),
375375
}
376-
376+
377377
// Check if 1M context is enabled for Claude Sonnet 4
378378
// Use parseBaseModelId to handle cross-region inference prefixes
379379
const baseModelId = this.parseBaseModelId(modelConfig.id)
@@ -922,10 +922,14 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH
922922
if (this.options.modelMaxTokens && this.options.modelMaxTokens > 0) {
923923
model.info.maxTokens = this.options.modelMaxTokens
924924
}
925+
// Support both awsModelContextWindow (for backward compatibility) and modelContextWindow
925926
if (this.options.awsModelContextWindow && this.options.awsModelContextWindow > 0) {
926927
model.info.contextWindow = this.options.awsModelContextWindow
927928
}
928929

930+
// Apply general model overrides (including modelContextWindow)
931+
model.info = this.applyModelOverrides(model.info, this.options)
932+
929933
return model
930934
}
931935

@@ -983,6 +987,9 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH
983987
}
984988
}
985989

990+
// Apply general model overrides (including modelContextWindow) after all specific logic
991+
modelConfig.info = this.applyModelOverrides(modelConfig.info, this.options)
992+
986993
// Get model params including reasoning configuration
987994
const params = getModelParams({
988995
format: "anthropic",

src/api/providers/gemini.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -166,6 +166,10 @@ export class GeminiHandler extends BaseProvider implements SingleCompletionHandl
166166
const modelId = this.options.apiModelId
167167
let id = modelId && modelId in geminiModels ? (modelId as GeminiModelId) : geminiDefaultModelId
168168
let info: ModelInfo = geminiModels[id]
169+
170+
// Apply user-configured overrides (e.g., custom context window)
171+
info = this.applyModelOverrides(info, this.options)
172+
169173
const params = getModelParams({ format: "gemini", modelId: id, model: info, settings: this.options })
170174

171175
// The `:thinking` suffix indicates that the model is a "Hybrid"

src/api/providers/openai.ts

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,11 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
267267

268268
override getModel() {
269269
const id = this.options.openAiModelId ?? ""
270-
const info = this.options.openAiCustomModelInfo ?? openAiModelInfoSaneDefaults
270+
let info = this.options.openAiCustomModelInfo ?? openAiModelInfoSaneDefaults
271+
272+
// Apply user-configured overrides (e.g., custom context window)
273+
info = this.applyModelOverrides(info, this.options)
274+
271275
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
272276
return { id, info, ...params }
273277
}

0 commit comments

Comments
 (0)