Skip to content

Commit 8fae1ca

Browse files
authored
Merge pull request RooCodeInc#1257 from RooVetGit/cte/unify-thinking-budget-setting
2 parents 54c6874 + 8cbce2d commit 8fae1ca

File tree

11 files changed

+28
-58
lines changed

11 files changed

+28
-58
lines changed

src/api/providers/__tests__/vertex.test.ts

Lines changed: 3 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -641,7 +641,7 @@ describe("VertexHandler", () => {
641641
vertexProjectId: "test-project",
642642
vertexRegion: "us-central1",
643643
modelMaxTokens: 16384,
644-
vertexThinking: 4096,
644+
modelMaxThinkingTokens: 4096,
645645
})
646646

647647
const modelInfo = thinkingHandler.getModel()
@@ -662,7 +662,7 @@ describe("VertexHandler", () => {
662662
vertexProjectId: "test-project",
663663
vertexRegion: "us-central1",
664664
modelMaxTokens: 16384,
665-
vertexThinking: 5000,
665+
modelMaxThinkingTokens: 5000,
666666
})
667667

668668
expect((handlerWithBudget.getModel().thinking as any).budget_tokens).toBe(5000)
@@ -688,25 +688,13 @@ describe("VertexHandler", () => {
688688
expect((handlerWithSmallMaxTokens.getModel().thinking as any).budget_tokens).toBe(1024)
689689
})
690690

691-
it("should use anthropicThinking value if vertexThinking is not provided", () => {
692-
const handler = new VertexHandler({
693-
apiModelId: "claude-3-7-sonnet@20250219:thinking",
694-
vertexProjectId: "test-project",
695-
vertexRegion: "us-central1",
696-
modelMaxTokens: 16384,
697-
anthropicThinking: 6000, // Should be used as fallback
698-
})
699-
700-
expect((handler.getModel().thinking as any).budget_tokens).toBe(6000)
701-
})
702-
703691
it("should pass thinking configuration to API", async () => {
704692
const thinkingHandler = new VertexHandler({
705693
apiModelId: "claude-3-7-sonnet@20250219:thinking",
706694
vertexProjectId: "test-project",
707695
vertexRegion: "us-central1",
708696
modelMaxTokens: 16384,
709-
vertexThinking: 4096,
697+
modelMaxThinkingTokens: 4096,
710698
})
711699

712700
const mockCreate = jest.fn().mockImplementation(async (options) => {

src/api/providers/anthropic.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,7 @@ export class AnthropicHandler implements ApiHandler, SingleCompletionHandler {
206206
// least 1024 tokens.
207207
const maxBudgetTokens = Math.floor(maxTokens * 0.8)
208208
const budgetTokens = Math.max(
209-
Math.min(this.options.anthropicThinking ?? maxBudgetTokens, maxBudgetTokens),
209+
Math.min(this.options.modelMaxThinkingTokens ?? maxBudgetTokens, maxBudgetTokens),
210210
1024,
211211
)
212212

src/api/providers/openrouter.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
117117
// least 1024 tokens.
118118
const maxBudgetTokens = Math.floor((maxTokens || 8192) * 0.8)
119119
const budgetTokens = Math.max(
120-
Math.min(this.options.anthropicThinking ?? maxBudgetTokens, maxBudgetTokens),
120+
Math.min(this.options.modelMaxThinkingTokens ?? maxBudgetTokens, maxBudgetTokens),
121121
1024,
122122
)
123123

src/api/providers/vertex.ts

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -300,10 +300,7 @@ export class VertexHandler implements ApiHandler, SingleCompletionHandler {
300300
temperature = 1.0 // Thinking requires temperature 1.0
301301
const maxBudgetTokens = Math.floor(maxTokens * 0.8)
302302
const budgetTokens = Math.max(
303-
Math.min(
304-
this.options.vertexThinking ?? this.options.anthropicThinking ?? maxBudgetTokens,
305-
maxBudgetTokens,
306-
),
303+
Math.min(this.options.modelMaxThinkingTokens ?? maxBudgetTokens, maxBudgetTokens),
307304
1024,
308305
)
309306
thinking = { type: "enabled", budget_tokens: budgetTokens }

src/core/webview/ClineProvider.ts

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1651,8 +1651,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
16511651
lmStudioModelId,
16521652
lmStudioBaseUrl,
16531653
anthropicBaseUrl,
1654-
anthropicThinking,
1655-
vertexThinking,
16561654
geminiApiKey,
16571655
openAiNativeApiKey,
16581656
deepSeekApiKey,
@@ -1673,6 +1671,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
16731671
requestyModelInfo,
16741672
modelTemperature,
16751673
modelMaxTokens,
1674+
modelMaxThinkingTokens,
16761675
} = apiConfiguration
16771676
await Promise.all([
16781677
this.updateGlobalState("apiProvider", apiProvider),
@@ -1701,8 +1700,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
17011700
this.updateGlobalState("lmStudioModelId", lmStudioModelId),
17021701
this.updateGlobalState("lmStudioBaseUrl", lmStudioBaseUrl),
17031702
this.updateGlobalState("anthropicBaseUrl", anthropicBaseUrl),
1704-
this.updateGlobalState("anthropicThinking", anthropicThinking),
1705-
this.updateGlobalState("vertexThinking", vertexThinking),
17061703
this.storeSecret("geminiApiKey", geminiApiKey),
17071704
this.storeSecret("openAiNativeApiKey", openAiNativeApiKey),
17081705
this.storeSecret("deepSeekApiKey", deepSeekApiKey),
@@ -1723,6 +1720,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
17231720
this.updateGlobalState("requestyModelInfo", requestyModelInfo),
17241721
this.updateGlobalState("modelTemperature", modelTemperature),
17251722
this.updateGlobalState("modelMaxTokens", modelMaxTokens),
1723+
this.updateGlobalState("anthropicThinking", modelMaxThinkingTokens),
17261724
])
17271725
if (this.cline) {
17281726
this.cline.api = buildApiHandler(apiConfiguration)
@@ -2159,8 +2157,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
21592157
lmStudioModelId,
21602158
lmStudioBaseUrl,
21612159
anthropicBaseUrl,
2162-
anthropicThinking,
2163-
vertexThinking,
21642160
geminiApiKey,
21652161
openAiNativeApiKey,
21662162
deepSeekApiKey,
@@ -2216,6 +2212,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
22162212
requestyModelInfo,
22172213
modelTemperature,
22182214
modelMaxTokens,
2215+
modelMaxThinkingTokens,
22192216
maxOpenTabsContext,
22202217
] = await Promise.all([
22212218
this.getGlobalState("apiProvider") as Promise<ApiProvider | undefined>,
@@ -2244,8 +2241,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
22442241
this.getGlobalState("lmStudioModelId") as Promise<string | undefined>,
22452242
this.getGlobalState("lmStudioBaseUrl") as Promise<string | undefined>,
22462243
this.getGlobalState("anthropicBaseUrl") as Promise<string | undefined>,
2247-
this.getGlobalState("anthropicThinking") as Promise<number | undefined>,
2248-
this.getGlobalState("vertexThinking") as Promise<number | undefined>,
22492244
this.getSecret("geminiApiKey") as Promise<string | undefined>,
22502245
this.getSecret("openAiNativeApiKey") as Promise<string | undefined>,
22512246
this.getSecret("deepSeekApiKey") as Promise<string | undefined>,
@@ -2301,6 +2296,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
23012296
this.getGlobalState("requestyModelInfo") as Promise<ModelInfo | undefined>,
23022297
this.getGlobalState("modelTemperature") as Promise<number | undefined>,
23032298
this.getGlobalState("modelMaxTokens") as Promise<number | undefined>,
2299+
this.getGlobalState("anthropicThinking") as Promise<number | undefined>,
23042300
this.getGlobalState("maxOpenTabsContext") as Promise<number | undefined>,
23052301
])
23062302

@@ -2346,8 +2342,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
23462342
lmStudioModelId,
23472343
lmStudioBaseUrl,
23482344
anthropicBaseUrl,
2349-
anthropicThinking,
2350-
vertexThinking,
23512345
geminiApiKey,
23522346
openAiNativeApiKey,
23532347
deepSeekApiKey,
@@ -2368,6 +2362,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
23682362
requestyModelInfo,
23692363
modelTemperature,
23702364
modelMaxTokens,
2365+
modelMaxThinkingTokens,
23712366
},
23722367
lastShownAnnouncementId,
23732368
customInstructions,

src/shared/__tests__/checkExistApiConfig.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ describe("checkExistKey", () => {
3232
apiKey: "test-key",
3333
apiProvider: undefined,
3434
anthropicBaseUrl: undefined,
35-
anthropicThinking: undefined,
35+
modelMaxThinkingTokens: undefined,
3636
}
3737
expect(checkExistKey(config)).toBe(true)
3838
})

src/shared/api.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ export interface ApiHandlerOptions {
2121
apiModelId?: string
2222
apiKey?: string // anthropic
2323
anthropicBaseUrl?: string
24-
anthropicThinking?: number
2524
vsCodeLmModelSelector?: vscode.LanguageModelChatSelector
2625
glamaModelId?: string
2726
glamaModelInfo?: ModelInfo
@@ -41,7 +40,6 @@ export interface ApiHandlerOptions {
4140
awsUseProfile?: boolean
4241
vertexProjectId?: string
4342
vertexRegion?: string
44-
vertexThinking?: number
4543
openAiBaseUrl?: string
4644
openAiApiKey?: string
4745
openAiModelId?: string
@@ -70,6 +68,7 @@ export interface ApiHandlerOptions {
7068
requestyModelInfo?: ModelInfo
7169
modelTemperature?: number
7270
modelMaxTokens?: number
71+
modelMaxThinkingTokens?: number
7372
}
7473

7574
export type ApiConfiguration = ApiHandlerOptions & {

src/shared/globalState.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ export type GlobalStateKey =
2424
| "awsUseProfile"
2525
| "vertexProjectId"
2626
| "vertexRegion"
27-
| "vertexThinking"
2827
| "lastShownAnnouncementId"
2928
| "customInstructions"
3029
| "alwaysAllowReadOnly"
@@ -43,7 +42,6 @@ export type GlobalStateKey =
4342
| "lmStudioModelId"
4443
| "lmStudioBaseUrl"
4544
| "anthropicBaseUrl"
46-
| "anthropicThinking"
4745
| "azureApiVersion"
4846
| "openAiStreamingEnabled"
4947
| "openRouterModelId"
@@ -83,5 +81,6 @@ export type GlobalStateKey =
8381
| "unboundModelInfo"
8482
| "modelTemperature"
8583
| "modelMaxTokens"
84+
| "anthropicThinking" // TODO: Rename to `modelMaxThinkingTokens`.
8685
| "mistralCodestralUrl"
8786
| "maxOpenTabsContext"

webview-ui/src/components/settings/ThinkingBudget.tsx

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -17,27 +17,24 @@ export const ThinkingBudget = ({
1717
modelInfo,
1818
provider,
1919
}: ThinkingBudgetProps) => {
20-
const isVertexProvider = provider === "vertex"
21-
const budgetField = isVertexProvider ? "vertexThinking" : "anthropicThinking"
22-
2320
const tokens = apiConfiguration?.modelMaxTokens || modelInfo?.maxTokens || 64_000
2421
const tokensMin = 8192
2522
const tokensMax = modelInfo?.maxTokens || 64_000
2623

2724
// Get the appropriate thinking tokens based on provider
2825
const thinkingTokens = useMemo(() => {
29-
const value = isVertexProvider ? apiConfiguration?.vertexThinking : apiConfiguration?.anthropicThinking
26+
const value = apiConfiguration?.modelMaxThinkingTokens
3027
return value || Math.min(Math.floor(0.8 * tokens), 8192)
31-
}, [apiConfiguration, isVertexProvider, tokens])
28+
}, [apiConfiguration, tokens])
3229

3330
const thinkingTokensMin = 1024
3431
const thinkingTokensMax = Math.floor(0.8 * tokens)
3532

3633
useEffect(() => {
3734
if (thinkingTokens > thinkingTokensMax) {
38-
setApiConfigurationField(budgetField, thinkingTokensMax)
35+
setApiConfigurationField("modelMaxThinkingTokens", thinkingTokensMax)
3936
}
40-
}, [thinkingTokens, thinkingTokensMax, setApiConfigurationField, budgetField])
37+
}, [thinkingTokens, thinkingTokensMax, setApiConfigurationField])
4138

4239
if (!modelInfo?.thinking) {
4340
return null
@@ -66,7 +63,7 @@ export const ThinkingBudget = ({
6663
max={thinkingTokensMax}
6764
step={1024}
6865
value={[thinkingTokens]}
69-
onValueChange={([value]) => setApiConfigurationField(budgetField, value)}
66+
onValueChange={([value]) => setApiConfigurationField("modelMaxThinkingTokens", value)}
7067
/>
7168
<div className="w-12 text-sm text-center">{thinkingTokens}</div>
7269
</div>

webview-ui/src/components/settings/__tests__/ApiOptions.test.tsx

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -51,12 +51,7 @@ jest.mock("../ThinkingBudget", () => ({
5151
ThinkingBudget: ({ apiConfiguration, setApiConfigurationField, modelInfo, provider }: any) =>
5252
modelInfo?.thinking ? (
5353
<div data-testid="thinking-budget" data-provider={provider}>
54-
<input
55-
data-testid="thinking-tokens"
56-
value={
57-
provider === "vertex" ? apiConfiguration?.vertexThinking : apiConfiguration?.anthropicThinking
58-
}
59-
/>
54+
<input data-testid="thinking-tokens" value={apiConfiguration?.modelMaxThinkingTokens} />
6055
</div>
6156
) : null,
6257
}))

0 commit comments

Comments
 (0)