Skip to content

Commit 04f5332

Browse files
committed
feat: Add reasoning effort setting for OpenAI compatible provider
1 parent 42c1f5f commit 04f5332

File tree

31 files changed

+323
-31
lines changed

31 files changed

+323
-31
lines changed

.changeset/odd-ligers-press.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"roo-cline": minor
3+
---
4+
5+
Add Reasoning Effort setting for OpenAI Compatible provider

package-lock.json

Lines changed: 11 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -469,6 +469,7 @@
469469
"@types/jest": "^29.5.14",
470470
"@types/mocha": "^10.0.10",
471471
"@types/node": "20.x",
472+
"@types/node-cache": "^4.1.3",
472473
"@types/node-ipc": "^9.2.3",
473474
"@types/string-similarity": "^4.0.2",
474475
"@typescript-eslint/eslint-plugin": "^7.14.1",

package.nls.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,5 +29,6 @@
2929
"settings.vsCodeLmModelSelector.description": "Settings for VSCode Language Model API",
3030
"settings.vsCodeLmModelSelector.vendor.description": "The vendor of the language model (e.g. copilot)",
3131
"settings.vsCodeLmModelSelector.family.description": "The family of the language model (e.g. gpt-4)",
32-
"settings.customStoragePath.description": "Custom storage path. Leave empty to use the default location. Supports absolute paths (e.g. 'D:\\RooCodeStorage')"
32+
"settings.customStoragePath.description": "Custom storage path. Leave empty to use the default location. Supports absolute paths (e.g. 'D:\\RooCodeStorage')",
33+
"settings:providers.setReasoningLevel": "Enable Reasoning Effort"
3334
}

src/api/providers/__tests__/openai.test.ts

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -155,6 +155,39 @@ describe("OpenAiHandler", () => {
155155
expect(textChunks).toHaveLength(1)
156156
expect(textChunks[0].text).toBe("Test response")
157157
})
158+
it("should include reasoning_effort when reasoning effort is enabled", async () => {
159+
const reasoningOptions: ApiHandlerOptions = {
160+
...mockOptions,
161+
enableReasoningEffort: true,
162+
openAiCustomModelInfo: { contextWindow: 128_000, supportsPromptCache: false, reasoningEffort: "high" },
163+
}
164+
const reasoningHandler = new OpenAiHandler(reasoningOptions)
165+
const stream = reasoningHandler.createMessage(systemPrompt, messages)
166+
// Consume the stream to trigger the API call
167+
for await (const _chunk of stream) {
168+
}
169+
// Assert the mockCreate was called with reasoning_effort
170+
expect(mockCreate).toHaveBeenCalled()
171+
const callArgs = mockCreate.mock.calls[0][0]
172+
expect(callArgs.reasoning_effort).toBe("high")
173+
})
174+
175+
it("should not include reasoning_effort when reasoning effort is disabled", async () => {
176+
const noReasoningOptions: ApiHandlerOptions = {
177+
...mockOptions,
178+
enableReasoningEffort: false,
179+
openAiCustomModelInfo: { contextWindow: 128_000, supportsPromptCache: false },
180+
}
181+
const noReasoningHandler = new OpenAiHandler(noReasoningOptions)
182+
const stream = noReasoningHandler.createMessage(systemPrompt, messages)
183+
// Consume the stream to trigger the API call
184+
for await (const _chunk of stream) {
185+
}
186+
// Assert the mockCreate was called without reasoning_effort
187+
expect(mockCreate).toHaveBeenCalled()
188+
const callArgs = mockCreate.mock.calls[0][0]
189+
expect(callArgs.reasoning_effort).toBeUndefined()
190+
})
158191
})
159192

160193
describe("error handling", () => {

src/api/providers/__tests__/unbound.test.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import { Anthropic } from "@anthropic-ai/sdk"
44

5-
import { ApiHandlerOptions } from "../../../shared/api"
5+
import { ApiHandlerOptions, unboundDefaultModelId } from "../../../shared/api"
66

77
import { UnboundHandler } from "../unbound"
88

@@ -70,7 +70,7 @@ describe("UnboundHandler", () => {
7070
beforeEach(() => {
7171
mockOptions = {
7272
unboundApiKey: "test-api-key",
73-
unboundModelId: "anthropic/claude-3-5-sonnet-20241022",
73+
unboundModelId: unboundDefaultModelId,
7474
}
7575

7676
handler = new UnboundHandler(mockOptions)
@@ -133,7 +133,7 @@ describe("UnboundHandler", () => {
133133

134134
expect(mockCreate).toHaveBeenCalledWith(
135135
expect.objectContaining({
136-
model: "claude-3-5-sonnet-20241022",
136+
model: "claude-3-7-sonnet-20250219",
137137
messages: expect.any(Array),
138138
stream: true,
139139
}),
@@ -174,7 +174,7 @@ describe("UnboundHandler", () => {
174174

175175
expect(mockCreate).toHaveBeenCalledWith(
176176
expect.objectContaining({
177-
model: "claude-3-5-sonnet-20241022",
177+
model: "claude-3-7-sonnet-20250219",
178178
messages: [{ role: "user", content: "Test prompt" }],
179179
temperature: 0,
180180
max_tokens: 8192,

src/core/__tests__/Cline.test.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -238,6 +238,7 @@ describe("Cline", () => {
238238
apiProvider: "anthropic",
239239
apiModelId: "claude-3-5-sonnet-20241022",
240240
apiKey: "test-api-key", // Add API key to mock config
241+
enableReasoningEffort: false,
241242
}
242243

243244
// Mock provider methods
@@ -475,10 +476,12 @@ describe("Cline", () => {
475476
const configWithImages = {
476477
...mockApiConfig,
477478
apiModelId: "claude-3-sonnet",
479+
enableReasoningEffort: false,
478480
}
479481
const configWithoutImages = {
480482
...mockApiConfig,
481483
apiModelId: "gpt-3.5-turbo",
484+
enableReasoningEffort: false,
482485
}
483486

484487
// Create test conversation history with mixed content

src/exports/roo-code.d.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,7 @@ type ProviderSettings = {
8787
openAiUseAzure?: boolean | undefined
8888
azureApiVersion?: string | undefined
8989
openAiStreamingEnabled?: boolean | undefined
90+
enableReasoningEffort?: boolean | undefined
9091
ollamaModelId?: string | undefined
9192
ollamaBaseUrl?: string | undefined
9293
vsCodeLmModelSelector?:

src/exports/types.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,7 @@ type ProviderSettings = {
8888
openAiUseAzure?: boolean | undefined
8989
azureApiVersion?: string | undefined
9090
openAiStreamingEnabled?: boolean | undefined
91+
enableReasoningEffort?: boolean | undefined
9192
ollamaModelId?: string | undefined
9293
ollamaBaseUrl?: string | undefined
9394
vsCodeLmModelSelector?:

src/schemas/index.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -355,6 +355,7 @@ export const providerSettingsSchema = z.object({
355355
openAiUseAzure: z.boolean().optional(),
356356
azureApiVersion: z.string().optional(),
357357
openAiStreamingEnabled: z.boolean().optional(),
358+
enableReasoningEffort: z.boolean().optional(),
358359
// Ollama
359360
ollamaModelId: z.string().optional(),
360361
ollamaBaseUrl: z.string().optional(),
@@ -453,6 +454,7 @@ const providerSettingsRecord: ProviderSettingsRecord = {
453454
openAiUseAzure: undefined,
454455
azureApiVersion: undefined,
455456
openAiStreamingEnabled: undefined,
457+
enableReasoningEffort: undefined,
456458
// Ollama
457459
ollamaModelId: undefined,
458460
ollamaBaseUrl: undefined,

0 commit comments

Comments
 (0)