Skip to content

Commit 43668e0

Browse files
authored
Add support for different reasoning effort (RooCodeInc#2692)
1 parent 250ea68 commit 43668e0

File tree

2 files changed

+35
-3
lines changed

2 files changed

+35
-3
lines changed

src/api/providers/openai-native.ts

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,17 @@ export class OpenAiNativeHandler extends BaseProvider implements SingleCompletio
4141
}
4242

4343
if (model.id.startsWith("o3-mini")) {
44-
yield* this.handleO3FamilyMessage(model, systemPrompt, messages)
44+
yield* this.handleReasonerMessage(model, "o3-mini", systemPrompt, messages)
45+
return
46+
}
47+
48+
if (model.id.startsWith("o3")) {
49+
yield* this.handleReasonerMessage(model, "o3", systemPrompt, messages)
50+
return
51+
}
52+
53+
if (model.id.startsWith("o4-mini")) {
54+
yield* this.handleReasonerMessage(model, "o4-mini", systemPrompt, messages)
4555
return
4656
}
4757

@@ -72,13 +82,14 @@ export class OpenAiNativeHandler extends BaseProvider implements SingleCompletio
7282
yield* this.handleStreamResponse(response, model)
7383
}
7484

75-
private async *handleO3FamilyMessage(
85+
private async *handleReasonerMessage(
7686
model: OpenAiNativeModel,
87+
family: "o3-mini" | "o3" | "o4-mini",
7788
systemPrompt: string,
7889
messages: Anthropic.Messages.MessageParam[],
7990
): ApiStream {
8091
const stream = await this.client.chat.completions.create({
81-
model: "o3-mini",
92+
model: family,
8293
messages: [
8394
{
8495
role: "developer",

src/shared/api.ts

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -786,6 +786,27 @@ export const openAiNativeModels = {
786786
inputPrice: 10.0,
787787
outputPrice: 40.0,
788788
cacheReadsPrice: 2.5,
789+
reasoningEffort: "medium",
790+
},
791+
"o3-high": {
792+
maxTokens: 100_000,
793+
contextWindow: 200_000,
794+
supportsImages: true,
795+
supportsPromptCache: true,
796+
inputPrice: 10.0,
797+
outputPrice: 40.0,
798+
cacheReadsPrice: 2.5,
799+
reasoningEffort: "high",
800+
},
801+
"o3-low": {
802+
maxTokens: 100_000,
803+
contextWindow: 200_000,
804+
supportsImages: true,
805+
supportsPromptCache: true,
806+
inputPrice: 10.0,
807+
outputPrice: 40.0,
808+
cacheReadsPrice: 2.5,
809+
reasoningEffort: "low",
789810
},
790811
"o4-mini": {
791812
maxTokens: 100_000,

0 commit comments

Comments
 (0)