Skip to content

Commit e4c398d

Browse files
Discards temperature setting on o3-mini for Unbound (#1836)
* Discards temperature setting on o3-mini for Unbound * Adds changeset --------- Co-authored-by: Pugazhendhi <[email protected]>
1 parent 9fadd3d commit e4c398d

File tree

3 files changed

+69
-21
lines changed

3 files changed

+69
-21
lines changed

.changeset/twenty-spoons-shout.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
---
2+
"roo-cline": patch
3+
---
4+
5+
Adds a function to add temperature setting based on the model id
6+
This is added because openai/o3-mini does not support temperature parameter which causes the request to fail.
7+
This update will allow users to use o3-mini on Unbound without facing any issues.

src/api/providers/__tests__/unbound.test.ts

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -246,6 +246,38 @@ describe("UnboundHandler", () => {
246246
)
247247
expect(mockCreate.mock.calls[0][0]).not.toHaveProperty("max_tokens")
248248
})
249+
250+
it("should not set temperature for openai/o3-mini", async () => {
251+
mockCreate.mockClear()
252+
253+
const openaiOptions = {
254+
apiModelId: "openai/o3-mini",
255+
unboundApiKey: "test-key",
256+
unboundModelId: "openai/o3-mini",
257+
unboundModelInfo: {
258+
maxTokens: undefined,
259+
contextWindow: 128000,
260+
supportsPromptCache: true,
261+
inputPrice: 0.01,
262+
outputPrice: 0.03,
263+
},
264+
}
265+
const openaiHandler = new UnboundHandler(openaiOptions)
266+
267+
await openaiHandler.completePrompt("Test prompt")
268+
expect(mockCreate).toHaveBeenCalledWith(
269+
expect.objectContaining({
270+
model: "o3-mini",
271+
messages: [{ role: "user", content: "Test prompt" }],
272+
}),
273+
expect.objectContaining({
274+
headers: expect.objectContaining({
275+
"X-Unbound-Metadata": expect.stringContaining("roo-code"),
276+
}),
277+
}),
278+
)
279+
expect(mockCreate.mock.calls[0][0]).not.toHaveProperty("temperature")
280+
})
249281
})
250282

251283
describe("getModel", () => {

src/api/providers/unbound.ts

Lines changed: 30 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,10 @@ export class UnboundHandler extends BaseProvider implements SingleCompletionHand
2525
this.client = new OpenAI({ baseURL, apiKey })
2626
}
2727

28+
private supportsTemperature(): boolean {
29+
return !this.getModel().id.startsWith("openai/o3-mini")
30+
}
31+
2832
override async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
2933
// Convert Anthropic messages to OpenAI format
3034
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
@@ -78,28 +82,30 @@ export class UnboundHandler extends BaseProvider implements SingleCompletionHand
7882
maxTokens = this.getModel().info.maxTokens
7983
}
8084

85+
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {
86+
model: this.getModel().id.split("/")[1],
87+
max_tokens: maxTokens,
88+
messages: openAiMessages,
89+
stream: true,
90+
}
91+
92+
if (this.supportsTemperature()) {
93+
requestOptions.temperature = this.options.modelTemperature ?? 0
94+
}
95+
8196
const { data: completion, response } = await this.client.chat.completions
82-
.create(
83-
{
84-
model: this.getModel().id.split("/")[1],
85-
max_tokens: maxTokens,
86-
temperature: this.options.modelTemperature ?? 0,
87-
messages: openAiMessages,
88-
stream: true,
89-
},
90-
{
91-
headers: {
92-
"X-Unbound-Metadata": JSON.stringify({
93-
labels: [
94-
{
95-
key: "app",
96-
value: "roo-code",
97-
},
98-
],
99-
}),
100-
},
97+
.create(requestOptions, {
98+
headers: {
99+
"X-Unbound-Metadata": JSON.stringify({
100+
labels: [
101+
{
102+
key: "app",
103+
value: "roo-code",
104+
},
105+
],
106+
}),
101107
},
102-
)
108+
})
103109
.withResponse()
104110

105111
for await (const chunk of completion) {
@@ -150,7 +156,10 @@ export class UnboundHandler extends BaseProvider implements SingleCompletionHand
150156
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
151157
model: this.getModel().id.split("/")[1],
152158
messages: [{ role: "user", content: prompt }],
153-
temperature: this.options.modelTemperature ?? 0,
159+
}
160+
161+
if (this.supportsTemperature()) {
162+
requestOptions.temperature = this.options.modelTemperature ?? 0
154163
}
155164

156165
if (this.getModel().id.startsWith("anthropic/")) {

0 commit comments

Comments
 (0)