Skip to content

Commit 3d67133

Browse files
authored
Merge pull request #698 from RooVetGit/more_o3_mini_fixes
Make o3-mini work in glama
2 parents 11fd360 + 0cd1642 commit 3d67133

File tree

1 file changed

+30
-21
lines changed

1 file changed

+30
-21
lines changed

src/api/providers/glama.ts

Lines changed: 30 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -72,28 +72,30 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
7272
maxTokens = 8_192
7373
}
7474

75+
const requestOptions: OpenAI.Chat.ChatCompletionCreateParams = {
76+
model: this.getModel().id,
77+
max_tokens: maxTokens,
78+
messages: openAiMessages,
79+
stream: true,
80+
}
81+
82+
if (this.supportsTemperature()) {
83+
requestOptions.temperature = 0
84+
}
85+
7586
const { data: completion, response } = await this.client.chat.completions
76-
.create(
77-
{
78-
model: this.getModel().id,
79-
max_tokens: maxTokens,
80-
temperature: 0,
81-
messages: openAiMessages,
82-
stream: true,
83-
},
84-
{
85-
headers: {
86-
"X-Glama-Metadata": JSON.stringify({
87-
labels: [
88-
{
89-
key: "app",
90-
value: "vscode.rooveterinaryinc.roo-cline",
91-
},
92-
],
93-
}),
94-
},
87+
.create(requestOptions, {
88+
headers: {
89+
"X-Glama-Metadata": JSON.stringify({
90+
labels: [
91+
{
92+
key: "app",
93+
value: "vscode.rooveterinaryinc.roo-cline",
94+
},
95+
],
96+
}),
9597
},
96-
)
98+
})
9799
.withResponse()
98100

99101
const completionRequestId = response.headers.get("x-completion-request-id")
@@ -148,6 +150,10 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
148150
}
149151
}
150152

153+
private supportsTemperature(): boolean {
154+
return !this.getModel().id.startsWith("openai/o3-mini")
155+
}
156+
151157
getModel(): { id: string; info: ModelInfo } {
152158
const modelId = this.options.glamaModelId
153159
const modelInfo = this.options.glamaModelInfo
@@ -164,7 +170,10 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
164170
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
165171
model: this.getModel().id,
166172
messages: [{ role: "user", content: prompt }],
167-
temperature: 0,
173+
}
174+
175+
if (this.supportsTemperature()) {
176+
requestOptions.temperature = 0
168177
}
169178

170179
if (this.getModel().id.startsWith("anthropic/")) {

0 commit comments

Comments
 (0)