Skip to content

Commit 25cf5b3

Browse files
committed
fix implementation to pass tests
1 parent 7718a81 commit 25cf5b3

File tree

1 file changed

+20
-5
lines changed

1 file changed

+20
-5
lines changed

src/api/providers/openai.ts

Lines changed: 20 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,12 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
124124

125125
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {
126126
model: modelId,
127-
temperature: this.options.modelTemperature ?? (deepseekReasoner ? DEEP_SEEK_DEFAULT_TEMPERATURE : 0),
127+
temperature:
128+
this.options.modelTemperature !== undefined && this.options.modelTemperature !== null
129+
? this.options.modelTemperature
130+
: deepseekReasoner
131+
? DEEP_SEEK_DEFAULT_TEMPERATURE
132+
: 0,
128133
messages: convertedMessages,
129134
stream: true as const,
130135
stream_options: { include_usage: true },
@@ -205,7 +210,11 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
205210
messages: deepseekReasoner
206211
? convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
207212
: [systemMessage, ...convertToOpenAiMessages(messages)],
208-
temperature: this.options.modelTemperature ?? (deepseekReasoner ? DEEP_SEEK_DEFAULT_TEMPERATURE : 0),
213+
...(this.options.modelTemperature !== undefined && this.options.modelTemperature !== null
214+
? { temperature: this.options.modelTemperature }
215+
: deepseekReasoner
216+
? { temperature: DEEP_SEEK_DEFAULT_TEMPERATURE }
217+
: {}),
209218
}
210219

211220
// Add min_p if set
@@ -258,7 +267,9 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
258267
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
259268
model: this.getModel().id,
260269
messages: [{ role: "user", content: prompt }],
261-
temperature: this.options.modelTemperature ?? 0,
270+
...(this.options.modelTemperature !== undefined && this.options.modelTemperature !== null
271+
? { temperature: this.options.modelTemperature }
272+
: {}),
262273
}
263274

264275
// Add min_p if set
@@ -309,7 +320,9 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
309320
stream: true as const,
310321
stream_options: { include_usage: true },
311322
reasoning_effort: this.getModel().info.reasoningEffort,
312-
temperature: this.options.modelTemperature ?? 0,
323+
...(this.options.modelTemperature !== undefined && this.options.modelTemperature !== null
324+
? { temperature: this.options.modelTemperature }
325+
: {}),
313326
} as any
314327

315328
// Add min_p if set
@@ -345,7 +358,9 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
345358
},
346359
...convertToOpenAiMessages(messages),
347360
],
348-
temperature: this.options.modelTemperature ?? 0,
361+
...(this.options.modelTemperature !== undefined && this.options.modelTemperature !== null
362+
? { temperature: this.options.modelTemperature }
363+
: {}),
349364
}
350365

351366
// Add min_p if set

0 commit comments

Comments
 (0)