Skip to content

Commit 5c17f79

Browse files
authored
Merge pull request #633 from Szpadel/r1-perplexity-support
Add support for perplexity/sonar-reasoning
2 parents 4026a87 + 1534a9c commit 5c17f79

File tree

1 file changed

+13
-2
lines changed

1 file changed

+13
-2
lines changed

src/api/providers/openrouter.ts

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -114,11 +114,21 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
114114
}
115115

116116
let temperature = 0
117-
if (this.getModel().id === "deepseek/deepseek-r1" || this.getModel().id.startsWith("deepseek/deepseek-r1:")) {
117+
let topP: number | undefined = undefined
118+
119+
// Handle models based on deepseek-r1
120+
if (
121+
this.getModel().id === "deepseek/deepseek-r1" ||
122+
this.getModel().id.startsWith("deepseek/deepseek-r1:") ||
123+
this.getModel().id === "perplexity/sonar-reasoning"
124+
) {
118125
// Recommended temperature for DeepSeek reasoning models
119126
temperature = 0.6
120-
// DeepSeek highly recommends using user instead of system role
127+
// DeepSeek highly recommends using user instead of system
128+
// role
121129
openAiMessages = convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
130+
// Some provider support topP and 0.95 is value that Deepseek used in their benchmarks
131+
topP = 0.95
122132
}
123133

124134
// https://openrouter.ai/docs/transforms
@@ -127,6 +137,7 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
127137
model: this.getModel().id,
128138
max_tokens: maxTokens,
129139
temperature: temperature,
140+
top_p: topP,
130141
messages: openAiMessages,
131142
stream: true,
132143
include_reasoning: true,

0 commit comments

Comments
 (0)