From 1534a9c04771f23349311ca0066bcca4b4c07f9e Mon Sep 17 00:00:00 2001 From: Piotr Rogowski Date: Wed, 29 Jan 2025 09:54:19 +0100 Subject: [PATCH] Add support for perplexity/sonar-reasoning --- src/api/providers/openrouter.ts | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/src/api/providers/openrouter.ts b/src/api/providers/openrouter.ts index e5390cb75c3..b6f15d2caf5 100644 --- a/src/api/providers/openrouter.ts +++ b/src/api/providers/openrouter.ts @@ -114,11 +114,21 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler { } let temperature = 0 - if (this.getModel().id === "deepseek/deepseek-r1" || this.getModel().id.startsWith("deepseek/deepseek-r1:")) { + let topP: number | undefined = undefined + + // Handle models based on deepseek-r1 + if ( + this.getModel().id === "deepseek/deepseek-r1" || + this.getModel().id.startsWith("deepseek/deepseek-r1:") || + this.getModel().id === "perplexity/sonar-reasoning" + ) { // Recommended temperature for DeepSeek reasoning models temperature = 0.6 - // DeepSeek highly recommends using user instead of system role + // DeepSeek highly recommends using user instead of system + // role openAiMessages = convertToR1Format([{ role: "user", content: systemPrompt }, ...messages]) + // Some provider support topP and 0.95 is value that Deepseek used in their benchmarks + topP = 0.95 } // https://openrouter.ai/docs/transforms @@ -127,6 +137,7 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler { model: this.getModel().id, max_tokens: maxTokens, temperature: temperature, + top_p: topP, messages: openAiMessages, stream: true, include_reasoning: true,