Skip to content

Commit e8a2e88

Browse files
feat: qwen platform adds deepseek-r1/v3 support (RooCodeInc#1729)
Co-authored-by: fine <[email protected]>
1 parent 7b6a3d2 commit e8a2e88

File tree

3 files changed

+31
-1
lines changed

3 files changed

+31
-1
lines changed

.changeset/stale-lizards-poke.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"claude-dev": minor
3+
---
4+
5+
qwen platform adds deepseek-r1/v3 support

src/api/providers/qwen.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import { ApiHandler } from "../"
44
import { ApiHandlerOptions, QwenModelId, ModelInfo, qwenDefaultModelId, qwenModels } from "../../shared/api"
55
import { convertToOpenAiMessages } from "../transform/openai-format"
66
import { ApiStream } from "../transform/stream"
7+
import { convertToR1Format } from "../transform/r1-format"
78

89
export class QwenHandler implements ApiHandler {
910
private options: ApiHandlerOptions
@@ -34,17 +35,21 @@ export class QwenHandler implements ApiHandler {
3435

3536
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
3637
const model = this.getModel()
38+
const isDeepseekReasoner = model.id.includes("deepseek-r1")
3739
let openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
3840
{ role: "system", content: systemPrompt },
3941
...convertToOpenAiMessages(messages),
4042
]
41-
43+
if (isDeepseekReasoner) {
44+
openAiMessages = convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
45+
}
4246
const stream = await this.client.chat.completions.create({
4347
model: model.id,
4448
max_completion_tokens: model.info.maxTokens,
4549
messages: openAiMessages,
4650
stream: true,
4751
stream_options: { include_usage: true },
52+
...(model.id === "deepseek-r1" ? {} : { temperature: 0 }),
4853
})
4954

5055
for await (const chunk of stream) {

src/shared/api.ts

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -529,6 +529,26 @@ export const qwenModels = {
529529
cacheWritesPrice: 0.0056,
530530
cacheReadsPrice: 0.0224,
531531
},
532+
"deepseek-v3": {
533+
maxTokens: 8_000,
534+
contextWindow: 64_000,
535+
supportsImages: false,
536+
supportsPromptCache: true,
537+
inputPrice: 0,
538+
outputPrice: 0.28,
539+
cacheWritesPrice: 0.14,
540+
cacheReadsPrice: 0.014,
541+
},
542+
"deepseek-r1": {
543+
maxTokens: 8_000,
544+
contextWindow: 64_000,
545+
supportsImages: false,
546+
supportsPromptCache: true,
547+
inputPrice: 0,
548+
outputPrice: 2.19,
549+
cacheWritesPrice: 0.55,
550+
cacheReadsPrice: 0.14,
551+
},
532552
} as const satisfies Record<string, ModelInfo>
533553

534554
// Mistral

0 commit comments

Comments
 (0)