Skip to content

Commit 6037140

Browse files
authored
Merge pull request #808 from sammcj/deepseek_r1_ollama
fix(r1): Ensure the DeepSeek r1 model works with Ollama
2 parents 517b545 + a87502e commit 6037140

File tree

1 file changed

+7
-2
lines changed

1 file changed

+7
-2
lines changed

src/api/providers/ollama.ts

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import OpenAI from "openai"
33
import { ApiHandler, SingleCompletionHandler } from "../"
44
import { ApiHandlerOptions, ModelInfo, openAiModelInfoSaneDefaults } from "../../shared/api"
55
import { convertToOpenAiMessages } from "../transform/openai-format"
6+
import { convertToR1Format } from "../transform/r1-format"
67
import { ApiStream } from "../transform/stream"
78

89
export class OllamaHandler implements ApiHandler, SingleCompletionHandler {
@@ -18,9 +19,11 @@ export class OllamaHandler implements ApiHandler, SingleCompletionHandler {
1819
}
1920

2021
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
22+
const modelId = this.getModel().id
23+
const useR1Format = modelId.toLowerCase().includes('deepseek-r1')
2124
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
2225
{ role: "system", content: systemPrompt },
23-
...convertToOpenAiMessages(messages),
26+
...(useR1Format ? convertToR1Format(messages) : convertToOpenAiMessages(messages)),
2427
]
2528

2629
const stream = await this.client.chat.completions.create({
@@ -49,9 +52,11 @@ export class OllamaHandler implements ApiHandler, SingleCompletionHandler {
4952

5053
async completePrompt(prompt: string): Promise<string> {
5154
try {
55+
const modelId = this.getModel().id
56+
const useR1Format = modelId.toLowerCase().includes('deepseek-r1')
5257
const response = await this.client.chat.completions.create({
5358
model: this.getModel().id,
54-
messages: [{ role: "user", content: prompt }],
59+
messages: useR1Format ? convertToR1Format([{ role: "user", content: prompt }]) : [{ role: "user", content: prompt }],
5560
temperature: 0,
5661
stream: false,
5762
})

0 commit comments

Comments
 (0)