@@ -3,6 +3,7 @@ import OpenAI from "openai"
33import { ApiHandler , SingleCompletionHandler } from "../"
44import { ApiHandlerOptions , ModelInfo , openAiModelInfoSaneDefaults } from "../../shared/api"
55import { convertToOpenAiMessages } from "../transform/openai-format"
6+ import { convertToR1Format } from "../transform/r1-format"
67import { ApiStream } from "../transform/stream"
78
89export class OllamaHandler implements ApiHandler , SingleCompletionHandler {
@@ -18,9 +19,11 @@ export class OllamaHandler implements ApiHandler, SingleCompletionHandler {
1819 }
1920
2021 async * createMessage ( systemPrompt : string , messages : Anthropic . Messages . MessageParam [ ] ) : ApiStream {
22+ const modelId = this . getModel ( ) . id
23+ const useR1Format = modelId . toLowerCase ( ) . includes ( 'deepseek-r1' )
2124 const openAiMessages : OpenAI . Chat . ChatCompletionMessageParam [ ] = [
2225 { role : "system" , content : systemPrompt } ,
23- ...convertToOpenAiMessages ( messages ) ,
26+ ...( useR1Format ? convertToR1Format ( messages ) : convertToOpenAiMessages ( messages ) ) ,
2427 ]
2528
2629 const stream = await this . client . chat . completions . create ( {
@@ -49,9 +52,11 @@ export class OllamaHandler implements ApiHandler, SingleCompletionHandler {
4952
5053 async completePrompt ( prompt : string ) : Promise < string > {
5154 try {
55+ const modelId = this . getModel ( ) . id
56+ const useR1Format = modelId . toLowerCase ( ) . includes ( 'deepseek-r1' )
5257 const response = await this . client . chat . completions . create ( {
5358 model : this . getModel ( ) . id ,
54- messages : [ { role : "user" , content : prompt } ] ,
59+ messages : useR1Format ? convertToR1Format ( [ { role : "user" , content : prompt } ] ) : [ { role : "user" , content : prompt } ] ,
5560 temperature : 0 ,
5661 stream : false ,
5762 } )
0 commit comments