11import { streamSse } from "@continuedev/fetch" ;
2- import { CompletionOptions , LLMOptions } from "../../index.js" ;
2+ import {
3+ AssistantChatMessage ,
4+ ChatMessage ,
5+ CompletionOptions ,
6+ LLMOptions ,
7+ ThinkingChatMessage ,
8+ } from "../../index.js" ;
39import { osModelsEditPrompt } from "../templates/edit.js" ;
410
11+ import { LlmApiRequestType } from "../openaiTypeConverters" ;
512import OpenAI from "./OpenAI.js" ;
613
714class Deepseek extends OpenAI {
@@ -16,6 +23,8 @@ class Deepseek extends OpenAI {
1623 } ;
1724 maxStopWords : number | undefined = 16 ;
1825
26+ protected useOpenAIAdapterFor : ( LlmApiRequestType | "*" ) [ ] = [ ] ;
27+
1928 supportsFim ( ) : boolean {
2029 return true ;
2130 }
@@ -52,6 +61,110 @@ class Deepseek extends OpenAI {
5261 yield chunk . choices [ 0 ] . text ;
5362 }
5463 }
64+
65+ protected async * _streamChat (
66+ messages : ChatMessage [ ] ,
67+ signal : AbortSignal ,
68+ options : CompletionOptions ,
69+ ) : AsyncGenerator < ChatMessage , any , any > {
70+ const body = this . _convertArgs ( options , messages ) ;
71+
72+ const response = await this . fetch ( this . _getEndpoint ( "chat/completions" ) , {
73+ method : "POST" ,
74+ headers : this . _getHeaders ( ) ,
75+ body : JSON . stringify ( {
76+ ...body ,
77+ ...this . extraBodyProperties ( ) ,
78+ } ) ,
79+ signal,
80+ } ) ;
81+
82+ // Handle non-streaming response
83+ if ( body . stream === false ) {
84+ if ( response . status === 499 ) {
85+ return ; // Aborted by user
86+ }
87+ const data = await response . json ( ) ;
88+ yield data . choices [ 0 ] . message ;
89+ return ;
90+ }
91+
92+ let message : AssistantChatMessage | ThinkingChatMessage | undefined ;
93+ let myArguments : string | undefined ;
94+ let lastMessageRole : "assistant" | "thinking" | undefined ;
95+
96+ function fromChatCompletionChunk ( chunk : any ) : ChatMessage | undefined {
97+ const delta = chunk . choices ?. [ 0 ] ?. delta ;
98+
99+ if ( delta ?. content ) {
100+ lastMessageRole = "assistant" ;
101+ return {
102+ role : "assistant" ,
103+ content : delta . content ,
104+ } ;
105+ } else if ( delta ?. reasoning_content ) {
106+ lastMessageRole = "thinking" ;
107+ return {
108+ role : "thinking" ,
109+ content : delta . reasoning_content ,
110+ } ;
111+ } else if ( delta ?. tool_calls ) {
112+ if ( ! message ) {
113+ message = {
114+ role : "assistant" ,
115+ content : "" ,
116+ toolCalls : delta ?. tool_calls . map ( ( tool_call : any ) => ( {
117+ id : tool_call . id ,
118+ type : tool_call . type ,
119+ function : {
120+ name : tool_call . function ?. name ,
121+ arguments : tool_call . function ?. arguments ,
122+ } ,
123+ } ) ) ,
124+ } ;
125+ myArguments = "" ;
126+ return message ;
127+ } else {
128+ // @ts -ignore
129+ myArguments += delta ?. tool_calls [ 0 ] . function . arguments ;
130+ }
131+ return undefined ;
132+ }
133+
134+ if ( chunk . choices ?. [ 0 ] ?. finish_reason === "tool_calls" ) {
135+ if ( message ) {
136+ message = {
137+ role : message . role ,
138+ content : message . content ,
139+ toolCalls : [
140+ {
141+ id : message . toolCalls ?. [ 0 ] . id ,
142+ type : message . toolCalls ?. [ 0 ] . type ,
143+ function : {
144+ name : message . toolCalls ?. [ 0 ] . function ?. name ,
145+ arguments : myArguments ,
146+ } ,
147+ } ,
148+ ] ,
149+ } ;
150+ const tempMessage = message ;
151+ message = undefined ;
152+ return tempMessage ;
153+ } else {
154+ return undefined ;
155+ }
156+ } else {
157+ return undefined ;
158+ }
159+ }
160+
161+ for await ( const value of streamSse ( response ) ) {
162+ const chunk = fromChatCompletionChunk ( value ) ;
163+ if ( chunk ) {
164+ yield chunk ;
165+ }
166+ }
167+ }
55168}
56169
57170export default Deepseek ;
0 commit comments