Skip to content

Commit 64dbf42

Browse files
add tool support for deepseek-chat、deepseek-reasoner(Only for deepseek official API )
1 parent e6406de commit 64dbf42

File tree

3 files changed

+121
-4
lines changed

3 files changed

+121
-4
lines changed

core/llm/autodetect.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ import {
1010
chatmlTemplateMessages,
1111
codeLlama70bTemplateMessages,
1212
codestralTemplateMessages,
13-
deepseekTemplateMessages,
1413
gemmaTemplateMessage,
1514
graniteTemplateMessages,
1615
llama2TemplateMessages,
@@ -293,7 +292,8 @@ function autodetectTemplateFunction(
293292
zephyr: zephyrTemplateMessages,
294293
anthropic: anthropicTemplateMessages,
295294
chatml: chatmlTemplateMessages,
296-
deepseek: deepseekTemplateMessages,
295+
// deepseek: deepseekTemplateMessages,
296+
deepseek: null,
297297
openchat: openchatTemplateMessages,
298298
"xwin-coder": xWinCoderTemplateMessages,
299299
"neural-chat": neuralChatTemplateMessages,

core/llm/llms/Deepseek.ts

Lines changed: 114 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,14 @@
11
import { streamSse } from "@continuedev/fetch";
2-
import { CompletionOptions, LLMOptions } from "../../index.js";
2+
import {
3+
AssistantChatMessage,
4+
ChatMessage,
5+
CompletionOptions,
6+
LLMOptions,
7+
ThinkingChatMessage,
8+
} from "../../index.js";
39
import { osModelsEditPrompt } from "../templates/edit.js";
410

11+
import { LlmApiRequestType } from "../openaiTypeConverters";
512
import OpenAI from "./OpenAI.js";
613

714
class Deepseek extends OpenAI {
@@ -16,6 +23,8 @@ class Deepseek extends OpenAI {
1623
};
1724
maxStopWords: number | undefined = 16;
1825

26+
protected useOpenAIAdapterFor: (LlmApiRequestType | "*")[] = [];
27+
1928
supportsFim(): boolean {
2029
return true;
2130
}
@@ -52,6 +61,110 @@ class Deepseek extends OpenAI {
5261
yield chunk.choices[0].text;
5362
}
5463
}
64+
65+
protected async *_streamChat(
66+
messages: ChatMessage[],
67+
signal: AbortSignal,
68+
options: CompletionOptions,
69+
): AsyncGenerator<ChatMessage, any, any> {
70+
const body = this._convertArgs(options, messages);
71+
72+
const response = await this.fetch(this._getEndpoint("chat/completions"), {
73+
method: "POST",
74+
headers: this._getHeaders(),
75+
body: JSON.stringify({
76+
...body,
77+
...this.extraBodyProperties(),
78+
}),
79+
signal,
80+
});
81+
82+
// Handle non-streaming response
83+
if (body.stream === false) {
84+
if (response.status === 499) {
85+
return; // Aborted by user
86+
}
87+
const data = await response.json();
88+
yield data.choices[0].message;
89+
return;
90+
}
91+
92+
let message: AssistantChatMessage | ThinkingChatMessage | undefined;
93+
let myArguments: string | undefined;
94+
let lastMessageRole: "assistant" | "thinking" | undefined;
95+
96+
function fromChatCompletionChunk(chunk: any): ChatMessage | undefined {
97+
const delta = chunk.choices?.[0]?.delta;
98+
99+
if (delta?.content) {
100+
lastMessageRole = "assistant";
101+
return {
102+
role: "assistant",
103+
content: delta.content,
104+
};
105+
} else if (delta?.reasoning_content) {
106+
lastMessageRole = "thinking";
107+
return {
108+
role: "thinking",
109+
content: delta.reasoning_content,
110+
};
111+
} else if (delta?.tool_calls) {
112+
if (!message) {
113+
message = {
114+
role: "assistant",
115+
content: "",
116+
toolCalls: delta?.tool_calls.map((tool_call: any) => ({
117+
id: tool_call.id,
118+
type: tool_call.type,
119+
function: {
120+
name: tool_call.function?.name,
121+
arguments: tool_call.function?.arguments,
122+
},
123+
})),
124+
};
125+
myArguments = "";
126+
return message;
127+
} else {
128+
// @ts-ignore
129+
myArguments += delta?.tool_calls[0].function.arguments;
130+
}
131+
return undefined;
132+
}
133+
134+
if (chunk.choices?.[0]?.finish_reason === "tool_calls") {
135+
if (message) {
136+
message = {
137+
role: message.role,
138+
content: message.content,
139+
toolCalls: [
140+
{
141+
id: message.toolCalls?.[0].id,
142+
type: message.toolCalls?.[0].type,
143+
function: {
144+
name: message.toolCalls?.[0].function?.name,
145+
arguments: myArguments,
146+
},
147+
},
148+
],
149+
};
150+
const tempMessage = message;
151+
message = undefined;
152+
return tempMessage;
153+
} else {
154+
return undefined;
155+
}
156+
} else {
157+
return undefined;
158+
}
159+
}
160+
161+
for await (const value of streamSse(response)) {
162+
const chunk = fromChatCompletionChunk(value);
163+
if (chunk) {
164+
yield chunk;
165+
}
166+
}
167+
}
55168
}
56169

57170
export default Deepseek;

core/llm/toolSupport.ts

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,11 @@ export const PROVIDER_TOOL_SUPPORT: Record<string, (model: string) => boolean> =
179179
return false;
180180
},
181181
deepseek: (model) => {
182-
if (model !== "deepseek-reasoner") {
182+
if (
183+
model == "deepseek-reasoner" ||
184+
model == "deepseek-chat" ||
185+
model == "deepseek-coder"
186+
) {
183187
return true;
184188
}
185189

0 commit comments

Comments
 (0)