Skip to content

Commit 8b0f32d

Browse files
Currently working fine, so rolling back.
1 parent 5648c21 commit 8b0f32d

File tree

1 file changed

+1
-114
lines changed

1 file changed

+1
-114
lines changed

core/llm/llms/Deepseek.ts

Lines changed: 1 addition & 114 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,7 @@
11
import { streamSse } from "@continuedev/fetch";
2-
import {
3-
AssistantChatMessage,
4-
ChatMessage,
5-
CompletionOptions,
6-
LLMOptions,
7-
ThinkingChatMessage,
8-
} from "../../index.js";
2+
import { CompletionOptions, LLMOptions } from "../../index.js";
93
import { osModelsEditPrompt } from "../templates/edit.js";
104

11-
import { LlmApiRequestType } from "../openaiTypeConverters";
125
import OpenAI from "./OpenAI.js";
136

147
class Deepseek extends OpenAI {
@@ -23,8 +16,6 @@ class Deepseek extends OpenAI {
2316
};
2417
maxStopWords: number | undefined = 16;
2518

26-
protected useOpenAIAdapterFor: (LlmApiRequestType | "*")[] = [];
27-
2819
supportsFim(): boolean {
2920
return true;
3021
}
@@ -61,110 +52,6 @@ class Deepseek extends OpenAI {
6152
yield chunk.choices[0].text;
6253
}
6354
}
64-
65-
protected async *_streamChat(
66-
messages: ChatMessage[],
67-
signal: AbortSignal,
68-
options: CompletionOptions,
69-
): AsyncGenerator<ChatMessage, any, any> {
70-
const body = this._convertArgs(options, messages);
71-
72-
const response = await this.fetch(this._getEndpoint("chat/completions"), {
73-
method: "POST",
74-
headers: this._getHeaders(),
75-
body: JSON.stringify({
76-
...body,
77-
...this.extraBodyProperties(),
78-
}),
79-
signal,
80-
});
81-
82-
// Handle non-streaming response
83-
if (body.stream === false) {
84-
if (response.status === 499) {
85-
return; // Aborted by user
86-
}
87-
const data = await response.json();
88-
yield data.choices[0].message;
89-
return;
90-
}
91-
92-
let message: AssistantChatMessage | ThinkingChatMessage | undefined;
93-
let myArguments: string | undefined;
94-
let lastMessageRole: "assistant" | "thinking" | undefined;
95-
96-
function fromChatCompletionChunk(chunk: any): ChatMessage | undefined {
97-
const delta = chunk.choices?.[0]?.delta;
98-
99-
if (delta?.content) {
100-
lastMessageRole = "assistant";
101-
return {
102-
role: "assistant",
103-
content: delta.content,
104-
};
105-
} else if (delta?.reasoning_content) {
106-
lastMessageRole = "thinking";
107-
return {
108-
role: "thinking",
109-
content: delta.reasoning_content,
110-
};
111-
} else if (delta?.tool_calls) {
112-
if (!message) {
113-
message = {
114-
role: "assistant",
115-
content: "",
116-
toolCalls: delta?.tool_calls.map((tool_call: any) => ({
117-
id: tool_call.id,
118-
type: tool_call.type,
119-
function: {
120-
name: tool_call.function?.name,
121-
arguments: tool_call.function?.arguments,
122-
},
123-
})),
124-
};
125-
myArguments = "";
126-
return message;
127-
} else {
128-
// @ts-ignore
129-
myArguments += delta?.tool_calls[0].function.arguments;
130-
}
131-
return undefined;
132-
}
133-
134-
if (chunk.choices?.[0]?.finish_reason === "tool_calls") {
135-
if (message) {
136-
message = {
137-
role: message.role,
138-
content: message.content,
139-
toolCalls: [
140-
{
141-
id: message.toolCalls?.[0].id,
142-
type: message.toolCalls?.[0].type,
143-
function: {
144-
name: message.toolCalls?.[0].function?.name,
145-
arguments: myArguments,
146-
},
147-
},
148-
],
149-
};
150-
const tempMessage = message;
151-
message = undefined;
152-
return tempMessage;
153-
} else {
154-
return undefined;
155-
}
156-
} else {
157-
return undefined;
158-
}
159-
}
160-
161-
for await (const value of streamSse(response)) {
162-
const chunk = fromChatCompletionChunk(value);
163-
if (chunk) {
164-
yield chunk;
165-
}
166-
}
167-
}
16855
}
16956

17057
export default Deepseek;

0 commit comments

Comments
 (0)