Skip to content

Commit a13da25

Browse files
committed
Deepseek fixes
1 parent eff636a commit a13da25

File tree

5 files changed

+25
-16
lines changed

5 files changed

+25
-16
lines changed

src/api/providers/__tests__/deepseek.test.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ describe("DeepSeekHandler", () => {
7171
beforeEach(() => {
7272
mockOptions = {
7373
deepSeekApiKey: "test-api-key",
74-
deepSeekModelId: "deepseek-chat",
74+
apiModelId: "deepseek-chat",
7575
deepSeekBaseUrl: "https://api.deepseek.com/v1",
7676
}
7777
handler = new DeepSeekHandler(mockOptions)
@@ -81,7 +81,7 @@ describe("DeepSeekHandler", () => {
8181
describe("constructor", () => {
8282
it("should initialize with provided options", () => {
8383
expect(handler).toBeInstanceOf(DeepSeekHandler)
84-
expect(handler.getModel().id).toBe(mockOptions.deepSeekModelId)
84+
expect(handler.getModel().id).toBe(mockOptions.apiModelId)
8585
})
8686

8787
it("should throw error if API key is missing", () => {
@@ -96,7 +96,7 @@ describe("DeepSeekHandler", () => {
9696
it("should use default model ID if not provided", () => {
9797
const handlerWithoutModel = new DeepSeekHandler({
9898
...mockOptions,
99-
deepSeekModelId: undefined,
99+
apiModelId: undefined,
100100
})
101101
expect(handlerWithoutModel.getModel().id).toBe(deepSeekDefaultModelId)
102102
})
@@ -144,7 +144,7 @@ describe("DeepSeekHandler", () => {
144144
describe("getModel", () => {
145145
it("should return model info for valid model ID", () => {
146146
const model = handler.getModel()
147-
expect(model.id).toBe(mockOptions.deepSeekModelId)
147+
expect(model.id).toBe(mockOptions.apiModelId)
148148
expect(model.info).toBeDefined()
149149
expect(model.info.maxTokens).toBe(8192)
150150
expect(model.info.contextWindow).toBe(64_000)
@@ -155,7 +155,7 @@ describe("DeepSeekHandler", () => {
155155
it("should return provided model ID with default model info if model does not exist", () => {
156156
const handlerWithInvalidModel = new DeepSeekHandler({
157157
...mockOptions,
158-
deepSeekModelId: "invalid-model",
158+
apiModelId: "invalid-model",
159159
})
160160
const model = handlerWithInvalidModel.getModel()
161161
expect(model.id).toBe("invalid-model") // Returns provided ID
@@ -166,7 +166,7 @@ describe("DeepSeekHandler", () => {
166166
it("should return default model if no model ID is provided", () => {
167167
const handlerWithoutModel = new DeepSeekHandler({
168168
...mockOptions,
169-
deepSeekModelId: undefined,
169+
apiModelId: undefined,
170170
})
171171
const model = handlerWithoutModel.getModel()
172172
expect(model.id).toBe(deepSeekDefaultModelId)

src/api/providers/__tests__/openai.test.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,6 @@ describe("OpenAiHandler", () => {
193193
expect(mockCreate).toHaveBeenCalledWith({
194194
model: mockOptions.openAiModelId,
195195
messages: [{ role: "user", content: "Test prompt" }],
196-
temperature: 0,
197196
})
198197
})
199198

src/api/providers/deepseek.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,14 +10,15 @@ export class DeepSeekHandler extends OpenAiHandler {
1010
super({
1111
...options,
1212
openAiApiKey: options.deepSeekApiKey,
13-
openAiModelId: options.deepSeekModelId ?? deepSeekDefaultModelId,
13+
openAiModelId: options.apiModelId ?? deepSeekDefaultModelId,
1414
openAiBaseUrl: options.deepSeekBaseUrl ?? "https://api.deepseek.com/v1",
15+
openAiStreamingEnabled: true,
1516
includeMaxTokens: true,
1617
})
1718
}
1819

1920
override getModel(): { id: string; info: ModelInfo } {
20-
const modelId = this.options.deepSeekModelId ?? deepSeekDefaultModelId
21+
const modelId = this.options.apiModelId ?? deepSeekDefaultModelId
2122
return {
2223
id: modelId,
2324
info: deepSeekModels[modelId as keyof typeof deepSeekModels] || deepSeekModels[deepSeekDefaultModelId],

src/api/providers/openai.ts

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,9 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
3636
const modelInfo = this.getModel().info
3737
const modelId = this.options.openAiModelId ?? ""
3838

39-
if (this.options.openAiStreamingEnabled ?? true) {
39+
const deepseekReasoner = modelId.includes("deepseek-reasoner")
40+
41+
if (!deepseekReasoner && (this.options.openAiStreamingEnabled ?? true)) {
4042
const systemMessage: OpenAI.Chat.ChatCompletionSystemMessageParam = {
4143
role: "system",
4244
content: systemPrompt,
@@ -71,11 +73,20 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
7173
}
7274
}
7375
} else {
76+
let systemMessage: OpenAI.Chat.ChatCompletionUserMessageParam | OpenAI.Chat.ChatCompletionSystemMessageParam
77+
7478
// o1 for instance doesnt support streaming, non-1 temp, or system prompt
75-
const systemMessage: OpenAI.Chat.ChatCompletionUserMessageParam = {
76-
role: "user",
77-
content: systemPrompt,
78-
}
79+
// deepseek reasoner supports system prompt
80+
systemMessage = deepseekReasoner
81+
? {
82+
role: "system",
83+
content: systemPrompt,
84+
}
85+
: {
86+
role: "user",
87+
content: systemPrompt,
88+
}
89+
7990
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
8091
model: modelId,
8192
messages: [systemMessage, ...convertToOpenAiMessages(messages)],
@@ -106,7 +117,6 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
106117
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
107118
model: this.getModel().id,
108119
messages: [{ role: "user", content: prompt }],
109-
temperature: 0,
110120
}
111121

112122
const response = await this.client.chat.completions.create(requestOptions)

src/shared/api.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,6 @@ export interface ApiHandlerOptions {
5151
setAzureApiVersion?: boolean
5252
deepSeekBaseUrl?: string
5353
deepSeekApiKey?: string
54-
deepSeekModelId?: string
5554
includeMaxTokens?: boolean
5655
}
5756

0 commit comments

Comments
 (0)