Skip to content

Commit 46ca1de

Browse files
committed
feat: update deepseek-reasoner maxTokens to 64K based on official documentation
1 parent e2cd924 commit 46ca1de

File tree

2 files changed

+17
-3
lines changed

2 files changed

+17
-3
lines changed

packages/types/src/providers/deepseek.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,15 @@ export const deepSeekModels = {
1818
description: `DeepSeek-V3 achieves a significant breakthrough in inference speed over previous models. It tops the leaderboard among open-source models and rivals the most advanced closed-source models globally.`,
1919
},
2020
"deepseek-reasoner": {
21-
maxTokens: 8192,
21+
maxTokens: 65536, // 64K max output for reasoning mode
2222
contextWindow: 128_000,
2323
supportsImages: false,
2424
supportsPromptCache: true,
2525
inputPrice: 0.55, // $0.55 per million tokens (cache miss)
2626
outputPrice: 2.19, // $2.19 per million tokens
2727
cacheWritesPrice: 0.55, // $0.55 per million tokens (cache miss)
2828
cacheReadsPrice: 0.14, // $0.14 per million tokens (cache hit)
29-
description: `DeepSeek-R1 achieves performance comparable to OpenAI-o1 across math, code, and reasoning tasks. Supports Chain of Thought reasoning with up to 32K tokens.`,
29+
description: `DeepSeek-R1 achieves performance comparable to OpenAI-o1 across math, code, and reasoning tasks. Supports Chain of Thought reasoning with up to 64K output tokens.`,
3030
},
3131
} as const satisfies Record<string, ModelInfo>
3232

src/api/providers/__tests__/deepseek.spec.ts

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,12 +154,26 @@ describe("DeepSeekHandler", () => {
154154
const model = handler.getModel()
155155
expect(model.id).toBe(mockOptions.apiModelId)
156156
expect(model.info).toBeDefined()
157-
expect(model.info.maxTokens).toBe(8192)
157+
expect(model.info.maxTokens).toBe(8192) // deepseek-chat has 8K max
158158
expect(model.info.contextWindow).toBe(128_000)
159159
expect(model.info.supportsImages).toBe(false)
160160
expect(model.info.supportsPromptCache).toBe(true) // Should be true now
161161
})
162162

163+
it("should return correct model info for deepseek-reasoner", () => {
164+
const handlerWithReasoner = new DeepSeekHandler({
165+
...mockOptions,
166+
apiModelId: "deepseek-reasoner",
167+
})
168+
const model = handlerWithReasoner.getModel()
169+
expect(model.id).toBe("deepseek-reasoner")
170+
expect(model.info).toBeDefined()
171+
expect(model.info.maxTokens).toBe(65536) // deepseek-reasoner has 64K max
172+
expect(model.info.contextWindow).toBe(128_000)
173+
expect(model.info.supportsImages).toBe(false)
174+
expect(model.info.supportsPromptCache).toBe(true)
175+
})
176+
163177
it("should return provided model ID with default model info if model does not exist", () => {
164178
const handlerWithInvalidModel = new DeepSeekHandler({
165179
...mockOptions,

0 commit comments

Comments
 (0)