Skip to content

Commit e2cd924

Browse files
committed
fix: update DeepSeek models context window to 128k
- Updated deepseek-chat and deepseek-reasoner models from 64k to 128k context window - Updated corresponding test expectations - Aligns with DeepSeek API documentation at https://api-docs.deepseek.com/quick_start/pricing/ Fixes #7268
1 parent 241df17 commit e2cd924

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

packages/types/src/providers/deepseek.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ export const deepSeekDefaultModelId: DeepSeekModelId = "deepseek-chat"
88
export const deepSeekModels = {
99
"deepseek-chat": {
1010
maxTokens: 8192,
11-
contextWindow: 64_000,
11+
contextWindow: 128_000,
1212
supportsImages: false,
1313
supportsPromptCache: true,
1414
inputPrice: 0.27, // $0.27 per million tokens (cache miss)
@@ -19,7 +19,7 @@ export const deepSeekModels = {
1919
},
2020
"deepseek-reasoner": {
2121
maxTokens: 8192,
22-
contextWindow: 64_000,
22+
contextWindow: 128_000,
2323
supportsImages: false,
2424
supportsPromptCache: true,
2525
inputPrice: 0.55, // $0.55 per million tokens (cache miss)

src/api/providers/__tests__/deepseek.spec.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ describe("DeepSeekHandler", () => {
155155
expect(model.id).toBe(mockOptions.apiModelId)
156156
expect(model.info).toBeDefined()
157157
expect(model.info.maxTokens).toBe(8192)
158-
expect(model.info.contextWindow).toBe(64_000)
158+
expect(model.info.contextWindow).toBe(128_000)
159159
expect(model.info.supportsImages).toBe(false)
160160
expect(model.info.supportsPromptCache).toBe(true) // Should be true now
161161
})

0 commit comments

Comments
 (0)