1+ import { DeepSeekHandler } from '../deepseek'
2+ import { ApiHandlerOptions } from '../../../shared/api'
3+ import OpenAI from 'openai'
4+ import { Anthropic } from '@anthropic-ai/sdk'
5+
6+ // Mock dependencies
7+ jest . mock ( 'openai' )
8+ jest . mock ( '../../../shared/api' , ( ) => ( {
9+ ...jest . requireActual ( '../../../shared/api' ) ,
10+ deepSeekModels : {
11+ 'deepseek-chat' : {
12+ maxTokens : 1000 ,
13+ contextWindow : 2000 ,
14+ supportsImages : false ,
15+ supportsPromptCache : false ,
16+ inputPrice : 0.014 ,
17+ outputPrice : 0.28 ,
18+ }
19+ }
20+ } ) )
21+
22+ describe ( 'DeepSeekHandler' , ( ) => {
23+
24+ const mockOptions : ApiHandlerOptions = {
25+ deepSeekApiKey : 'test-key' ,
26+ deepSeekModelId : 'deepseek-chat' ,
27+ }
28+
29+ beforeEach ( ( ) => {
30+ jest . clearAllMocks ( )
31+ } )
32+
33+ test ( 'constructor initializes with correct options' , ( ) => {
34+ const handler = new DeepSeekHandler ( mockOptions )
35+ expect ( handler ) . toBeInstanceOf ( DeepSeekHandler )
36+ expect ( OpenAI ) . toHaveBeenCalledWith ( {
37+ baseURL : 'https://api.deepseek.com/v1' ,
38+ apiKey : mockOptions . deepSeekApiKey ,
39+ } )
40+ } )
41+
42+ test ( 'getModel returns correct model info' , ( ) => {
43+ const handler = new DeepSeekHandler ( mockOptions )
44+ const result = handler . getModel ( )
45+
46+ expect ( result ) . toEqual ( {
47+ id : mockOptions . deepSeekModelId ,
48+ info : expect . objectContaining ( {
49+ maxTokens : 1000 ,
50+ contextWindow : 2000 ,
51+ supportsPromptCache : false ,
52+ supportsImages : false ,
53+ inputPrice : 0.014 ,
54+ outputPrice : 0.28 ,
55+ } )
56+ } )
57+ } )
58+
59+ test ( 'getModel returns default model info when no model specified' , ( ) => {
60+ const handler = new DeepSeekHandler ( { deepSeekApiKey : 'test-key' } )
61+ const result = handler . getModel ( )
62+
63+ expect ( result . id ) . toBe ( 'deepseek-chat' )
64+ expect ( result . info . maxTokens ) . toBe ( 1000 )
65+ } )
66+
67+ test ( 'createMessage handles string content correctly' , async ( ) => {
68+ const handler = new DeepSeekHandler ( mockOptions )
69+ const mockStream = {
70+ async * [ Symbol . asyncIterator ] ( ) {
71+ yield {
72+ choices : [ {
73+ delta : {
74+ content : 'test response'
75+ }
76+ } ]
77+ }
78+ }
79+ }
80+
81+ const mockCreate = jest . fn ( ) . mockResolvedValue ( mockStream )
82+ ; ( OpenAI as jest . MockedClass < typeof OpenAI > ) . prototype . chat = {
83+ completions : { create : mockCreate }
84+ } as any
85+
86+ const systemPrompt = 'test system prompt'
87+ const messages : Anthropic . Messages . MessageParam [ ] = [
88+ { role : 'user' , content : 'test message' }
89+ ]
90+
91+ const generator = handler . createMessage ( systemPrompt , messages )
92+ const chunks = [ ]
93+
94+ for await ( const chunk of generator ) {
95+ chunks . push ( chunk )
96+ }
97+
98+ expect ( chunks ) . toHaveLength ( 1 )
99+ expect ( chunks [ 0 ] ) . toEqual ( {
100+ type : 'text' ,
101+ text : 'test response'
102+ } )
103+
104+ expect ( mockCreate ) . toHaveBeenCalledWith ( expect . objectContaining ( {
105+ model : mockOptions . deepSeekModelId ,
106+ messages : [
107+ { role : 'system' , content : systemPrompt } ,
108+ { role : 'user' , content : 'test message' }
109+ ] ,
110+ temperature : 0 ,
111+ stream : true ,
112+ max_tokens : 1000 ,
113+ stream_options : { include_usage : true }
114+ } ) )
115+ } )
116+
117+ test ( 'createMessage handles complex content correctly' , async ( ) => {
118+ const handler = new DeepSeekHandler ( mockOptions )
119+ const mockStream = {
120+ async * [ Symbol . asyncIterator ] ( ) {
121+ yield {
122+ choices : [ {
123+ delta : {
124+ content : 'test response'
125+ }
126+ } ]
127+ }
128+ }
129+ }
130+
131+ const mockCreate = jest . fn ( ) . mockResolvedValue ( mockStream )
132+ ; ( OpenAI as jest . MockedClass < typeof OpenAI > ) . prototype . chat = {
133+ completions : { create : mockCreate }
134+ } as any
135+
136+ const systemPrompt = 'test system prompt'
137+ const messages : Anthropic . Messages . MessageParam [ ] = [
138+ {
139+ role : 'user' ,
140+ content : [
141+ { type : 'text' , text : 'part 1' } ,
142+ { type : 'text' , text : 'part 2' }
143+ ]
144+ }
145+ ]
146+
147+ const generator = handler . createMessage ( systemPrompt , messages )
148+ await generator . next ( )
149+
150+ expect ( mockCreate ) . toHaveBeenCalledWith ( expect . objectContaining ( {
151+ messages : [
152+ { role : 'system' , content : systemPrompt } ,
153+ { role : 'user' , content : 'part 1part 2' }
154+ ]
155+ } ) )
156+ } )
157+
158+ test ( 'createMessage truncates messages when exceeding context window' , async ( ) => {
159+ const handler = new DeepSeekHandler ( mockOptions )
160+ const longString = 'a' . repeat ( 1000 ) // ~300 tokens
161+ const shortString = 'b' . repeat ( 100 ) // ~30 tokens
162+
163+ const systemPrompt = 'test system prompt'
164+ const messages : Anthropic . Messages . MessageParam [ ] = [
165+ { role : 'user' , content : longString } , // Old message
166+ { role : 'assistant' , content : 'short response' } ,
167+ { role : 'user' , content : shortString } // Recent message
168+ ]
169+
170+ const mockStream = {
171+ async * [ Symbol . asyncIterator ] ( ) {
172+ yield {
173+ choices : [ {
174+ delta : {
175+ content : '(Note: Some earlier messages were truncated to fit within the model\'s context window)\n\n'
176+ }
177+ } ]
178+ }
179+ yield {
180+ choices : [ {
181+ delta : {
182+ content : 'test response'
183+ }
184+ } ]
185+ }
186+ }
187+ }
188+
189+ const mockCreate = jest . fn ( ) . mockResolvedValue ( mockStream )
190+ ; ( OpenAI as jest . MockedClass < typeof OpenAI > ) . prototype . chat = {
191+ completions : { create : mockCreate }
192+ } as any
193+
194+ const generator = handler . createMessage ( systemPrompt , messages )
195+ const chunks = [ ]
196+ for await ( const chunk of generator ) {
197+ chunks . push ( chunk )
198+ }
199+
200+ // Should get two chunks: truncation notice and response
201+ expect ( chunks ) . toHaveLength ( 2 )
202+ expect ( chunks [ 0 ] ) . toEqual ( {
203+ type : 'text' ,
204+ text : expect . stringContaining ( 'truncated' )
205+ } )
206+ expect ( chunks [ 1 ] ) . toEqual ( {
207+ type : 'text' ,
208+ text : 'test response'
209+ } )
210+
211+ // Verify API call includes system prompt and recent messages, but not old message
212+ expect ( mockCreate ) . toHaveBeenCalledWith ( expect . objectContaining ( {
213+ messages : expect . arrayContaining ( [
214+ { role : 'system' , content : systemPrompt } ,
215+ { role : 'assistant' , content : 'short response' } ,
216+ { role : 'user' , content : shortString }
217+ ] )
218+ } ) )
219+
220+ // Verify truncation notice was included
221+ expect ( chunks [ 0 ] ) . toEqual ( {
222+ type : 'text' ,
223+ text : expect . stringContaining ( 'truncated' )
224+ } )
225+
226+ // Verify the messages array contains the expected messages
227+ const calledMessages = mockCreate . mock . calls [ 0 ] [ 0 ] . messages
228+ expect ( calledMessages ) . toHaveLength ( 4 )
229+ expect ( calledMessages [ 0 ] ) . toEqual ( { role : 'system' , content : systemPrompt } )
230+ expect ( calledMessages [ 1 ] ) . toEqual ( { role : 'user' , content : longString } )
231+ expect ( calledMessages [ 2 ] ) . toEqual ( { role : 'assistant' , content : 'short response' } )
232+ expect ( calledMessages [ 3 ] ) . toEqual ( { role : 'user' , content : shortString } )
233+ } )
234+
235+ test ( 'createMessage handles API errors' , async ( ) => {
236+ const handler = new DeepSeekHandler ( mockOptions )
237+ const mockStream = {
238+ async * [ Symbol . asyncIterator ] ( ) {
239+ throw new Error ( 'API Error' )
240+ }
241+ }
242+
243+ const mockCreate = jest . fn ( ) . mockResolvedValue ( mockStream )
244+ ; ( OpenAI as jest . MockedClass < typeof OpenAI > ) . prototype . chat = {
245+ completions : { create : mockCreate }
246+ } as any
247+
248+ const generator = handler . createMessage ( 'test' , [ ] )
249+ await expect ( generator . next ( ) ) . rejects . toThrow ( 'API Error' )
250+ } )
251+ } )
0 commit comments