Skip to content

Commit f89d98a

Browse files
committed
test: add tests
1 parent a8cc9c0 commit f89d98a

File tree

2 files changed

+637
-0
lines changed

2 files changed

+637
-0
lines changed
Lines changed: 293 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,293 @@
1+
import {describe, expect, test} from "vitest";
2+
import {ChatHistoryItem, ChatModelFunctions, DeepSeekChatWrapper} from "../../../src/index.js";
3+
import {defaultChatSystemPrompt} from "../../../src/config.js";
4+
5+
6+
describe("DeepSeekChatWrapper", () => {
7+
const conversationHistory: ChatHistoryItem[] = [{
8+
type: "system",
9+
text: defaultChatSystemPrompt
10+
}, {
11+
type: "user",
12+
text: "Hi there!"
13+
}, {
14+
type: "model",
15+
response: ["Hello!"]
16+
}];
17+
const conversationHistory2: ChatHistoryItem[] = [
18+
...(new DeepSeekChatWrapper()).generateInitialChatHistory({systemPrompt: defaultChatSystemPrompt}), {
19+
type: "user",
20+
text: "Hi there!"
21+
}, {
22+
type: "model",
23+
response: ["Hello!"]
24+
}, {
25+
type: "user",
26+
text: "What is the time?"
27+
}, {
28+
type: "model",
29+
response: [{
30+
type: "functionCall",
31+
name: "getTime",
32+
description: "Retrieve the current time",
33+
params: {
34+
hours: "24",
35+
seconds: true
36+
},
37+
result: "22:00:00"
38+
}, "I'm good, how are you?"]
39+
}
40+
];
41+
const conversationHistory2Functions: ChatModelFunctions = {
42+
getTime: {
43+
description: "Retrieve the current time",
44+
params: {
45+
type: "object",
46+
properties: {
47+
hours: {
48+
enum: ["24", "12"]
49+
},
50+
seconds: {
51+
type: "boolean"
52+
}
53+
}
54+
}
55+
}
56+
};
57+
const conversationHistory3: ChatHistoryItem[] = [
58+
...(new DeepSeekChatWrapper()).generateInitialChatHistory({systemPrompt: defaultChatSystemPrompt}), {
59+
type: "user",
60+
text: "Hi there!"
61+
}, {
62+
type: "model",
63+
response: ["Hello!"]
64+
}, {
65+
type: "user",
66+
text: "What is the time?"
67+
}, {
68+
type: "model",
69+
response: ["I'll fet some information for you", {
70+
type: "functionCall",
71+
name: "getTime",
72+
description: "Retrieve the current time",
73+
params: {
74+
hours: "24",
75+
seconds: true
76+
},
77+
result: "22:00:00",
78+
startsNewChunk: true
79+
}, {
80+
type: "functionCall",
81+
name: "getDate",
82+
description: "Retrieve the current date",
83+
params: {
84+
timezone: 0
85+
},
86+
result: "2025-03-20T00:00:00Z",
87+
startsNewChunk: false
88+
}, "I'm good, how are you?"]
89+
}
90+
];
91+
const conversationHistory3Functions: ChatModelFunctions = {
92+
getTime: {
93+
description: "Retrieve the current time",
94+
params: {
95+
type: "object",
96+
properties: {
97+
hours: {
98+
enum: ["24", "12"]
99+
},
100+
seconds: {
101+
type: "boolean"
102+
}
103+
}
104+
}
105+
},
106+
getDate: {
107+
description: "Retrieve the current date",
108+
params: {
109+
type: "object",
110+
properties: {
111+
timezone: {
112+
type: "integer"
113+
}
114+
}
115+
}
116+
}
117+
};
118+
119+
test("should generate valid context text", () => {
120+
const chatWrapper = new DeepSeekChatWrapper();
121+
const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory});
122+
123+
expect(contextText).toMatchInlineSnapshot(`
124+
LlamaText([
125+
new SpecialToken("BOS"),
126+
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
127+
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.",
128+
new SpecialTokensText("<|User|>"),
129+
"Hi there!",
130+
new SpecialTokensText("<|Assistant|>"),
131+
"Hello!",
132+
])
133+
`);
134+
135+
const chatWrapper2 = new DeepSeekChatWrapper();
136+
const {contextText: contextText2} = chatWrapper2.generateContextState({
137+
chatHistory: conversationHistory2,
138+
availableFunctions: conversationHistory2Functions
139+
});
140+
141+
expect(contextText2).toMatchInlineSnapshot(`
142+
LlamaText([
143+
new SpecialToken("BOS"),
144+
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
145+
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.
146+
147+
The assistant calls the provided functions as needed to retrieve information instead of relying on existing knowledge.
148+
To fulfill a request, the assistant calls relevant functions in advance when needed before responding to the request, and does not tell the user prior to calling a function.
149+
If the result of function calls from previous turns might be stale, the assistant will call the functions again if needed.
150+
Provided functions:
151+
{"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}}
152+
153+
Calling any of the provided functions can be done like this:
154+
",
155+
new SpecialTokensText("<function="),
156+
"getSomeInfo",
157+
new SpecialTokensText(">"),
158+
"{"someKey": "someValue"}",
159+
new SpecialTokensText("</function>"),
160+
"
161+
162+
Note that the verbatim ",
163+
new SpecialTokensText("<function="),
164+
" prefix is mandatory.
165+
166+
The assistant never assumes the results of function calls, and instead uses the raw results directly for processing.
167+
The assistant does not inform the user about using functions and does not explain anything before calling a function.
168+
After calling a function, the raw result appears afterwards and is not part of the conversation.
169+
To make information be part of the conversation, the assistant paraphrases and repeats the information without the function syntax.
170+
The assistant never repeats itself unless necessary.",
171+
new SpecialTokensText("<|User|>"),
172+
"Hi there!",
173+
new SpecialTokensText("<|Assistant|>"),
174+
"Hello!",
175+
new SpecialTokensText("<|end▁of▁sentence|><|User|>"),
176+
"What is the time?",
177+
new SpecialTokensText("<|Assistant|><function="),
178+
"getTime",
179+
new SpecialTokensText(">"),
180+
"{"hours": "24", "seconds": true}",
181+
new SpecialTokensText("</function><|tool▁output▁begin|>"),
182+
""22:00:00"",
183+
new SpecialTokensText("<|tool▁output▁end|>
184+
"),
185+
"I'm good, how are you?",
186+
])
187+
`);
188+
189+
const chatWrapper3 = new DeepSeekChatWrapper();
190+
const {contextText: contextText3} = chatWrapper3.generateContextState({chatHistory: conversationHistory});
191+
const {contextText: contextText3WithOpenModelResponse} = chatWrapper3.generateContextState({
192+
chatHistory: [
193+
...conversationHistory,
194+
{
195+
type: "model",
196+
response: []
197+
}
198+
]
199+
});
200+
201+
expect(contextText3).toMatchInlineSnapshot(`
202+
LlamaText([
203+
new SpecialToken("BOS"),
204+
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
205+
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.",
206+
new SpecialTokensText("<|User|>"),
207+
"Hi there!",
208+
new SpecialTokensText("<|Assistant|>"),
209+
"Hello!",
210+
])
211+
`);
212+
213+
expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(`
214+
LlamaText([
215+
new SpecialToken("BOS"),
216+
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
217+
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.",
218+
new SpecialTokensText("<|User|>"),
219+
"Hi there!",
220+
new SpecialTokensText("<|Assistant|>"),
221+
"Hello!",
222+
new SpecialTokensText("<|end▁of▁sentence|><|Assistant|>"),
223+
])
224+
`);
225+
});
226+
227+
test("should generate valid context text for 2 sequential function calls", () => {
228+
const chatWrapper = new DeepSeekChatWrapper();
229+
const {contextText} = chatWrapper.generateContextState({
230+
chatHistory: conversationHistory3,
231+
availableFunctions: conversationHistory3Functions
232+
});
233+
234+
expect(contextText).toMatchInlineSnapshot(`
235+
LlamaText([
236+
new SpecialToken("BOS"),
237+
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
238+
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.
239+
240+
The assistant calls the provided functions as needed to retrieve information instead of relying on existing knowledge.
241+
To fulfill a request, the assistant calls relevant functions in advance when needed before responding to the request, and does not tell the user prior to calling a function.
242+
If the result of function calls from previous turns might be stale, the assistant will call the functions again if needed.
243+
Provided functions:
244+
{"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}}
245+
246+
{"name": "getDate", "description": "Retrieve the current date", "parameters": {"type": "object", "properties": {"timezone": {"type": "integer"}}}}
247+
248+
Calling any of the provided functions can be done like this:
249+
",
250+
new SpecialTokensText("<function="),
251+
"getSomeInfo",
252+
new SpecialTokensText(">"),
253+
"{"someKey": "someValue"}",
254+
new SpecialTokensText("</function>"),
255+
"
256+
257+
Note that the verbatim ",
258+
new SpecialTokensText("<function="),
259+
" prefix is mandatory.
260+
261+
The assistant never assumes the results of function calls, and instead uses the raw results directly for processing.
262+
The assistant does not inform the user about using functions and does not explain anything before calling a function.
263+
After calling a function, the raw result appears afterwards and is not part of the conversation.
264+
To make information be part of the conversation, the assistant paraphrases and repeats the information without the function syntax.
265+
The assistant never repeats itself unless necessary.",
266+
new SpecialTokensText("<|User|>"),
267+
"Hi there!",
268+
new SpecialTokensText("<|Assistant|>"),
269+
"Hello!",
270+
new SpecialTokensText("<|end▁of▁sentence|><|User|>"),
271+
"What is the time?",
272+
new SpecialTokensText("<|Assistant|>"),
273+
"I'll fet some information for you",
274+
new SpecialTokensText("<function="),
275+
"getTime",
276+
new SpecialTokensText(">"),
277+
"{"hours": "24", "seconds": true}",
278+
new SpecialTokensText("</function><|tool▁output▁begin|>"),
279+
""22:00:00"",
280+
new SpecialTokensText("<|tool▁output▁end|>
281+
<function="),
282+
"getDate",
283+
new SpecialTokensText(">"),
284+
"{"timezone": 0}",
285+
new SpecialTokensText("</function><|tool▁output▁begin|>"),
286+
""2025-03-20T00:00:00Z"",
287+
new SpecialTokensText("<|tool▁output▁end|>
288+
"),
289+
"I'm good, how are you?",
290+
])
291+
`);
292+
});
293+
});

0 commit comments

Comments
 (0)