Skip to content

Commit a8cc9c0

Browse files
committed
test: make LlamaText snapshots more readable
1 parent b2bdacc commit a8cc9c0

16 files changed

+1012
-2443
lines changed

test/standalone/chatWrappers/ChatMLChatWrapper.test.ts

Lines changed: 46 additions & 103 deletions
Original file line numberDiff line numberDiff line change
@@ -35,81 +35,51 @@ describe("ChatMLChatWrapper", () => {
3535
const chatWrapper = new ChatMLChatWrapper();
3636
const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory});
3737

38-
expect(contextText.values).toMatchInlineSnapshot(`
39-
[
40-
{
41-
"type": "specialToken",
42-
"value": "BOS",
43-
},
44-
{
45-
"type": "specialTokensText",
46-
"value": "<|im_start|>system
47-
",
48-
},
38+
expect(contextText).toMatchInlineSnapshot(`
39+
LlamaText([
40+
new SpecialToken("BOS"),
41+
new SpecialTokensText("<|im_start|>system
42+
"),
4943
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
5044
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.",
51-
{
52-
"type": "specialTokensText",
53-
"value": "<|im_end|>
45+
new SpecialTokensText("<|im_end|>
5446
<|im_start|>user
55-
",
56-
},
47+
"),
5748
"Hi there!",
58-
{
59-
"type": "specialTokensText",
60-
"value": "<|im_end|>
49+
new SpecialTokensText("<|im_end|>
6150
<|im_start|>assistant
62-
",
63-
},
51+
"),
6452
"Hello!",
65-
]
53+
])
6654
`);
6755

6856
const chatWrapper2 = new ChatMLChatWrapper();
6957
const {contextText: contextText2} = chatWrapper2.generateContextState({chatHistory: conversationHistory2});
7058

71-
expect(contextText2.values).toMatchInlineSnapshot(`
72-
[
73-
{
74-
"type": "specialToken",
75-
"value": "BOS",
76-
},
77-
{
78-
"type": "specialTokensText",
79-
"value": "<|im_start|>system
80-
",
81-
},
59+
expect(contextText2).toMatchInlineSnapshot(`
60+
LlamaText([
61+
new SpecialToken("BOS"),
62+
new SpecialTokensText("<|im_start|>system
63+
"),
8264
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
8365
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.",
84-
{
85-
"type": "specialTokensText",
86-
"value": "<|im_end|>
66+
new SpecialTokensText("<|im_end|>
8767
<|im_start|>user
88-
",
89-
},
68+
"),
9069
"Hi there!",
91-
{
92-
"type": "specialTokensText",
93-
"value": "<|im_end|>
70+
new SpecialTokensText("<|im_end|>
9471
<|im_start|>assistant
95-
",
96-
},
72+
"),
9773
"Hello!",
98-
{
99-
"type": "specialTokensText",
100-
"value": "<|im_end|>
74+
new SpecialTokensText("<|im_end|>
10175
<|im_start|>user
102-
",
103-
},
76+
"),
10477
"How are you?",
105-
{
106-
"type": "specialTokensText",
107-
"value": "<|im_end|>
78+
new SpecialTokensText("<|im_end|>
10879
<|im_start|>assistant
109-
",
110-
},
80+
"),
11181
"I'm good, how are you?",
112-
]
82+
])
11383
`);
11484

11585
const chatWrapper3 = new ChatMLChatWrapper();
@@ -124,70 +94,43 @@ describe("ChatMLChatWrapper", () => {
12494
]
12595
});
12696

127-
expect(contextText3.values).toMatchInlineSnapshot(`
128-
[
129-
{
130-
"type": "specialToken",
131-
"value": "BOS",
132-
},
133-
{
134-
"type": "specialTokensText",
135-
"value": "<|im_start|>system
136-
",
137-
},
97+
expect(contextText3).toMatchInlineSnapshot(`
98+
LlamaText([
99+
new SpecialToken("BOS"),
100+
new SpecialTokensText("<|im_start|>system
101+
"),
138102
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
139103
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.",
140-
{
141-
"type": "specialTokensText",
142-
"value": "<|im_end|>
104+
new SpecialTokensText("<|im_end|>
143105
<|im_start|>user
144-
",
145-
},
106+
"),
146107
"Hi there!",
147-
{
148-
"type": "specialTokensText",
149-
"value": "<|im_end|>
108+
new SpecialTokensText("<|im_end|>
150109
<|im_start|>assistant
151-
",
152-
},
110+
"),
153111
"Hello!",
154-
]
112+
])
155113
`);
156114

157-
expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(`
158-
[
159-
{
160-
"type": "specialToken",
161-
"value": "BOS",
162-
},
163-
{
164-
"type": "specialTokensText",
165-
"value": "<|im_start|>system
166-
",
167-
},
115+
expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(`
116+
LlamaText([
117+
new SpecialToken("BOS"),
118+
new SpecialTokensText("<|im_start|>system
119+
"),
168120
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
169121
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.",
170-
{
171-
"type": "specialTokensText",
172-
"value": "<|im_end|>
122+
new SpecialTokensText("<|im_end|>
173123
<|im_start|>user
174-
",
175-
},
124+
"),
176125
"Hi there!",
177-
{
178-
"type": "specialTokensText",
179-
"value": "<|im_end|>
126+
new SpecialTokensText("<|im_end|>
180127
<|im_start|>assistant
181-
",
182-
},
128+
"),
183129
"Hello!",
184-
{
185-
"type": "specialTokensText",
186-
"value": "<|im_end|>
130+
new SpecialTokensText("<|im_end|>
187131
<|im_start|>assistant
188-
",
189-
},
190-
]
132+
"),
133+
])
191134
`);
192135
});
193136
});

test/standalone/chatWrappers/FalconChatWrapper.test.ts

Lines changed: 16 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -35,30 +35,24 @@ describe("FalconChatWrapper", () => {
3535
const chatWrapper = new FalconChatWrapper();
3636
const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory});
3737

38-
expect(contextText.values).toMatchInlineSnapshot(`
39-
[
40-
{
41-
"type": "specialToken",
42-
"value": "BOS",
43-
},
38+
expect(contextText).toMatchInlineSnapshot(`
39+
LlamaText([
40+
new SpecialToken("BOS"),
4441
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
4542
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.
4643
4744
User: Hi there!
4845
4946
Assistant: Hello!",
50-
]
47+
])
5148
`);
5249

5350
const chatWrapper2 = new FalconChatWrapper();
5451
const {contextText: contextText2} = chatWrapper2.generateContextState({chatHistory: conversationHistory2});
5552

56-
expect(contextText2.values).toMatchInlineSnapshot(`
57-
[
58-
{
59-
"type": "specialToken",
60-
"value": "BOS",
61-
},
53+
expect(contextText2).toMatchInlineSnapshot(`
54+
LlamaText([
55+
new SpecialToken("BOS"),
6256
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
6357
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.
6458
@@ -69,7 +63,7 @@ describe("FalconChatWrapper", () => {
6963
User: How are you?
7064
7165
Assistant: I'm good, how are you?",
72-
]
66+
])
7367
`);
7468

7569
const chatWrapper3 = new FalconChatWrapper();
@@ -84,27 +78,21 @@ describe("FalconChatWrapper", () => {
8478
]
8579
});
8680

87-
expect(contextText3.values).toMatchInlineSnapshot(`
88-
[
89-
{
90-
"type": "specialToken",
91-
"value": "BOS",
92-
},
81+
expect(contextText3).toMatchInlineSnapshot(`
82+
LlamaText([
83+
new SpecialToken("BOS"),
9384
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
9485
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.
9586
9687
User: Hi there!
9788
9889
Assistant: Hello!",
99-
]
90+
])
10091
`);
10192

102-
expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(`
103-
[
104-
{
105-
"type": "specialToken",
106-
"value": "BOS",
107-
},
93+
expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(`
94+
LlamaText([
95+
new SpecialToken("BOS"),
10896
"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.
10997
If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.
11098
@@ -113,7 +101,7 @@ describe("FalconChatWrapper", () => {
113101
Assistant: Hello!
114102
115103
Assistant: ",
116-
]
104+
])
117105
`);
118106
});
119107
});

0 commit comments

Comments
 (0)