Skip to content

Commit bb5bfb8

Browse files
committed
test: improve lexer test case
1 parent 4abc54b commit bb5bfb8

17 files changed

+1199
-986
lines changed

lexer/lib.rs

Lines changed: 16 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -165,7 +165,7 @@ mod tests {
165165
use crate::Lexer;
166166
use crate::token::TokenKind;
167167
use super::*;
168-
use insta::assert_debug_snapshot;
168+
use insta::*;
169169

170170
fn test_token_set(l: &mut Lexer) -> Vec<Token> {
171171
let mut token_vs: Vec<Token> = vec![];
@@ -181,65 +181,51 @@ mod tests {
181181
token_vs
182182
}
183183

184-
#[test]
185-
fn test_lexer_simple() {
186-
let mut l = Lexer::new("=+(){},:;");
184+
pub fn test_lexer_common(name: &str, input: &str) {
185+
let mut l = Lexer::new(input);
187186
let token_vs = test_token_set(&mut l);
188187

189-
assert_debug_snapshot!(token_vs)
188+
assert_snapshot!(name, serde_json::to_string_pretty(&token_vs).unwrap(), input);
190189
}
191190

192191
#[test]
193-
fn test_lexer_let() {
194-
let mut l = Lexer::new("let x=5");
195-
let token_vs = test_token_set(&mut l);
192+
fn test_lexer_simple() {
193+
test_lexer_common("simple", "=+(){},:;");
194+
}
196195

197-
assert_debug_snapshot!(token_vs)
196+
#[test]
197+
fn test_lexer_let() {
198+
test_lexer_common("let", "let x=5");
198199
}
199200

200201
#[test]
201202
fn test_lexer_let_with_space() {
202-
let mut l = Lexer::new("let x = 5");
203-
let token_vs = test_token_set(&mut l);
204-
205-
assert_debug_snapshot!(token_vs)
203+
test_lexer_common("let_with_space", "let x = 5");
206204
}
207205

208206
#[test]
209207
fn test_lexer_string() {
210-
let mut l = Lexer::new(r#""a""#);
211-
let token_vs = test_token_set(&mut l);
212-
213-
assert_debug_snapshot!(token_vs)
208+
test_lexer_common("string", r#""a""#);
214209
}
215210

216211
#[test]
217212
fn test_lexer_array() {
218-
let mut l = Lexer::new("[3]");
219-
let token_vs = test_token_set(&mut l);
220-
221-
assert_debug_snapshot!(token_vs)
213+
test_lexer_common("array", "[3]");
222214
}
223215

224216
#[test]
225217
fn test_lexer_hash() {
226-
let mut l = Lexer::new(r#"{"one": 1, "two": 2, "three": 3}"#);
227-
let token_vs = test_token_set(&mut l);
228-
229-
assert_debug_snapshot!(token_vs)
218+
test_lexer_common("hash", r#"{"one": 1, "two": 2, "three": 3}"#);
230219
}
231220

232221
#[test]
233222
fn test_lexer_bool() {
234-
let mut l = Lexer::new("let y=true");
235-
let token_vs = test_token_set(&mut l);
236-
237-
assert_debug_snapshot!(token_vs)
223+
test_lexer_common("bool", "let y=true");
238224
}
239225

240226
#[test]
241227
fn test_lexer_complex() {
242-
let mut l = Lexer::new("let five = 5;
228+
test_lexer_common("complex", "let five = 5;
243229
let ten = 10;
244230
245231
let add = fn(x, y) {
@@ -258,8 +244,5 @@ if (5 < 10) {
258244
259245
10 == 10;
260246
10 != 9;");
261-
let token_vs = test_token_set(&mut l);
262-
263-
assert_debug_snapshot!(token_vs)
264247
}
265248
}
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
---
2+
source: lexer/lib.rs
3+
expression: "[3]"
4+
---
5+
[
6+
{
7+
"kind": {
8+
"type": "LBRACKET"
9+
},
10+
"span": {
11+
"start": 0,
12+
"end": 1
13+
}
14+
},
15+
{
16+
"kind": {
17+
"type": "INT",
18+
"value": 3
19+
},
20+
"span": {
21+
"start": 1,
22+
"end": 2
23+
}
24+
},
25+
{
26+
"kind": {
27+
"type": "RBRACKET"
28+
},
29+
"span": {
30+
"start": 2,
31+
"end": 3
32+
}
33+
},
34+
{
35+
"kind": {
36+
"type": "EOF"
37+
},
38+
"span": {
39+
"start": 3,
40+
"end": 4
41+
}
42+
}
43+
]
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
---
2+
source: lexer/lib.rs
3+
expression: let y=true
4+
---
5+
[
6+
{
7+
"kind": {
8+
"type": "LET"
9+
},
10+
"span": {
11+
"start": 0,
12+
"end": 3
13+
}
14+
},
15+
{
16+
"kind": {
17+
"type": "IDENTIFIER",
18+
"value": {
19+
"name": "y"
20+
}
21+
},
22+
"span": {
23+
"start": 4,
24+
"end": 5
25+
}
26+
},
27+
{
28+
"kind": {
29+
"type": "ASSIGN"
30+
},
31+
"span": {
32+
"start": 5,
33+
"end": 6
34+
}
35+
},
36+
{
37+
"kind": {
38+
"type": "TRUE"
39+
},
40+
"span": {
41+
"start": 6,
42+
"end": 10
43+
}
44+
},
45+
{
46+
"kind": {
47+
"type": "EOF"
48+
},
49+
"span": {
50+
"start": 10,
51+
"end": 11
52+
}
53+
}
54+
]

0 commit comments

Comments
 (0)