Skip to content

Commit 14283d2

Browse files
committed
merge: 'fix-dots' -> 'dev' (fixes: #2)
2 parents 2cefb9b + 26737d2 commit 14283d2

File tree

1 file changed

+45
-1
lines changed

1 file changed

+45
-1
lines changed

src/tokenizer.rs

Lines changed: 45 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,13 @@ pub fn tokenize(content: &str) -> Result<Vec<Token>, TokenizationError> {
182182
iter = r_iter;
183183
continue;
184184
}
185-
_ => buffers.0.push(c),
185+
_ => {
186+
if !buffers.1.is_empty() {
187+
tokens.push(Token::Number(buffers.1.clone(), i - buffers.1.len()));
188+
buffers.1.clear();
189+
}
190+
buffers.0.push(c);
191+
}
186192
}
187193
} else {
188194
buffers.0.push(c);
@@ -356,4 +362,42 @@ mod tests {
356362
])
357363
);
358364
}
365+
366+
#[test]
367+
fn test_dots() {
368+
assert_eq!(
369+
tokenize("{1..3}"),
370+
Ok(vec![
371+
Token::OBra(0),
372+
Token::Number("1".to_owned(), 1),
373+
Token::Range(2),
374+
Token::Number("3".to_owned(), 4),
375+
Token::CBra(5),
376+
])
377+
);
378+
assert_eq!(
379+
tokenize("{1.2.3,b}"),
380+
Ok(vec![
381+
Token::OBra(0),
382+
Token::Number("1".to_owned(), 1),
383+
Token::Text(".".to_owned(), 2),
384+
Token::Number("2".to_owned(), 3),
385+
Token::Text(".".to_owned(), 4),
386+
Token::Number("3".to_owned(), 5),
387+
Token::Comma(6),
388+
Token::Text("b".to_owned(), 7),
389+
Token::CBra(8),
390+
])
391+
);
392+
assert_eq!(
393+
tokenize("{a.b.c,d}"),
394+
Ok(vec![
395+
Token::OBra(0),
396+
Token::Text("a.b.c".to_owned(), 1),
397+
Token::Comma(6),
398+
Token::Text("d".to_owned(), 7),
399+
Token::CBra(8),
400+
])
401+
);
402+
}
359403
}

0 commit comments

Comments
 (0)