|
1 | 1 | import unittest |
2 | 2 |
|
3 | 3 | from parser.lexer import Tokenizer |
4 | | -from parser.lexer.tokens import WhitespaceToken, StringToken, EofToken, NumberToken |
| 4 | +from parser.lexer.tokens import ( |
| 5 | + WhitespaceToken, StringToken, EofToken, NumberToken, SemicolonToken) |
5 | 6 | from parser.common import StrRegion |
6 | 7 | from parser.tokens import IdentNameToken, DotToken, AttrNameToken, OpToken |
7 | 8 | from test.common import CommonTestCase, TokenStreamFlag |
8 | 9 |
|
9 | 10 |
|
10 | | -class MyTestCase(CommonTestCase): |
| 11 | +class TestInternalFuncs(CommonTestCase): |
11 | 12 | def test__t_ident_name__at_end(self): |
12 | 13 | t = Tokenizer('abc') |
13 | 14 | end = t._t_ident_name(0) |
@@ -63,6 +64,8 @@ def test__t_number(self): |
63 | 64 | self.assertTokensEqual(t, [NumberToken(StrRegion(0, 2))]) |
64 | 65 | self.assertEqual(end, 2) |
65 | 66 |
|
| 67 | + |
| 68 | +class TestFullTokenizer(CommonTestCase): |
66 | 69 | def test_mod_supported(self): |
67 | 70 | t = Tokenizer('a+b%2') |
68 | 71 | t.tokenize() |
@@ -93,6 +96,20 @@ def test_tokenize_concat_works(self): |
93 | 96 | EofToken(StrRegion(9, 9)), |
94 | 97 | ], TokenStreamFlag.CONTENT) |
95 | 98 |
|
| 99 | + def test_ws_at_end(self): |
| 100 | + t = Tokenizer('let a =1; \n').tokenize() |
| 101 | + self.assertTokensEqual(t, [ |
| 102 | + IdentNameToken(), |
| 103 | + WhitespaceToken(), |
| 104 | + IdentNameToken(), |
| 105 | + WhitespaceToken(), |
| 106 | + OpToken(op_str='='), |
| 107 | + NumberToken(), |
| 108 | + SemicolonToken(), |
| 109 | + WhitespaceToken(), |
| 110 | + EofToken() |
| 111 | + ], TokenStreamFlag.FULL, check_regions=False) |
| 112 | + |
96 | 113 |
|
97 | 114 | if __name__ == '__main__': |
98 | 115 | unittest.main() |
0 commit comments