|
3 | 3 | * Licensed under the MIT License. See License.txt in the project root for license information.
|
4 | 4 | *--------------------------------------------------------------------------------------------*/
|
5 | 5 | import * as assert from 'assert';
|
6 |
| -import { Scanner, TokenType } from 'vs/platform/contextkey/common/scanner'; |
| 6 | +import { Scanner, Token, TokenType } from 'vs/platform/contextkey/common/scanner'; |
7 | 7 |
|
8 | 8 | suite('Context Key Scanner', () => {
|
9 |
| - function scan(input: string) { |
10 |
| - return (new Scanner()).reset(input).scan(); |
11 |
| - } |
12 |
| - |
13 |
| - suite('scanning a single token', () => { |
14 |
| - function assertTokenTypes(str: string, ...expected: TokenType[]) { |
15 |
| - const tokens = scan(str); |
16 |
| - expected.push(TokenType.EOF); |
17 |
| - assert.deepStrictEqual(tokens.length, expected.length, 'len: ' + str); |
18 |
| - tokens.forEach((token, i) => { |
19 |
| - assert.deepStrictEqual(token.type, expected[i], token.lexeme ? token.lexeme : token.type); |
20 |
| - }); |
| 9 | + function tokenTypeToStr(token: Token) { |
| 10 | + switch (token.type) { |
| 11 | + case TokenType.LParen: |
| 12 | + return '('; |
| 13 | + case TokenType.RParen: |
| 14 | + return ')'; |
| 15 | + case TokenType.Neg: |
| 16 | + return '!'; |
| 17 | + case TokenType.Eq: |
| 18 | + return '=='; |
| 19 | + case TokenType.NotEq: |
| 20 | + return '!='; |
| 21 | + case TokenType.Lt: |
| 22 | + return '<'; |
| 23 | + case TokenType.LtEq: |
| 24 | + return '<='; |
| 25 | + case TokenType.Gt: |
| 26 | + return '>'; |
| 27 | + case TokenType.GtEq: |
| 28 | + return '>='; |
| 29 | + case TokenType.RegexOp: |
| 30 | + return '=~'; |
| 31 | + case TokenType.RegexStr: |
| 32 | + return 'RegexStr'; |
| 33 | + case TokenType.True: |
| 34 | + return 'true'; |
| 35 | + case TokenType.False: |
| 36 | + return 'false'; |
| 37 | + case TokenType.In: |
| 38 | + return 'in'; |
| 39 | + case TokenType.Not: |
| 40 | + return 'not'; |
| 41 | + case TokenType.And: |
| 42 | + return '&&'; |
| 43 | + case TokenType.Or: |
| 44 | + return '||'; |
| 45 | + case TokenType.Str: |
| 46 | + return 'Str'; |
| 47 | + case TokenType.QuotedStr: |
| 48 | + return 'QuotedStr'; |
| 49 | + case TokenType.Error: |
| 50 | + return 'ErrorToken'; |
| 51 | + case TokenType.EOF: |
| 52 | + return 'EOF'; |
21 | 53 | }
|
22 | 54 |
|
23 |
| - test('single', () => { |
24 |
| - assertTokenTypes('(', TokenType.LParen); |
25 |
| - assertTokenTypes(')', TokenType.RParen); |
26 |
| - |
27 |
| - assertTokenTypes('!', TokenType.Neg); |
28 |
| - |
29 |
| - assertTokenTypes('==', TokenType.Eq); |
30 |
| - assertTokenTypes('!=', TokenType.NotEq); |
31 |
| - |
32 |
| - assertTokenTypes('<', TokenType.Lt); |
33 |
| - assertTokenTypes('<=', TokenType.LtEq); |
34 |
| - assertTokenTypes('>', TokenType.Gt); |
35 |
| - assertTokenTypes('>=', TokenType.GtEq); |
36 |
| - |
37 |
| - assertTokenTypes('=~', TokenType.RegexOp); |
38 |
| - |
39 |
| - assertTokenTypes('=~', TokenType.RegexOp); |
40 |
| - |
41 |
| - assertTokenTypes('/foo/', TokenType.RegexStr); |
42 |
| - assertTokenTypes('/foo/i', TokenType.RegexStr); |
43 |
| - assertTokenTypes('/foo/gm', TokenType.RegexStr); |
44 |
| - |
45 |
| - assertTokenTypes('true', TokenType.True); |
46 |
| - assertTokenTypes('false', TokenType.False); |
47 |
| - |
48 |
| - assertTokenTypes('in', TokenType.In); |
49 |
| - assertTokenTypes('not', TokenType.Not); |
50 |
| - assertTokenTypes('not in', TokenType.Not, TokenType.In); |
51 |
| - |
52 |
| - assertTokenTypes('&&', TokenType.And); |
53 |
| - assertTokenTypes('||', TokenType.Or); |
54 |
| - |
55 |
| - assertTokenTypes('a', TokenType.Str); |
56 |
| - assertTokenTypes('a.b', TokenType.Str); |
57 |
| - assertTokenTypes('.b.c', TokenType.Str); |
58 |
| - assertTokenTypes('Foo<C-r>', TokenType.Str); |
59 |
| - assertTokenTypes('foo.bar<C-shift+2>', TokenType.Str); |
60 |
| - assertTokenTypes('foo.bar:zee', TokenType.Str); |
61 |
| - |
62 |
| - assertTokenTypes('\'hello world\'', TokenType.QuotedStr); |
63 |
| - |
64 |
| - assertTokenTypes(' '); |
65 |
| - assertTokenTypes('\n'); |
66 |
| - assertTokenTypes(' '); |
67 |
| - assertTokenTypes(' \n '); |
| 55 | + } |
| 56 | + function scan(input: string) { |
| 57 | + return (new Scanner()).reset(input).scan().map((token: Token) => { |
| 58 | + return 'lexeme' in token |
| 59 | + ? { |
| 60 | + type: tokenTypeToStr(token), |
| 61 | + offset: token.offset, |
| 62 | + lexeme: token.lexeme |
| 63 | + } : { |
| 64 | + type: tokenTypeToStr(token), |
| 65 | + offset: token.offset |
| 66 | + }; |
68 | 67 | });
|
69 |
| - }); |
| 68 | + } |
70 | 69 |
|
71 | 70 | suite('scanning various cases of context keys', () => {
|
72 | 71 |
|
|
0 commit comments