Skip to content

Commit df2db87

Browse files
committed
context keys: scanner: remove hand-written enum values
and remove hard-to-maintain and debug tests
1 parent b6605d3 commit df2db87

File tree

2 files changed

+80
-93
lines changed

2 files changed

+80
-93
lines changed

src/vs/platform/contextkey/common/scanner.ts

Lines changed: 22 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -6,40 +6,28 @@
66
import { CharCode } from 'vs/base/common/charCode';
77
import { illegalArgument, illegalState } from 'vs/base/common/errors';
88

9-
export enum TokenType {
10-
LParen = '(',
11-
RParen = ')',
12-
13-
Neg = '!',
14-
15-
Eq = '==',
16-
NotEq = '!=',
17-
18-
Lt = '<',
19-
LtEq = '<=',
20-
Gt = '>',
21-
GtEq = '>=',
22-
23-
RegexOp = '=~',
24-
25-
RegexStr = 'RegexStr',
26-
27-
True = 'true',
28-
False = 'false',
29-
30-
In = 'in',
31-
Not = 'not',
32-
33-
And = '&&',
34-
Or = '||',
35-
36-
Str = 'Str',
37-
38-
QuotedStr = 'QuotedStr',
39-
40-
Error = 'ErrorToken',
41-
42-
EOF = 'EOF'
9+
export const enum TokenType {
10+
LParen,
11+
RParen,
12+
Neg,
13+
Eq,
14+
NotEq,
15+
Lt,
16+
LtEq,
17+
Gt,
18+
GtEq,
19+
RegexOp,
20+
RegexStr,
21+
True,
22+
False,
23+
In,
24+
Not,
25+
And,
26+
Or,
27+
Str,
28+
QuotedStr,
29+
Error,
30+
EOF,
4331
}
4432

4533
export type Token = {

src/vs/platform/contextkey/test/common/scanner.test.ts

Lines changed: 58 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -3,70 +3,69 @@
33
* Licensed under the MIT License. See License.txt in the project root for license information.
44
*--------------------------------------------------------------------------------------------*/
55
import * as assert from 'assert';
6-
import { Scanner, TokenType } from 'vs/platform/contextkey/common/scanner';
6+
import { Scanner, Token, TokenType } from 'vs/platform/contextkey/common/scanner';
77

88
suite('Context Key Scanner', () => {
9-
function scan(input: string) {
10-
return (new Scanner()).reset(input).scan();
11-
}
12-
13-
suite('scanning a single token', () => {
14-
function assertTokenTypes(str: string, ...expected: TokenType[]) {
15-
const tokens = scan(str);
16-
expected.push(TokenType.EOF);
17-
assert.deepStrictEqual(tokens.length, expected.length, 'len: ' + str);
18-
tokens.forEach((token, i) => {
19-
assert.deepStrictEqual(token.type, expected[i], token.lexeme ? token.lexeme : token.type);
20-
});
9+
function tokenTypeToStr(token: Token) {
10+
switch (token.type) {
11+
case TokenType.LParen:
12+
return '(';
13+
case TokenType.RParen:
14+
return ')';
15+
case TokenType.Neg:
16+
return '!';
17+
case TokenType.Eq:
18+
return '==';
19+
case TokenType.NotEq:
20+
return '!=';
21+
case TokenType.Lt:
22+
return '<';
23+
case TokenType.LtEq:
24+
return '<=';
25+
case TokenType.Gt:
26+
return '>';
27+
case TokenType.GtEq:
28+
return '>=';
29+
case TokenType.RegexOp:
30+
return '=~';
31+
case TokenType.RegexStr:
32+
return 'RegexStr';
33+
case TokenType.True:
34+
return 'true';
35+
case TokenType.False:
36+
return 'false';
37+
case TokenType.In:
38+
return 'in';
39+
case TokenType.Not:
40+
return 'not';
41+
case TokenType.And:
42+
return '&&';
43+
case TokenType.Or:
44+
return '||';
45+
case TokenType.Str:
46+
return 'Str';
47+
case TokenType.QuotedStr:
48+
return 'QuotedStr';
49+
case TokenType.Error:
50+
return 'ErrorToken';
51+
case TokenType.EOF:
52+
return 'EOF';
2153
}
2254

23-
test('single', () => {
24-
assertTokenTypes('(', TokenType.LParen);
25-
assertTokenTypes(')', TokenType.RParen);
26-
27-
assertTokenTypes('!', TokenType.Neg);
28-
29-
assertTokenTypes('==', TokenType.Eq);
30-
assertTokenTypes('!=', TokenType.NotEq);
31-
32-
assertTokenTypes('<', TokenType.Lt);
33-
assertTokenTypes('<=', TokenType.LtEq);
34-
assertTokenTypes('>', TokenType.Gt);
35-
assertTokenTypes('>=', TokenType.GtEq);
36-
37-
assertTokenTypes('=~', TokenType.RegexOp);
38-
39-
assertTokenTypes('=~', TokenType.RegexOp);
40-
41-
assertTokenTypes('/foo/', TokenType.RegexStr);
42-
assertTokenTypes('/foo/i', TokenType.RegexStr);
43-
assertTokenTypes('/foo/gm', TokenType.RegexStr);
44-
45-
assertTokenTypes('true', TokenType.True);
46-
assertTokenTypes('false', TokenType.False);
47-
48-
assertTokenTypes('in', TokenType.In);
49-
assertTokenTypes('not', TokenType.Not);
50-
assertTokenTypes('not in', TokenType.Not, TokenType.In);
51-
52-
assertTokenTypes('&&', TokenType.And);
53-
assertTokenTypes('||', TokenType.Or);
54-
55-
assertTokenTypes('a', TokenType.Str);
56-
assertTokenTypes('a.b', TokenType.Str);
57-
assertTokenTypes('.b.c', TokenType.Str);
58-
assertTokenTypes('Foo<C-r>', TokenType.Str);
59-
assertTokenTypes('foo.bar<C-shift+2>', TokenType.Str);
60-
assertTokenTypes('foo.bar:zee', TokenType.Str);
61-
62-
assertTokenTypes('\'hello world\'', TokenType.QuotedStr);
63-
64-
assertTokenTypes(' ');
65-
assertTokenTypes('\n');
66-
assertTokenTypes(' ');
67-
assertTokenTypes(' \n ');
55+
}
56+
function scan(input: string) {
57+
return (new Scanner()).reset(input).scan().map((token: Token) => {
58+
return 'lexeme' in token
59+
? {
60+
type: tokenTypeToStr(token),
61+
offset: token.offset,
62+
lexeme: token.lexeme
63+
} : {
64+
type: tokenTypeToStr(token),
65+
offset: token.offset
66+
};
6867
});
69-
});
68+
}
7069

7170
suite('scanning various cases of context keys', () => {
7271

0 commit comments

Comments
 (0)