@@ -18,11 +18,10 @@ class Lexer(object):
18
18
',' : 'comma' ,
19
19
':' : 'colon' ,
20
20
'@' : 'current' ,
21
- '&' : 'expref' ,
22
21
'(' : 'lparen' ,
23
22
')' : 'rparen' ,
24
23
'{' : 'lbrace' ,
25
- '}' : 'rbrace'
24
+ '}' : 'rbrace' ,
26
25
}
27
26
28
27
def tokenize (self , expression ):
@@ -60,6 +59,8 @@ def tokenize(self, expression):
60
59
yield self ._consume_raw_string_literal ()
61
60
elif self ._current == '|' :
62
61
yield self ._match_or_else ('|' , 'or' , 'pipe' )
62
+ elif self ._current == '&' :
63
+ yield self ._match_or_else ('&' , 'and' , 'expref' )
63
64
elif self ._current == '`' :
64
65
yield self ._consume_literal ()
65
66
elif self ._current in self .START_NUMBER :
@@ -76,7 +77,7 @@ def tokenize(self, expression):
76
77
elif self ._current == '>' :
77
78
yield self ._match_or_else ('=' , 'gte' , 'gt' )
78
79
elif self ._current == '!' :
79
- yield self ._match_or_else ('=' , 'ne' , 'unknown ' )
80
+ yield self ._match_or_else ('=' , 'ne' , 'not ' )
80
81
elif self ._current == '=' :
81
82
yield self ._match_or_else ('=' , 'eq' , 'unknown' )
82
83
else :
0 commit comments