Skip to content

Commit f7c553b

Browse files
committed
conditional syntax
1 parent 002464c commit f7c553b

File tree

2 files changed

+86
-30
lines changed

2 files changed

+86
-30
lines changed

pkg/token.go

Lines changed: 85 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -38,36 +38,56 @@ const (
3838
DOT
3939
PIPE
4040
QUESTION
41+
EQUALITY
42+
INEQUALITY
43+
GREATER_THAN
44+
GREATER_THAN_OR_EQUAL
45+
LESS_THAN
46+
LESS_THAN_OR_EQUAL
47+
AND
48+
OR
49+
NOT
50+
MATCHES_REGEX
4151
)
4252

4353
var tokens = [...]string{
44-
ILLEGAL: "ILLEGAL",
45-
EOF: "EOF",
46-
LITERAL: "LITERAL",
47-
NUMBER: "NUMBER",
48-
STRING: "STRING",
49-
BOOLEAN: "BOOLEAN",
50-
NULL: "NULL",
51-
ROOT: "$",
52-
CURRENT: "@",
53-
WILDCARD: "*",
54-
RECURSIVE: "..",
55-
UNION: ",",
56-
CHILD: ".",
57-
SUBSCRIPT: "[]",
58-
SLICE: ":",
59-
FILTER: "?",
60-
PAREN_LEFT: "(",
61-
PAREN_RIGHT: ")",
62-
BRACKET_LEFT: "[",
63-
BRACKET_RIGHT: "]",
64-
BRACE_LEFT: "{",
65-
BRACE_RIGHT: "}",
66-
COLON: ":",
67-
COMMA: ",",
68-
DOT: ".",
69-
PIPE: "|",
70-
QUESTION: "?",
54+
ILLEGAL: "ILLEGAL",
55+
EOF: "EOF",
56+
LITERAL: "LITERAL",
57+
NUMBER: "NUMBER",
58+
STRING: "STRING",
59+
BOOLEAN: "BOOLEAN",
60+
NULL: "NULL",
61+
ROOT: "$",
62+
CURRENT: "@",
63+
WILDCARD: "*",
64+
RECURSIVE: "..",
65+
UNION: ",",
66+
CHILD: ".",
67+
SUBSCRIPT: "[]",
68+
SLICE: ":",
69+
FILTER: "?",
70+
PAREN_LEFT: "(",
71+
PAREN_RIGHT: ")",
72+
BRACKET_LEFT: "[",
73+
BRACKET_RIGHT: "]",
74+
BRACE_LEFT: "{",
75+
BRACE_RIGHT: "}",
76+
COLON: ":",
77+
COMMA: ",",
78+
DOT: ".",
79+
PIPE: "|",
80+
QUESTION: "?",
81+
EQUALITY: "==",
82+
INEQUALITY: "!=",
83+
GREATER_THAN: ">",
84+
GREATER_THAN_OR_EQUAL: ">=",
85+
LESS_THAN: "<",
86+
LESS_THAN_OR_EQUAL: "<=",
87+
AND: "&&",
88+
OR: "||",
89+
NOT: "!",
90+
MATCHES_REGEX: "=~",
7191
}
7292

7393
// String returns the string representation of the token.
@@ -178,9 +198,46 @@ func (t *Tokenizer) Tokenize() []TokenInfo {
178198
t.addToken(BRACE_RIGHT, "")
179199
case ch == '|':
180200
t.addToken(PIPE, "")
201+
case ch == '=':
202+
if t.peek() == '=' {
203+
t.addToken(EQUALITY, "")
204+
} else if t.peek() == '~' {
205+
t.addToken(MATCHES_REGEX, "")
206+
} else {
207+
t.addToken(ILLEGAL, string(ch))
208+
}
209+
case ch == '!':
210+
if t.peek() == '=' {
211+
t.addToken(INEQUALITY, "")
212+
} else {
213+
t.addToken(NOT, "")
214+
}
215+
case ch == '>':
216+
if t.peek() == '=' {
217+
t.addToken(GREATER_THAN_OR_EQUAL, "")
218+
} else {
219+
t.addToken(GREATER_THAN, "")
220+
}
221+
case ch == '<':
222+
if t.peek() == '=' {
223+
t.addToken(LESS_THAN_OR_EQUAL, "")
224+
} else {
225+
t.addToken(LESS_THAN, "")
226+
}
227+
case ch == '&':
228+
if t.peek() == '&' {
229+
t.addToken(AND, "")
230+
} else {
231+
t.addToken(ILLEGAL, string(ch))
232+
}
233+
case ch == '|':
234+
if t.peek() == '|' {
235+
t.addToken(OR, "")
236+
} else {
237+
t.addToken(ILLEGAL, string(ch))
238+
}
181239
case ch == '"' || ch == '\'':
182240
t.scanString(rune(ch))
183-
184241
case isDigit(ch):
185242
t.scanNumber()
186243
case isLetter(ch):
@@ -264,7 +321,6 @@ func (t *Tokenizer) scanLiteral() {
264321
}
265322
t.pos = len(t.input) - 1
266323
t.column = len(t.input) - 1
267-
268324
}
269325

270326
func (t *Tokenizer) skipWhitespace() {

pkg/token_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ func TestTokenizer(t *testing.T) {
9999
{Token: CURRENT, Line: 1, Column: 15, Literal: ""},
100100
{Token: CHILD, Line: 1, Column: 16, Literal: ""},
101101
{Token: LITERAL, Line: 1, Column: 17, Literal: "price"},
102-
{Token: LITERAL, Line: 1, Column: 23, Literal: "<"},
102+
{Token: LESS_THAN, Line: 1, Column: 23, Literal: ""},
103103
{Token: NUMBER, Line: 1, Column: 25, Literal: "10"},
104104
{Token: PAREN_RIGHT, Line: 1, Column: 27, Literal: ""},
105105
{Token: BRACKET_RIGHT, Line: 1, Column: 28, Literal: ""},

0 commit comments

Comments
 (0)