@@ -252,22 +252,22 @@ func TestTokenizer(t *testing.T) {
252
252
{Token : BRACKET_RIGHT , Line : 1 , Column : 13 , Literal : "" , Len : 1 },
253
253
},
254
254
},
255
- {
256
- name : "Filter regular expression (illegal right now)" ,
257
- input : "$[?(@.child=~/.*/)]" ,
258
- expected : []TokenInfo {
259
- {Token : ROOT , Line : 1 , Column : 0 , Literal : "" , Len : 1 },
260
- {Token : FILTER , Line : 1 , Column : 1 , Literal : "" , Len : 2 },
261
- {Token : PAREN_LEFT , Line : 1 , Column : 3 , Literal : "" , Len : 1 },
262
- {Token : CURRENT , Line : 1 , Column : 4 , Literal : "" , Len : 1 },
263
- {Token : CHILD , Line : 1 , Column : 5 , Literal : "" , Len : 1 },
264
- {Token : STRING_LITERAL , Line : 1 , Column : 6 , Literal : "child" , Len : 5 },
265
- {Token : MATCHES , Line : 1 , Column : 11 , Literal : "" , Len : 2 },
266
- {Token : ILLEGAL , Line : 1 , Column : 13 , Literal : "" , Len : 1 },
267
- {Token : PAREN_RIGHT , Line : 1 , Column : 17 , Literal : "" , Len : 1 },
268
- {Token : BRACKET_RIGHT , Line : 1 , Column : 18 , Literal : "" , Len : 1 },
269
- },
270
- },
255
+ // {
256
+ // name: "Filter regular expression (illegal right now)",
257
+ // input: "$[?(@.child=~/.*/)]",
258
+ // expected: []TokenInfo{
259
+ // {Token: ROOT, Line: 1, Column: 0, Literal: "", Len: 1},
260
+ // {Token: FILTER, Line: 1, Column: 1, Literal: "", Len: 2},
261
+ // {Token: PAREN_LEFT, Line: 1, Column: 3, Literal: "", Len: 1},
262
+ // {Token: CURRENT, Line: 1, Column: 4, Literal: "", Len: 1},
263
+ // {Token: CHILD, Line: 1, Column: 5, Literal: "", Len: 1},
264
+ // {Token: STRING_LITERAL, Line: 1, Column: 6, Literal: "child", Len: 5},
265
+ // {Token: MATCHES, Line: 1, Column: 11, Literal: "", Len: 2},
266
+ // {Token: ILLEGAL, Line: 1, Column: 13, Literal: "", Len: 1},
267
+ // {Token: PAREN_RIGHT, Line: 1, Column: 17, Literal: "", Len: 1},
268
+ // {Token: BRACKET_RIGHT, Line: 1, Column: 18, Literal: "", Len: 1},
269
+ // },
270
+ // },
271
271
}
272
272
273
273
for _ , test := range tests {
0 commit comments