diff --git a/compliance b/compliance index 86fe910..ffa8a5a 160000 --- a/compliance +++ b/compliance @@ -1 +1 @@ -Subproject commit 86fe9102fda76890ef0914b546c1ef821eb6e758 +Subproject commit ffa8a5ac6eb73c4297ff7d997ac5786ee4309d59 diff --git a/jp_test.go b/jp_test.go index c6304d3..cffd2f5 100644 --- a/jp_test.go +++ b/jp_test.go @@ -28,11 +28,6 @@ type TestCase struct { var excludeList = []string{ "legacy/legacy-literal.json", "benchmarks.json", - "function_let.json", - "lexical_scoping.json", - - // this test currently fails - "literal.json", } func excluded(path string) bool { diff --git a/pkg/api/api.go b/pkg/api/api.go index 73d7247..a58c4eb 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -53,7 +53,7 @@ func MustCompile(expression string, funcs ...functions.FunctionEntry) JMESPath { // Search evaluates a JMESPath expression against input data and returns the result. func (jp jmesPath) Search(data interface{}) (interface{}, error) { - intr := interpreter.NewInterpreter(data, jp.caller) + intr := interpreter.NewInterpreter(data, jp.caller, nil) return intr.Execute(jp.node, data) } diff --git a/pkg/api/api_test.go b/pkg/api/api_test.go index 2f27f80..2177079 100644 --- a/pkg/api/api_test.go +++ b/pkg/api/api_test.go @@ -130,6 +130,14 @@ func TestSearch(t *testing.T) { want: map[string]interface{}{ "foo": nil, }, + }, { + args: args{ + expression: "let $root = @ in $root.a", + data: map[string]interface{}{ + "a": 42.0, + }, + }, + want: 42.0, }} for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { diff --git a/pkg/binding/binding.go b/pkg/binding/binding.go new file mode 100644 index 0000000..d87a156 --- /dev/null +++ b/pkg/binding/binding.go @@ -0,0 +1,37 @@ +package binding + +import "fmt" + +// Bindings stores let expression bindings by name. +type Bindings interface { + // Get returns the value bound for a given name. + Get(string) (interface{}, error) + // Register registers a value associated with a given name, it returns a new binding + Register(string, interface{}) Bindings +} + +type bindings struct { + values map[string]interface{} +} + +func NewBindings() Bindings { + return bindings{} +} + +func (b bindings) Get(name string) (interface{}, error) { + if value, ok := b.values[name]; ok { + return value, nil + } + return nil, fmt.Errorf("variable not defined: %s", name) +} + +func (b bindings) Register(name string, value interface{}) Bindings { + values := map[string]interface{}{} + for k, v := range b.values { + values[k] = v + } + values[name] = value + return bindings{ + values: values, + } +} diff --git a/pkg/binding/binding_test.go b/pkg/binding/binding_test.go new file mode 100644 index 0000000..1784ec9 --- /dev/null +++ b/pkg/binding/binding_test.go @@ -0,0 +1,143 @@ +package binding + +import ( + "reflect" + "testing" +) + +func TestNewBindings(t *testing.T) { + tests := []struct { + name string + want Bindings + }{{ + want: bindings{}, + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := NewBindings(); !reflect.DeepEqual(got, tt.want) { + t.Errorf("NewBindings() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_bindings_Get(t *testing.T) { + type fields struct { + values map[string]interface{} + } + type args struct { + name string + } + tests := []struct { + name string + fields fields + args args + want interface{} + wantErr bool + }{{ + fields: fields{ + values: nil, + }, + args: args{ + name: "$root", + }, + wantErr: true, + }, { + fields: fields{ + values: map[string]interface{}{}, + }, + args: args{ + name: "$root", + }, + wantErr: true, + }, { + fields: fields{ + values: map[string]interface{}{ + "$root": 42.0, + }, + }, + args: args{ + name: "$root", + }, + want: 42.0, + }, { + fields: fields{ + values: map[string]interface{}{ + "$foot": 42.0, + }, + }, + args: args{ + name: "$root", + }, + wantErr: true, + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + b := bindings{ + values: tt.fields.values, + } + got, err := b.Get(tt.args.name) + if (err != nil) != tt.wantErr { + t.Errorf("bindings.Get() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("bindings.Get() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_bindings_Register(t *testing.T) { + type fields struct { + values map[string]interface{} + } + type args struct { + name string + value interface{} + } + tests := []struct { + name string + fields fields + args args + want Bindings + }{{ + fields: fields{ + values: nil, + }, + args: args{ + name: "$root", + value: 42.0, + }, + want: bindings{ + values: map[string]interface{}{ + "$root": 42.0, + }, + }, + }, { + fields: fields{ + values: map[string]interface{}{ + "$root": 21.0, + }, + }, + args: args{ + name: "$root", + value: 42.0, + }, + want: bindings{ + values: map[string]interface{}{ + "$root": 42.0, + }, + }, + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + b := bindings{ + values: tt.fields.values, + } + if got := b.Register(tt.args.name, tt.args.value); !reflect.DeepEqual(got, tt.want) { + t.Errorf("bindings.Register() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/interpreter/interpreter.go b/pkg/interpreter/interpreter.go index beacba4..7d5dc95 100644 --- a/pkg/interpreter/interpreter.go +++ b/pkg/interpreter/interpreter.go @@ -7,6 +7,7 @@ import ( "unicode" "unicode/utf8" + "github.com/jmespath-community/go-jmespath/pkg/binding" "github.com/jmespath-community/go-jmespath/pkg/parsing" "github.com/jmespath-community/go-jmespath/pkg/util" ) @@ -21,14 +22,19 @@ type Interpreter interface { } type treeInterpreter struct { - caller FunctionCaller - root interface{} + caller FunctionCaller + root interface{} + bindings binding.Bindings } -func NewInterpreter(data interface{}, caller FunctionCaller) Interpreter { +func NewInterpreter(data interface{}, caller FunctionCaller, bindings binding.Bindings) Interpreter { + if bindings == nil { + bindings = binding.NewBindings() + } return &treeInterpreter{ - caller: caller, - root: data, + caller: caller, + root: data, + bindings: bindings, } } @@ -211,6 +217,39 @@ func (intr *treeInterpreter) Execute(node parsing.ASTNode, value interface{}) (i return value, nil case parsing.ASTRootNode: return intr.root, nil + case parsing.ASTBindings: + bindings := intr.bindings + for _, child := range node.Children { + if value, err := intr.Execute(child.Children[1], value); err != nil { + return nil, err + } else { + bindings = bindings.Register(child.Children[0].Value.(string), value) + } + } + intr.bindings = bindings + // doesn't mutate value + return value, nil + case parsing.ASTLetExpression: + // save bindings state + bindings := intr.bindings + // retore bindings state + defer func() { + intr.bindings = bindings + }() + // evalute bindings first, then evaluate expression + if _, err := intr.Execute(node.Children[0], value); err != nil { + return nil, err + } else if value, err := intr.Execute(node.Children[1], value); err != nil { + return nil, err + } else { + return value, nil + } + case parsing.ASTVariable: + if value, err := intr.bindings.Get(node.Value.(string)); err != nil { + return nil, err + } else { + return value, nil + } case parsing.ASTIndex: if sliceType, ok := value.([]interface{}); ok { index := node.Value.(int) diff --git a/pkg/interpreter/interpreter_test.go b/pkg/interpreter/interpreter_test.go index 7947dc1..7d664cb 100644 --- a/pkg/interpreter/interpreter_test.go +++ b/pkg/interpreter/interpreter_test.go @@ -52,7 +52,7 @@ func search(t *testing.T, expression string, data interface{}) (interface{}, err return nil, err } caller := NewFunctionCaller(functions.GetDefaultFunctions()...) - intr := NewInterpreter(nil, caller) + intr := NewInterpreter(nil, caller, nil) return intr.Execute(ast, data) } @@ -198,7 +198,7 @@ func TestCanSupportSliceOfStructsWithFunctions(t *testing.T) { func BenchmarkInterpretSingleFieldStruct(b *testing.B) { assert := assert.New(b) caller := NewFunctionCaller(functions.GetDefaultFunctions()...) - intr := NewInterpreter(nil, caller) + intr := NewInterpreter(nil, caller, nil) parser := parsing.NewParser() ast, _ := parser.Parse("fooasdfasdfasdfasdf") data := benchmarkStruct{"foobarbazqux"} @@ -213,7 +213,7 @@ func BenchmarkInterpretSingleFieldStruct(b *testing.B) { func BenchmarkInterpretNestedStruct(b *testing.B) { assert := assert.New(b) caller := NewFunctionCaller(functions.GetDefaultFunctions()...) - intr := NewInterpreter(nil, caller) + intr := NewInterpreter(nil, caller, nil) parser := parsing.NewParser() ast, _ := parser.Parse("fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf") data := benchmarkNested{ @@ -238,7 +238,7 @@ func BenchmarkInterpretNestedMaps(b *testing.B) { err := json.Unmarshal(jsonData, &data) assert.Nil(err) caller := NewFunctionCaller(functions.GetDefaultFunctions()...) - intr := NewInterpreter(nil, caller) + intr := NewInterpreter(nil, caller, nil) parser := parsing.NewParser() ast, _ := parser.Parse("fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf") for i := 0; i < b.N; i++ { diff --git a/pkg/parsing/astnodetype_string.go b/pkg/parsing/astnodetype_string.go index ecdcd09..ff9ed36 100644 --- a/pkg/parsing/astnodetype_string.go +++ b/pkg/parsing/astnodetype_string.go @@ -34,11 +34,15 @@ func _() { _ = x[ASTSubexpression-23] _ = x[ASTSlice-24] _ = x[ASTValueProjection-25] + _ = x[ASTLetExpression-26] + _ = x[ASTVariable-27] + _ = x[ASTBindings-28] + _ = x[ASTBinding-29] } -const _astNodeType_name = "ASTEmptyASTArithmeticExpressionASTArithmeticUnaryExpressionASTComparatorASTCurrentNodeASTRootNodeASTExpRefASTFunctionExpressionASTFieldASTFilterProjectionASTFlattenASTIdentityASTIndexASTIndexExpressionASTKeyValPairASTLiteralASTMultiSelectHashASTMultiSelectListASTOrExpressionASTAndExpressionASTNotExpressionASTPipeASTProjectionASTSubexpressionASTSliceASTValueProjection" +const _astNodeType_name = "ASTEmptyASTArithmeticExpressionASTArithmeticUnaryExpressionASTComparatorASTCurrentNodeASTRootNodeASTExpRefASTFunctionExpressionASTFieldASTFilterProjectionASTFlattenASTIdentityASTIndexASTIndexExpressionASTKeyValPairASTLiteralASTMultiSelectHashASTMultiSelectListASTOrExpressionASTAndExpressionASTNotExpressionASTPipeASTProjectionASTSubexpressionASTSliceASTValueProjectionASTLetExpressionASTVariableASTBindingsASTBinding" -var _astNodeType_index = [...]uint16{0, 8, 31, 59, 72, 86, 97, 106, 127, 135, 154, 164, 175, 183, 201, 214, 224, 242, 260, 275, 291, 307, 314, 327, 343, 351, 369} +var _astNodeType_index = [...]uint16{0, 8, 31, 59, 72, 86, 97, 106, 127, 135, 154, 164, 175, 183, 201, 214, 224, 242, 260, 275, 291, 307, 314, 327, 343, 351, 369, 385, 396, 407, 417} func (i astNodeType) String() string { if i < 0 || i >= astNodeType(len(_astNodeType_index)-1) { diff --git a/pkg/parsing/lexer.go b/pkg/parsing/lexer.go index e452ddb..b5be6da 100644 --- a/pkg/parsing/lexer.go +++ b/pkg/parsing/lexer.go @@ -87,6 +87,8 @@ const ( TOKExpref TOKAnd TOKNot + TOKVarref + TOKAssign TOKEOF ) @@ -101,7 +103,6 @@ var basicTokens = map[rune]TokType{ '(': TOKLparen, ')': TOKRparen, '@': TOKCurrent, - '$': TOKRoot, '+': TOKPlus, '%': TOKModulo, '\u2212': TOKMinus, @@ -163,7 +164,7 @@ loop: for { r := lexer.next() if identifierStartBits&(1<<(uint64(r)-64)) > 0 { - t := lexer.consumeUnquotedIdentifier() + t := lexer.consumeUnquotedIdentifier(TOKUnquotedIdentifier) tokens = append(tokens, t) } else if val, ok := basicTokens[r]; ok { // Basic single char token. @@ -227,8 +228,14 @@ loop: } else if r == '!' { t := lexer.matchOrElse(r, '=', TOKNE, TOKNot) tokens = append(tokens, t) + } else if r == '$' { + t := lexer.consumeUnquotedIdentifier(TOKVarref) + if t.value == "$" { + t.tokenType = TOKRoot + } + tokens = append(tokens, t) } else if r == '=' { - t := lexer.matchOrElse(r, '=', TOKEQ, TOKUnknown) + t := lexer.matchOrElse(r, '=', TOKEQ, TOKAssign) tokens = append(tokens, t) } else if r == '&' { t := lexer.matchOrElse(r, '&', TOKAnd, TOKExpref) @@ -417,7 +424,7 @@ func (lexer *Lexer) consumeQuotedIdentifier() (token, error) { }, nil } -func (lexer *Lexer) consumeUnquotedIdentifier() token { +func (lexer *Lexer) consumeUnquotedIdentifier(matchedType TokType) token { // Consume runes until we reach the end of an unquoted // identifier. start := lexer.currentPos - lexer.lastWidth @@ -430,7 +437,7 @@ func (lexer *Lexer) consumeUnquotedIdentifier() token { } value := lexer.expression[start:lexer.currentPos] return token{ - tokenType: TOKUnquotedIdentifier, + tokenType: matchedType, value: value, position: start, length: lexer.currentPos - start, diff --git a/pkg/parsing/lexer_test.go b/pkg/parsing/lexer_test.go index f1894bf..0b9e686 100644 --- a/pkg/parsing/lexer_test.go +++ b/pkg/parsing/lexer_test.go @@ -83,6 +83,13 @@ var lexingTests = []struct { {TOKUnquotedIdentifier, "b", 7, 1}, {TOKRbracket, "]", 8, 1}, }}, + // let expressions + {"$root", []token{{TOKVarref, "$root", 0, 5}}}, + {"$root = @", []token{ + {TOKVarref, "$root", 0, 5}, + {TOKAssign, "=", 6, 1}, + {TOKCurrent, "@", 8, 1}, + }}, } func TestCanLexTokens(t *testing.T) { diff --git a/pkg/parsing/parser.go b/pkg/parsing/parser.go index cedaffa..a97d559 100644 --- a/pkg/parsing/parser.go +++ b/pkg/parsing/parser.go @@ -37,6 +37,10 @@ const ( ASTSubexpression ASTSlice ASTValueProjection + ASTLetExpression + ASTVariable + ASTBindings + ASTBinding ) // ASTNode represents the abstract syntax tree of a JMESPath expression. @@ -75,6 +79,7 @@ func (node ASTNode) PrettyPrint(indent int) string { for _, elem := range node.Children { output += elem.PrettyPrint(childIndent) } + output += fmt.Sprintf("%s}\n", strings.Repeat(" ", nextIndent)) } output += fmt.Sprintf("%s}\n", spaces) return output @@ -82,6 +87,7 @@ func (node ASTNode) PrettyPrint(indent int) string { var bindingPowers = map[TokType]int{ TOKEOF: 0, + TOKVarref: 0, TOKUnquotedIdentifier: 0, TOKQuotedIdentifier: 0, TOKRbracket: 0, @@ -92,6 +98,7 @@ var bindingPowers = map[TokType]int{ TOKCurrent: 0, TOKExpref: 0, TOKColon: 0, + TOKAssign: 1, TOKPipe: 1, TOKOr: 2, TOKAnd: 3, @@ -134,12 +141,16 @@ func NewParser() *Parser { func (p *Parser) Parse(expression string) (ASTNode, error) { lexer := NewLexer() p.expression = expression - p.index = 0 tokens, err := lexer.Tokenize(expression) if err != nil { return ASTNode{}, err } + return p.parseTokens(tokens) +} + +func (p *Parser) parseTokens(tokens []token) (ASTNode, error) { p.tokens = tokens + p.index = 0 parsed, err := p.parseExpression(0) if err != nil { return ASTNode{}, err @@ -221,6 +232,18 @@ func (p *Parser) parseSliceExpression() (ASTNode, error) { }, nil } +func isKeyword(token token, keyword string) bool { + return token.tokenType == TOKUnquotedIdentifier && token.value == keyword +} + +func (p *Parser) matchKeyword(keyword string) error { + if isKeyword(p.lookaheadToken(0), keyword) { + p.advance() + return nil + } + return p.syntaxError("Expected keyword " + keyword + ", received: " + p.current().String()) +} + func (p *Parser) match(tokenType TokType) error { if p.current() == tokenType { p.advance() @@ -262,20 +285,8 @@ func (p *Parser) led(tokenType TokType, node ASTNode) (ASTNode, error) { return ASTNode{}, p.syntaxErrorToken("Invalid node as function name.", p.lookaheadToken(-2)) } name := node.Value - var args []ASTNode - for p.current() != TOKRparen { - expression, err := p.parseExpression(0) - if err != nil { - return ASTNode{}, err - } - if p.current() == TOKComma { - if err := p.match(TOKComma); err != nil { - return ASTNode{}, err - } - } - args = append(args, expression) - } - if err := p.match(TOKRparen); err != nil { + args, err := p.parseCommaSeparatedExpressionsUntilToken(TOKRparen) + if err != nil { return ASTNode{}, err } return ASTNode{ @@ -302,16 +313,16 @@ func (p *Parser) led(tokenType TokType, node ASTNode) (ASTNode, error) { Value: tokenType, Children: []ASTNode{node, right}, }, nil - case TOKEQ, TOKNE, TOKGT, TOKGTE, TOKLT, TOKLTE: - right, err := p.parseExpression(bindingPowers[tokenType]) - if err != nil { - return ASTNode{}, err + case TOKAssign: + { + right, err := p.parseExpression(bindingPowers[0]) + return ASTNode{ + NodeType: ASTBinding, + Children: []ASTNode{node, right}, + }, err } - return ASTNode{ - NodeType: ASTComparator, - Value: tokenType, - Children: []ASTNode{node, right}, - }, nil + case TOKEQ, TOKNE, TOKGT, TOKGTE, TOKLT, TOKLTE: + return p.parseComparatorExpression(node, tokenType) case TOKLbracket: tokenType := p.current() var right ASTNode @@ -344,6 +355,11 @@ func (p *Parser) led(tokenType TokType, node ASTNode) (ASTNode, error) { func (p *Parser) nud(token token) (ASTNode, error) { switch token.tokenType { + case TOKVarref: + return ASTNode{ + NodeType: ASTVariable, + Value: token.value, + }, nil case TOKJSONLiteral: var parsed interface{} err := json.Unmarshal([]byte(token.value), &parsed) @@ -354,10 +370,14 @@ func (p *Parser) nud(token token) (ASTNode, error) { case TOKStringLiteral: return ASTNode{NodeType: ASTLiteral, Value: token.value}, nil case TOKUnquotedIdentifier: - return ASTNode{ - NodeType: ASTField, - Value: token.value, - }, nil + if token.value == "let" && p.current() == TOKVarref { + return p.parseLetExpression() + } else { + return ASTNode{ + NodeType: ASTField, + Value: token.value, + }, nil + } case TOKQuotedIdentifier: node := ASTNode{NodeType: ASTField, Value: token.value} if p.current() == TOKLparen { @@ -595,6 +615,73 @@ func (p *Parser) parseProjectionRHS(bindingPower int) (ASTNode, error) { } } +func (p *Parser) parseLetExpression() (ASTNode, error) { + bindings, err := p.parseCommaSeparatedExpressionsUntilKeyword("in") + if err != nil { + return ASTNode{}, err + } + expression, err := p.parseExpression(0) + if err != nil { + return ASTNode{}, err + } + return ASTNode{ + NodeType: ASTLetExpression, + Children: []ASTNode{ + { + NodeType: ASTBindings, + Children: bindings, + }, + expression, + }, + }, nil +} + +func (p *Parser) parseCommaSeparatedExpressionsUntilKeyword(keyword string) ([]ASTNode, error) { + return p.parseCommaSeparatedExpressionsUntil( + func() bool { + return isKeyword(p.lookaheadToken(0), keyword) + }, + func() error { return p.matchKeyword(keyword) }) +} + +func (p *Parser) parseCommaSeparatedExpressionsUntilToken(endToken TokType) ([]ASTNode, error) { + return p.parseCommaSeparatedExpressionsUntil( + func() bool { return p.current() == endToken }, + func() error { return p.match(endToken) }) +} + +func (p *Parser) parseCommaSeparatedExpressionsUntil(isEndToken func() bool, matchEndToken func() error) ([]ASTNode, error) { + var nodes []ASTNode + for !isEndToken() { + expression, err := p.parseExpression(0) + if err != nil { + return []ASTNode{}, err + } + if p.current() == TOKComma { + if err := p.match(TOKComma); err != nil { + return []ASTNode{}, err + } + } + nodes = append(nodes, expression) + } + if err := matchEndToken(); err != nil { + return []ASTNode{}, err + } + return nodes, nil +} + +func (p *Parser) parseComparatorExpression(left ASTNode, tokenType TokType) (ASTNode, error) { + right, err := p.parseExpression(bindingPowers[tokenType]) + if err != nil { + return ASTNode{}, err + } + return ASTNode{ + NodeType: ASTComparator, + Value: tokenType, + Children: []ASTNode{left, right}, + }, nil +} + func (p *Parser) lookahead(number int) TokType { return p.lookaheadToken(number).tokenType } diff --git a/pkg/parsing/parser_test.go b/pkg/parsing/parser_test.go index 9109aec..8cc9c41 100644 --- a/pkg/parsing/parser_test.go +++ b/pkg/parsing/parser_test.go @@ -7,6 +7,150 @@ import ( "github.com/stretchr/testify/assert" ) +var parseLetExpressionsTest = []struct { + tokens []token + prettyPrint string +}{ + { + []token{ + {tokenType: TOKVarref, value: "foo", position: 20, length: 3}, + {tokenType: TOKEOF, position: 19}, + }, + `ASTVariable { + value: "foo" +} +`, + }, + { + []token{ + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKEOF, position: 19}, + }, + `ASTBinding { + children: { + ASTVariable { + value: "foo" + } + ASTField { + value: "foo" + } + } +} +`, + }, + { + []token{ + // let $foo = foo in @ + // 012345678901234567890123 + // 1 2 + {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKUnquotedIdentifier, value: "in", position: 15, length: 2}, + {tokenType: TOKCurrent, value: "@", position: 18, length: 1}, + {tokenType: TOKEOF, position: 19}, + }, + `ASTLetExpression { + children: { + ASTBindings { + children: { + ASTBinding { + children: { + ASTVariable { + value: "foo" + } + ASTField { + value: "foo" + } + } + } + } + } + ASTCurrentNode { + } + } +} +`, + }, +} + +func TestParsingLetExpression(t *testing.T) { + assert := assert.New(t) + p := NewParser() + for _, tt := range parseLetExpressionsTest { + parsed, _ := p.parseTokens(tt.tokens) + assert.Equal(tt.prettyPrint, parsed.PrettyPrint(0)) + } +} + +var parseLetExpressionsErrorsTest = []struct { + tokens []token + msg string +}{ + { + []token{ + {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKUnquotedIdentifier, value: "in", position: 15, length: 2}, + {tokenType: TOKEOF, position: 19}, + }, + "Incomplete expression", + }, + { + []token{ + {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKUnquotedIdentifier, value: "of", position: 15, length: 2}, + {tokenType: TOKEOF, position: 19}, + }, + "Invalid keyword 'of'", + }, + { + []token{ + // let $foo = , foo in + // ^ + {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKAssign, value: ",", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKUnquotedIdentifier, value: "of", position: 15, length: 2}, + {tokenType: TOKEOF, position: 19}, + }, + "Invalid comma-separated list", + }, + { + []token{ + // let $foo = foo in { + // ^ + {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKUnquotedIdentifier, value: "in", position: 15, length: 2}, + {tokenType: TOKLbrace, value: "{", position: 17, length: 2}, + {tokenType: TOKEOF, position: 19}, + }, + "Syntax error", + }, +} + +func TestParsingLetExpressionErrors(t *testing.T) { + assert := assert.New(t) + p := NewParser() + for _, tt := range parseLetExpressionsErrorsTest { + _, err := p.parseTokens(tt.tokens) + assert.NotNil(err, fmt.Sprintf("Expected parsing error: %s", tt.msg)) + } +} + var parsingErrorTests = []struct { expression string msg string @@ -50,11 +194,14 @@ var prettyPrinted = `ASTProjection { ASTField { value: "baz" } + } } ASTField { value: "qux" } + } } + } } ` @@ -74,7 +221,9 @@ var prettyPrintedCompNode = `ASTFilterProjection { ASTField { value: "c" } + } } + } } ` diff --git a/pkg/parsing/toktype_string.go b/pkg/parsing/toktype_string.go index fddbe68..469d2b7 100644 --- a/pkg/parsing/toktype_string.go +++ b/pkg/parsing/toktype_string.go @@ -45,12 +45,14 @@ func _() { _ = x[TOKExpref-34] _ = x[TOKAnd-35] _ = x[TOKNot-36] - _ = x[TOKEOF-37] + _ = x[TOKVarref-37] + _ = x[TOKAssign-38] + _ = x[TOKEOF-39] } -const _TokType_name = "TOKUnknownTOKStarTOKDotTOKFilterTOKFlattenTOKLparenTOKRparenTOKLbracketTOKRbracketTOKLbraceTOKRbraceTOKOrTOKPipeTOKNumberTOKUnquotedIdentifierTOKQuotedIdentifierTOKCommaTOKColonTOKPlusTOKMinusTOKMultiplyTOKDivideTOKModuloTOKDivTOKLTTOKLTETOKGTTOKGTETOKEQTOKNETOKJSONLiteralTOKStringLiteralTOKCurrentTOKRootTOKExprefTOKAndTOKNotTOKEOF" +const _TokType_name = "TOKUnknownTOKStarTOKDotTOKFilterTOKFlattenTOKLparenTOKRparenTOKLbracketTOKRbracketTOKLbraceTOKRbraceTOKOrTOKPipeTOKNumberTOKUnquotedIdentifierTOKQuotedIdentifierTOKCommaTOKColonTOKPlusTOKMinusTOKMultiplyTOKDivideTOKModuloTOKDivTOKLTTOKLTETOKGTTOKGTETOKEQTOKNETOKJSONLiteralTOKStringLiteralTOKCurrentTOKRootTOKExprefTOKAndTOKNotTOKVarrefTOKAssignTOKEOF" -var _TokType_index = [...]uint16{0, 10, 17, 23, 32, 42, 51, 60, 71, 82, 91, 100, 105, 112, 121, 142, 161, 169, 177, 184, 192, 203, 212, 221, 227, 232, 238, 243, 249, 254, 259, 273, 289, 299, 306, 315, 321, 327, 333} +var _TokType_index = [...]uint16{0, 10, 17, 23, 32, 42, 51, 60, 71, 82, 91, 100, 105, 112, 121, 142, 161, 169, 177, 184, 192, 203, 212, 221, 227, 232, 238, 243, 249, 254, 259, 273, 289, 299, 306, 315, 321, 327, 336, 345, 351} func (i TokType) String() string { if i < 0 || i >= TokType(len(_TokType_index)-1) {