From 178a640d27485cf932363c8745f2d6ebbd3aeabb Mon Sep 17 00:00:00 2001 From: Springcomp Date: Thu, 23 Mar 2023 12:13:09 +0100 Subject: [PATCH 01/15] Fixed mismatched pretty printed parens Signed-off-by: Springcomp --- pkg/parsing/parser.go | 1 + pkg/parsing/parser_test.go | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/pkg/parsing/parser.go b/pkg/parsing/parser.go index cedaffa..5b5bae4 100644 --- a/pkg/parsing/parser.go +++ b/pkg/parsing/parser.go @@ -75,6 +75,7 @@ func (node ASTNode) PrettyPrint(indent int) string { for _, elem := range node.Children { output += elem.PrettyPrint(childIndent) } + output += fmt.Sprintf("%s}\n", strings.Repeat(" ", nextIndent)) } output += fmt.Sprintf("%s}\n", spaces) return output diff --git a/pkg/parsing/parser_test.go b/pkg/parsing/parser_test.go index 9109aec..1d12e0d 100644 --- a/pkg/parsing/parser_test.go +++ b/pkg/parsing/parser_test.go @@ -50,11 +50,14 @@ var prettyPrinted = `ASTProjection { ASTField { value: "baz" } + } } ASTField { value: "qux" } + } } + } } ` @@ -74,7 +77,9 @@ var prettyPrintedCompNode = `ASTFilterProjection { ASTField { value: "c" } + } } + } } ` From e30f785af438e5e64bf4107e802ca9b8fca4d95c Mon Sep 17 00:00:00 2001 From: Springcomp Date: Thu, 23 Mar 2023 14:17:37 +0100 Subject: [PATCH 02/15] [lexical-scoping] Parsing let-expression. Signed-off-by: Springcomp --- pkg/parsing/astnodetype_string.go | 8 ++- pkg/parsing/lexer.go | 6 ++- pkg/parsing/parser.go | 78 +++++++++++++++++++++++++---- pkg/parsing/parser_test.go | 82 +++++++++++++++++++++++++++++++ pkg/parsing/toktype_string.go | 10 ++-- 5 files changed, 169 insertions(+), 15 deletions(-) diff --git a/pkg/parsing/astnodetype_string.go b/pkg/parsing/astnodetype_string.go index ecdcd09..ff9ed36 100644 --- a/pkg/parsing/astnodetype_string.go +++ b/pkg/parsing/astnodetype_string.go @@ -34,11 +34,15 @@ func _() { _ = x[ASTSubexpression-23] _ = x[ASTSlice-24] _ = x[ASTValueProjection-25] + _ = x[ASTLetExpression-26] + _ = x[ASTVariable-27] + _ = x[ASTBindings-28] + _ = x[ASTBinding-29] } -const _astNodeType_name = "ASTEmptyASTArithmeticExpressionASTArithmeticUnaryExpressionASTComparatorASTCurrentNodeASTRootNodeASTExpRefASTFunctionExpressionASTFieldASTFilterProjectionASTFlattenASTIdentityASTIndexASTIndexExpressionASTKeyValPairASTLiteralASTMultiSelectHashASTMultiSelectListASTOrExpressionASTAndExpressionASTNotExpressionASTPipeASTProjectionASTSubexpressionASTSliceASTValueProjection" +const _astNodeType_name = "ASTEmptyASTArithmeticExpressionASTArithmeticUnaryExpressionASTComparatorASTCurrentNodeASTRootNodeASTExpRefASTFunctionExpressionASTFieldASTFilterProjectionASTFlattenASTIdentityASTIndexASTIndexExpressionASTKeyValPairASTLiteralASTMultiSelectHashASTMultiSelectListASTOrExpressionASTAndExpressionASTNotExpressionASTPipeASTProjectionASTSubexpressionASTSliceASTValueProjectionASTLetExpressionASTVariableASTBindingsASTBinding" -var _astNodeType_index = [...]uint16{0, 8, 31, 59, 72, 86, 97, 106, 127, 135, 154, 164, 175, 183, 201, 214, 224, 242, 260, 275, 291, 307, 314, 327, 343, 351, 369} +var _astNodeType_index = [...]uint16{0, 8, 31, 59, 72, 86, 97, 106, 127, 135, 154, 164, 175, 183, 201, 214, 224, 242, 260, 275, 291, 307, 314, 327, 343, 351, 369, 385, 396, 407, 417} func (i astNodeType) String() string { if i < 0 || i >= astNodeType(len(_astNodeType_index)-1) { diff --git a/pkg/parsing/lexer.go b/pkg/parsing/lexer.go index e452ddb..d36c78e 100644 --- a/pkg/parsing/lexer.go +++ b/pkg/parsing/lexer.go @@ -87,6 +87,10 @@ const ( TOKExpref TOKAnd TOKNot + TOKLet + TOKIn + TOKVarref + TOKAssign TOKEOF ) @@ -228,7 +232,7 @@ loop: t := lexer.matchOrElse(r, '=', TOKNE, TOKNot) tokens = append(tokens, t) } else if r == '=' { - t := lexer.matchOrElse(r, '=', TOKEQ, TOKUnknown) + t := lexer.matchOrElse(r, '=', TOKEQ, TOKAssign) tokens = append(tokens, t) } else if r == '&' { t := lexer.matchOrElse(r, '&', TOKAnd, TOKExpref) diff --git a/pkg/parsing/parser.go b/pkg/parsing/parser.go index 5b5bae4..11bb7bc 100644 --- a/pkg/parsing/parser.go +++ b/pkg/parsing/parser.go @@ -37,6 +37,10 @@ const ( ASTSubexpression ASTSlice ASTValueProjection + ASTLetExpression + ASTVariable + ASTBindings + ASTBinding ) // ASTNode represents the abstract syntax tree of a JMESPath expression. @@ -83,6 +87,7 @@ func (node ASTNode) PrettyPrint(indent int) string { var bindingPowers = map[TokType]int{ TOKEOF: 0, + TOKVarref: 0, TOKUnquotedIdentifier: 0, TOKQuotedIdentifier: 0, TOKRbracket: 0, @@ -93,6 +98,7 @@ var bindingPowers = map[TokType]int{ TOKCurrent: 0, TOKExpref: 0, TOKColon: 0, + TOKAssign: 1, TOKPipe: 1, TOKOr: 2, TOKAnd: 3, @@ -140,6 +146,10 @@ func (p *Parser) Parse(expression string) (ASTNode, error) { if err != nil { return ASTNode{}, err } + return p.parseTokens(tokens) +} + +func (p *Parser) parseTokens(tokens []token) (ASTNode, error) { p.tokens = tokens parsed, err := p.parseExpression(0) if err != nil { @@ -303,16 +313,16 @@ func (p *Parser) led(tokenType TokType, node ASTNode) (ASTNode, error) { Value: tokenType, Children: []ASTNode{node, right}, }, nil - case TOKEQ, TOKNE, TOKGT, TOKGTE, TOKLT, TOKLTE: - right, err := p.parseExpression(bindingPowers[tokenType]) - if err != nil { - return ASTNode{}, err + case TOKAssign: + { + right, err := p.parseExpression(bindingPowers[0]) + return ASTNode{ + NodeType: ASTBinding, + Children: []ASTNode{node, right}, + }, err } - return ASTNode{ - NodeType: ASTComparator, - Value: tokenType, - Children: []ASTNode{node, right}, - }, nil + case TOKEQ, TOKNE, TOKGT, TOKGTE, TOKLT, TOKLTE: + return p.parseComparatorExpression(node, tokenType) case TOKLbracket: tokenType := p.current() var right ASTNode @@ -345,6 +355,44 @@ func (p *Parser) led(tokenType TokType, node ASTNode) (ASTNode, error) { func (p *Parser) nud(token token) (ASTNode, error) { switch token.tokenType { + case TOKLet: + { + var bindings []ASTNode + for p.current() != TOKIn { + binding, err := p.parseExpression(0) + if err != nil { + return ASTNode{}, err + } + if p.current() == TOKComma { + if err := p.match(TOKComma); err != nil { + return ASTNode{}, err + } + } + bindings = append(bindings, binding) + } + if err := p.match(TOKIn); err != nil { + return ASTNode{}, err + } + expression, err := p.parseExpression(0) + if err != nil { + return ASTNode{}, err + } + return ASTNode{ + NodeType: ASTLetExpression, + Children: []ASTNode{ + { + NodeType: ASTBindings, + Children: bindings, + }, + expression, + }, + }, nil + } + case TOKVarref: + return ASTNode{ + NodeType: ASTVariable, + Value: token.value, + }, nil case TOKJSONLiteral: var parsed interface{} err := json.Unmarshal([]byte(token.value), &parsed) @@ -596,6 +644,18 @@ func (p *Parser) parseProjectionRHS(bindingPower int) (ASTNode, error) { } } +func (p *Parser) parseComparatorExpression(left ASTNode, tokenType TokType) (ASTNode, error) { + right, err := p.parseExpression(bindingPowers[tokenType]) + if err != nil { + return ASTNode{}, err + } + return ASTNode{ + NodeType: ASTComparator, + Value: tokenType, + Children: []ASTNode{left, right}, + }, nil +} + func (p *Parser) lookahead(number int) TokType { return p.lookaheadToken(number).tokenType } diff --git a/pkg/parsing/parser_test.go b/pkg/parsing/parser_test.go index 1d12e0d..372b6f3 100644 --- a/pkg/parsing/parser_test.go +++ b/pkg/parsing/parser_test.go @@ -7,6 +7,88 @@ import ( "github.com/stretchr/testify/assert" ) +func TestParsingVariable(t *testing.T) { + assert := assert.New(t) + tokens := []token{ + {tokenType: TOKVarref, value: "foo", position: 20, length: 3}, + {tokenType: TOKEOF, position: 19}, + } + + var prettyPrintedLookup = `ASTVariable { + value: "foo" +} +` + p := NewParser() + parsed, _ := p.parseTokens(tokens) + assert.Equal(prettyPrintedLookup, parsed.PrettyPrint(0)) +} + +func TestParsingVariableBinding(t *testing.T) { + assert := assert.New(t) + tokens := []token{ + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKEOF, position: 19}, + } + + var prettyPrintedLookup = `ASTBinding { + children: { + ASTVariable { + value: "foo" + } + ASTField { + value: "foo" + } + } +} +` + p := NewParser() + parsed, _ := p.parseTokens(tokens) + assert.Equal(prettyPrintedLookup, parsed.PrettyPrint(0)) +} + +func TestParsingLetExpression(t *testing.T) { + // let $foo = foo in @ + // 012345678901234567890123 + // 1 2 + assert := assert.New(t) + tokens := []token{ + {tokenType: TOKLet, value: "let", position: 0, length: 3}, + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKIn, value: "in", position: 15, length: 2}, + {tokenType: TOKCurrent, value: "@", position: 18, length: 1}, + {tokenType: TOKEOF, position: 19}, + } + + expected := `ASTLetExpression { + children: { + ASTBindings { + children: { + ASTBinding { + children: { + ASTVariable { + value: "foo" + } + ASTField { + value: "foo" + } + } + } + } + } + ASTCurrentNode { + } + } +} +` + p := NewParser() + parsed, _ := p.parseTokens(tokens) + assert.Equal(expected, parsed.PrettyPrint(0)) +} + var parsingErrorTests = []struct { expression string msg string diff --git a/pkg/parsing/toktype_string.go b/pkg/parsing/toktype_string.go index fddbe68..3fd6196 100644 --- a/pkg/parsing/toktype_string.go +++ b/pkg/parsing/toktype_string.go @@ -45,12 +45,16 @@ func _() { _ = x[TOKExpref-34] _ = x[TOKAnd-35] _ = x[TOKNot-36] - _ = x[TOKEOF-37] + _ = x[TOKLet-37] + _ = x[TOKIn-38] + _ = x[TOKVarref-39] + _ = x[TOKAssign-40] + _ = x[TOKEOF-41] } -const _TokType_name = "TOKUnknownTOKStarTOKDotTOKFilterTOKFlattenTOKLparenTOKRparenTOKLbracketTOKRbracketTOKLbraceTOKRbraceTOKOrTOKPipeTOKNumberTOKUnquotedIdentifierTOKQuotedIdentifierTOKCommaTOKColonTOKPlusTOKMinusTOKMultiplyTOKDivideTOKModuloTOKDivTOKLTTOKLTETOKGTTOKGTETOKEQTOKNETOKJSONLiteralTOKStringLiteralTOKCurrentTOKRootTOKExprefTOKAndTOKNotTOKEOF" +const _TokType_name = "TOKUnknownTOKStarTOKDotTOKFilterTOKFlattenTOKLparenTOKRparenTOKLbracketTOKRbracketTOKLbraceTOKRbraceTOKOrTOKPipeTOKNumberTOKUnquotedIdentifierTOKQuotedIdentifierTOKCommaTOKColonTOKPlusTOKMinusTOKMultiplyTOKDivideTOKModuloTOKDivTOKLTTOKLTETOKGTTOKGTETOKEQTOKNETOKJSONLiteralTOKStringLiteralTOKCurrentTOKRootTOKExprefTOKAndTOKNotTOKLetTOKInTOKVarrefTOKAssignTOKEOF" -var _TokType_index = [...]uint16{0, 10, 17, 23, 32, 42, 51, 60, 71, 82, 91, 100, 105, 112, 121, 142, 161, 169, 177, 184, 192, 203, 212, 221, 227, 232, 238, 243, 249, 254, 259, 273, 289, 299, 306, 315, 321, 327, 333} +var _TokType_index = [...]uint16{0, 10, 17, 23, 32, 42, 51, 60, 71, 82, 91, 100, 105, 112, 121, 142, 161, 169, 177, 184, 192, 203, 212, 221, 227, 232, 238, 243, 249, 254, 259, 273, 289, 299, 306, 315, 321, 327, 333, 338, 347, 356, 362} func (i TokType) String() string { if i < 0 || i >= TokType(len(_TokType_index)-1) { From f6fe63958ae24d02496e34b46dd9a09050488198 Mon Sep 17 00:00:00 2001 From: Springcomp Date: Thu, 23 Mar 2023 15:01:55 +0100 Subject: [PATCH 03/15] [lexical-scoping] Refactor comma-separated lists. Signed-off-by: Springcomp --- pkg/parsing/parser.go | 52 ++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 28 deletions(-) diff --git a/pkg/parsing/parser.go b/pkg/parsing/parser.go index 11bb7bc..261d422 100644 --- a/pkg/parsing/parser.go +++ b/pkg/parsing/parser.go @@ -273,20 +273,8 @@ func (p *Parser) led(tokenType TokType, node ASTNode) (ASTNode, error) { return ASTNode{}, p.syntaxErrorToken("Invalid node as function name.", p.lookaheadToken(-2)) } name := node.Value - var args []ASTNode - for p.current() != TOKRparen { - expression, err := p.parseExpression(0) - if err != nil { - return ASTNode{}, err - } - if p.current() == TOKComma { - if err := p.match(TOKComma); err != nil { - return ASTNode{}, err - } - } - args = append(args, expression) - } - if err := p.match(TOKRparen); err != nil { + args, err := p.parseCommaSeparatedExpressions(TOKRparen) + if err != nil { return ASTNode{}, err } return ASTNode{ @@ -357,20 +345,8 @@ func (p *Parser) nud(token token) (ASTNode, error) { switch token.tokenType { case TOKLet: { - var bindings []ASTNode - for p.current() != TOKIn { - binding, err := p.parseExpression(0) - if err != nil { - return ASTNode{}, err - } - if p.current() == TOKComma { - if err := p.match(TOKComma); err != nil { - return ASTNode{}, err - } - } - bindings = append(bindings, binding) - } - if err := p.match(TOKIn); err != nil { + bindings, err := p.parseCommaSeparatedExpressions(TOKIn) + if err != nil { return ASTNode{}, err } expression, err := p.parseExpression(0) @@ -644,6 +620,26 @@ func (p *Parser) parseProjectionRHS(bindingPower int) (ASTNode, error) { } } +func (p *Parser) parseCommaSeparatedExpressions(endToken TokType) ([]ASTNode, error) { + var nodes []ASTNode + for p.current() != endToken { + expression, err := p.parseExpression(0) + if err != nil { + return []ASTNode{}, err + } + if p.current() == TOKComma { + if err := p.match(TOKComma); err != nil { + return []ASTNode{}, err + } + } + nodes = append(nodes, expression) + } + if err := p.match(endToken); err != nil { + return []ASTNode{}, err + } + return nodes, nil +} + func (p *Parser) parseComparatorExpression(left ASTNode, tokenType TokType) (ASTNode, error) { right, err := p.parseExpression(bindingPowers[tokenType]) if err != nil { From b02968d1a5dcfe40c2d68cd6d0bb99652c17a74b Mon Sep 17 00:00:00 2001 From: Springcomp Date: Thu, 23 Mar 2023 18:22:45 +0100 Subject: [PATCH 04/15] [lexical-scoping] 'let' and 'in' should be valid identifiers as well. Signed-off-by: Springcomp --- pkg/parsing/lexer.go | 2 - pkg/parsing/parser.go | 88 +++++++++++++++++++++++------------ pkg/parsing/parser_test.go | 4 +- pkg/parsing/toktype_string.go | 12 ++--- 4 files changed, 66 insertions(+), 40 deletions(-) diff --git a/pkg/parsing/lexer.go b/pkg/parsing/lexer.go index d36c78e..db496ac 100644 --- a/pkg/parsing/lexer.go +++ b/pkg/parsing/lexer.go @@ -87,8 +87,6 @@ const ( TOKExpref TOKAnd TOKNot - TOKLet - TOKIn TOKVarref TOKAssign TOKEOF diff --git a/pkg/parsing/parser.go b/pkg/parsing/parser.go index 261d422..2f4a035 100644 --- a/pkg/parsing/parser.go +++ b/pkg/parsing/parser.go @@ -232,6 +232,18 @@ func (p *Parser) parseSliceExpression() (ASTNode, error) { }, nil } +func isKeyword(token token, keyword string) bool { + return token.tokenType == TOKUnquotedIdentifier && token.value == keyword +} + +func (p *Parser) matchKeyword(keyword string) error { + if isKeyword(p.lookaheadToken(0), keyword) { + p.advance() + return nil + } + return p.syntaxError("Expected keyword " + keyword + ", received: " + p.current().String()) +} + func (p *Parser) match(tokenType TokType) error { if p.current() == tokenType { p.advance() @@ -273,7 +285,7 @@ func (p *Parser) led(tokenType TokType, node ASTNode) (ASTNode, error) { return ASTNode{}, p.syntaxErrorToken("Invalid node as function name.", p.lookaheadToken(-2)) } name := node.Value - args, err := p.parseCommaSeparatedExpressions(TOKRparen) + args, err := p.parseCommaSeparatedExpressionsUntilToken(TOKRparen) if err != nil { return ASTNode{}, err } @@ -343,27 +355,6 @@ func (p *Parser) led(tokenType TokType, node ASTNode) (ASTNode, error) { func (p *Parser) nud(token token) (ASTNode, error) { switch token.tokenType { - case TOKLet: - { - bindings, err := p.parseCommaSeparatedExpressions(TOKIn) - if err != nil { - return ASTNode{}, err - } - expression, err := p.parseExpression(0) - if err != nil { - return ASTNode{}, err - } - return ASTNode{ - NodeType: ASTLetExpression, - Children: []ASTNode{ - { - NodeType: ASTBindings, - Children: bindings, - }, - expression, - }, - }, nil - } case TOKVarref: return ASTNode{ NodeType: ASTVariable, @@ -379,10 +370,14 @@ func (p *Parser) nud(token token) (ASTNode, error) { case TOKStringLiteral: return ASTNode{NodeType: ASTLiteral, Value: token.value}, nil case TOKUnquotedIdentifier: - return ASTNode{ - NodeType: ASTField, - Value: token.value, - }, nil + if token.value == "let" && p.current() == TOKVarref { + return p.parseLetExpression() + } else { + return ASTNode{ + NodeType: ASTField, + Value: token.value, + }, nil + } case TOKQuotedIdentifier: node := ASTNode{NodeType: ASTField, Value: token.value} if p.current() == TOKLparen { @@ -620,9 +615,44 @@ func (p *Parser) parseProjectionRHS(bindingPower int) (ASTNode, error) { } } -func (p *Parser) parseCommaSeparatedExpressions(endToken TokType) ([]ASTNode, error) { +func (p *Parser) parseLetExpression() (ASTNode, error) { + bindings, err := p.parseCommaSeparatedExpressionsUntilKeyword("in") + if err != nil { + return ASTNode{}, err + } + expression, err := p.parseExpression(0) + if err != nil { + return ASTNode{}, err + } + return ASTNode{ + NodeType: ASTLetExpression, + Children: []ASTNode{ + { + NodeType: ASTBindings, + Children: bindings, + }, + expression, + }, + }, nil +} + +func (p *Parser) parseCommaSeparatedExpressionsUntilKeyword(keyword string) ([]ASTNode, error) { + return p.parseCommaSeparatedExpressionsUntil( + func() bool { + return isKeyword(p.lookaheadToken(0), keyword) + }, + func() error { return p.matchKeyword(keyword) }) +} + +func (p *Parser) parseCommaSeparatedExpressionsUntilToken(endToken TokType) ([]ASTNode, error) { + return p.parseCommaSeparatedExpressionsUntil( + func() bool { return p.current() == endToken }, + func() error { return p.match(endToken) }) +} + +func (p *Parser) parseCommaSeparatedExpressionsUntil(isEndToken func() bool, matchEndToken func() error) ([]ASTNode, error) { var nodes []ASTNode - for p.current() != endToken { + for !isEndToken() { expression, err := p.parseExpression(0) if err != nil { return []ASTNode{}, err @@ -634,7 +664,7 @@ func (p *Parser) parseCommaSeparatedExpressions(endToken TokType) ([]ASTNode, er } nodes = append(nodes, expression) } - if err := p.match(endToken); err != nil { + if err := matchEndToken(); err != nil { return []ASTNode{}, err } return nodes, nil diff --git a/pkg/parsing/parser_test.go b/pkg/parsing/parser_test.go index 372b6f3..17cc38a 100644 --- a/pkg/parsing/parser_test.go +++ b/pkg/parsing/parser_test.go @@ -54,11 +54,11 @@ func TestParsingLetExpression(t *testing.T) { // 1 2 assert := assert.New(t) tokens := []token{ - {tokenType: TOKLet, value: "let", position: 0, length: 3}, + {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, {tokenType: TOKAssign, value: "=", position: 9, length: 1}, {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, - {tokenType: TOKIn, value: "in", position: 15, length: 2}, + {tokenType: TOKUnquotedIdentifier, value: "in", position: 15, length: 2}, {tokenType: TOKCurrent, value: "@", position: 18, length: 1}, {tokenType: TOKEOF, position: 19}, } diff --git a/pkg/parsing/toktype_string.go b/pkg/parsing/toktype_string.go index 3fd6196..469d2b7 100644 --- a/pkg/parsing/toktype_string.go +++ b/pkg/parsing/toktype_string.go @@ -45,16 +45,14 @@ func _() { _ = x[TOKExpref-34] _ = x[TOKAnd-35] _ = x[TOKNot-36] - _ = x[TOKLet-37] - _ = x[TOKIn-38] - _ = x[TOKVarref-39] - _ = x[TOKAssign-40] - _ = x[TOKEOF-41] + _ = x[TOKVarref-37] + _ = x[TOKAssign-38] + _ = x[TOKEOF-39] } -const _TokType_name = "TOKUnknownTOKStarTOKDotTOKFilterTOKFlattenTOKLparenTOKRparenTOKLbracketTOKRbracketTOKLbraceTOKRbraceTOKOrTOKPipeTOKNumberTOKUnquotedIdentifierTOKQuotedIdentifierTOKCommaTOKColonTOKPlusTOKMinusTOKMultiplyTOKDivideTOKModuloTOKDivTOKLTTOKLTETOKGTTOKGTETOKEQTOKNETOKJSONLiteralTOKStringLiteralTOKCurrentTOKRootTOKExprefTOKAndTOKNotTOKLetTOKInTOKVarrefTOKAssignTOKEOF" +const _TokType_name = "TOKUnknownTOKStarTOKDotTOKFilterTOKFlattenTOKLparenTOKRparenTOKLbracketTOKRbracketTOKLbraceTOKRbraceTOKOrTOKPipeTOKNumberTOKUnquotedIdentifierTOKQuotedIdentifierTOKCommaTOKColonTOKPlusTOKMinusTOKMultiplyTOKDivideTOKModuloTOKDivTOKLTTOKLTETOKGTTOKGTETOKEQTOKNETOKJSONLiteralTOKStringLiteralTOKCurrentTOKRootTOKExprefTOKAndTOKNotTOKVarrefTOKAssignTOKEOF" -var _TokType_index = [...]uint16{0, 10, 17, 23, 32, 42, 51, 60, 71, 82, 91, 100, 105, 112, 121, 142, 161, 169, 177, 184, 192, 203, 212, 221, 227, 232, 238, 243, 249, 254, 259, 273, 289, 299, 306, 315, 321, 327, 333, 338, 347, 356, 362} +var _TokType_index = [...]uint16{0, 10, 17, 23, 32, 42, 51, 60, 71, 82, 91, 100, 105, 112, 121, 142, 161, 169, 177, 184, 192, 203, 212, 221, 227, 232, 238, 243, 249, 254, 259, 273, 289, 299, 306, 315, 321, 327, 336, 345, 351} func (i TokType) String() string { if i < 0 || i >= TokType(len(_TokType_index)-1) { From 338298c31026b21755ec30ded989393dd4bde1a5 Mon Sep 17 00:00:00 2001 From: Springcomp Date: Thu, 23 Mar 2023 21:58:33 +0100 Subject: [PATCH 05/15] [lexical-scoping] Fixed file is not 'gofumpt' ed Signed-off-by: Springcomp --- pkg/parsing/parser_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/parsing/parser_test.go b/pkg/parsing/parser_test.go index 17cc38a..1479818 100644 --- a/pkg/parsing/parser_test.go +++ b/pkg/parsing/parser_test.go @@ -14,7 +14,7 @@ func TestParsingVariable(t *testing.T) { {tokenType: TOKEOF, position: 19}, } - var prettyPrintedLookup = `ASTVariable { + prettyPrintedLookup := `ASTVariable { value: "foo" } ` @@ -32,7 +32,7 @@ func TestParsingVariableBinding(t *testing.T) { {tokenType: TOKEOF, position: 19}, } - var prettyPrintedLookup = `ASTBinding { + prettyPrintedLookup := `ASTBinding { children: { ASTVariable { value: "foo" From 14d65367ed12708dae47ccb1e6cd7d2c0517a962 Mon Sep 17 00:00:00 2001 From: Springcomp Date: Thu, 23 Mar 2023 22:59:05 +0100 Subject: [PATCH 06/15] [lexical-scoping] Refactored tests. Signed-off-by: Springcomp --- pkg/parsing/parser.go | 2 +- pkg/parsing/parser_test.go | 121 ++++++++++++++++++++++--------------- 2 files changed, 73 insertions(+), 50 deletions(-) diff --git a/pkg/parsing/parser.go b/pkg/parsing/parser.go index 2f4a035..a97d559 100644 --- a/pkg/parsing/parser.go +++ b/pkg/parsing/parser.go @@ -141,7 +141,6 @@ func NewParser() *Parser { func (p *Parser) Parse(expression string) (ASTNode, error) { lexer := NewLexer() p.expression = expression - p.index = 0 tokens, err := lexer.Tokenize(expression) if err != nil { return ASTNode{}, err @@ -151,6 +150,7 @@ func (p *Parser) Parse(expression string) (ASTNode, error) { func (p *Parser) parseTokens(tokens []token) (ASTNode, error) { p.tokens = tokens + p.index = 0 parsed, err := p.parseExpression(0) if err != nil { return ASTNode{}, err diff --git a/pkg/parsing/parser_test.go b/pkg/parsing/parser_test.go index 1479818..ccd98a4 100644 --- a/pkg/parsing/parser_test.go +++ b/pkg/parsing/parser_test.go @@ -7,32 +7,28 @@ import ( "github.com/stretchr/testify/assert" ) -func TestParsingVariable(t *testing.T) { - assert := assert.New(t) - tokens := []token{ - {tokenType: TOKVarref, value: "foo", position: 20, length: 3}, - {tokenType: TOKEOF, position: 19}, - } - - prettyPrintedLookup := `ASTVariable { +var parseLetExpressionsTest = []struct { + tokens []token + prettyPrint string +}{ + { + []token{ + {tokenType: TOKVarref, value: "foo", position: 20, length: 3}, + {tokenType: TOKEOF, position: 19}, + }, + `ASTVariable { value: "foo" } -` - p := NewParser() - parsed, _ := p.parseTokens(tokens) - assert.Equal(prettyPrintedLookup, parsed.PrettyPrint(0)) -} - -func TestParsingVariableBinding(t *testing.T) { - assert := assert.New(t) - tokens := []token{ - {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, - {tokenType: TOKAssign, value: "=", position: 9, length: 1}, - {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, - {tokenType: TOKEOF, position: 19}, - } - - prettyPrintedLookup := `ASTBinding { +`, + }, + { + []token{ + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKEOF, position: 19}, + }, + `ASTBinding { children: { ASTVariable { value: "foo" @@ -42,28 +38,22 @@ func TestParsingVariableBinding(t *testing.T) { } } } -` - p := NewParser() - parsed, _ := p.parseTokens(tokens) - assert.Equal(prettyPrintedLookup, parsed.PrettyPrint(0)) -} - -func TestParsingLetExpression(t *testing.T) { - // let $foo = foo in @ - // 012345678901234567890123 - // 1 2 - assert := assert.New(t) - tokens := []token{ - {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, - {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, - {tokenType: TOKAssign, value: "=", position: 9, length: 1}, - {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, - {tokenType: TOKUnquotedIdentifier, value: "in", position: 15, length: 2}, - {tokenType: TOKCurrent, value: "@", position: 18, length: 1}, - {tokenType: TOKEOF, position: 19}, - } - - expected := `ASTLetExpression { +`, + }, + { + []token{ + // let $foo = foo in @ + // 012345678901234567890123 + // 1 2 + {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKUnquotedIdentifier, value: "in", position: 15, length: 2}, + {tokenType: TOKCurrent, value: "@", position: 18, length: 1}, + {tokenType: TOKEOF, position: 19}, + }, + `ASTLetExpression { children: { ASTBindings { children: { @@ -83,10 +73,43 @@ func TestParsingLetExpression(t *testing.T) { } } } -` +`, + }, +} + +func TestParsingLetExpression(t *testing.T) { + assert := assert.New(t) p := NewParser() - parsed, _ := p.parseTokens(tokens) - assert.Equal(expected, parsed.PrettyPrint(0)) + for _, tt := range parseLetExpressionsTest { + parsed, _ := p.parseTokens(tt.tokens) + assert.Equal(tt.prettyPrint, parsed.PrettyPrint(0)) + } +} + +var parseLetExpressionsErrorsTest = []struct { + tokens []token + msg string +}{ + { + []token{ + {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKUnquotedIdentifier, value: "in", position: 15, length: 2}, + {tokenType: TOKEOF, position: 19}, + }, + "", + }, +} + +func TestParsingLetExpressionErrors(t *testing.T) { + assert := assert.New(t) + p := NewParser() + for _, tt := range parseLetExpressionsErrorsTest { + _, err := p.parseTokens(tt.tokens) + assert.NotNil(err, fmt.Sprintf("Expected parsing error: %s", tt.msg)) + } } var parsingErrorTests = []struct { From d66497d29b61eb630ee6ac758635cb591d4e1ed3 Mon Sep 17 00:00:00 2001 From: Springcomp Date: Thu, 23 Mar 2023 23:17:29 +0100 Subject: [PATCH 07/15] [lexical-scoping] Ensure sustained code coverage. Signed-off-by: Springcomp --- pkg/parsing/parser_test.go | 41 +++++++++++++++++++++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/pkg/parsing/parser_test.go b/pkg/parsing/parser_test.go index ccd98a4..8cc9c41 100644 --- a/pkg/parsing/parser_test.go +++ b/pkg/parsing/parser_test.go @@ -99,7 +99,46 @@ var parseLetExpressionsErrorsTest = []struct { {tokenType: TOKUnquotedIdentifier, value: "in", position: 15, length: 2}, {tokenType: TOKEOF, position: 19}, }, - "", + "Incomplete expression", + }, + { + []token{ + {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKUnquotedIdentifier, value: "of", position: 15, length: 2}, + {tokenType: TOKEOF, position: 19}, + }, + "Invalid keyword 'of'", + }, + { + []token{ + // let $foo = , foo in + // ^ + {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKAssign, value: ",", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKUnquotedIdentifier, value: "of", position: 15, length: 2}, + {tokenType: TOKEOF, position: 19}, + }, + "Invalid comma-separated list", + }, + { + []token{ + // let $foo = foo in { + // ^ + {tokenType: TOKUnquotedIdentifier, value: "let", position: 0, length: 3}, + {tokenType: TOKVarref, value: "foo", position: 4, length: 4}, + {tokenType: TOKAssign, value: "=", position: 9, length: 1}, + {tokenType: TOKUnquotedIdentifier, value: "foo", position: 11, length: 3}, + {tokenType: TOKUnquotedIdentifier, value: "in", position: 15, length: 2}, + {tokenType: TOKLbrace, value: "{", position: 17, length: 2}, + {tokenType: TOKEOF, position: 19}, + }, + "Syntax error", }, } From 95aa9786b426638e254d8aa0792831eb9b07e709 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Charles-Edouard=20Br=C3=A9t=C3=A9ch=C3=A9?= Date: Mon, 27 Mar 2023 09:24:47 +0200 Subject: [PATCH 08/15] lexing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Charles-Edouard Brétéché --- pkg/parsing/lexer.go | 13 +++++++++---- pkg/parsing/lexer_test.go | 7 +++++++ 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/pkg/parsing/lexer.go b/pkg/parsing/lexer.go index db496ac..b5be6da 100644 --- a/pkg/parsing/lexer.go +++ b/pkg/parsing/lexer.go @@ -103,7 +103,6 @@ var basicTokens = map[rune]TokType{ '(': TOKLparen, ')': TOKRparen, '@': TOKCurrent, - '$': TOKRoot, '+': TOKPlus, '%': TOKModulo, '\u2212': TOKMinus, @@ -165,7 +164,7 @@ loop: for { r := lexer.next() if identifierStartBits&(1<<(uint64(r)-64)) > 0 { - t := lexer.consumeUnquotedIdentifier() + t := lexer.consumeUnquotedIdentifier(TOKUnquotedIdentifier) tokens = append(tokens, t) } else if val, ok := basicTokens[r]; ok { // Basic single char token. @@ -229,6 +228,12 @@ loop: } else if r == '!' { t := lexer.matchOrElse(r, '=', TOKNE, TOKNot) tokens = append(tokens, t) + } else if r == '$' { + t := lexer.consumeUnquotedIdentifier(TOKVarref) + if t.value == "$" { + t.tokenType = TOKRoot + } + tokens = append(tokens, t) } else if r == '=' { t := lexer.matchOrElse(r, '=', TOKEQ, TOKAssign) tokens = append(tokens, t) @@ -419,7 +424,7 @@ func (lexer *Lexer) consumeQuotedIdentifier() (token, error) { }, nil } -func (lexer *Lexer) consumeUnquotedIdentifier() token { +func (lexer *Lexer) consumeUnquotedIdentifier(matchedType TokType) token { // Consume runes until we reach the end of an unquoted // identifier. start := lexer.currentPos - lexer.lastWidth @@ -432,7 +437,7 @@ func (lexer *Lexer) consumeUnquotedIdentifier() token { } value := lexer.expression[start:lexer.currentPos] return token{ - tokenType: TOKUnquotedIdentifier, + tokenType: matchedType, value: value, position: start, length: lexer.currentPos - start, diff --git a/pkg/parsing/lexer_test.go b/pkg/parsing/lexer_test.go index f1894bf..0b9e686 100644 --- a/pkg/parsing/lexer_test.go +++ b/pkg/parsing/lexer_test.go @@ -83,6 +83,13 @@ var lexingTests = []struct { {TOKUnquotedIdentifier, "b", 7, 1}, {TOKRbracket, "]", 8, 1}, }}, + // let expressions + {"$root", []token{{TOKVarref, "$root", 0, 5}}}, + {"$root = @", []token{ + {TOKVarref, "$root", 0, 5}, + {TOKAssign, "=", 6, 1}, + {TOKCurrent, "@", 8, 1}, + }}, } func TestCanLexTokens(t *testing.T) { From 29cf3c0fc8797a7459b7a3cbc7c73d9cd5853cf7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Charles-Edouard=20Br=C3=A9t=C3=A9ch=C3=A9?= Date: Mon, 27 Mar 2023 10:35:05 +0200 Subject: [PATCH 09/15] interpreter MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Charles-Edouard Brétéché --- pkg/api/api.go | 2 +- pkg/api/api_test.go | 8 +++++ pkg/binding/binding.go | 36 ++++++++++++++++++++ pkg/interpreter/interpreter.go | 53 ++++++++++++++++++++++++++--- pkg/interpreter/interpreter_test.go | 8 ++--- 5 files changed, 97 insertions(+), 10 deletions(-) create mode 100644 pkg/binding/binding.go diff --git a/pkg/api/api.go b/pkg/api/api.go index 73d7247..a58c4eb 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -53,7 +53,7 @@ func MustCompile(expression string, funcs ...functions.FunctionEntry) JMESPath { // Search evaluates a JMESPath expression against input data and returns the result. func (jp jmesPath) Search(data interface{}) (interface{}, error) { - intr := interpreter.NewInterpreter(data, jp.caller) + intr := interpreter.NewInterpreter(data, jp.caller, nil) return intr.Execute(jp.node, data) } diff --git a/pkg/api/api_test.go b/pkg/api/api_test.go index 2f27f80..2177079 100644 --- a/pkg/api/api_test.go +++ b/pkg/api/api_test.go @@ -130,6 +130,14 @@ func TestSearch(t *testing.T) { want: map[string]interface{}{ "foo": nil, }, + }, { + args: args{ + expression: "let $root = @ in $root.a", + data: map[string]interface{}{ + "a": 42.0, + }, + }, + want: 42.0, }} for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { diff --git a/pkg/binding/binding.go b/pkg/binding/binding.go new file mode 100644 index 0000000..a3ea1e4 --- /dev/null +++ b/pkg/binding/binding.go @@ -0,0 +1,36 @@ +package binding + +// Bindings stores let expression bindings by name. +type Bindings interface { + // Get returns the value bound for a given name. + Get(string) (interface{}, error) + // Register registers a value associated with a given name, it returns a new binding + Register(string, interface{}) Bindings +} + +type bindings struct { + values map[string]interface{} +} + +func NewBindings() Bindings { + return bindings{} +} + +func (b bindings) Get(name string) (interface{}, error) { + if value, ok := b.values[name]; ok { + return value, nil + } + // TODO: should return an error + return nil, nil +} + +func (b bindings) Register(name string, value interface{}) Bindings { + values := map[string]interface{}{} + for k, v := range b.values { + values[k] = v + } + values[name] = value + return bindings{ + values: values, + } +} diff --git a/pkg/interpreter/interpreter.go b/pkg/interpreter/interpreter.go index beacba4..db5cd2d 100644 --- a/pkg/interpreter/interpreter.go +++ b/pkg/interpreter/interpreter.go @@ -7,6 +7,7 @@ import ( "unicode" "unicode/utf8" + "github.com/jmespath-community/go-jmespath/pkg/binding" "github.com/jmespath-community/go-jmespath/pkg/parsing" "github.com/jmespath-community/go-jmespath/pkg/util" ) @@ -21,14 +22,19 @@ type Interpreter interface { } type treeInterpreter struct { - caller FunctionCaller - root interface{} + caller FunctionCaller + root interface{} + bindings binding.Bindings } -func NewInterpreter(data interface{}, caller FunctionCaller) Interpreter { +func NewInterpreter(data interface{}, caller FunctionCaller, bindings binding.Bindings) Interpreter { + if bindings == nil { + bindings = binding.NewBindings() + } return &treeInterpreter{ - caller: caller, - root: data, + caller: caller, + root: data, + bindings: bindings, } } @@ -211,6 +217,43 @@ func (intr *treeInterpreter) Execute(node parsing.ASTNode, value interface{}) (i return value, nil case parsing.ASTRootNode: return intr.root, nil + case parsing.ASTBindings: + for _, child := range node.Children { + if _, err := intr.Execute(child, value); err != nil { + return nil, err + } + } + // doesn't mutate value + return value, nil + case parsing.ASTBinding: + if value, err := intr.Execute(node.Children[1], value); err != nil { + return nil, err + } else { + intr.bindings = intr.bindings.Register(node.Children[0].Value.(string), value) + } + // doesn't mutate value + return value, nil + case parsing.ASTLetExpression: + // save bindings state + bindings := intr.bindings + // retore bindings state + defer func() { + intr.bindings = bindings + }() + // evalute bindings first, then evaluate expression + if _, err := intr.Execute(node.Children[0], value); err != nil { + return nil, err + } else if value, err := intr.Execute(node.Children[1], value); err != nil { + return nil, err + } else { + return value, nil + } + case parsing.ASTVariable: + if value, err := intr.bindings.Get(node.Value.(string)); err != nil { + return nil, err + } else { + return value, nil + } case parsing.ASTIndex: if sliceType, ok := value.([]interface{}); ok { index := node.Value.(int) diff --git a/pkg/interpreter/interpreter_test.go b/pkg/interpreter/interpreter_test.go index 7947dc1..7d664cb 100644 --- a/pkg/interpreter/interpreter_test.go +++ b/pkg/interpreter/interpreter_test.go @@ -52,7 +52,7 @@ func search(t *testing.T, expression string, data interface{}) (interface{}, err return nil, err } caller := NewFunctionCaller(functions.GetDefaultFunctions()...) - intr := NewInterpreter(nil, caller) + intr := NewInterpreter(nil, caller, nil) return intr.Execute(ast, data) } @@ -198,7 +198,7 @@ func TestCanSupportSliceOfStructsWithFunctions(t *testing.T) { func BenchmarkInterpretSingleFieldStruct(b *testing.B) { assert := assert.New(b) caller := NewFunctionCaller(functions.GetDefaultFunctions()...) - intr := NewInterpreter(nil, caller) + intr := NewInterpreter(nil, caller, nil) parser := parsing.NewParser() ast, _ := parser.Parse("fooasdfasdfasdfasdf") data := benchmarkStruct{"foobarbazqux"} @@ -213,7 +213,7 @@ func BenchmarkInterpretSingleFieldStruct(b *testing.B) { func BenchmarkInterpretNestedStruct(b *testing.B) { assert := assert.New(b) caller := NewFunctionCaller(functions.GetDefaultFunctions()...) - intr := NewInterpreter(nil, caller) + intr := NewInterpreter(nil, caller, nil) parser := parsing.NewParser() ast, _ := parser.Parse("fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf") data := benchmarkNested{ @@ -238,7 +238,7 @@ func BenchmarkInterpretNestedMaps(b *testing.B) { err := json.Unmarshal(jsonData, &data) assert.Nil(err) caller := NewFunctionCaller(functions.GetDefaultFunctions()...) - intr := NewInterpreter(nil, caller) + intr := NewInterpreter(nil, caller, nil) parser := parsing.NewParser() ast, _ := parser.Parse("fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf") for i := 0; i < b.N; i++ { From a873c8ec8f7165a0a050482d249ef4df33246dec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Charles-Edouard=20Br=C3=A9t=C3=A9ch=C3=A9?= Date: Mon, 27 Mar 2023 14:27:26 +0200 Subject: [PATCH 10/15] error MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Charles-Edouard Brétéché --- pkg/binding/binding.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pkg/binding/binding.go b/pkg/binding/binding.go index a3ea1e4..d87a156 100644 --- a/pkg/binding/binding.go +++ b/pkg/binding/binding.go @@ -1,5 +1,7 @@ package binding +import "fmt" + // Bindings stores let expression bindings by name. type Bindings interface { // Get returns the value bound for a given name. @@ -20,8 +22,7 @@ func (b bindings) Get(name string) (interface{}, error) { if value, ok := b.values[name]; ok { return value, nil } - // TODO: should return an error - return nil, nil + return nil, fmt.Errorf("variable not defined: %s", name) } func (b bindings) Register(name string, value interface{}) Bindings { From 28336df18c11cd17bb8f30e36f2b5eecef33df76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Charles-Edouard=20Br=C3=A9t=C3=A9ch=C3=A9?= Date: Mon, 27 Mar 2023 14:30:22 +0200 Subject: [PATCH 11/15] fix MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Charles-Edouard Brétéché --- pkg/interpreter/interpreter.go | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/pkg/interpreter/interpreter.go b/pkg/interpreter/interpreter.go index db5cd2d..7d5dc95 100644 --- a/pkg/interpreter/interpreter.go +++ b/pkg/interpreter/interpreter.go @@ -218,19 +218,15 @@ func (intr *treeInterpreter) Execute(node parsing.ASTNode, value interface{}) (i case parsing.ASTRootNode: return intr.root, nil case parsing.ASTBindings: + bindings := intr.bindings for _, child := range node.Children { - if _, err := intr.Execute(child, value); err != nil { + if value, err := intr.Execute(child.Children[1], value); err != nil { return nil, err + } else { + bindings = bindings.Register(child.Children[0].Value.(string), value) } } - // doesn't mutate value - return value, nil - case parsing.ASTBinding: - if value, err := intr.Execute(node.Children[1], value); err != nil { - return nil, err - } else { - intr.bindings = intr.bindings.Register(node.Children[0].Value.(string), value) - } + intr.bindings = bindings // doesn't mutate value return value, nil case parsing.ASTLetExpression: From df18b6e48241d33f3b2c699b492b03f5cd8874b4 Mon Sep 17 00:00:00 2001 From: Springcomp Date: Mon, 27 Mar 2023 15:42:24 +0200 Subject: [PATCH 12/15] Re-introduced tests that now succeed. Signed-off-by: Springcomp --- jp_test.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/jp_test.go b/jp_test.go index c6304d3..26027e2 100644 --- a/jp_test.go +++ b/jp_test.go @@ -29,10 +29,6 @@ var excludeList = []string{ "legacy/legacy-literal.json", "benchmarks.json", "function_let.json", - "lexical_scoping.json", - - // this test currently fails - "literal.json", } func excluded(path string) bool { From 3df1ecf2d821c735a2a1203ff721df360e4e55ba Mon Sep 17 00:00:00 2001 From: Springcomp Date: Mon, 27 Mar 2023 22:56:11 +0200 Subject: [PATCH 13/15] Include compliance tests. Signed-off-by: Springcomp --- compliance | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compliance b/compliance index 86fe910..ffa8a5a 160000 --- a/compliance +++ b/compliance @@ -1 +1 @@ -Subproject commit 86fe9102fda76890ef0914b546c1ef821eb6e758 +Subproject commit ffa8a5ac6eb73c4297ff7d997ac5786ee4309d59 From 6a054c7c7c572c864af575ad9437546861a2f076 Mon Sep 17 00:00:00 2001 From: Springcomp Date: Mon, 27 Mar 2023 22:58:28 +0200 Subject: [PATCH 14/15] let() is deprecated. Signed-off-by: Springcomp --- jp_test.go | 1 - 1 file changed, 1 deletion(-) diff --git a/jp_test.go b/jp_test.go index 26027e2..cffd2f5 100644 --- a/jp_test.go +++ b/jp_test.go @@ -28,7 +28,6 @@ type TestCase struct { var excludeList = []string{ "legacy/legacy-literal.json", "benchmarks.json", - "function_let.json", } func excluded(path string) bool { From cbc9e5b0274fdc9b5440a186cee127720bcae46c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Charles-Edouard=20Br=C3=A9t=C3=A9ch=C3=A9?= Date: Tue, 28 Mar 2023 11:36:58 +0200 Subject: [PATCH 15/15] unit tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Charles-Edouard Brétéché --- pkg/binding/binding_test.go | 143 ++++++++++++++++++++++++++++++++++++ 1 file changed, 143 insertions(+) create mode 100644 pkg/binding/binding_test.go diff --git a/pkg/binding/binding_test.go b/pkg/binding/binding_test.go new file mode 100644 index 0000000..1784ec9 --- /dev/null +++ b/pkg/binding/binding_test.go @@ -0,0 +1,143 @@ +package binding + +import ( + "reflect" + "testing" +) + +func TestNewBindings(t *testing.T) { + tests := []struct { + name string + want Bindings + }{{ + want: bindings{}, + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := NewBindings(); !reflect.DeepEqual(got, tt.want) { + t.Errorf("NewBindings() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_bindings_Get(t *testing.T) { + type fields struct { + values map[string]interface{} + } + type args struct { + name string + } + tests := []struct { + name string + fields fields + args args + want interface{} + wantErr bool + }{{ + fields: fields{ + values: nil, + }, + args: args{ + name: "$root", + }, + wantErr: true, + }, { + fields: fields{ + values: map[string]interface{}{}, + }, + args: args{ + name: "$root", + }, + wantErr: true, + }, { + fields: fields{ + values: map[string]interface{}{ + "$root": 42.0, + }, + }, + args: args{ + name: "$root", + }, + want: 42.0, + }, { + fields: fields{ + values: map[string]interface{}{ + "$foot": 42.0, + }, + }, + args: args{ + name: "$root", + }, + wantErr: true, + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + b := bindings{ + values: tt.fields.values, + } + got, err := b.Get(tt.args.name) + if (err != nil) != tt.wantErr { + t.Errorf("bindings.Get() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("bindings.Get() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_bindings_Register(t *testing.T) { + type fields struct { + values map[string]interface{} + } + type args struct { + name string + value interface{} + } + tests := []struct { + name string + fields fields + args args + want Bindings + }{{ + fields: fields{ + values: nil, + }, + args: args{ + name: "$root", + value: 42.0, + }, + want: bindings{ + values: map[string]interface{}{ + "$root": 42.0, + }, + }, + }, { + fields: fields{ + values: map[string]interface{}{ + "$root": 21.0, + }, + }, + args: args{ + name: "$root", + value: 42.0, + }, + want: bindings{ + values: map[string]interface{}{ + "$root": 42.0, + }, + }, + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + b := bindings{ + values: tt.fields.values, + } + if got := b.Register(tt.args.name, tt.args.value); !reflect.DeepEqual(got, tt.want) { + t.Errorf("bindings.Register() = %v, want %v", got, tt.want) + } + }) + } +}