Skip to content

Commit 96cc10e

Browse files
committed
Merge branch 'more-testing' into develop
* more-testing: Add additional hypothesis tests for functions Assert more properties on parsed AST result Fix lexer error values
2 parents a908dd0 + 491501b commit 96cc10e

File tree

3 files changed

+73
-8
lines changed

3 files changed

+73
-8
lines changed

extra/test_hypothesis.py

Lines changed: 59 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
# via env var for longer runs in travis.
55
import os
66
import sys
7+
import numbers
78

89
from nose.plugins.skip import SkipTest
910
from hypothesis import given, settings, assume, HealthCheck
@@ -12,19 +13,22 @@
1213
from jmespath import lexer
1314
from jmespath import parser
1415
from jmespath import exceptions
16+
from jmespath.functions import Functions
1517

1618

1719
if sys.version_info[:2] == (2, 6):
1820
raise RuntimeError("Hypothesis tests are not supported on python2.6. "
1921
"Use python2.7, or python3.3 and greater.")
2022

2123

24+
JSON_NUMBERS = (st.integers() | st.floats(allow_nan=False,
25+
allow_infinity=False))
26+
2227
RANDOM_JSON = st.recursive(
23-
st.floats() | st.booleans() | st.text() | st.none(),
28+
JSON_NUMBERS | st.booleans() | st.text() | st.none(),
2429
lambda children: st.lists(children) | st.dictionaries(st.text(), children)
2530
)
2631

27-
2832
MAX_EXAMPLES = int(os.environ.get('JP_MAX_EXAMPLES', 1000))
2933
BASE_SETTINGS = {
3034
'max_examples': MAX_EXAMPLES,
@@ -40,11 +44,31 @@
4044
def test_lexer_api(expr):
4145
try:
4246
tokens = list(lexer.Lexer().tokenize(expr))
43-
except exceptions.JMESPathError as e:
47+
except exceptions.EmptyExpressionError:
48+
return
49+
except exceptions.LexerError as e:
50+
assert e.lex_position >= 0, e.lex_position
51+
assert e.lex_position < len(expr), e.lex_position
52+
if expr:
53+
assert expr[e.lex_position] == e.token_value[0], (
54+
"Lex position does not match first token char.\n"
55+
"Expression: %s\n%s != %s" % (expr, expr[e.lex_position],
56+
e.token_value[0])
57+
)
4458
return
4559
except Exception as e:
4660
raise AssertionError("Non JMESPathError raised: %s" % e)
4761
assert isinstance(tokens, list)
62+
# Token starting positions must be unique, can't have two
63+
# tokens with the same start position.
64+
start_locations = [t['start'] for t in tokens]
65+
assert len(set(start_locations)) == len(start_locations), (
66+
"Tokens must have unique starting locations.")
67+
# Starting positions must be increasing (i.e sorted).
68+
assert sorted(start_locations) == start_locations, (
69+
"Tokens must have increasing start locations.")
70+
# Last token is always EOF.
71+
assert tokens[-1]['type'] == 'eof'
4872

4973

5074
@settings(**BASE_SETTINGS)
@@ -65,6 +89,9 @@ def test_parser_api_from_str(expr):
6589
except Exception as e:
6690
raise AssertionError("Non JMESPathError raised: %s" % e)
6791
assert isinstance(ast.parsed, dict)
92+
assert 'type' in ast.parsed
93+
assert 'children' in ast.parsed
94+
assert isinstance(ast.parsed['children'], list)
6895

6996

7097
@settings(**BASE_SETTINGS)
@@ -82,3 +109,32 @@ def test_search_api(expr, data):
82109
return
83110
except Exception as e:
84111
raise AssertionError("Non JMESPathError raised: %s" % e)
112+
113+
114+
# Additional property tests for functions.
115+
116+
@given(arg=JSON_NUMBERS)
117+
def test_abs(arg):
118+
assert Functions().call_function('abs', [arg]) >= 0
119+
120+
121+
@given(arg=st.lists(JSON_NUMBERS))
122+
def test_avg(arg):
123+
result = Functions().call_function('avg', [arg])
124+
if result is not None:
125+
assert isinstance(result, numbers.Number)
126+
127+
128+
@given(arg=st.lists(st.floats() | st.booleans() | st.text() | st.none(),
129+
min_size=1))
130+
def test_not_null(arg):
131+
result = Functions().call_function('not_null', arg)
132+
if result is not None:
133+
assert result in arg
134+
135+
136+
@given(arg=RANDOM_JSON)
137+
def test_to_number(arg):
138+
result = Functions().call_function('to_number', [arg])
139+
if result is not None:
140+
assert isinstance(result, numbers.Number)

jmespath/functions.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
'str': 'string',
1717
'float': 'number',
1818
'int': 'number',
19+
'long': 'number',
1920
'OrderedDict': 'object',
2021
'_Projection': 'array',
2122
'_Expression': 'expref',
@@ -29,7 +30,7 @@
2930
'object': ('dict', 'OrderedDict',),
3031
'null': ('None',),
3132
'string': ('unicode', 'str'),
32-
'number': ('float', 'int'),
33+
'number': ('float', 'int', 'long'),
3334
'expref': ('_Expression',),
3435
}
3536

jmespath/lexer.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -92,10 +92,17 @@ def tokenize(self, expression):
9292
'start': self._position - 1, 'end': self._position}
9393
self._next()
9494
else:
95+
if self._current is None:
96+
# If we're at the EOF, we never advanced
97+
# the position so we don't need to rewind
98+
# it back one location.
99+
position = self._position
100+
else:
101+
position = self._position - 1
95102
raise LexerError(
96-
lexer_position=self._position - 1,
103+
lexer_position=position,
97104
lexer_value='=',
98-
message="Unknown token =")
105+
message="Unknown token '='")
99106
else:
100107
raise LexerError(lexer_position=self._position,
101108
lexer_value=self._current,
@@ -138,8 +145,9 @@ def _consume_until(self, delimiter):
138145
buff += '\\'
139146
self._next()
140147
if self._current is None:
148+
# We're at the EOF.
141149
raise LexerError(lexer_position=start,
142-
lexer_value=self._expression,
150+
lexer_value=self._expression[start:],
143151
message="Unclosed %s delimiter" % delimiter)
144152
buff += self._current
145153
self._next()
@@ -162,7 +170,7 @@ def _consume_literal(self):
162170
PendingDeprecationWarning)
163171
except ValueError:
164172
raise LexerError(lexer_position=start,
165-
lexer_value=self._expression,
173+
lexer_value=self._expression[start:],
166174
message="Bad token %s" % lexeme)
167175
token_len = self._position - start
168176
return {'type': 'literal', 'value': parsed_json,

0 commit comments

Comments
 (0)