Skip to content

Commit 248eceb

Browse files
committed
Refactor parse errors into common functionality
Cuts down duplication on error cases.
1 parent 9dc6207 commit 248eceb

File tree

1 file changed

+35
-53
lines changed

1 file changed

+35
-53
lines changed

jmespath/parser.py

Lines changed: 35 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -222,23 +222,15 @@ def _parse_slice_expression(self):
222222
if current_token == 'colon':
223223
index += 1
224224
if index == 3:
225-
t = self._lookahead_token(0)
226-
lex_position = t['start']
227-
actual_value = t['value']
228-
actual_type = t['type']
229-
raise exceptions.ParseError(lex_position, actual_value,
230-
actual_type, 'syntax error')
225+
self._raise_parse_error_for_token(
226+
self._lookahead_token(0), 'syntax error')
231227
self._advance()
232228
elif current_token == 'number':
233229
parts[index] = self._lookahead_token(0)['value']
234230
self._advance()
235231
else:
236-
t = self._lookahead_token(0)
237-
lex_position = t['start']
238-
actual_value = t['value']
239-
actual_type = t['type']
240-
raise exceptions.ParseError(lex_position, actual_value,
241-
actual_type, 'syntax error')
232+
self._raise_parse_error_for_token(
233+
self._lookahead_token(0), 'syntax error')
242234
current_token = self._current_token()
243235
self._match('rbracket')
244236
return ast.slice(*parts)
@@ -408,12 +400,8 @@ def _parse_projection_rhs(self, binding_power):
408400
self._match('dot')
409401
right = self._parse_dot_rhs(binding_power)
410402
else:
411-
t = self._lookahead_token(0)
412-
lex_position = t['start']
413-
actual_value = t['value']
414-
actual_type = t['type']
415-
raise exceptions.ParseError(lex_position, actual_value,
416-
actual_type, 'syntax error')
403+
self._raise_parse_error_for_token(self._lookahead_token(0),
404+
'syntax error')
417405
return right
418406

419407
def _parse_dot_rhs(self, binding_power):
@@ -439,58 +427,33 @@ def _parse_dot_rhs(self, binding_power):
439427
t = self._lookahead_token(0)
440428
allowed = ['quoted_identifier', 'unquoted_identifier',
441429
'lbracket', 'lbrace']
442-
lex_position = t['start']
443-
actual_value = t['value']
444-
actual_type = t['type']
445-
raise exceptions.ParseError(
446-
lex_position, actual_value, actual_type,
447-
"Expecting: %s, got: %s" % (allowed,
448-
actual_type))
430+
msg = (
431+
"Expecting: %s, got: %s" % (allowed, t['type'])
432+
)
433+
self._raise_parse_error_for_token(t, msg)
449434

450435
def _error_nud_token(self, token):
451436
if token['type'] == 'eof':
452437
raise exceptions.IncompleteExpressionError(
453438
token['start'], token['value'], token['type'])
454-
raise exceptions.ParseError(token['start'], token['value'],
455-
token['type'], 'Invalid token.')
439+
self._raise_parse_error_for_token(token, 'invalid token')
456440

457441
def _error_led_token(self, token):
458-
raise exceptions.ParseError(token['start'], token['value'],
459-
token['type'], 'Invalid token')
442+
self._raise_parse_error_for_token(token, 'invalid token')
460443

461444
def _match(self, token_type=None):
462445
# inline'd self._current_token()
463446
if self._current_token() == token_type:
464447
# inline'd self._advance()
465448
self._advance()
466449
else:
467-
t = self._lookahead_token(0)
468-
lex_position = t['start']
469-
actual_value = t['value']
470-
actual_type = t['type']
471-
if actual_type == 'eof':
472-
raise exceptions.IncompleteExpressionError(
473-
lex_position, actual_value, actual_type)
474-
else:
475-
message = 'Expecting: %s, got: %s' % (token_type,
476-
actual_type)
477-
raise exceptions.ParseError(
478-
lex_position, actual_value, actual_type, message)
450+
self._raise_parse_error_maybe_eof(
451+
token_type, self._lookahead_token(0))
479452

480453
def _match_multiple_tokens(self, token_types):
481454
if self._current_token() not in token_types:
482-
t = self._lookahead_token(0)
483-
lex_position = t['start']
484-
actual_value = t['value']
485-
actual_type = t['type']
486-
if actual_type == 'eof':
487-
raise exceptions.IncompleteExpressionError(
488-
lex_position, actual_value, actual_type)
489-
else:
490-
message = 'Expecting: %s, got: %s' % (token_types,
491-
actual_type)
492-
raise exceptions.ParseError(
493-
lex_position, actual_value, actual_type, message)
455+
self._raise_parse_error_maybe_eof(
456+
token_types, self._lookahead_token(0))
494457
self._advance()
495458

496459
def _advance(self):
@@ -505,6 +468,25 @@ def _lookahead(self, number):
505468
def _lookahead_token(self, number):
506469
return self._tokens[self._index + number]
507470

471+
def _raise_parse_error_for_token(self, token, reason):
472+
lex_position = token['start']
473+
actual_value = token['value']
474+
actual_type = token['type']
475+
raise exceptions.ParseError(lex_position, actual_value,
476+
actual_type, reason)
477+
478+
def _raise_parse_error_maybe_eof(self, expected_type, token):
479+
lex_position = token['start']
480+
actual_value = token['value']
481+
actual_type = token['type']
482+
if actual_type == 'eof':
483+
raise exceptions.IncompleteExpressionError(
484+
lex_position, actual_value, actual_type)
485+
message = 'Expecting: %s, got: %s' % (expected_type,
486+
actual_type)
487+
raise exceptions.ParseError(
488+
lex_position, actual_value, actual_type, message)
489+
508490
def _free_cache_entries(self):
509491
for key in random.sample(self._CACHE.keys(), int(self._MAX_SIZE / 2)):
510492
del self._CACHE[key]

0 commit comments

Comments
 (0)