@@ -221,17 +221,16 @@ def _parse_slice_expression(self):
221
221
while not current_token == 'rbracket' and index < 3 :
222
222
if current_token == 'colon' :
223
223
index += 1
224
+ if index == 3 :
225
+ self ._raise_parse_error_for_token (
226
+ self ._lookahead_token (0 ), 'syntax error' )
224
227
self ._advance ()
225
228
elif current_token == 'number' :
226
229
parts [index ] = self ._lookahead_token (0 )['value' ]
227
230
self ._advance ()
228
231
else :
229
- t = self ._lookahead_token (0 )
230
- lex_position = t ['start' ]
231
- actual_value = t ['value' ]
232
- actual_type = t ['type' ]
233
- raise exceptions .ParseError (lex_position , actual_value ,
234
- actual_type , 'syntax error' )
232
+ self ._raise_parse_error_for_token (
233
+ self ._lookahead_token (0 ), 'syntax error' )
235
234
current_token = self ._current_token ()
236
235
self ._match ('rbracket' )
237
236
return ast .slice (* parts )
@@ -271,6 +270,14 @@ def _token_led_and(self, left):
271
270
return ast .and_expression (left , right )
272
271
273
272
def _token_led_lparen (self , left ):
273
+ if left ['type' ] != 'field' :
274
+ # 0 - first func arg or closing paren.
275
+ # -1 - '(' token
276
+ # -2 - invalid function "name".
277
+ prev_t = self ._lookahead_token (- 2 )
278
+ raise exceptions .ParseError (
279
+ prev_t ['start' ], prev_t ['value' ], prev_t ['type' ],
280
+ "Invalid function name '%s'" % prev_t ['value' ])
274
281
name = left ['value' ]
275
282
args = []
276
283
while not self ._current_token () == 'rparen' :
@@ -393,12 +400,8 @@ def _parse_projection_rhs(self, binding_power):
393
400
self ._match ('dot' )
394
401
right = self ._parse_dot_rhs (binding_power )
395
402
else :
396
- t = self ._lookahead_token (0 )
397
- lex_position = t ['start' ]
398
- actual_value = t ['value' ]
399
- actual_type = t ['type' ]
400
- raise exceptions .ParseError (lex_position , actual_value ,
401
- actual_type , 'syntax error' )
403
+ self ._raise_parse_error_for_token (self ._lookahead_token (0 ),
404
+ 'syntax error' )
402
405
return right
403
406
404
407
def _parse_dot_rhs (self , binding_power ):
@@ -424,58 +427,33 @@ def _parse_dot_rhs(self, binding_power):
424
427
t = self ._lookahead_token (0 )
425
428
allowed = ['quoted_identifier' , 'unquoted_identifier' ,
426
429
'lbracket' , 'lbrace' ]
427
- lex_position = t ['start' ]
428
- actual_value = t ['value' ]
429
- actual_type = t ['type' ]
430
- raise exceptions .ParseError (
431
- lex_position , actual_value , actual_type ,
432
- "Expecting: %s, got: %s" % (allowed ,
433
- actual_type ))
430
+ msg = (
431
+ "Expecting: %s, got: %s" % (allowed , t ['type' ])
432
+ )
433
+ self ._raise_parse_error_for_token (t , msg )
434
434
435
435
def _error_nud_token (self , token ):
436
436
if token ['type' ] == 'eof' :
437
437
raise exceptions .IncompleteExpressionError (
438
438
token ['start' ], token ['value' ], token ['type' ])
439
- raise exceptions .ParseError (token ['start' ], token ['value' ],
440
- token ['type' ], 'Invalid token.' )
439
+ self ._raise_parse_error_for_token (token , 'invalid token' )
441
440
442
441
def _error_led_token (self , token ):
443
- raise exceptions .ParseError (token ['start' ], token ['value' ],
444
- token ['type' ], 'Invalid token' )
442
+ self ._raise_parse_error_for_token (token , 'invalid token' )
445
443
446
444
def _match (self , token_type = None ):
447
445
# inline'd self._current_token()
448
446
if self ._current_token () == token_type :
449
447
# inline'd self._advance()
450
448
self ._advance ()
451
449
else :
452
- t = self ._lookahead_token (0 )
453
- lex_position = t ['start' ]
454
- actual_value = t ['value' ]
455
- actual_type = t ['type' ]
456
- if actual_type == 'eof' :
457
- raise exceptions .IncompleteExpressionError (
458
- lex_position , actual_value , actual_type )
459
- else :
460
- message = 'Expecting: %s, got: %s' % (token_type ,
461
- actual_type )
462
- raise exceptions .ParseError (
463
- lex_position , actual_value , actual_type , message )
450
+ self ._raise_parse_error_maybe_eof (
451
+ token_type , self ._lookahead_token (0 ))
464
452
465
453
def _match_multiple_tokens (self , token_types ):
466
454
if self ._current_token () not in token_types :
467
- t = self ._lookahead_token (0 )
468
- lex_position = t ['start' ]
469
- actual_value = t ['value' ]
470
- actual_type = t ['type' ]
471
- if actual_type == 'eof' :
472
- raise exceptions .IncompleteExpressionError (
473
- lex_position , actual_value , actual_type )
474
- else :
475
- message = 'Expecting: %s, got: %s' % (token_types ,
476
- actual_type )
477
- raise exceptions .ParseError (
478
- lex_position , actual_value , actual_type , message )
455
+ self ._raise_parse_error_maybe_eof (
456
+ token_types , self ._lookahead_token (0 ))
479
457
self ._advance ()
480
458
481
459
def _advance (self ):
@@ -490,6 +468,25 @@ def _lookahead(self, number):
490
468
def _lookahead_token (self , number ):
491
469
return self ._tokens [self ._index + number ]
492
470
471
+ def _raise_parse_error_for_token (self , token , reason ):
472
+ lex_position = token ['start' ]
473
+ actual_value = token ['value' ]
474
+ actual_type = token ['type' ]
475
+ raise exceptions .ParseError (lex_position , actual_value ,
476
+ actual_type , reason )
477
+
478
+ def _raise_parse_error_maybe_eof (self , expected_type , token ):
479
+ lex_position = token ['start' ]
480
+ actual_value = token ['value' ]
481
+ actual_type = token ['type' ]
482
+ if actual_type == 'eof' :
483
+ raise exceptions .IncompleteExpressionError (
484
+ lex_position , actual_value , actual_type )
485
+ message = 'Expecting: %s, got: %s' % (expected_type ,
486
+ actual_type )
487
+ raise exceptions .ParseError (
488
+ lex_position , actual_value , actual_type , message )
489
+
493
490
def _free_cache_entries (self ):
494
491
for key in random .sample (self ._CACHE .keys (), int (self ._MAX_SIZE / 2 )):
495
492
del self ._CACHE [key ]
0 commit comments