@@ -222,23 +222,15 @@ def _parse_slice_expression(self):
222
222
if current_token == 'colon' :
223
223
index += 1
224
224
if index == 3 :
225
- t = self ._lookahead_token (0 )
226
- lex_position = t ['start' ]
227
- actual_value = t ['value' ]
228
- actual_type = t ['type' ]
229
- raise exceptions .ParseError (lex_position , actual_value ,
230
- actual_type , 'syntax error' )
225
+ self ._raise_parse_error_for_token (
226
+ self ._lookahead_token (0 ), 'syntax error' )
231
227
self ._advance ()
232
228
elif current_token == 'number' :
233
229
parts [index ] = self ._lookahead_token (0 )['value' ]
234
230
self ._advance ()
235
231
else :
236
- t = self ._lookahead_token (0 )
237
- lex_position = t ['start' ]
238
- actual_value = t ['value' ]
239
- actual_type = t ['type' ]
240
- raise exceptions .ParseError (lex_position , actual_value ,
241
- actual_type , 'syntax error' )
232
+ self ._raise_parse_error_for_token (
233
+ self ._lookahead_token (0 ), 'syntax error' )
242
234
current_token = self ._current_token ()
243
235
self ._match ('rbracket' )
244
236
return ast .slice (* parts )
@@ -408,12 +400,8 @@ def _parse_projection_rhs(self, binding_power):
408
400
self ._match ('dot' )
409
401
right = self ._parse_dot_rhs (binding_power )
410
402
else :
411
- t = self ._lookahead_token (0 )
412
- lex_position = t ['start' ]
413
- actual_value = t ['value' ]
414
- actual_type = t ['type' ]
415
- raise exceptions .ParseError (lex_position , actual_value ,
416
- actual_type , 'syntax error' )
403
+ self ._raise_parse_error_for_token (self ._lookahead_token (0 ),
404
+ 'syntax error' )
417
405
return right
418
406
419
407
def _parse_dot_rhs (self , binding_power ):
@@ -439,58 +427,33 @@ def _parse_dot_rhs(self, binding_power):
439
427
t = self ._lookahead_token (0 )
440
428
allowed = ['quoted_identifier' , 'unquoted_identifier' ,
441
429
'lbracket' , 'lbrace' ]
442
- lex_position = t ['start' ]
443
- actual_value = t ['value' ]
444
- actual_type = t ['type' ]
445
- raise exceptions .ParseError (
446
- lex_position , actual_value , actual_type ,
447
- "Expecting: %s, got: %s" % (allowed ,
448
- actual_type ))
430
+ msg = (
431
+ "Expecting: %s, got: %s" % (allowed , t ['type' ])
432
+ )
433
+ self ._raise_parse_error_for_token (t , msg )
449
434
450
435
def _error_nud_token (self , token ):
451
436
if token ['type' ] == 'eof' :
452
437
raise exceptions .IncompleteExpressionError (
453
438
token ['start' ], token ['value' ], token ['type' ])
454
- raise exceptions .ParseError (token ['start' ], token ['value' ],
455
- token ['type' ], 'Invalid token.' )
439
+ self ._raise_parse_error_for_token (token , 'invalid token' )
456
440
457
441
def _error_led_token (self , token ):
458
- raise exceptions .ParseError (token ['start' ], token ['value' ],
459
- token ['type' ], 'Invalid token' )
442
+ self ._raise_parse_error_for_token (token , 'invalid token' )
460
443
461
444
def _match (self , token_type = None ):
462
445
# inline'd self._current_token()
463
446
if self ._current_token () == token_type :
464
447
# inline'd self._advance()
465
448
self ._advance ()
466
449
else :
467
- t = self ._lookahead_token (0 )
468
- lex_position = t ['start' ]
469
- actual_value = t ['value' ]
470
- actual_type = t ['type' ]
471
- if actual_type == 'eof' :
472
- raise exceptions .IncompleteExpressionError (
473
- lex_position , actual_value , actual_type )
474
- else :
475
- message = 'Expecting: %s, got: %s' % (token_type ,
476
- actual_type )
477
- raise exceptions .ParseError (
478
- lex_position , actual_value , actual_type , message )
450
+ self ._raise_parse_error_maybe_eof (
451
+ token_type , self ._lookahead_token (0 ))
479
452
480
453
def _match_multiple_tokens (self , token_types ):
481
454
if self ._current_token () not in token_types :
482
- t = self ._lookahead_token (0 )
483
- lex_position = t ['start' ]
484
- actual_value = t ['value' ]
485
- actual_type = t ['type' ]
486
- if actual_type == 'eof' :
487
- raise exceptions .IncompleteExpressionError (
488
- lex_position , actual_value , actual_type )
489
- else :
490
- message = 'Expecting: %s, got: %s' % (token_types ,
491
- actual_type )
492
- raise exceptions .ParseError (
493
- lex_position , actual_value , actual_type , message )
455
+ self ._raise_parse_error_maybe_eof (
456
+ token_types , self ._lookahead_token (0 ))
494
457
self ._advance ()
495
458
496
459
def _advance (self ):
@@ -505,6 +468,25 @@ def _lookahead(self, number):
505
468
def _lookahead_token (self , number ):
506
469
return self ._tokens [self ._index + number ]
507
470
471
+ def _raise_parse_error_for_token (self , token , reason ):
472
+ lex_position = token ['start' ]
473
+ actual_value = token ['value' ]
474
+ actual_type = token ['type' ]
475
+ raise exceptions .ParseError (lex_position , actual_value ,
476
+ actual_type , reason )
477
+
478
+ def _raise_parse_error_maybe_eof (self , expected_type , token ):
479
+ lex_position = token ['start' ]
480
+ actual_value = token ['value' ]
481
+ actual_type = token ['type' ]
482
+ if actual_type == 'eof' :
483
+ raise exceptions .IncompleteExpressionError (
484
+ lex_position , actual_value , actual_type )
485
+ message = 'Expecting: %s, got: %s' % (expected_type ,
486
+ actual_type )
487
+ raise exceptions .ParseError (
488
+ lex_position , actual_value , actual_type , message )
489
+
508
490
def _free_cache_entries (self ):
509
491
for key in random .sample (self ._CACHE .keys (), int (self ._MAX_SIZE / 2 )):
510
492
del self ._CACHE [key ]
0 commit comments