@@ -431,35 +431,39 @@ def customize_grammar_rules(self, tokens, customize):
431
431
"BUILD_TUPLE" ,
432
432
"BUILD_TUPLE_UNPACK" ,
433
433
):
434
- v = token .attr
434
+ collection_size = token .attr
435
435
436
436
is_LOAD_CLOSURE = False
437
437
if opname_base == "BUILD_TUPLE" :
438
438
# If is part of a "load_closure", then it is not part of a
439
439
# "list".
440
440
is_LOAD_CLOSURE = True
441
- for j in range (v ):
441
+ for j in range (collection_size ):
442
442
if tokens [i - j - 1 ].kind != "LOAD_CLOSURE" :
443
443
is_LOAD_CLOSURE = False
444
444
break
445
445
if is_LOAD_CLOSURE :
446
- rule = "load_closure ::= %s%s" % (("LOAD_CLOSURE " * v ), opname )
446
+ rule = "load_closure ::= %s%s" % (
447
+ ("LOAD_CLOSURE " * collection_size ),
448
+ opname ,
449
+ )
447
450
self .add_unique_rule (rule , opname , token .attr , customize )
448
- if not is_LOAD_CLOSURE or v == 0 :
451
+ if not is_LOAD_CLOSURE or collection_size == 0 :
449
452
# We do this complicated test to speed up parsing of
450
453
# pathelogically long literals, especially those over 1024.
451
- build_count = token .attr
452
- thousands = build_count // 1024
453
- thirty32s = (build_count // 32 ) % 32
454
+ thousands = collection_size // 1024
455
+ thirty32s = (collection_size // 32 ) % 32
454
456
if thirty32s > 0 :
455
457
rule = "expr32 ::=%s" % (" expr" * 32 )
456
- self .add_unique_rule (rule , opname_base , build_count , customize )
458
+ self .add_unique_rule (
459
+ rule , opname_base , collection_size , customize
460
+ )
457
461
pass
458
462
if thousands > 0 :
459
463
self .add_unique_rule (
460
464
"expr1024 ::=%s" % (" expr32" * 32 ),
461
465
opname_base ,
462
- build_count ,
466
+ collection_size ,
463
467
customize ,
464
468
)
465
469
pass
@@ -468,7 +472,7 @@ def customize_grammar_rules(self, tokens, customize):
468
472
("%s ::= " % collection )
469
473
+ "expr1024 " * thousands
470
474
+ "expr32 " * thirty32s
471
- + "expr " * (build_count % 32 )
475
+ + "expr " * (collection_size % 32 )
472
476
+ opname
473
477
)
474
478
self .add_unique_rules (["expr ::= %s" % collection , rule ], customize )
@@ -478,8 +482,8 @@ def customize_grammar_rules(self, tokens, customize):
478
482
if token .attr == 2 :
479
483
self .add_unique_rules (
480
484
[
481
- "expr ::= build_slice2 " ,
482
- "build_slice2 ::= expr expr BUILD_SLICE_2" ,
485
+ "expr ::= slice2 " ,
486
+ "slice2 ::= expr expr BUILD_SLICE_2" ,
483
487
],
484
488
customize ,
485
489
)
@@ -489,8 +493,8 @@ def customize_grammar_rules(self, tokens, customize):
489
493
)
490
494
self .add_unique_rules (
491
495
[
492
- "expr ::= build_slice3 " ,
493
- "build_slice3 ::= expr expr expr BUILD_SLICE_3" ,
496
+ "expr ::= slice3 " ,
497
+ "slice3 ::= expr expr expr BUILD_SLICE_3" ,
494
498
],
495
499
customize ,
496
500
)
@@ -524,6 +528,7 @@ def customize_grammar_rules(self, tokens, customize):
524
528
525
529
if opname == "CALL_FUNCTION" and token .attr == 1 :
526
530
rule = """
531
+ expr ::= dict_comp
527
532
dict_comp ::= LOAD_DICTCOMP LOAD_STR MAKE_FUNCTION_0 expr
528
533
GET_ITER CALL_FUNCTION_1
529
534
classdefdeco1 ::= expr classdefdeco2 CALL_FUNCTION_1
@@ -563,6 +568,7 @@ def customize_grammar_rules(self, tokens, customize):
563
568
+ "expr " * nak
564
569
+ opname
565
570
)
571
+
566
572
self .add_unique_rule (rule , opname , token .attr , customize )
567
573
568
574
elif opname == "CONTINUE" :
@@ -1252,20 +1258,13 @@ def reduce_is_invalid(self, rule, ast, tokens, first, last):
1252
1258
try :
1253
1259
if fn :
1254
1260
return fn (self , lhs , n , rule , ast , tokens , first , last )
1255
- except :
1261
+ except Exception :
1256
1262
import sys , traceback
1257
1263
1258
1264
print (
1259
- ("Exception in %s %s\n "
1260
- + "rule: %s\n "
1261
- + "offsets %s .. %s" )
1262
- % (
1263
- fn .__name__ ,
1264
- sys .exc_info ()[1 ],
1265
- rule2str (rule ),
1266
- tokens [first ].offset ,
1267
- tokens [last ].offset ,
1268
- )
1265
+ f"Exception in { fn .__name__ } { sys .exc_info ()[1 ]} \n "
1266
+ + f"rule: { rule2str (rule )} \n "
1267
+ + f"offsets { tokens [first ].offset } .. { tokens [last ].offset } "
1269
1268
)
1270
1269
print (traceback .print_tb (sys .exc_info ()[2 ], - 1 ))
1271
1270
raise ParserError (tokens [last ], tokens [last ].off2int (), self .debug ["rules" ])
0 commit comments