Skip to content

Commit 41d1ba3

Browse files
committed
Synch with decompyle3 code a little bit
1 parent e03f4cf commit 41d1ba3

File tree

1 file changed

+24
-25
lines changed

1 file changed

+24
-25
lines changed

uncompyle6/parsers/parse37base.py

Lines changed: 24 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -431,35 +431,39 @@ def customize_grammar_rules(self, tokens, customize):
431431
"BUILD_TUPLE",
432432
"BUILD_TUPLE_UNPACK",
433433
):
434-
v = token.attr
434+
collection_size = token.attr
435435

436436
is_LOAD_CLOSURE = False
437437
if opname_base == "BUILD_TUPLE":
438438
# If is part of a "load_closure", then it is not part of a
439439
# "list".
440440
is_LOAD_CLOSURE = True
441-
for j in range(v):
441+
for j in range(collection_size):
442442
if tokens[i - j - 1].kind != "LOAD_CLOSURE":
443443
is_LOAD_CLOSURE = False
444444
break
445445
if is_LOAD_CLOSURE:
446-
rule = "load_closure ::= %s%s" % (("LOAD_CLOSURE " * v), opname)
446+
rule = "load_closure ::= %s%s" % (
447+
("LOAD_CLOSURE " * collection_size),
448+
opname,
449+
)
447450
self.add_unique_rule(rule, opname, token.attr, customize)
448-
if not is_LOAD_CLOSURE or v == 0:
451+
if not is_LOAD_CLOSURE or collection_size == 0:
449452
# We do this complicated test to speed up parsing of
450453
# pathelogically long literals, especially those over 1024.
451-
build_count = token.attr
452-
thousands = build_count // 1024
453-
thirty32s = (build_count // 32) % 32
454+
thousands = collection_size // 1024
455+
thirty32s = (collection_size // 32) % 32
454456
if thirty32s > 0:
455457
rule = "expr32 ::=%s" % (" expr" * 32)
456-
self.add_unique_rule(rule, opname_base, build_count, customize)
458+
self.add_unique_rule(
459+
rule, opname_base, collection_size, customize
460+
)
457461
pass
458462
if thousands > 0:
459463
self.add_unique_rule(
460464
"expr1024 ::=%s" % (" expr32" * 32),
461465
opname_base,
462-
build_count,
466+
collection_size,
463467
customize,
464468
)
465469
pass
@@ -468,7 +472,7 @@ def customize_grammar_rules(self, tokens, customize):
468472
("%s ::= " % collection)
469473
+ "expr1024 " * thousands
470474
+ "expr32 " * thirty32s
471-
+ "expr " * (build_count % 32)
475+
+ "expr " * (collection_size % 32)
472476
+ opname
473477
)
474478
self.add_unique_rules(["expr ::= %s" % collection, rule], customize)
@@ -478,8 +482,8 @@ def customize_grammar_rules(self, tokens, customize):
478482
if token.attr == 2:
479483
self.add_unique_rules(
480484
[
481-
"expr ::= build_slice2",
482-
"build_slice2 ::= expr expr BUILD_SLICE_2",
485+
"expr ::= slice2",
486+
"slice2 ::= expr expr BUILD_SLICE_2",
483487
],
484488
customize,
485489
)
@@ -489,8 +493,8 @@ def customize_grammar_rules(self, tokens, customize):
489493
)
490494
self.add_unique_rules(
491495
[
492-
"expr ::= build_slice3",
493-
"build_slice3 ::= expr expr expr BUILD_SLICE_3",
496+
"expr ::= slice3",
497+
"slice3 ::= expr expr expr BUILD_SLICE_3",
494498
],
495499
customize,
496500
)
@@ -524,6 +528,7 @@ def customize_grammar_rules(self, tokens, customize):
524528

525529
if opname == "CALL_FUNCTION" and token.attr == 1:
526530
rule = """
531+
expr ::= dict_comp
527532
dict_comp ::= LOAD_DICTCOMP LOAD_STR MAKE_FUNCTION_0 expr
528533
GET_ITER CALL_FUNCTION_1
529534
classdefdeco1 ::= expr classdefdeco2 CALL_FUNCTION_1
@@ -563,6 +568,7 @@ def customize_grammar_rules(self, tokens, customize):
563568
+ "expr " * nak
564569
+ opname
565570
)
571+
566572
self.add_unique_rule(rule, opname, token.attr, customize)
567573

568574
elif opname == "CONTINUE":
@@ -1252,20 +1258,13 @@ def reduce_is_invalid(self, rule, ast, tokens, first, last):
12521258
try:
12531259
if fn:
12541260
return fn(self, lhs, n, rule, ast, tokens, first, last)
1255-
except:
1261+
except Exception:
12561262
import sys, traceback
12571263

12581264
print(
1259-
("Exception in %s %s\n"
1260-
+ "rule: %s\n"
1261-
+ "offsets %s .. %s")
1262-
% (
1263-
fn.__name__,
1264-
sys.exc_info()[1],
1265-
rule2str(rule),
1266-
tokens[first].offset,
1267-
tokens[last].offset,
1268-
)
1265+
f"Exception in {fn.__name__} {sys.exc_info()[1]}\n"
1266+
+ f"rule: {rule2str(rule)}\n"
1267+
+ f"offsets {tokens[first].offset} .. {tokens[last].offset}"
12691268
)
12701269
print(traceback.print_tb(sys.exc_info()[2], -1))
12711270
raise ParserError(tokens[last], tokens[last].off2int(), self.debug["rules"])

0 commit comments

Comments
 (0)