Skip to content

Commit ebcc12e

Browse files
committed
Misc lint things
1 parent 286bb59 commit ebcc12e

File tree

2 files changed

+20
-18
lines changed

2 files changed

+20
-18
lines changed

uncompyle6/parser.py

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright (c) 2015-2022 Rocky Bernstein
1+
# Copyright (c) 2015-2023 Rocky Bernstein
22
# Copyright (c) 2005 by Dan Pascu <[email protected]>
33
# Copyright (c) 2000-2002 by hartmut Goebel <[email protected]>
44
# Copyright (c) 1999 John Aycock
@@ -44,8 +44,8 @@ def nop_func(self, args):
4444

4545

4646
class PythonParser(GenericASTBuilder):
47-
def __init__(self, SyntaxTree, start, debug):
48-
super(PythonParser, self).__init__(SyntaxTree, start, debug)
47+
def __init__(self, syntax_tree_class, start, debug):
48+
super(PythonParser, self).__init__(syntax_tree_class, start, debug)
4949
# FIXME: customize per python parser version
5050

5151
# These are the non-terminals we should collect into a list.
@@ -103,6 +103,7 @@ def __init__(self, SyntaxTree, start, debug):
103103
)
104104
# Instructions filled in from scanner
105105
self.insts = []
106+
self.version = tuple()
106107

107108
def ast_first_offset(self, ast):
108109
if hasattr(ast, "offset"):
@@ -151,9 +152,9 @@ def cleanup(self):
151152
Remove recursive references to allow garbage
152153
collector to collect this object.
153154
"""
154-
for dict in (self.rule2func, self.rules, self.rule2name):
155-
for i in list(dict.keys()):
156-
dict[i] = None
155+
for rule_dict in (self.rule2func, self.rules, self.rule2name):
156+
for i in list(rule_dict.keys()):
157+
rule_dict[i] = None
157158
for i in dir(self):
158159
setattr(self, i, None)
159160

@@ -164,11 +165,11 @@ def debug_reduce(self, rule, tokens, parent, last_token_pos):
164165

165166
def fix(c):
166167
s = str(c)
167-
last_token_pos = s.find("_")
168-
if last_token_pos == -1:
168+
token_pos = s.find("_")
169+
if token_pos == -1:
169170
return s
170171
else:
171-
return s[:last_token_pos]
172+
return s[:token_pos]
172173

173174
prefix = ""
174175
if parent and tokens:
@@ -267,13 +268,13 @@ def __ambiguity(self, children):
267268
print(children)
268269
return GenericASTBuilder.ambiguity(self, children)
269270

270-
def resolve(self, list):
271-
if len(list) == 2 and "function_def" in list and "assign" in list:
271+
def resolve(self, rule: list):
272+
if len(rule) == 2 and "function_def" in rule and "assign" in rule:
272273
return "function_def"
273-
if "grammar" in list and "expr" in list:
274+
if "grammar" in rule and "expr" in rule:
274275
return "expr"
275-
# print >> sys.stderr, 'resolve', str(list)
276-
return GenericASTBuilder.resolve(self, list)
276+
# print >> sys.stderr, 'resolve', str(rule)
277+
return GenericASTBuilder.resolve(self, rule)
277278

278279
###############################################
279280
# Common Python 2 and Python 3 grammar rules #
@@ -667,7 +668,7 @@ def get_python_parser(
667668
if compile_mode == "exec":
668669
p = parse10.Python10Parser(debug_parser)
669670
else:
670-
p = parse10.Python01ParserSingle(debug_parser)
671+
p = parse10.Python10ParserSingle(debug_parser)
671672
elif version == (1, 1):
672673
import uncompyle6.parsers.parse11 as parse11
673674

@@ -873,6 +874,7 @@ def python_parser(
873874
:param showasm: Flag which determines whether the disassembled and
874875
ingested code is written to sys.stdout or not.
875876
:param parser_debug: dict containing debug flags for the spark parser.
877+
:param is_pypy: True if we are running PyPY
876878
877879
:return: Abstract syntax tree representation of the code object.
878880
"""

uncompyle6/parsers/parse24.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright (c) 2016-2018, 2020, 2022 Rocky Bernstein
1+
# Copyright (c) 2016-2018, 2020, 2022-2023 Rocky Bernstein
22
"""
33
spark grammar differences over Python2.5 for Python 2.4.
44
"""
@@ -115,8 +115,8 @@ def reduce_is_invalid(self, rule, ast, tokens, first, last):
115115

116116
lhs = rule[0]
117117
if lhs == "nop_stmt":
118-
l = len(tokens)
119-
if 0 <= l < len(tokens):
118+
token_len = len(tokens)
119+
if 0 <= token_len < len(tokens):
120120
return not int(tokens[first].pattr) == tokens[last].offset
121121
elif lhs == "try_except":
122122
if last == len(tokens):

0 commit comments

Comments
 (0)