Skip to content

Commit 9d02634

Browse files
appease the typechecker
1 parent d3ebf5d commit 9d02634

File tree

1 file changed

+10
-6
lines changed

1 file changed

+10
-6
lines changed

mypy/fastparse.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -151,19 +151,23 @@ def ast3_parse(
151151
# Note that the functions in this module are only designed to parse syntactically valid Python code (code that does not raise when parsed using ast.parse()). The behavior of the functions in this module is **undefined** when providing invalid Python code and it can change at any point.
152152
# So, we cannot rely on roundtrip behavior in tokenize iff ast.parse would throw when given `source`.
153153
# The simplest way to deal with that is just to call ast.parse twice, once before and once after. So, we do that.
154-
p = lambda: ast3.parse(
155-
source, filename, mode, type_comments=True, feature_version=feature_version
156-
)
154+
def p() -> AST:
155+
return ast3.parse(
156+
source, filename, mode, type_comments=True, feature_version=feature_version
157+
)
157158
p() # Call to assure syntactic validity (will throw an exception otherwise, exiting this function).
158159
if isinstance(source, str):
159160
tokens = tokenize.generate_tokens(io.StringIO(source).readline)
160161
to_find, to_replace = r"#\s*mypy:\s*ignore(?![-_])", "# type: ignore"
162+
source = tokenize.untokenize(
163+
(t, re.sub(to_find, to_replace, s) if t == tokenize.COMMENT else s) for t, s, *_ in tokens
164+
)
161165
else:
162166
tokens = tokenize.tokenize(io.BytesIO(source).readline)
163167
to_find, to_replace = rb"#\s*mypy:\s*ignore(?![-_])", b"# type: ignore"
164-
source = tokenize.untokenize(
165-
(t, re.sub(to_find, to_replace, s) if t == tokenize.COMMENT else s) for t, s, *_ in tokens
166-
)
168+
source = tokenize.untokenize(
169+
(t, re.sub(to_find, to_replace, s) if t == tokenize.COMMENT else s) for t, s, *_ in tokens
170+
)
167171
return p()
168172

169173

0 commit comments

Comments
 (0)