Skip to content

Commit 6a33114

Browse files
Revert changes in Python-tokenize.c, fix in lexer.c
1 parent b6e538d commit 6a33114

File tree

3 files changed

+6
-7
lines changed

3 files changed

+6
-7
lines changed

Lib/test/test_tokenize.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3183,8 +3183,7 @@ def get_tokens(string):
31833183
f'__{
31843184
x:d
31853185
}__'""",
3186-
"def f():\n if x\n\x00"
3187-
"class C:\n a\n\x00",
3186+
" a\n\x00",
31883187
]:
31893188
with self.subTest(case=case):
31903189
self.assertRaises(tokenize.TokenError, get_tokens, case)

Parser/lexer/lexer.c

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -539,6 +539,9 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
539539
return MAKE_TOKEN(ERRORTOKEN);
540540
}
541541
}
542+
else if (c == EOF && PyErr_Occurred()) {
543+
return MAKE_TOKEN(ERRORTOKEN);
544+
}
542545
else {
543546
break;
544547
}

Python/Python-tokenize.c

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ tokenizeriter_next(PyObject *op)
251251

252252
int type = _PyTokenizer_Get(it->tok, &token);
253253
if (type == ERRORTOKEN) {
254-
if (!PyErr_Occurred()) {
254+
if(!PyErr_Occurred()) {
255255
_tokenizer_error(it);
256256
assert(PyErr_Occurred());
257257
}
@@ -337,10 +337,7 @@ tokenizeriter_next(PyObject *op)
337337
}
338338
}
339339

340-
if (!PyErr_Occurred()) {
341-
result = Py_BuildValue("(iN(nn)(nn)O)", type, str, lineno, col_offset, end_lineno, end_col_offset, line);
342-
}
343-
340+
result = Py_BuildValue("(iN(nn)(nn)O)", type, str, lineno, col_offset, end_lineno, end_col_offset, line);
344341
exit:
345342
_PyToken_Free(&token);
346343
if (type == ENDMARKER) {

0 commit comments

Comments
 (0)