Skip to content

Commit b7bc977

Browse files
authored
[3.13] gh-139516: Fix lambda colon start format spec in f-string in t… (#139726)
[3.13] gh-139516: Fix lambda colon start format spec in f-string in tokenizer (GH-139657) (cherry picked from commit 539461d)
1 parent 5074feb commit b7bc977

File tree

5 files changed

+28
-1
lines changed

5 files changed

+28
-1
lines changed

Lib/test/test_fstring.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1849,6 +1849,13 @@ def __format__(self, format):
18491849
# Test multiple format specs in same raw f-string
18501850
self.assertEqual(rf"{UnchangedFormat():\xFF} {UnchangedFormat():\n}", '\\xFF \\n')
18511851

1852+
def test_gh139516(self):
1853+
with temp_cwd():
1854+
script = 'script.py'
1855+
with open(script, 'wb') as f:
1856+
f.write('''def f(a): pass\nf"{f(a=lambda: 'à'\n)}"'''.encode())
1857+
assert_python_ok(script)
1858+
18521859

18531860
if __name__ == '__main__':
18541861
unittest.main()

Lib/test/test_tokenize.py

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1212,6 +1212,23 @@ def test_multiline_non_ascii_fstring_with_expr(self):
12121212
FSTRING_END "\'\'\'" (3, 1) (3, 4)
12131213
""")
12141214

1215+
# gh-139516, the '\n' is explicit to ensure no trailing whitespace which would invalidate the test
1216+
self.check_tokenize('''f"{f(a=lambda: 'à'\n)}"''', """\
1217+
FSTRING_START \'f"\' (1, 0) (1, 2)
1218+
OP '{' (1, 2) (1, 3)
1219+
NAME 'f' (1, 3) (1, 4)
1220+
OP '(' (1, 4) (1, 5)
1221+
NAME 'a' (1, 5) (1, 6)
1222+
OP '=' (1, 6) (1, 7)
1223+
NAME 'lambda' (1, 7) (1, 13)
1224+
OP ':' (1, 13) (1, 14)
1225+
STRING "\'à\'" (1, 15) (1, 18)
1226+
NL '\\n' (1, 18) (1, 19)
1227+
OP ')' (2, 0) (2, 1)
1228+
OP '}' (2, 1) (2, 2)
1229+
FSTRING_END \'"\' (2, 2) (2, 3)
1230+
""")
1231+
12151232
class GenerateTokensTest(TokenizeTest):
12161233
def check_tokenize(self, s, expected):
12171234
# Format the tokens in s in a table format.
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Fix lambda colon erroneously start format spec in f-string in tokenizer.

Parser/lexer/lexer.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1291,7 +1291,7 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
12911291
return MAKE_TOKEN(_PyTokenizer_syntaxerror(tok, "invalid non-printable character U+%04X", c));
12921292
}
12931293

1294-
if( c == '=' && INSIDE_FSTRING_EXPR(current_tok)) {
1294+
if( c == '=' && INSIDE_FSTRING_EXPR_AT_TOP(current_tok)) {
12951295
current_tok->f_string_debug = 1;
12961296
}
12971297

Parser/lexer/state.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@
1010

1111
#define INSIDE_FSTRING(tok) (tok->tok_mode_stack_index > 0)
1212
#define INSIDE_FSTRING_EXPR(tok) (tok->curly_bracket_expr_start_depth >= 0)
13+
#define INSIDE_FSTRING_EXPR_AT_TOP(tok) \
14+
(tok->curly_bracket_depth - tok->curly_bracket_expr_start_depth == 1)
1315

1416
enum decoding_state {
1517
STATE_INIT,

0 commit comments

Comments
 (0)