Skip to content

Commit 99a41ef

Browse files
committed
ignore UP031 for Tools/build/generate_token.py
1 parent eae5215 commit 99a41ef

File tree

2 files changed

+16
-13
lines changed

2 files changed

+16
-13
lines changed

Tools/build/.ruff.toml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,9 @@ ignore = [
2828
"stable_abi.py" = "py311" # requires 'tomllib'
2929

3030
[lint.per-file-ignores]
31-
"{check_extension_modules,freeze_modules,generate_re_casefix,generate_sre_constants}.py" = [
31+
"{check_extension_modules,freeze_modules}.py" = [
32+
"UP031", # Use format specifiers instead of percent format
33+
]
34+
"generate_{re_casefix,sre_constants,token}.py" = [
3235
"UP031", # Use format specifiers instead of percent format
3336
]

Tools/build/generate_token.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -107,14 +107,14 @@ def make_h(infile, outfile='Include/internal/pycore_token.h'):
107107

108108
defines = []
109109
for value, name in enumerate(tok_names[:ERRORTOKEN + 1]):
110-
defines.append("#define %-15s %d\n" % (name, value)) # noqa: UP031
110+
defines.append("#define %-15s %d\n" % (name, value))
111111

112112
if update_file(outfile, token_h_template % (
113113
''.join(defines),
114114
len(tok_names),
115115
NT_OFFSET
116116
)):
117-
print(f"{outfile} regenerated from {infile}")
117+
print("%s regenerated from %s" % (outfile, infile))
118118

119119

120120
token_c_template = f"""\
@@ -160,17 +160,17 @@ def generate_chars_to_token(mapping, n=1):
160160
write = result.append
161161
indent = ' ' * n
162162
write(indent)
163-
write('switch (c%d) {\n' % (n,)) # noqa: UP031
163+
write('switch (c%d) {\n' % (n,))
164164
for c in sorted(mapping):
165165
write(indent)
166166
value = mapping[c]
167167
if isinstance(value, dict):
168-
write("case '%s':\n" % (c,)) # noqa: UP031
168+
write("case '%s':\n" % (c,))
169169
write(generate_chars_to_token(value, n + 1))
170170
write(indent)
171171
write(' break;\n')
172172
else:
173-
write("case '%s': return %s;\n" % (c, value)) # noqa: UP031
173+
write("case '%s': return %s;\n" % (c, value))
174174
write(indent)
175175
write('}\n')
176176
return ''.join(result)
@@ -190,8 +190,8 @@ def make_c(infile, outfile='Parser/token.c'):
190190
names = []
191191
for value, name in enumerate(tok_names):
192192
if value >= ERRORTOKEN:
193-
name = '<%s>' % name # noqa: UP031
194-
names.append(' "%s",\n' % name) # noqa: UP031
193+
name = '<%s>' % name
194+
names.append(' "%s",\n' % name)
195195
names.append(' "<N_TOKENS>",\n')
196196

197197
if update_file(outfile, token_c_template % (
@@ -200,7 +200,7 @@ def make_c(infile, outfile='Parser/token.c'):
200200
generate_chars_to_token(chars_to_token[2]),
201201
generate_chars_to_token(chars_to_token[3])
202202
)):
203-
print(f"{outfile} regenerated from {infile}")
203+
print("%s regenerated from %s" % (outfile, infile))
204204

205205

206206
token_inc_template = f"""\
@@ -252,7 +252,7 @@ def make_rst(infile, outfile='Doc/library/token-list.inc',
252252
exit('\n'.join(message_parts))
253253

254254
if update_file(outfile, token_inc_template % '\n'.join(names)):
255-
print(f"{outfile} regenerated from {infile}")
255+
print("%s regenerated from %s" % (outfile, infile))
256256

257257

258258
token_py_template = f'''\
@@ -292,21 +292,21 @@ def make_py(infile, outfile='Lib/token.py'):
292292

293293
constants = []
294294
for value, name in enumerate(tok_names):
295-
constants.append('%s = %d' % (name, value)) # noqa: UP031
295+
constants.append('%s = %d' % (name, value))
296296
constants.insert(ERRORTOKEN,
297297
"# These aren't used by the C tokenizer but are needed for tokenize.py")
298298

299299
token_types = []
300300
for s, value in sorted(string_to_tok.items()):
301-
token_types.append(' %r: %s,' % (s, tok_names[value])) # noqa: UP031
301+
token_types.append(' %r: %s,' % (s, tok_names[value]))
302302

303303
if update_file(outfile, token_py_template % (
304304
'\n'.join(constants),
305305
len(tok_names),
306306
NT_OFFSET,
307307
'\n'.join(token_types),
308308
)):
309-
print(f"{outfile} regenerated from {infile}")
309+
print("%s regenerated from %s" % (outfile, infile))
310310

311311

312312
def main(op, infile='Grammar/Tokens', *args):

0 commit comments

Comments
 (0)