@@ -107,14 +107,14 @@ def make_h(infile, outfile='Include/internal/pycore_token.h'):
107107
108108 defines = []
109109 for value , name in enumerate (tok_names [:ERRORTOKEN + 1 ]):
110- defines .append ("#define %-15s %d\n " % (name , value )) # noqa: UP031
110+ defines .append ("#define %-15s %d\n " % (name , value ))
111111
112112 if update_file (outfile , token_h_template % (
113113 '' .join (defines ),
114114 len (tok_names ),
115115 NT_OFFSET
116116 )):
117- print (f" { outfile } regenerated from { infile } " )
117+ print ("%s regenerated from %s" % ( outfile , infile ) )
118118
119119
120120token_c_template = f"""\
@@ -160,17 +160,17 @@ def generate_chars_to_token(mapping, n=1):
160160 write = result .append
161161 indent = ' ' * n
162162 write (indent )
163- write ('switch (c%d) {\n ' % (n ,)) # noqa: UP031
163+ write ('switch (c%d) {\n ' % (n ,))
164164 for c in sorted (mapping ):
165165 write (indent )
166166 value = mapping [c ]
167167 if isinstance (value , dict ):
168- write ("case '%s':\n " % (c ,)) # noqa: UP031
168+ write ("case '%s':\n " % (c ,))
169169 write (generate_chars_to_token (value , n + 1 ))
170170 write (indent )
171171 write (' break;\n ' )
172172 else :
173- write ("case '%s': return %s;\n " % (c , value )) # noqa: UP031
173+ write ("case '%s': return %s;\n " % (c , value ))
174174 write (indent )
175175 write ('}\n ' )
176176 return '' .join (result )
@@ -190,8 +190,8 @@ def make_c(infile, outfile='Parser/token.c'):
190190 names = []
191191 for value , name in enumerate (tok_names ):
192192 if value >= ERRORTOKEN :
193- name = '<%s>' % name # noqa: UP031
194- names .append (' "%s",\n ' % name ) # noqa: UP031
193+ name = '<%s>' % name
194+ names .append (' "%s",\n ' % name )
195195 names .append (' "<N_TOKENS>",\n ' )
196196
197197 if update_file (outfile , token_c_template % (
@@ -200,7 +200,7 @@ def make_c(infile, outfile='Parser/token.c'):
200200 generate_chars_to_token (chars_to_token [2 ]),
201201 generate_chars_to_token (chars_to_token [3 ])
202202 )):
203- print (f" { outfile } regenerated from { infile } " )
203+ print ("%s regenerated from %s" % ( outfile , infile ) )
204204
205205
206206token_inc_template = f"""\
@@ -252,7 +252,7 @@ def make_rst(infile, outfile='Doc/library/token-list.inc',
252252 exit ('\n ' .join (message_parts ))
253253
254254 if update_file (outfile , token_inc_template % '\n ' .join (names )):
255- print (f" { outfile } regenerated from { infile } " )
255+ print ("%s regenerated from %s" % ( outfile , infile ) )
256256
257257
258258token_py_template = f'''\
@@ -292,21 +292,21 @@ def make_py(infile, outfile='Lib/token.py'):
292292
293293 constants = []
294294 for value , name in enumerate (tok_names ):
295- constants .append ('%s = %d' % (name , value )) # noqa: UP031
295+ constants .append ('%s = %d' % (name , value ))
296296 constants .insert (ERRORTOKEN ,
297297 "# These aren't used by the C tokenizer but are needed for tokenize.py" )
298298
299299 token_types = []
300300 for s , value in sorted (string_to_tok .items ()):
301- token_types .append (' %r: %s,' % (s , tok_names [value ])) # noqa: UP031
301+ token_types .append (' %r: %s,' % (s , tok_names [value ]))
302302
303303 if update_file (outfile , token_py_template % (
304304 '\n ' .join (constants ),
305305 len (tok_names ),
306306 NT_OFFSET ,
307307 '\n ' .join (token_types ),
308308 )):
309- print (f" { outfile } regenerated from { infile } " )
309+ print ("%s regenerated from %s" % ( outfile , infile ) )
310310
311311
312312def main (op , infile = 'Grammar/Tokens' , * args ):
0 commit comments