@@ -268,7 +268,7 @@ def new_line(self, tokens: TokenWrapper, line_end: int, line_start: int) -> None
268
268
def process_module (self , node : nodes .Module ) -> None :
269
269
pass
270
270
271
- # pylint: disable-next = too-many-return-statements, too-many-branches
271
+ # pylint: disable-next = too-many-return-statements
272
272
def _check_keyword_parentheses (
273
273
self , tokens : list [tokenize .TokenInfo ], start : int
274
274
) -> None :
@@ -347,30 +347,31 @@ def _check_keyword_parentheses(
347
347
)
348
348
return
349
349
elif depth == 1 :
350
- # This is a tuple, which is always acceptable.
351
- if token [1 ] == "," :
352
- return
353
- # 'and' and 'or' are the only boolean operators with lower precedence
354
- # than 'not', so parens are only required when they are found.
355
- if token [1 ] in {"and" , "or" }:
356
- found_and_or = True
357
- # A yield inside an expression must always be in parentheses,
358
- # quit early without error.
359
- elif token [1 ] == "yield" :
360
- return
361
- # A generator expression always has a 'for' token in it, and
362
- # the 'for' token is only legal inside parens when it is in a
363
- # generator expression. The parens are necessary here, so bail
364
- # without an error.
365
- elif token [1 ] == "for" :
366
- return
367
- # A generator expression can have an 'else' token in it.
368
- # We check the rest of the tokens to see if any problems occur after
369
- # the 'else'.
370
- elif token [1 ] == "else" :
371
- if "(" in (i .string for i in tokens [i :]):
372
- self ._check_keyword_parentheses (tokens [i :], 0 )
373
- return
350
+ match token [1 ]:
351
+ case "," :
352
+ # This is a tuple, which is always acceptable.
353
+ return
354
+ case "and" | "or" :
355
+ # 'and' and 'or' are the only boolean operators with lower precedence
356
+ # than 'not', so parens are only required when they are found.
357
+ found_and_or = True
358
+ case "yield" :
359
+ # A yield inside an expression must always be in parentheses,
360
+ # quit early without error.
361
+ return
362
+ case "for" :
363
+ # A generator expression always has a 'for' token in it, and
364
+ # the 'for' token is only legal inside parens when it is in a
365
+ # generator expression. The parens are necessary here, so bail
366
+ # without an error.
367
+ return
368
+ case "else" :
369
+ # A generator expression can have an 'else' token in it.
370
+ # We check the rest of the tokens to see if any problems occur after
371
+ # the 'else'.
372
+ if "(" in (i .string for i in tokens [i :]):
373
+ self ._check_keyword_parentheses (tokens [i :], 0 )
374
+ return
374
375
375
376
def process_tokens (self , tokens : list [tokenize .TokenInfo ]) -> None :
376
377
"""Process tokens and search for:
@@ -397,39 +398,42 @@ def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
397
398
else :
398
399
self .new_line (TokenWrapper (tokens ), idx - 1 , idx )
399
400
400
- if tok_type == tokenize .NEWLINE :
401
- # a program statement, or ENDMARKER, will eventually follow,
402
- # after some (possibly empty) run of tokens of the form
403
- # (NL | COMMENT)* (INDENT | DEDENT+)?
404
- # If an INDENT appears, setting check_equal is wrong, and will
405
- # be undone when we see the INDENT.
406
- check_equal = True
407
- self ._check_line_ending (string , line_num )
408
- elif tok_type == tokenize .INDENT :
409
- check_equal = False
410
- self .check_indent_level (string , indents [- 1 ] + 1 , line_num )
411
- indents .append (indents [- 1 ] + 1 )
412
- elif tok_type == tokenize .DEDENT :
413
- # there's nothing we need to check here! what's important is
414
- # that when the run of DEDENTs ends, the indentation of the
415
- # program statement (or ENDMARKER) that triggered the run is
416
- # equal to what's left at the top of the indents stack
417
- check_equal = True
418
- if len (indents ) > 1 :
419
- del indents [- 1 ]
420
- elif tok_type == tokenize .NL :
421
- if not line .strip ("\r \n " ):
422
- last_blank_line_num = line_num
423
- elif tok_type not in (tokenize .COMMENT , tokenize .ENCODING ):
424
- # This is the first concrete token following a NEWLINE, so it
425
- # must be the first token of the next program statement, or an
426
- # ENDMARKER; the "line" argument exposes the leading white-space
427
- # for this statement; in the case of ENDMARKER, line is an empty
428
- # string, so will properly match the empty string with which the
429
- # "indents" stack was seeded
430
- if check_equal :
401
+ match tok_type :
402
+ case tokenize .NEWLINE :
403
+ # a program statement, or ENDMARKER, will eventually follow,
404
+ # after some (possibly empty) run of tokens of the form
405
+ # (NL | COMMENT)* (INDENT | DEDENT+)?
406
+ # If an INDENT appears, setting check_equal is wrong, and will
407
+ # be undone when we see the INDENT.
408
+ check_equal = True
409
+ self ._check_line_ending (string , line_num )
410
+ case tokenize .INDENT :
431
411
check_equal = False
432
- self .check_indent_level (line , indents [- 1 ], line_num )
412
+ self .check_indent_level (string , indents [- 1 ] + 1 , line_num )
413
+ indents .append (indents [- 1 ] + 1 )
414
+ case tokenize .DEDENT :
415
+ # there's nothing we need to check here! what's important is
416
+ # that when the run of DEDENTs ends, the indentation of the
417
+ # program statement (or ENDMARKER) that triggered the run is
418
+ # equal to what's left at the top of the indents stack
419
+ check_equal = True
420
+ if len (indents ) > 1 :
421
+ del indents [- 1 ]
422
+ case tokenize .NL :
423
+ if not line .strip ("\r \n " ):
424
+ last_blank_line_num = line_num
425
+ case tokenize .COMMENT | tokenize .ENCODING :
426
+ pass
427
+ case _:
428
+ # This is the first concrete token following a NEWLINE, so it
429
+ # must be the first token of the next program statement, or an
430
+ # ENDMARKER; the "line" argument exposes the leading white-space
431
+ # for this statement; in the case of ENDMARKER, line is an empty
432
+ # string, so will properly match the empty string with which the
433
+ # "indents" stack was seeded
434
+ if check_equal :
435
+ check_equal = False
436
+ self .check_indent_level (line , indents [- 1 ], line_num )
433
437
434
438
if tok_type == tokenize .NUMBER and string .endswith ("l" ):
435
439
self .add_message ("lowercase-l-suffix" , line = line_num )
@@ -546,30 +550,26 @@ def _infer_else_finally_line_number(
546
550
547
551
def _check_multi_statement_line (self , node : nodes .NodeNG , line : int ) -> None :
548
552
"""Check for lines containing multiple statements."""
549
- if isinstance (node , nodes .With ):
550
- # Do not warn about multiple nested context managers in with statements.
551
- return
552
- if (
553
- isinstance (node .parent , nodes .If )
554
- and not node .parent .orelse
555
- and self .linter .config .single_line_if_stmt
556
- ):
557
- return
558
- if (
559
- isinstance (node .parent , nodes .ClassDef )
560
- and len (node .parent .body ) == 1
561
- and self .linter .config .single_line_class_stmt
562
- ):
563
- return
564
-
565
- # Functions stubs and class with ``Ellipsis`` as body are exempted.
566
- if (
567
- isinstance (node , nodes .Expr )
568
- and isinstance (node .parent , (nodes .FunctionDef , nodes .ClassDef ))
569
- and isinstance (node .value , nodes .Const )
570
- and node .value .value is Ellipsis
571
- ):
572
- return
553
+ match node :
554
+ case nodes .With ():
555
+ # Do not warn about multiple nested context managers in with statements.
556
+ return
557
+ case nodes .NodeNG (
558
+ parent = nodes .If (orelse = [])
559
+ ) if self .linter .config .single_line_if_stmt :
560
+ return
561
+ case nodes .NodeNG (
562
+ parent = nodes .ClassDef (body = [_])
563
+ ) if self .linter .config .single_line_class_stmt :
564
+ return
565
+ case nodes .Expr (
566
+ parent = nodes .FunctionDef () | nodes .ClassDef (),
567
+ value = nodes .Const (value = value ),
568
+ ) if (
569
+ value is Ellipsis
570
+ ):
571
+ # Functions stubs and class with ``Ellipsis`` as body are exempted.
572
+ return
573
573
574
574
self .add_message ("multiple-statements" , node = node , confidence = HIGH )
575
575
self ._visited_lines [line ] = 2
0 commit comments