@@ -51,7 +51,7 @@ def process_module(self, node: nodes.Module) -> None:
51
51
52
52
53
53
class EncodingChecker (BaseTokenChecker , BaseRawFileChecker ):
54
- """BaseChecker for encoding issues.
54
+ """BaseChecker for encoding issues and fixme notes .
55
55
56
56
Checks for:
57
57
* warning notes in the code like FIXME, XXX
@@ -90,18 +90,37 @@ class EncodingChecker(BaseTokenChecker, BaseRawFileChecker):
90
90
"default" : "" ,
91
91
},
92
92
),
93
+ (
94
+ "check-fixme-in-docstring" ,
95
+ {
96
+ "type" : "yn" ,
97
+ "metavar" : "<y or n>" ,
98
+ "default" : False ,
99
+ "help" : "Whether or not to search for fixme's in docstrings." ,
100
+ },
101
+ ),
93
102
)
94
103
95
104
def open (self ) -> None :
96
105
super ().open ()
97
106
98
107
notes = "|" .join (re .escape (note ) for note in self .linter .config .notes )
99
108
if self .linter .config .notes_rgx :
100
- regex_string = rf"#\s*({ notes } |{ self .linter .config .notes_rgx } )(?=(:|\s|\Z))"
101
- else :
102
- regex_string = rf"#\s*({ notes } )(?=(:|\s|\Z))"
109
+ notes += f"|{ self .linter .config .notes_rgx } "
103
110
104
- self ._fixme_pattern = re .compile (regex_string , re .I )
111
+ comment_regex = rf"#\s*(?P<msg>({ notes } )(?=(:|\s|\Z)).*?$)"
112
+ self ._comment_fixme_pattern = re .compile (comment_regex , re .I )
113
+
114
+ # single line docstring like '''this''' or """this"""
115
+ docstring_regex = rf"((\"\"\")|(\'\'\'))\s*(?P<msg>({ notes } )(?=(:|\s|\Z)).*?)((\"\"\")|(\'\'\'))"
116
+ self ._docstring_fixme_pattern = re .compile (docstring_regex , re .I )
117
+
118
+ # multiline docstrings which will be split into newlines
119
+ # so we do not need to look for quotes/double-quotes
120
+ multiline_docstring_regex = rf"^\s*(?P<msg>({ notes } )(?=(:|\s|\Z)).*$)"
121
+ self ._multiline_docstring_fixme_pattern = re .compile (
122
+ multiline_docstring_regex , re .I
123
+ )
105
124
106
125
def _check_encoding (
107
126
self , lineno : int , line : bytes , file_encoding : str
@@ -133,16 +152,39 @@ def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
133
152
if not self .linter .config .notes :
134
153
return
135
154
for token_info in tokens :
136
- if token_info .type != tokenize .COMMENT :
137
- continue
138
- comment_text = token_info .string [1 :].lstrip () # trim '#' and white-spaces
139
- if self ._fixme_pattern .search ("#" + comment_text .lower ()):
140
- self .add_message (
141
- "fixme" ,
142
- col_offset = token_info .start [1 ] + 1 ,
143
- args = comment_text ,
144
- line = token_info .start [0 ],
145
- )
155
+ if token_info .type == tokenize .COMMENT :
156
+ if match := self ._comment_fixme_pattern .match (token_info .string ):
157
+ self .add_message (
158
+ "fixme" ,
159
+ col_offset = token_info .start [1 ] + 1 ,
160
+ args = match .group ("msg" ),
161
+ line = token_info .start [0 ],
162
+ )
163
+ elif self .linter .config .check_fixme_in_docstring :
164
+ if self ._is_multiline_docstring (token_info ):
165
+ docstring_lines = token_info .string .split ("\n " )
166
+ for line_no , line in enumerate (docstring_lines ):
167
+ if match := self ._multiline_docstring_fixme_pattern .match (line ):
168
+ self .add_message (
169
+ "fixme" ,
170
+ col_offset = token_info .start [1 ] + 1 ,
171
+ args = match .group ("msg" ),
172
+ line = token_info .start [0 ] + line_no ,
173
+ )
174
+ elif match := self ._docstring_fixme_pattern .match (token_info .string ):
175
+ self .add_message (
176
+ "fixme" ,
177
+ col_offset = token_info .start [1 ] + 1 ,
178
+ args = match .group ("msg" ),
179
+ line = token_info .start [0 ],
180
+ )
181
+
182
+ def _is_multiline_docstring (self , token_info : tokenize .TokenInfo ) -> bool :
183
+ return (
184
+ token_info .type == tokenize .STRING
185
+ and (token_info .line .lstrip ().startswith (('"""' , "'''" )))
186
+ and "\n " in token_info .line .rstrip ()
187
+ )
146
188
147
189
148
190
def register (linter : PyLinter ) -> None :
0 commit comments