Skip to content

Commit af630ab

Browse files
Try each line separately on error, add spans back
1 parent 0156eb7 commit af630ab

File tree

1 file changed

+40
-11
lines changed

1 file changed

+40
-11
lines changed

Lib/idlelib/colorizer.py

Lines changed: 40 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,12 @@ def is_soft_keyword_used(*tokens: TI | None) -> bool:
128128
TI(T.NAME, string=s)
129129
):
130130
return not keyword.iskeyword(s)
131+
case (
132+
None | TI(T.NEWLINE) | TI(T.INDENT) | TI(T.DEDENT) | TI(string=":"),
133+
TI(string="match" | "case" | "type"),
134+
None | TI(T.ENDMARKER) | TI(T.NEWLINE)
135+
):
136+
return True
131137
case _:
132138
return False
133139

@@ -189,9 +195,9 @@ def gen_colors_from_token_stream(
189195
case T.COMMENT:
190196
span = Span.from_token(token, line_lengths)
191197
yield ColorSpan(span, "COMMENT")
192-
case T.NUMBER:
198+
case T.NEWLINE:
193199
span = Span.from_token(token, line_lengths)
194-
yield ColorSpan(span, "STRING")
200+
yield ColorSpan(span, "SYNC")
195201
case T.OP:
196202
if token.string in "([{":
197203
bracket_level += 1
@@ -243,12 +249,37 @@ def gen_colors(buffer: str) -> Iterator[ColorSpan]:
243249
for color in gen_colors_from_token_stream(gen, line_lengths):
244250
yield color
245251
last_emitted = color
246-
except SyntaxError:
247-
return
248-
except tokenize.TokenError as te:
249-
yield from recover_unterminated_string(
250-
te, line_lengths, last_emitted, buffer
251-
)
252+
except (SyntaxError, tokenize.TokenError) as e:
253+
recovered = False
254+
if isinstance(e, tokenize.TokenError):
255+
for recovered_color in recover_unterminated_string(
256+
e, line_lengths, last_emitted, buffer
257+
):
258+
yield recovered_color
259+
recovered = True
260+
261+
# fall back to trying each line seperetly
262+
if not recovered:
263+
lines = buffer.split('\n')
264+
current_offset = 0
265+
for i, line in enumerate(lines):
266+
if not line.strip():
267+
current_offset += len(line) + 1
268+
continue
269+
try:
270+
line_sio = StringIO(line + '\n')
271+
line_gen = tokenize.generate_tokens(line_sio.readline)
272+
line_line_lengths = [0, len(line) + 1]
273+
274+
for color in gen_colors_from_token_stream(line_gen, line_line_lengths):
275+
adjusted_span = Span(
276+
color.span.start + current_offset,
277+
color.span.end + current_offset
278+
)
279+
yield ColorSpan(adjusted_span, color.tag)
280+
except Exception:
281+
pass
282+
current_offset += len(line) + 1
252283

253284

254285

@@ -511,9 +542,7 @@ def _add_tags_in_section(self, chars, head):
511542
512543
`head` is the index in the text widget where the text is found.
513544
"""
514-
color_spans = list(gen_colors(chars))
515-
516-
for color_span in color_spans:
545+
for color_span in gen_colors(chars):
517546
start_pos = color_span.span.start
518547
end_pos = color_span.span.end + 1
519548
tag = color_span.tag

0 commit comments

Comments
 (0)