Skip to content

Commit a373b8d

Browse files
eregonEarlopain
andcommitted
Handle line continuations for Ripper :on_sp events
Co-authored-by: Earlopain <[email protected]>
1 parent e82f10b commit a373b8d

File tree

1 file changed

+37
-14
lines changed

1 file changed

+37
-14
lines changed

lib/prism/lex_compat.rb

Lines changed: 37 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -874,21 +874,44 @@ def add_on_sp_tokens(tokens)
874874
line, column = token.location
875875
start_offset = @source.line_to_byte_offset(line) + column
876876
if start_offset > prev_token_end
877-
new_tokens << IgnoreStateToken.new([
878-
[
879-
@source.line(prev_token_end),
880-
@source.column(prev_token_end),
881-
],
882-
:on_sp,
883-
@source.slice(prev_token_end, start_offset - prev_token_end),
884-
prev_token_state
885-
])
877+
sp_value = @source.slice(prev_token_end, start_offset - prev_token_end)
878+
sp_line = @source.line(prev_token_end)
879+
sp_column = @source.column(prev_token_end)
880+
continuation_index = sp_value.byteindex("\\")
881+
# ripper emits up to three on_sp tokens when line continuations are used
882+
if continuation_index
883+
next_whitespace_index = continuation_index + 1
884+
next_whitespace_index += 1 if sp_value.byteslice(next_whitespace_index) == "\r"
885+
first_whitespace = sp_value[0...continuation_index]
886+
continuation = sp_value[continuation_index..next_whitespace_index]
887+
second_whitespace = sp_value[next_whitespace_index+1..]
888+
new_tokens << IgnoreStateToken.new([
889+
[sp_line, sp_column],
890+
:on_sp,
891+
first_whitespace,
892+
prev_token_state
893+
]) if first_whitespace != ""
894+
new_tokens << IgnoreStateToken.new([
895+
[sp_line, sp_column + continuation_index],
896+
:on_sp,
897+
continuation,
898+
prev_token_state
899+
])
900+
new_tokens << IgnoreStateToken.new([
901+
[sp_line + 1, 0],
902+
:on_sp,
903+
second_whitespace,
904+
prev_token_state
905+
]) if second_whitespace != ""
906+
else
907+
new_tokens << IgnoreStateToken.new([
908+
[sp_line, @source.column(prev_token_end)],
909+
:on_sp,
910+
sp_value,
911+
prev_token_state
912+
])
913+
end
886914
end
887-
new_tokens << token
888-
889-
prev_token_state = token.state
890-
prev_token_end = start_offset + token.value.bytesize
891-
end
892915

893916
new_tokens
894917
end

0 commit comments

Comments
 (0)