@@ -241,7 +241,14 @@ tokenize([$?, $\\, H | T], Line, Column, Scope, Tokens) ->
241
241
end ,
242
242
243
243
Token = {char , {Line , Column , [$? , $\\ , H ]}, Char },
244
- tokenize (T , Line , Column + 3 , NewScope , [Token | Tokens ]);
244
+ case Char of
245
+ $\n ->
246
+ % % If a real LF was consumed as part of the char literal (e.g., ?\n with a literal newline),
247
+ % % advance to the next line without emitting an EOL token.
248
+ tokenize_eol (T , Line , NewScope , [Token | Tokens ]);
249
+ _ ->
250
+ tokenize (T , Line , Column + 3 , NewScope , [Token | Tokens ])
251
+ end ;
245
252
246
253
tokenize ([$? , Char | T ], Line , Column , Scope , Tokens ) ->
247
254
NewScope = case handle_char (Char ) of
@@ -253,7 +260,14 @@ tokenize([$?, Char | T], Line, Column, Scope, Tokens) ->
253
260
Scope
254
261
end ,
255
262
Token = {char , {Line , Column , [$? , Char ]}, Char },
256
- tokenize (T , Line , Column + 2 , NewScope , [Token | Tokens ]);
263
+ case Char of
264
+ $\n ->
265
+ % % If a real LF was consumed as part of the char literal (e.g., ?<LF>),
266
+ % % advance to the next line without emitting an EOL token.
267
+ tokenize_eol (T , Line , NewScope , [Token | Tokens ]);
268
+ _ ->
269
+ tokenize (T , Line , Column + 2 , NewScope , [Token | Tokens ])
270
+ end ;
257
271
258
272
% Heredocs
259
273
0 commit comments