Skip to content

Commit 7bef09a

Browse files
committed
Lexer: fix line & column for multiline BLOCK_STRING tokens
Replicates graphql/graphql-js@b262418
1 parent 3eadd19 commit 7bef09a

File tree

2 files changed

+9
-6
lines changed

2 files changed

+9
-6
lines changed

src/graphql/language/lexer.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -380,6 +380,9 @@ def read_block_string(self, start: int) -> Token:
380380
"""Read a block string token from the source file."""
381381
body = self.source.body
382382
body_length = len(body)
383+
start_line = self.line
384+
start_column = 1 + start - self.line_start
385+
383386
position = start + 3
384387
chunk_start = position
385388
raw_value = []
@@ -389,10 +392,12 @@ def read_block_string(self, start: int) -> Token:
389392

390393
if char == '"' and body[position + 1 : position + 3] == '""':
391394
raw_value.append(body[chunk_start:position])
392-
return self.create_token(
395+
return Token(
393396
TokenKind.BLOCK_STRING,
394397
start,
395398
position + 3,
399+
start_line,
400+
start_column,
396401
dedent_block_string_value("".join(raw_value)),
397402
)
398403

tests/language/test_lexer.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -353,10 +353,10 @@ def lexes_block_strings():
353353
TokenKind.BLOCK_STRING, 0, 32, 1, 1, 'contains """ triple-quote'
354354
)
355355
assert lex_one('"""multi\nline"""') == Token(
356-
TokenKind.BLOCK_STRING, 0, 16, 2, -8, "multi\nline"
356+
TokenKind.BLOCK_STRING, 0, 16, 1, 1, "multi\nline"
357357
)
358358
assert lex_one('"""multi\rline\r\nnormalized"""') == Token(
359-
TokenKind.BLOCK_STRING, 0, 28, 3, -14, "multi\nline\nnormalized"
359+
TokenKind.BLOCK_STRING, 0, 28, 1, 1, "multi\nline\nnormalized"
360360
)
361361
assert lex_one('"""unescaped \\n\\r\\b\\t\\f\\u1234"""') == Token(
362362
TokenKind.BLOCK_STRING,
@@ -388,9 +388,7 @@ def lexes_block_strings():
388388
assert lex_one(
389389
'"""\n\n spans\n multiple\n'
390390
' lines\n\n """'
391-
) == Token(
392-
TokenKind.BLOCK_STRING, 0, 68, 7, -56, "spans\n multiple\n lines"
393-
)
391+
) == Token(TokenKind.BLOCK_STRING, 0, 68, 1, 1, "spans\n multiple\n lines")
394392

395393
def advance_line_after_lexing_multiline_block_string():
396394
assert (

0 commit comments

Comments
 (0)