Skip to content

Commit b2372b7

Browse files
authored
Merge pull request #101 from JuliaLang/c42f/minor-bugfixes
Some minor fixes
2 parents 72fee06 + 5a86713 commit b2372b7

File tree

3 files changed

+11
-4
lines changed

3 files changed

+11
-4
lines changed

src/hooks.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ end
4646
# https://github.com/JuliaLang/julia/pull/43876
4747
# Prior to this, the following signature was needed:
4848
function core_parser_hook(code, filename, offset, options)
49-
core_parser_hook(code, filename, LineNumberNode(0), offset, options)
49+
core_parser_hook(code, filename, 1, offset, options)
5050
end
5151

5252
# Debug log file for dumping parsed code
@@ -96,6 +96,7 @@ function _core_parser_hook(code, filename, lineno, offset, options)
9696
e = Expr(:error, ParseError(SourceFile(code, filename=filename), stream.diagnostics))
9797
ex = options === :all ? Expr(:toplevel, e) : e
9898
else
99+
# FIXME: Add support to lineno to this tree build (via SourceFile?)
99100
ex = build_tree(Expr, stream, filename=filename, wrap_toplevel_as_kind=K"None")
100101
if Meta.isexpr(ex, :None)
101102
# The None wrapping is only to give somewhere for trivia to be

src/tokenize.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -98,13 +98,13 @@ function Lexer(io::IO)
9898
c2 = read(io, Char)
9999
p2 = position(io)
100100
if eof(io)
101-
c3, p3 = EOF_CHAR, p1
102-
c4, p4 = EOF_CHAR, p1
101+
c3, p3 = EOF_CHAR, p2
102+
c4, p4 = EOF_CHAR, p2
103103
else
104104
c3 = read(io, Char)
105105
p3 = position(io)
106106
if eof(io)
107-
c4, p4 = EOF_CHAR, p1
107+
c4, p4 = EOF_CHAR, p3
108108
else
109109
c4 = read(io, Char)
110110
p4 = position(io)

test/tokenize.jl

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -897,6 +897,12 @@ end
897897
]
898898
end
899899

900+
@testset "lexer initialization" begin
901+
# Ranges of EndMarker
902+
@test (t = last(collect(tokenize("+"))); (t.startbyte, t.endbyte)) == (1,0)
903+
@test (t = last(collect(tokenize("+*"))); (t.startbyte, t.endbyte)) == (2,1)
904+
end
905+
900906
@testset "dotop miscellanea" begin
901907
@test strtok("a .-> b") == ["a", " ", ".-", ">", " ", "b", ""]
902908
@test strtok(".>: b") == [".>:", " ", "b", ""]

0 commit comments

Comments
 (0)