Skip to content

Commit 529d584

Browse files
author
José Valim
committed
Unify error reporting from EEx, closes #2833
Signed-off-by: José Valim <[email protected]> Conflicts: lib/eex/test/eex_test.exs
1 parent c0fb275 commit 529d584

File tree

5 files changed

+80
-59
lines changed

5 files changed

+80
-59
lines changed

lib/eex/lib/eex.ex

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
defmodule EEx.SyntaxError do
2-
defexception [:message]
2+
defexception [:message, :file, :line]
3+
4+
def message(exception) do
5+
"#{exception.file}:#{exception.line}: #{exception.message}"
6+
end
37
end
48

59
defmodule EEx do

lib/eex/lib/eex/compiler.ex

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,14 @@ defmodule EEx.Compiler do
1212
def compile(source, opts) do
1313
file = opts[:file] || "nofile"
1414
line = opts[:line] || 1
15-
tokens = EEx.Tokenizer.tokenize(source, line)
16-
state = %{engine: opts[:engine] || @default_engine,
17-
file: file, line: line, quoted: [], start_line: nil}
18-
generate_buffer(tokens, "", [], state)
15+
case EEx.Tokenizer.tokenize(source, line) do
16+
{:ok, tokens} ->
17+
state = %{engine: opts[:engine] || @default_engine,
18+
file: file, line: line, quoted: [], start_line: nil}
19+
generate_buffer(tokens, "", [], state)
20+
{:error, line, message} ->
21+
raise EEx.SyntaxError, line: line, file: file, message: message
22+
end
1923
end
2024

2125
# Generates the buffers by handling each expression from the tokenizer
@@ -51,16 +55,17 @@ defmodule EEx.Compiler do
5155
{buffer, t}
5256
end
5357

54-
defp generate_buffer([{:end_expr, line, _, chars}|_], _buffer, [], _state) do
55-
raise EEx.SyntaxError, message: "unexpected token: #{inspect chars} at line #{inspect line}"
58+
defp generate_buffer([{:end_expr, line, _, chars}|_], _buffer, [], state) do
59+
raise EEx.SyntaxError, message: "unexpected token #{inspect chars}", file: state.file, line: line
5660
end
5761

5862
defp generate_buffer([], buffer, [], state) do
5963
state.engine.handle_body(buffer)
6064
end
6165

62-
defp generate_buffer([], _buffer, _scope, _state) do
63-
raise EEx.SyntaxError, message: "unexpected end of string. expecting a closing <% end %>."
66+
defp generate_buffer([], _buffer, _scope, state) do
67+
raise EEx.SyntaxError, message: "unexpected end of string, expected a closing '<% end %>'",
68+
file: state.file, line: state.line
6469
end
6570

6671
# Creates a placeholder and wrap it inside the expression block

lib/eex/lib/eex/tokenizer.ex

Lines changed: 31 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -3,41 +3,52 @@ defmodule EEx.Tokenizer do
33

44
@doc """
55
Tokenizes the given char list or binary.
6-
It returns 4 different types of tokens as result:
6+
7+
It returns {:ok, list} with the following tokens:
78
89
* `{:text, contents}`
910
* `{:expr, line, marker, contents}`
1011
* `{:start_expr, line, marker, contents}`
1112
* `{:middle_expr, line, marker, contents}`
1213
* `{:end_expr, line, marker, contents}`
1314
15+
Or `{:error, line, error}` in case of errors.
1416
"""
1517
def tokenize(bin, line) when is_binary(bin) do
1618
tokenize(String.to_char_list(bin), line)
1719
end
1820

1921
def tokenize(list, line) do
20-
Enum.reverse(tokenize(list, line, [], []))
22+
tokenize(list, line, [], [])
2123
end
2224

2325
defp tokenize('<%%' ++ t, line, buffer, acc) do
24-
{buffer, new_line, rest} = tokenize_expr t, line, [?%, ?<|buffer]
25-
tokenize rest, new_line, [?>, ?%|buffer], acc
26+
case expr(t, line, [?%, ?<|buffer]) do
27+
{:error, _, _} = error -> error
28+
{:ok, buffer, new_line, rest} ->
29+
tokenize rest, new_line, [?>, ?%|buffer], acc
30+
end
2631
end
2732

2833
defp tokenize('<%#' ++ t, line, buffer, acc) do
29-
{_, new_line, rest} = tokenize_expr t, line, []
30-
tokenize rest, new_line, buffer, acc
34+
case expr(t, line, []) do
35+
{:error, _, _} = error -> error
36+
{:ok, _, new_line, rest} ->
37+
tokenize rest, new_line, buffer, acc
38+
end
3139
end
3240

3341
defp tokenize('<%' ++ t, line, buffer, acc) do
3442
{marker, t} = retrieve_marker(t)
35-
{expr, new_line, rest} = tokenize_expr t, line, []
3643

37-
token = token_name(expr)
38-
acc = tokenize_text(buffer, acc)
39-
final = {token, line, marker, Enum.reverse(expr)}
40-
tokenize rest, new_line, [], [final | acc]
44+
case expr(t, line, []) do
45+
{:error, _, _} = error -> error
46+
{:ok, expr, new_line, rest} ->
47+
token = token_name(expr)
48+
acc = tokenize_text(buffer, acc)
49+
final = {token, line, marker, Enum.reverse(expr)}
50+
tokenize rest, new_line, [], [final | acc]
51+
end
4152
end
4253

4354
defp tokenize('\n' ++ t, line, buffer, acc) do
@@ -49,7 +60,7 @@ defmodule EEx.Tokenizer do
4960
end
5061

5162
defp tokenize([], _line, buffer, acc) do
52-
tokenize_text(buffer, acc)
63+
{:ok, Enum.reverse(tokenize_text(buffer, acc))}
5364
end
5465

5566
# Retrieve marker for <%
@@ -64,20 +75,20 @@ defmodule EEx.Tokenizer do
6475

6576
# Tokenize an expression until we find %>
6677

67-
defp tokenize_expr([?%, ?>|t], line, buffer) do
68-
{buffer, line, t}
78+
defp expr([?%, ?>|t], line, buffer) do
79+
{:ok, buffer, line, t}
6980
end
7081

71-
defp tokenize_expr('\n' ++ t, line, buffer) do
72-
tokenize_expr t, line + 1, [?\n|buffer]
82+
defp expr('\n' ++ t, line, buffer) do
83+
expr t, line + 1, [?\n|buffer]
7384
end
7485

75-
defp tokenize_expr([h|t], line, buffer) do
76-
tokenize_expr t, line, [h|buffer]
86+
defp expr([h|t], line, buffer) do
87+
expr t, line, [h|buffer]
7788
end
7889

79-
defp tokenize_expr([], _line, _buffer) do
80-
raise EEx.SyntaxError, message: "missing token: %>"
90+
defp expr([], line, _buffer) do
91+
{:error, line, "missing token '%>'"}
8192
end
8293

8394
# Receive an expression content and check

lib/eex/test/eex/tokenizer_test.exs

Lines changed: 25 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -5,23 +5,26 @@ defmodule EEx.TokenizerTest do
55
require EEx.Tokenizer, as: T
66

77
test "simple chars lists" do
8-
assert T.tokenize('foo', 1) == [ {:text, 'foo'} ]
8+
assert T.tokenize('foo', 1) == {:ok, [{:text, 'foo'}]}
99
end
1010

1111
test "simple strings" do
12-
assert T.tokenize("foo", 1) == [ {:text, 'foo'} ]
12+
assert T.tokenize("foo", 1) == {:ok, [{:text, 'foo'}]}
1313
end
1414

1515
test "strings with embedded code" do
16-
assert T.tokenize('foo <% bar %>', 1) == [ {:text, 'foo '}, {:expr, 1, "", ' bar '} ]
16+
assert T.tokenize('foo <% bar %>', 1) ==
17+
{:ok, [{:text, 'foo '}, {:expr, 1, "", ' bar '}]}
1718
end
1819

1920
test "strings with embedded equals code" do
20-
assert T.tokenize('foo <%= bar %>', 1) == [ {:text, 'foo '}, {:expr, 1, "=", ' bar '} ]
21+
assert T.tokenize('foo <%= bar %>', 1) ==
22+
{:ok, [{:text, 'foo '}, {:expr, 1, "=", ' bar '}]}
2123
end
2224

2325
test "strings with more than one line" do
24-
assert T.tokenize('foo\n<%= bar %>', 1) == [ {:text, 'foo\n'}, {:expr, 2, "=", ' bar '} ]
26+
assert T.tokenize('foo\n<%= bar %>', 1) ==
27+
{:ok, [{:text, 'foo\n'}, {:expr, 2, "=", ' bar '}]}
2528
end
2629

2730
test "strings with more than one line and expression with more than one line" do
@@ -32,74 +35,72 @@ baz %>
3235
<% foo %>
3336
'''
3437

35-
assert T.tokenize(string, 1) == [
38+
assert T.tokenize(string, 1) == {:ok, [
3639
{:text, 'foo '},
3740
{:expr, 1, "=", ' bar\n\nbaz '},
3841
{:text, '\n'},
3942
{:expr, 4, "", ' foo '},
4043
{:text, '\n'}
41-
]
44+
]}
4245
end
4346

4447
test "quotation" do
45-
assert T.tokenize('foo <%% true %>', 1) == [
48+
assert T.tokenize('foo <%% true %>', 1) == {:ok, [
4649
{:text, 'foo <% true %>'}
47-
]
50+
]}
4851
end
4952

5053
test "quotation with do/end" do
51-
assert T.tokenize('foo <%% true do %>bar<%% end %>', 1) == [
54+
assert T.tokenize('foo <%% true do %>bar<%% end %>', 1) == {:ok, [
5255
{:text, 'foo <% true do %>bar<% end %>'}
53-
]
56+
]}
5457
end
5558

5659
test "comments" do
57-
assert T.tokenize('foo <%# true %>', 1) == [
60+
assert T.tokenize('foo <%# true %>', 1) == {:ok, [
5861
{:text, 'foo '}
59-
]
62+
]}
6063
end
6164

6265
test "comments with do/end" do
63-
assert T.tokenize('foo <%# true do %>bar<%# end %>', 1) == [
66+
assert T.tokenize('foo <%# true do %>bar<%# end %>', 1) == {:ok, [
6467
{:text, 'foo bar'}
65-
]
68+
]}
6669
end
6770

6871
test "strings with embedded do end" do
69-
assert T.tokenize('foo <% if true do %>bar<% end %>', 1) == [
72+
assert T.tokenize('foo <% if true do %>bar<% end %>', 1) == {:ok, [
7073
{:text, 'foo '},
7174
{:start_expr, 1, "", ' if true do '},
7275
{:text, 'bar'},
7376
{:end_expr, 1, "", ' end '}
74-
]
77+
]}
7578
end
7679

7780
test "strings with embedded -> end" do
78-
assert T.tokenize('foo <% cond do %><% false -> %>bar<% true -> %>baz<% end %>', 1) == [
81+
assert T.tokenize('foo <% cond do %><% false -> %>bar<% true -> %>baz<% end %>', 1) == {:ok, [
7982
{:text, 'foo '},
8083
{:start_expr, 1, "", ' cond do '},
8184
{:middle_expr, 1, "", ' false -> '},
8285
{:text, 'bar'},
8386
{:middle_expr, 1, "", ' true -> '},
8487
{:text, 'baz'},
8588
{:end_expr, 1, "", ' end '}
86-
]
89+
]}
8790
end
8891

8992
test "strings with embedded keywords blocks" do
90-
assert T.tokenize('foo <% if true do %>bar<% else %>baz<% end %>', 1) == [
93+
assert T.tokenize('foo <% if true do %>bar<% else %>baz<% end %>', 1) == {:ok, [
9194
{:text, 'foo '},
9295
{:start_expr, 1, "", ' if true do '},
9396
{:text, 'bar'},
9497
{:middle_expr, 1, "", ' else '},
9598
{:text, 'baz'},
9699
{:end_expr, 1, "", ' end '}
97-
]
100+
]}
98101
end
99102

100103
test "raise syntax error when there is start mark and no end mark" do
101-
assert_raise EEx.SyntaxError, "missing token: %>", fn ->
102-
T.tokenize('foo <% :bar', 1)
103-
end
104+
assert T.tokenize('foo <% :bar', 1) == {:error, 1, "missing token '%>'"}
104105
end
105106
end

lib/eex/test/eex_test.exs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -111,25 +111,25 @@ defmodule EExTest do
111111
end
112112

113113
test "raises a syntax error when the token is invalid" do
114-
assert_raise EEx.SyntaxError, "missing token: %>", fn ->
114+
assert_raise EEx.SyntaxError, "nofile:1: missing token '%>'", fn ->
115115
EEx.compile_string "foo <%= bar"
116116
end
117117
end
118118

119119
test "raises a syntax error when end expression is found without a start expression" do
120-
assert_raise EEx.SyntaxError, "unexpected token: ' end ' at line 1", fn ->
120+
assert_raise EEx.SyntaxError, "nofile:1: unexpected token ' end '", fn ->
121121
EEx.compile_string "foo <% end %>"
122122
end
123123
end
124124

125125
test "raises a syntax error when start expression is found without an end expression" do
126-
assert_raise EEx.SyntaxError, "unexpected end of string. expecting a closing <% end %>.", fn ->
127-
EEx.compile_string "foo <% if true do %>"
126+
assert_raise EEx.SyntaxError, "nofile:2: unexpected end of string, expected a closing '<% end %>'", fn ->
127+
EEx.compile_string "foo\n<% if true do %>"
128128
end
129129
end
130130

131-
test "raises a syntax error when nested end expression is found without an start expression" do
132-
assert_raise EEx.SyntaxError, "unexpected token: ' end ' at line 1", fn ->
131+
test "raises a syntax error when nested end expression is found without a start expression" do
132+
assert_raise EEx.SyntaxError, "nofile:1: unexpected token ' end '", fn ->
133133
EEx.compile_string "foo <% if true do %><% end %><% end %>"
134134
end
135135
end

0 commit comments

Comments
 (0)