Skip to content

Commit 325c98c

Browse files
author
José Valim
committed
Remove line and convert text tokens to char lists
1 parent 2daf136 commit 325c98c

File tree

3 files changed

+39
-39
lines changed

3 files changed

+39
-39
lines changed

lib/eex/lib/eex/compiler.ex

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ defmodule EEx.Compiler do
1717

1818
# Generates the buffers by handling each expression from the tokenizer
1919

20-
defp generate_buffer([{ :text, _line, chars }|t], buffer, scope, state) do
21-
buffer = state.engine.handle_text(buffer, chars)
20+
defp generate_buffer([{ :text, chars }|t], buffer, scope, state) do
21+
buffer = state.engine.handle_text(buffer, String.from_char_list!(chars))
2222
generate_buffer(t, buffer, scope, state)
2323
end
2424

lib/eex/lib/eex/tokenizer.ex

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -16,39 +16,39 @@ defmodule EEx.Tokenizer do
1616
end
1717

1818
def tokenize(list, line) do
19-
Enum.reverse(tokenize(list, line, line, [], []))
19+
Enum.reverse(tokenize(list, line, [], []))
2020
end
2121

22-
defp tokenize('<%%' ++ t, current_line, line, buffer, acc) do
22+
defp tokenize('<%%' ++ t, line, buffer, acc) do
2323
{ buffer, new_line, rest } = tokenize_expr t, line, [?%, ?<|buffer]
24-
tokenize rest, current_line, new_line, [?>, ?%|buffer], acc
24+
tokenize rest, new_line, [?>, ?%|buffer], acc
2525
end
2626

27-
defp tokenize('<%#' ++ t, current_line, line, buffer, acc) do
27+
defp tokenize('<%#' ++ t, line, buffer, acc) do
2828
{ _, new_line, rest } = tokenize_expr t, line, []
29-
tokenize rest, current_line, new_line, buffer, acc
29+
tokenize rest, new_line, buffer, acc
3030
end
3131

32-
defp tokenize('<%' ++ t, current_line, line, buffer, acc) do
32+
defp tokenize('<%' ++ t, line, buffer, acc) do
3333
{ marker, t } = retrieve_marker(t)
3434
{ expr, new_line, rest } = tokenize_expr t, line, []
3535

3636
token = token_name(expr)
37-
acc = tokenize_text(current_line, buffer, acc)
37+
acc = tokenize_text(buffer, acc)
3838
final = { token, line, marker, Enum.reverse(expr) }
39-
tokenize rest, new_line, new_line, [], [final | acc]
39+
tokenize rest, new_line, [], [final | acc]
4040
end
4141

42-
defp tokenize('\n' ++ t, current_line, line, buffer, acc) do
43-
tokenize t, current_line, line + 1, [?\n|buffer], acc
42+
defp tokenize('\n' ++ t, line, buffer, acc) do
43+
tokenize t, line + 1, [?\n|buffer], acc
4444
end
4545

46-
defp tokenize([h|t], current_line, line, buffer, acc) do
47-
tokenize t, current_line, line, [h|buffer], acc
46+
defp tokenize([h|t], line, buffer, acc) do
47+
tokenize t, line, [h|buffer], acc
4848
end
4949

50-
defp tokenize([], current_line, _line, buffer, acc) do
51-
tokenize_text(current_line, buffer, acc)
50+
defp tokenize([], _line, buffer, acc) do
51+
tokenize_text(buffer, acc)
5252
end
5353

5454
# Retrieve marker for <%
@@ -149,11 +149,11 @@ defmodule EEx.Tokenizer do
149149
# Tokenize the buffered text by appending
150150
# it to the given accumulator.
151151

152-
defp tokenize_text(_line, [], acc) do
152+
defp tokenize_text([], acc) do
153153
acc
154154
end
155155

156-
defp tokenize_text(line, buffer, acc) do
157-
[{ :text, line, String.from_char_list!(Enum.reverse(buffer)) } | acc]
156+
defp tokenize_text(buffer, acc) do
157+
[{ :text, Enum.reverse(buffer) } | acc]
158158
end
159159
end

lib/eex/test/eex/tokenizer_test.exs

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -5,23 +5,23 @@ defmodule EEx.TokenizerTest do
55
require EEx.Tokenizer, as: T
66

77
test "simple chars lists" do
8-
assert T.tokenize('foo', 1) == [ { :text, 1, "foo" } ]
8+
assert T.tokenize('foo', 1) == [ { :text, 'foo' } ]
99
end
1010

1111
test "simple strings" do
12-
assert T.tokenize("foo", 1) == [ { :text, 1, "foo" } ]
12+
assert T.tokenize("foo", 1) == [ { :text, 'foo' } ]
1313
end
1414

1515
test "strings with embedded code" do
16-
assert T.tokenize('foo <% bar %>', 1) == [ { :text, 1, "foo " }, { :expr, 1, "", ' bar ' } ]
16+
assert T.tokenize('foo <% bar %>', 1) == [ { :text, 'foo ' }, { :expr, 1, "", ' bar ' } ]
1717
end
1818

1919
test "strings with embedded equals code" do
20-
assert T.tokenize('foo <%= bar %>', 1) == [ { :text, 1, "foo " }, { :expr, 1, "=", ' bar ' } ]
20+
assert T.tokenize('foo <%= bar %>', 1) == [ { :text, 'foo ' }, { :expr, 1, "=", ' bar ' } ]
2121
end
2222

2323
test "strings with more than one line" do
24-
assert T.tokenize('foo\n<%= bar %>', 1) == [ { :text, 1, "foo\n" }, { :expr, 2, "=", ' bar ' } ]
24+
assert T.tokenize('foo\n<%= bar %>', 1) == [ { :text, 'foo\n' }, { :expr, 2, "=", ' bar ' } ]
2525
end
2626

2727
test "strings with more than one line and expression with more than one line" do
@@ -33,66 +33,66 @@ baz %>
3333
'''
3434

3535
assert T.tokenize(string, 1) == [
36-
{:text, 1, "foo "},
36+
{:text, 'foo '},
3737
{:expr, 1, "=", ' bar\n\nbaz '},
38-
{:text, 3, "\n"},
38+
{:text, '\n'},
3939
{:expr, 4, "", ' foo '},
40-
{:text, 4, "\n"}
40+
{:text, '\n'}
4141
]
4242
end
4343

4444
test "quotation" do
4545
assert T.tokenize('foo <%% true %>', 1) == [
46-
{ :text, 1, "foo <% true %>" }
46+
{ :text, 'foo <% true %>' }
4747
]
4848
end
4949

5050
test "quotation with do/end" do
5151
assert T.tokenize('foo <%% true do %>bar<%% end %>', 1) == [
52-
{ :text, 1, "foo <% true do %>bar<% end %>" }
52+
{ :text, 'foo <% true do %>bar<% end %>' }
5353
]
5454
end
5555

5656
test "comments" do
5757
assert T.tokenize('foo <%# true %>', 1) == [
58-
{ :text, 1, "foo " }
58+
{ :text, 'foo ' }
5959
]
6060
end
6161

6262
test "comments with do/end" do
6363
assert T.tokenize('foo <%# true do %>bar<%# end %>', 1) == [
64-
{ :text, 1, "foo bar" }
64+
{ :text, 'foo bar' }
6565
]
6666
end
6767

6868
test "strings with embedded do end" do
6969
assert T.tokenize('foo <% if true do %>bar<% end %>', 1) == [
70-
{ :text, 1, "foo " },
70+
{ :text, 'foo ' },
7171
{ :start_expr, 1, "", ' if true do ' },
72-
{ :text, 1, "bar" },
72+
{ :text, 'bar' },
7373
{ :end_expr, 1, "", ' end ' }
7474
]
7575
end
7676

7777
test "strings with embedded -> end" do
7878
assert T.tokenize('foo <% cond do %><% false -> %>bar<% true -> %>baz<% end %>', 1) == [
79-
{ :text, 1, "foo " },
79+
{ :text, 'foo ' },
8080
{ :start_expr, 1, "", ' cond do ' },
8181
{ :middle_expr, 1, "", ' false -> ' },
82-
{ :text, 1, "bar" },
82+
{ :text, 'bar' },
8383
{ :middle_expr, 1, "", ' true -> ' },
84-
{ :text, 1, "baz" },
84+
{ :text, 'baz' },
8585
{ :end_expr, 1, "", ' end ' }
8686
]
8787
end
8888

8989
test "strings with embedded keywords blocks" do
9090
assert T.tokenize('foo <% if true do %>bar<% else %>baz<% end %>', 1) == [
91-
{ :text, 1, "foo " },
91+
{ :text, 'foo ' },
9292
{ :start_expr, 1, "", ' if true do ' },
93-
{ :text, 1, "bar" },
93+
{ :text, 'bar' },
9494
{ :middle_expr, 1, "", ' else ' },
95-
{ :text, 1, "baz" },
95+
{ :text, 'baz' },
9696
{ :end_expr, 1, "", ' end ' }
9797
]
9898
end

0 commit comments

Comments
 (0)