Skip to content

Commit 64c5acb

Browse files
author
José Valim
committed
Fix syntax error on .%, closes #2245
1 parent 45df7c4 commit 64c5acb

File tree

3 files changed

+34
-26
lines changed

3 files changed

+34
-26
lines changed

lib/elixir/lib/inspect.ex

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ defimpl Inspect, for: Atom do
8282
end
8383
valid_atom_identifier?(binary) ->
8484
":" <> binary
85-
atom in [:%{}, :{}, :<<>>, :..., :[]] ->
85+
atom in [:%{}, :{}, :<<>>, :..., :%] ->
8686
":" <> binary
8787
atom in Macro.binary_ops or atom in Macro.unary_ops ->
8888
":" <> binary

lib/elixir/src/elixir_tokenizer.erl

Lines changed: 30 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,6 @@
33
-export([tokenize/3]).
44
-import(elixir_interpolation, [unescape_tokens/1]).
55

6-
-define(container(T1, T2),
7-
T1 == ${, T2 == $};
8-
T1 == $[, T2 == $]
9-
).
10-
116
-define(at_op(T),
127
T == $@).
138

@@ -160,47 +155,58 @@ tokenize([$~,S,H|T] = Original, Line, Scope, Tokens) when ?is_sigil(H), ?is_upca
160155

161156
% Char tokens
162157

163-
tokenize([$?,$\\,P,${,A,B,C,D,E,F,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E), ?is_hex(F) ->
158+
tokenize([$?,$\\,P,${,A,B,C,D,E,F,$}|T], Line, Scope, Tokens)
159+
when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E), ?is_hex(F) ->
164160
Char = escape_char([$\\,P,${,A,B,C,D,E,F,$}]),
165161
tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
166162

167-
tokenize([$?,$\\,P,${,A,B,C,D,E,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E) ->
163+
tokenize([$?,$\\,P,${,A,B,C,D,E,$}|T], Line, Scope, Tokens)
164+
when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E) ->
168165
Char = escape_char([$\\,P,${,A,B,C,D,E,$}]),
169166
tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
170167

171-
tokenize([$?,$\\,P,${,A,B,C,D,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) ->
168+
tokenize([$?,$\\,P,${,A,B,C,D,$}|T], Line, Scope, Tokens)
169+
when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) ->
172170
Char = escape_char([$\\,P,${,A,B,C,D,$}]),
173171
tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
174172

175-
tokenize([$?,$\\,P,${,A,B,C,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C) ->
173+
tokenize([$?,$\\,P,${,A,B,C,$}|T], Line, Scope, Tokens)
174+
when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C) ->
176175
Char = escape_char([$\\,P,${,A,B,C,$}]),
177176
tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
178177

179-
tokenize([$?,$\\,P,${,A,B,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B) ->
178+
tokenize([$?,$\\,P,${,A,B,$}|T], Line, Scope, Tokens)
179+
when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B) ->
180180
Char = escape_char([$\\,P,${,A,B,$}]),
181181
tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
182182

183-
tokenize([$?,$\\,P,${,A,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A) ->
183+
tokenize([$?,$\\,P,${,A,$}|T], Line, Scope, Tokens)
184+
when (P == $x orelse P == $X), ?is_hex(A) ->
184185
Char = escape_char([$\\,P,${,A,$}]),
185186
tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
186187

187-
tokenize([$?,$\\,P,A,B|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B) ->
188+
tokenize([$?,$\\,P,A,B|T], Line, Scope, Tokens)
189+
when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B) ->
188190
Char = escape_char([$\\,P,A,B]),
189191
tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
190192

191-
tokenize([$?,$\\,P,A|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A) ->
193+
tokenize([$?,$\\,P,A|T], Line, Scope, Tokens)
194+
when (P == $x orelse P == $X), ?is_hex(A) ->
192195
Char = escape_char([$\\,P,A]),
193196
tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
194197

195-
tokenize([$?,$\\,A,B,C|T], Line, Scope, Tokens) when ?is_octal(A), A =< $3,?is_octal(B), ?is_octal(C) ->
198+
tokenize([$?,$\\,A,B,C|T], Line, Scope, Tokens)
199+
when ?is_octal(A), A =< $3,?is_octal(B), ?is_octal(C) ->
196200
Char = escape_char([$\\,A,B,C]),
197201
tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
198202

199-
tokenize([$?,$\\,A,B|T], Line, Scope, Tokens) when ?is_octal(A), ?is_octal(B) ->
203+
tokenize([$?,$\\,A,B|T], Line, Scope, Tokens)
204+
when ?is_octal(A), ?is_octal(B) ->
200205
Char = escape_char([$\\,A,B]),
201206
tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
202207

203-
tokenize([$?,$\\,A|T], Line, Scope, Tokens) when ?is_octal(A) ->
208+
tokenize([$?,$\\,A|T], Line, Scope, Tokens)
209+
when ?is_octal(A) ->
204210
Char = escape_char([$\\,A]),
205211
tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
206212

@@ -257,8 +263,8 @@ tokenize(":%{}" ++ Rest, Line, Scope, Tokens) ->
257263
tokenize(Rest, Line, Scope, [{atom, Line, '%{}'}|Tokens]);
258264
tokenize(":%" ++ Rest, Line, Scope, Tokens) ->
259265
tokenize(Rest, Line, Scope, [{atom, Line, '%'}|Tokens]);
260-
tokenize([$:,T1,T2|Rest], Line, Scope, Tokens) when ?container(T1, T2) ->
261-
tokenize(Rest, Line, Scope, [{atom, Line, list_to_atom([T1,T2])}|Tokens]);
266+
tokenize(":{}" ++ Rest, Line, Scope, Tokens) ->
267+
tokenize(Rest, Line, Scope, [{atom, Line, '{}'}|Tokens]);
262268

263269
tokenize("...:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) ->
264270
tokenize(Rest, Line, Scope, [{kw_identifier, Line, '...'}|Tokens]);
@@ -268,8 +274,8 @@ tokenize("%{}:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) ->
268274
tokenize(Rest, Line, Scope, [{kw_identifier, Line, '%{}'}|Tokens]);
269275
tokenize("%:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) ->
270276
tokenize(Rest, Line, Scope, [{kw_identifier, Line, '%'}|Tokens]);
271-
tokenize([T1,T2,$:|Rest], Line, Scope, Tokens) when ?container(T1, T2), ?is_space(hd(Rest)) ->
272-
tokenize(Rest, Line, Scope, [{kw_identifier, Line, list_to_atom([T1,T2])}|Tokens]);
277+
tokenize("{}:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) ->
278+
tokenize(Rest, Line, Scope, [{kw_identifier, Line, '{}'}|Tokens]);
273279

274280
% ## Three Token Operators
275281
tokenize([$:,T1,T2,T3|Rest], Line, Scope, Tokens) when
@@ -315,6 +321,9 @@ tokenize("..." ++ Rest, Line, Scope, Tokens) ->
315321
Token = check_call_identifier(identifier, Line, '...', Rest),
316322
tokenize(Rest, Line, Scope, [Token|Tokens]);
317323

324+
tokenize("=>" ++ Rest, Line, Scope, Tokens) ->
325+
tokenize(Rest, Line, Scope, add_token_with_nl({assoc_op, Line, '=>'}, Tokens));
326+
318327
% ## Three token operators
319328
tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?unary_op3(T1, T2, T3) ->
320329
handle_unary_op(Rest, Line, unary_op, list_to_atom([T1,T2,T3]), Scope, Tokens);
@@ -344,9 +353,6 @@ tokenize([T|Rest], Line, Scope, Tokens) when T == $(;
344353
Token = {list_to_atom([T]), Line},
345354
handle_terminator(Rest, Line, Scope, Token, Tokens);
346355

347-
tokenize("=>" ++ Rest, Line, Scope, Tokens) ->
348-
tokenize(Rest, Line, Scope, add_token_with_nl({assoc_op, Line, '=>'}, Tokens));
349-
350356
% ## Two Token Operators
351357
tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?two_op(T1, T2) ->
352358
handle_op(Rest, Line, two_op, list_to_atom([T1, T2]), Scope, Tokens);
@@ -537,7 +543,7 @@ handle_dot([$.,T1,T2|Rest], Line, Scope, Tokens) when
537543
% ## Single Token Operators
538544
handle_dot([$.,T|Rest], Line, Scope, Tokens) when
539545
?at_op(T); ?unary_op(T); ?dual_op(T); ?mult_op(T); ?comp_op(T);
540-
?match_op(T); ?pipe_op(T) ->
546+
?match_op(T); ?pipe_op(T); T == $% ->
541547
handle_call_identifier(Rest, Line, list_to_atom([T]), Scope, Tokens);
542548

543549
% ## Exception for .( as it needs to be treated specially in the parser

lib/elixir/test/elixir/inspect_test.exs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,9 +56,11 @@ defmodule Inspect.AtomTest do
5656
assert inspect(:foo@bar@baz) == ":foo@bar@baz"
5757
end
5858

59-
test :container do
59+
test :others do
6060
assert inspect(:<<>>) == ":<<>>"
6161
assert inspect(:{}) == ":{}"
62+
assert inspect(:%{}) == ":%{}"
63+
assert inspect(:%) == ":%"
6264
end
6365
end
6466

0 commit comments

Comments
 (0)