Skip to content

Commit 6f73d7d

Browse files
author
José Valim
committed
Move comp_op to the new table
1 parent c2ac967 commit 6f73d7d

File tree

2 files changed

+54
-65
lines changed

2 files changed

+54
-65
lines changed

lib/elixir/src/elixir_parser.yrl

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ Nonterminals
22
grammar expr_list
33
expr paren_expr block_expr fn_expr bracket_expr call_expr bracket_at_expr max_expr
44
base_expr matched_expr matched_op_expr unmatched_expr op_expr
5-
mult_op two_op regex_op right_op bin_concat_op
5+
mult_op two_op right_op bin_concat_op
66
match_op send_op default_op when_op pipe_op in_op inc_op range_op
77
andand_op oror_op and_op or_op colon_colon_op three_op
88
comp_op_eol at_op_eol unary_op_eol dual_op_eol
@@ -33,7 +33,7 @@ Terminals
3333
'true' 'false' 'nil'
3434
'=' '*' '/' '++' '--' '**' '//'
3535
'(' ')' '[' ']' '{' '}' '<<' '>>' '::'
36-
eol ',' '&' '|' '.' '<-' '<>' '->' '|>' '=~'
36+
eol ',' '&' '|' '.' '<-' '<>' '->' '|>'
3737
'&&' '||' '...' '..'
3838
'<<<' '>>>' '&&&' '|||' '^^^'
3939
.
@@ -54,9 +54,8 @@ Left 120 oror_op.
5454
Left 130 andand_op.
5555
Left 140 or_op.
5656
Left 150 and_op.
57-
Left 160 comp_op_eol.
57+
Left 160 comp_op_eol. %% < (op), > (op), <=, >=, ==, !=, =~, ===, !===
5858
Left 170 in_op.
59-
Right 180 regex_op.
6059
Right 190 right_op.
6160
Left 200 range_op.
6261
Left 210 three_op.
@@ -67,7 +66,7 @@ Right 250 two_op.
6766
Nonassoc 300 unary_op_eol. %% +, -, !, ^, not, ~~~
6867
Left 310 dot_call_op.
6968
Left 310 dot_op.
70-
Nonassoc 320 at_op_eol. %% @<op>
69+
Nonassoc 320 at_op_eol. %% @ (op)
7170
Nonassoc 330 var.
7271

7372
%%% MAIN FLOW OF EXPRESSIONS
@@ -106,7 +105,6 @@ op_expr -> match_op expr : { '$1', '$2' }.
106105
op_expr -> dual_op_eol expr : { '$1', '$2' }.
107106
op_expr -> mult_op expr : { '$1', '$2' }.
108107
op_expr -> two_op expr : { '$1', '$2' }.
109-
op_expr -> regex_op expr : { '$1', '$2' }.
110108
op_expr -> right_op expr : { '$1', '$2' }.
111109
op_expr -> andand_op expr : { '$1', '$2' }.
112110
op_expr -> three_op expr : { '$1', '$2' }.
@@ -128,7 +126,6 @@ matched_op_expr -> match_op matched_expr : { '$1', '$2' }.
128126
matched_op_expr -> dual_op_eol matched_expr : { '$1', '$2' }.
129127
matched_op_expr -> mult_op matched_expr : { '$1', '$2' }.
130128
matched_op_expr -> two_op matched_expr : { '$1', '$2' }.
131-
matched_op_expr -> regex_op matched_expr : { '$1', '$2' }.
132129
matched_op_expr -> right_op matched_expr : { '$1', '$2' }.
133130
matched_op_expr -> andand_op matched_expr : { '$1', '$2' }.
134131
matched_op_expr -> three_op matched_expr : { '$1', '$2' }.
@@ -282,9 +279,6 @@ two_op -> '--' eol : '$1'.
282279
two_op -> '**' : '$1'.
283280
two_op -> '**' eol : '$1'.
284281

285-
regex_op -> '=~' : '$1'.
286-
regex_op -> '=~' eol : '$1'.
287-
288282
right_op -> '|>' : '$1'.
289283
right_op -> '|>' eol : '$1'.
290284

lib/elixir/src/elixir_tokenizer.erl

Lines changed: 50 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,6 @@
1515
T1 == $[, T2 == $]
1616
).
1717

18-
-define(comp3(T1, T2, T3),
19-
T1 == $=, T2 == $=, T3 == $=;
20-
T1 == $!, T2 == $=, T3 == $=
21-
).
22-
2318
-define(op3(T1, T2, T3),
2419
T1 == $<, T2 == $<, T3 == $<;
2520
T1 == $>, T2 == $>, T3 == $>;
@@ -29,13 +24,6 @@
2924
T1 == $|, T2 == $|, T3 == $|
3025
).
3126

32-
-define(comp2(T1, T2),
33-
T1 == $=, T2 == $=;
34-
T1 == $!, T2 == $=;
35-
T1 == $<, T2 == $=;
36-
T1 == $>, T2 == $=
37-
).
38-
3927
-define(op2(T1, T2),
4028
T1 == $&, T2 == $&;
4129
T1 == $|, T2 == $|;
@@ -48,14 +36,7 @@
4836
T1 == $<, T2 == $-;
4937
T1 == $-, T2 == $>;
5038
T1 == $., T2 == $.;
51-
T1 == $|, T2 == $>;
52-
T1 == $/, T2 == $>;
53-
T1 == $=, T2 == $~
54-
).
55-
56-
-define(comp1(T),
57-
T == $<;
58-
T == $>
39+
T1 == $|, T2 == $>
5940
).
6041

6142
-define(op1(T),
@@ -73,15 +54,29 @@
7354
-define(unary_op(T),
7455
% T == $&;
7556
T == $!;
76-
T == $^
77-
).
57+
T == $^).
7858

7959
-define(unary_op3(T1, T2, T3),
8060
T1 == $~, T2 == $~, T3 == $~).
8161

8262
-define(dual_op(T),
8363
T == $+ orelse T == $-).
8464

65+
-define(comp_op(T),
66+
T == $<;
67+
T == $>).
68+
69+
-define(comp_op2(T1, T2),
70+
T1 == $=, T2 == $=;
71+
T1 == $=, T2 == $~;
72+
T1 == $!, T2 == $=;
73+
T1 == $<, T2 == $=;
74+
T1 == $>, T2 == $=).
75+
76+
-define(comp_op3(T1, T2, T3),
77+
T1 == $=, T2 == $=, T3 == $=;
78+
T1 == $!, T2 == $=, T3 == $=).
79+
8580
tokenize(String, Line, Opts) ->
8681
File = case lists:keyfind(file, 1, Opts) of
8782
{ file, V1 } -> V1;
@@ -209,42 +204,46 @@ tokenize([$?,Char|T], Line, Scope, Tokens) ->
209204
tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
210205

211206
% Dot identifier/operators
212-
213207
tokenize("..." ++ Rest, Line, Scope, Tokens) ->
214208
Token = check_call_identifier(identifier, Line, '...', Rest),
215209
tokenize(Rest, Line, Scope, [Token|Tokens]);
216210

217-
% ## Containers
218-
219211
tokenize([$.,T|Tail], Line, Scope, Tokens) when ?is_space(T) ->
220212
case [T|Tail] of
221213
[$\r,$\n|Rest] -> tokenize([$.|Rest], Line + 1, Scope, Tokens);
222214
[$\n|Rest] -> tokenize([$.|Rest], Line + 1, Scope, Tokens);
223215
[_|Rest] -> tokenize([$.|Rest], Line, Scope, Tokens)
224216
end;
225217

218+
% ## Containers
226219
tokenize(".<<>>" ++ Rest, Line, Scope, Tokens) ->
227220
handle_call_identifier(Rest, Line, '<<>>', Scope, Tokens);
228221

229222
tokenize([$.,T1,T2|Rest], Line, Scope, Tokens) when ?container2(T1, T2) ->
230223
handle_call_identifier(Rest, Line, list_to_atom([T1, T2]), Scope, Tokens);
231224

232225
% ## Three Token Operators
233-
tokenize([$.,T1,T2,T3|Rest], Line, Scope, Tokens) when ?unary_op3(T1, T2, T3) ->
226+
tokenize([$.,T1,T2,T3|Rest], Line, Scope, Tokens) when
227+
?unary_op3(T1, T2, T3); ?comp_op3(T1, T2, T3) ->
234228
handle_call_identifier(Rest, Line, list_to_atom([T1, T2, T3]), Scope, Tokens);
235229

236-
tokenize([$.,T1,T2,T3|Rest], Line, Scope, Tokens) when ?comp3(T1, T2, T3); ?op3(T1, T2, T3) ->
230+
tokenize([$.,T1,T2,T3|Rest], Line, Scope, Tokens) when ?op3(T1, T2, T3) ->
237231
handle_call_identifier(Rest, Line, list_to_atom([T1, T2, T3]), Scope, Tokens);
238232

239233
% ## Two Token Operators
240-
tokenize([$.,T1,T2|Rest], Line, Scope, Tokens) when ?comp2(T1, T2); ?op2(T1, T2) ->
234+
tokenize([$.,T1,T2|Rest], Line, Scope, Tokens) when
235+
?comp_op2(T1, T2) ->
236+
handle_call_identifier(Rest, Line, list_to_atom([T1, T2]), Scope, Tokens);
237+
238+
tokenize([$.,T1,T2|Rest], Line, Scope, Tokens) when ?op2(T1, T2) ->
241239
handle_call_identifier(Rest, Line, list_to_atom([T1, T2]), Scope, Tokens);
242240

243241
% ## Single Token Operators
244-
tokenize([$.,T|Rest], Line, Scope, Tokens) when ?at_op(T); ?unary_op(T); ?dual_op(T) ->
242+
tokenize([$.,T|Rest], Line, Scope, Tokens) when
243+
?at_op(T); ?unary_op(T); ?dual_op(T); ?comp_op(T) ->
245244
handle_call_identifier(Rest, Line, list_to_atom([T]), Scope, Tokens);
246245

247-
tokenize([$.,T|Rest], Line, Scope, Tokens) when ?comp1(T); ?op1(T); T == $& ->
246+
tokenize([$.,T|Rest], Line, Scope, Tokens) when ?op1(T); T == $& ->
248247
handle_call_identifier(Rest, Line, list_to_atom([T]), Scope, Tokens);
249248

250249
% Dot call
@@ -309,22 +308,27 @@ tokenize([$:,T1,T2|Rest], Line, Scope, Tokens) when ?container2(T1, T2) ->
309308
tokenize(Rest, Line, Scope, [{ atom, Line, list_to_atom([T1,T2]) }|Tokens]);
310309

311310
% ## Three Token Operators
312-
tokenize([$:,T1,T2,T3|Rest], Line, Scope, Tokens) when ?unary_op3(T1, T2, T3) ->
311+
tokenize([$:,T1,T2,T3|Rest], Line, Scope, Tokens) when
312+
?unary_op3(T1, T2, T3); ?comp_op3(T1, T2, T3) ->
313313
tokenize(Rest, Line, Scope, [{ atom, Line, list_to_atom([T1,T2,T3]) }|Tokens]);
314314

315-
tokenize([$:,T1,T2,T3|Rest], Line, Scope, Tokens) when ?comp3(T1, T2, T3); ?op3(T1, T2, T3) ->
315+
tokenize([$:,T1,T2,T3|Rest], Line, Scope, Tokens) when ?op3(T1, T2, T3) ->
316316
tokenize(Rest, Line, Scope, [{ atom, Line, list_to_atom([T1,T2,T3]) }|Tokens]);
317317

318318
% ## Two Token Operators
319-
tokenize([$:,T1,T2|Rest], Line, Scope, Tokens) when ?comp2(T1, T2); ?op2(T1, T2) ->
319+
tokenize([$:,T1,T2|Rest], Line, Scope, Tokens) when
320+
?comp_op2(T1, T2) ->
320321
tokenize(Rest, Line, Scope, [{ atom, Line, list_to_atom([T1,T2]) }|Tokens]);
321322

322-
% ## Single Token Operators
323+
tokenize([$:,T1,T2|Rest], Line, Scope, Tokens) when ?op2(T1, T2) ->
324+
tokenize(Rest, Line, Scope, [{ atom, Line, list_to_atom([T1,T2]) }|Tokens]);
323325

324-
tokenize([$:,T|Rest], Line, Scope, Tokens) when ?at_op(T); ?unary_op(T); ?dual_op(T) ->
326+
% ## Single Token Operators
327+
tokenize([$:,T|Rest], Line, Scope, Tokens) when
328+
?at_op(T); ?unary_op(T); ?dual_op(T); ?comp_op(T) ->
325329
tokenize(Rest, Line, Scope, [{ atom, Line, list_to_atom([T]) }|Tokens]);
326330

327-
tokenize([$:,T|Rest], Line, Scope, Tokens) when ?comp1(T); ?op1(T); T == $&; T == $. ->
331+
tokenize([$:,T|Rest], Line, Scope, Tokens) when ?op1(T); T == $&; T == $. ->
328332
tokenize(Rest, Line, Scope, [{ atom, Line, list_to_atom([T]) }|Tokens]);
329333

330334
% End of line
@@ -353,14 +357,13 @@ tokenize("\r\n" ++ Rest, Line, Scope, Tokens) ->
353357
tokenize([$&,H|Rest], Line, Scope, Tokens) when ?is_digit(H) ->
354358
tokenize(Rest, Line, Scope, [{ '&', Line, [list_to_integer([H])] }|Tokens]);
355359

356-
% ## Comparison three token operators
357-
tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?comp3(T1, T2, T3) ->
358-
handle_comp_op(Rest, Line, list_to_atom([T1,T2,T3]), Scope, Tokens);
359-
360360
% ## Three token operators
361361
tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?unary_op3(T1, T2, T3) ->
362362
handle_unary_op(Rest, Line, unary_op, list_to_atom([T1,T2,T3]), Scope, Tokens);
363363

364+
tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?comp_op3(T1, T2, T3) ->
365+
handle_op(Rest, Line, comp_op, list_to_atom([T1,T2,T3]), Scope, Tokens);
366+
364367
tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?op3(T1, T2, T3) ->
365368
handle_op(Rest, Line, list_to_atom([T1,T2,T3]), Scope, Tokens);
366369

@@ -374,25 +377,23 @@ tokenize([T|Rest], Line, Scope, Tokens) when T == $(;
374377
Token = { list_to_atom([T]), Line },
375378
handle_terminator(Rest, Line, Scope, Token, Tokens);
376379

377-
% ## Comparison two token operators
378-
tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?comp2(T1, T2) ->
379-
handle_comp_op(Rest, Line, list_to_atom([T1, T2]), Scope, Tokens);
380-
381380
% ## Two Token Operators
381+
tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?comp_op2(T1, T2) ->
382+
handle_op(Rest, Line, comp_op, list_to_atom([T1, T2]), Scope, Tokens);
383+
382384
tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?op2(T1, T2) ->
383385
handle_op(Rest, Line, list_to_atom([T1, T2]), Scope, Tokens);
384386

385-
% ## Comparison single token operators
386-
tokenize([T|Rest], Line, Scope, Tokens) when ?comp1(T) ->
387-
handle_comp_op(Rest, Line, list_to_atom([T]), Scope, Tokens);
388-
389387
% ## Single Token Operators
390388
tokenize([T|Rest], Line, Scope, Tokens) when ?at_op(T) ->
391389
handle_unary_op(Rest, Line, at_op, list_to_atom([T]), Scope, Tokens);
392390

393391
tokenize([T|Rest], Line, Scope, Tokens) when ?unary_op(T) ->
394392
handle_unary_op(Rest, Line, unary_op, list_to_atom([T]), Scope, Tokens);
395393

394+
tokenize([T|Rest], Line, Scope, Tokens) when ?comp_op(T) ->
395+
handle_op(Rest, Line, comp_op, list_to_atom([T]), Scope, Tokens);
396+
396397
tokenize([T|Rest], Line, Scope, Tokens) when ?dual_op(T) ->
397398
handle_unary_op(Rest, Line, dual_op, list_to_atom([T]), Scope, Tokens);
398399

@@ -491,12 +492,6 @@ handle_strings(T, Line, H, #scope{file=File} = Scope, Tokens) ->
491492
interpolation_error(Error, " (for string starting at line ~B)", [Line])
492493
end.
493494

494-
handle_comp_op([$:|Rest], Line, Op, Scope, Tokens) when ?is_space(hd(Rest)) ->
495-
tokenize(Rest, Line, Scope, [{ kw_identifier, Line, Op }|Tokens]);
496-
497-
handle_comp_op(Rest, Line, Op, Scope, Tokens) ->
498-
tokenize(Rest, Line, Scope, add_token_with_nl({ comp_op, Line, Op }, Tokens)).
499-
500495
handle_op([$:|Rest], Line, Op, Scope, Tokens) when ?is_space(hd(Rest)) ->
501496
tokenize(Rest, Line, Scope, [{ kw_identifier, Line, Op }|Tokens]);
502497

@@ -513,7 +508,7 @@ handle_op([$:|Rest], Line, _Kind, Op, Scope, Tokens) when ?is_space(hd(Rest)) ->
513508
tokenize(Rest, Line, Scope, [{ kw_identifier, Line, Op }|Tokens]);
514509

515510
handle_op(Rest, Line, Kind, Op, Scope, Tokens) ->
516-
tokenize(Rest, Line, Scope, add_token_with_nl({ Kind, Op, Line }, Tokens)).
511+
tokenize(Rest, Line, Scope, add_token_with_nl({ Kind, Line, Op }, Tokens)).
517512

518513
handle_call_identifier(Rest, Line, Op, Scope, Tokens) ->
519514
Token = check_call_identifier(identifier, Line, Op, Rest),

0 commit comments

Comments
 (0)