15
15
T1 == $[ , T2 == $]
16
16
).
17
17
18
- -define (comp3 (T1 , T2 , T3 ),
19
- T1 == $= , T2 == $= , T3 == $= ;
20
- T1 == $! , T2 == $= , T3 == $=
21
- ).
22
-
23
18
-define (op3 (T1 , T2 , T3 ),
24
19
T1 == $< , T2 == $< , T3 == $< ;
25
20
T1 == $> , T2 == $> , T3 == $> ;
29
24
T1 == $| , T2 == $| , T3 == $|
30
25
).
31
26
32
- -define (comp2 (T1 , T2 ),
33
- T1 == $= , T2 == $= ;
34
- T1 == $! , T2 == $= ;
35
- T1 == $< , T2 == $= ;
36
- T1 == $> , T2 == $=
37
- ).
38
-
39
27
-define (op2 (T1 , T2 ),
40
28
T1 == $& , T2 == $& ;
41
29
T1 == $| , T2 == $| ;
48
36
T1 == $< , T2 == $- ;
49
37
T1 == $- , T2 == $> ;
50
38
T1 == $. , T2 == $. ;
51
- T1 == $| , T2 == $> ;
52
- T1 == $/ , T2 == $> ;
53
- T1 == $= , T2 == $~
54
- ).
55
-
56
- -define (comp1 (T ),
57
- T == $< ;
58
- T == $>
39
+ T1 == $| , T2 == $>
59
40
).
60
41
61
42
-define (op1 (T ),
73
54
-define (unary_op (T ),
74
55
% T == $&;
75
56
T == $! ;
76
- T == $^
77
- ).
57
+ T == $^ ).
78
58
79
59
-define (unary_op3 (T1 , T2 , T3 ),
80
60
T1 == $~ , T2 == $~ , T3 == $~ ).
81
61
82
62
-define (dual_op (T ),
83
63
T == $+ orelse T == $- ).
84
64
65
+ -define (comp_op (T ),
66
+ T == $< ;
67
+ T == $> ).
68
+
69
+ -define (comp_op2 (T1 , T2 ),
70
+ T1 == $= , T2 == $= ;
71
+ T1 == $= , T2 == $~ ;
72
+ T1 == $! , T2 == $= ;
73
+ T1 == $< , T2 == $= ;
74
+ T1 == $> , T2 == $= ).
75
+
76
+ -define (comp_op3 (T1 , T2 , T3 ),
77
+ T1 == $= , T2 == $= , T3 == $= ;
78
+ T1 == $! , T2 == $= , T3 == $= ).
79
+
85
80
tokenize (String , Line , Opts ) ->
86
81
File = case lists :keyfind (file , 1 , Opts ) of
87
82
{ file , V1 } -> V1 ;
@@ -209,42 +204,46 @@ tokenize([$?,Char|T], Line, Scope, Tokens) ->
209
204
tokenize (T , Line , Scope , [{ number , Line , Char }|Tokens ]);
210
205
211
206
% Dot identifier/operators
212
-
213
207
tokenize (" ..." ++ Rest , Line , Scope , Tokens ) ->
214
208
Token = check_call_identifier (identifier , Line , '...' , Rest ),
215
209
tokenize (Rest , Line , Scope , [Token |Tokens ]);
216
210
217
- % ## Containers
218
-
219
211
tokenize ([$. ,T |Tail ], Line , Scope , Tokens ) when ? is_space (T ) ->
220
212
case [T |Tail ] of
221
213
[$\r ,$\n |Rest ] -> tokenize ([$. |Rest ], Line + 1 , Scope , Tokens );
222
214
[$\n |Rest ] -> tokenize ([$. |Rest ], Line + 1 , Scope , Tokens );
223
215
[_ |Rest ] -> tokenize ([$. |Rest ], Line , Scope , Tokens )
224
216
end ;
225
217
218
+ % ## Containers
226
219
tokenize (" .<<>>" ++ Rest , Line , Scope , Tokens ) ->
227
220
handle_call_identifier (Rest , Line , '<<>>' , Scope , Tokens );
228
221
229
222
tokenize ([$. ,T1 ,T2 |Rest ], Line , Scope , Tokens ) when ? container2 (T1 , T2 ) ->
230
223
handle_call_identifier (Rest , Line , list_to_atom ([T1 , T2 ]), Scope , Tokens );
231
224
232
225
% ## Three Token Operators
233
- tokenize ([$. ,T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when ? unary_op3 (T1 , T2 , T3 ) ->
226
+ tokenize ([$. ,T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when
227
+ ? unary_op3 (T1 , T2 , T3 ); ? comp_op3 (T1 , T2 , T3 ) ->
234
228
handle_call_identifier (Rest , Line , list_to_atom ([T1 , T2 , T3 ]), Scope , Tokens );
235
229
236
- tokenize ([$. ,T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when ? comp3 ( T1 , T2 , T3 ); ? op3 (T1 , T2 , T3 ) ->
230
+ tokenize ([$. ,T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when ? op3 (T1 , T2 , T3 ) ->
237
231
handle_call_identifier (Rest , Line , list_to_atom ([T1 , T2 , T3 ]), Scope , Tokens );
238
232
239
233
% ## Two Token Operators
240
- tokenize ([$. ,T1 ,T2 |Rest ], Line , Scope , Tokens ) when ? comp2 (T1 , T2 ); ? op2 (T1 , T2 ) ->
234
+ tokenize ([$. ,T1 ,T2 |Rest ], Line , Scope , Tokens ) when
235
+ ? comp_op2 (T1 , T2 ) ->
236
+ handle_call_identifier (Rest , Line , list_to_atom ([T1 , T2 ]), Scope , Tokens );
237
+
238
+ tokenize ([$. ,T1 ,T2 |Rest ], Line , Scope , Tokens ) when ? op2 (T1 , T2 ) ->
241
239
handle_call_identifier (Rest , Line , list_to_atom ([T1 , T2 ]), Scope , Tokens );
242
240
243
241
% ## Single Token Operators
244
- tokenize ([$. ,T |Rest ], Line , Scope , Tokens ) when ? at_op (T ); ? unary_op (T ); ? dual_op (T ) ->
242
+ tokenize ([$. ,T |Rest ], Line , Scope , Tokens ) when
243
+ ? at_op (T ); ? unary_op (T ); ? dual_op (T ); ? comp_op (T ) ->
245
244
handle_call_identifier (Rest , Line , list_to_atom ([T ]), Scope , Tokens );
246
245
247
- tokenize ([$. ,T |Rest ], Line , Scope , Tokens ) when ? comp1 ( T ); ? op1 (T ); T == $& ->
246
+ tokenize ([$. ,T |Rest ], Line , Scope , Tokens ) when ? op1 (T ); T == $& ->
248
247
handle_call_identifier (Rest , Line , list_to_atom ([T ]), Scope , Tokens );
249
248
250
249
% Dot call
@@ -309,22 +308,27 @@ tokenize([$:,T1,T2|Rest], Line, Scope, Tokens) when ?container2(T1, T2) ->
309
308
tokenize (Rest , Line , Scope , [{ atom , Line , list_to_atom ([T1 ,T2 ]) }|Tokens ]);
310
309
311
310
% ## Three Token Operators
312
- tokenize ([$: ,T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when ? unary_op3 (T1 , T2 , T3 ) ->
311
+ tokenize ([$: ,T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when
312
+ ? unary_op3 (T1 , T2 , T3 ); ? comp_op3 (T1 , T2 , T3 ) ->
313
313
tokenize (Rest , Line , Scope , [{ atom , Line , list_to_atom ([T1 ,T2 ,T3 ]) }|Tokens ]);
314
314
315
- tokenize ([$: ,T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when ? comp3 ( T1 , T2 , T3 ); ? op3 (T1 , T2 , T3 ) ->
315
+ tokenize ([$: ,T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when ? op3 (T1 , T2 , T3 ) ->
316
316
tokenize (Rest , Line , Scope , [{ atom , Line , list_to_atom ([T1 ,T2 ,T3 ]) }|Tokens ]);
317
317
318
318
% ## Two Token Operators
319
- tokenize ([$: ,T1 ,T2 |Rest ], Line , Scope , Tokens ) when ? comp2 (T1 , T2 ); ? op2 (T1 , T2 ) ->
319
+ tokenize ([$: ,T1 ,T2 |Rest ], Line , Scope , Tokens ) when
320
+ ? comp_op2 (T1 , T2 ) ->
320
321
tokenize (Rest , Line , Scope , [{ atom , Line , list_to_atom ([T1 ,T2 ]) }|Tokens ]);
321
322
322
- % ## Single Token Operators
323
+ tokenize ([$: ,T1 ,T2 |Rest ], Line , Scope , Tokens ) when ? op2 (T1 , T2 ) ->
324
+ tokenize (Rest , Line , Scope , [{ atom , Line , list_to_atom ([T1 ,T2 ]) }|Tokens ]);
323
325
324
- tokenize ([$: ,T |Rest ], Line , Scope , Tokens ) when ? at_op (T ); ? unary_op (T ); ? dual_op (T ) ->
326
+ % ## Single Token Operators
327
+ tokenize ([$: ,T |Rest ], Line , Scope , Tokens ) when
328
+ ? at_op (T ); ? unary_op (T ); ? dual_op (T ); ? comp_op (T ) ->
325
329
tokenize (Rest , Line , Scope , [{ atom , Line , list_to_atom ([T ]) }|Tokens ]);
326
330
327
- tokenize ([$: ,T |Rest ], Line , Scope , Tokens ) when ? comp1 ( T ); ? op1 (T ); T == $& ; T == $. ->
331
+ tokenize ([$: ,T |Rest ], Line , Scope , Tokens ) when ? op1 (T ); T == $& ; T == $. ->
328
332
tokenize (Rest , Line , Scope , [{ atom , Line , list_to_atom ([T ]) }|Tokens ]);
329
333
330
334
% End of line
@@ -353,14 +357,13 @@ tokenize("\r\n" ++ Rest, Line, Scope, Tokens) ->
353
357
tokenize ([$& ,H |Rest ], Line , Scope , Tokens ) when ? is_digit (H ) ->
354
358
tokenize (Rest , Line , Scope , [{ '&' , Line , [list_to_integer ([H ])] }|Tokens ]);
355
359
356
- % ## Comparison three token operators
357
- tokenize ([T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when ? comp3 (T1 , T2 , T3 ) ->
358
- handle_comp_op (Rest , Line , list_to_atom ([T1 ,T2 ,T3 ]), Scope , Tokens );
359
-
360
360
% ## Three token operators
361
361
tokenize ([T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when ? unary_op3 (T1 , T2 , T3 ) ->
362
362
handle_unary_op (Rest , Line , unary_op , list_to_atom ([T1 ,T2 ,T3 ]), Scope , Tokens );
363
363
364
+ tokenize ([T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when ? comp_op3 (T1 , T2 , T3 ) ->
365
+ handle_op (Rest , Line , comp_op , list_to_atom ([T1 ,T2 ,T3 ]), Scope , Tokens );
366
+
364
367
tokenize ([T1 ,T2 ,T3 |Rest ], Line , Scope , Tokens ) when ? op3 (T1 , T2 , T3 ) ->
365
368
handle_op (Rest , Line , list_to_atom ([T1 ,T2 ,T3 ]), Scope , Tokens );
366
369
@@ -374,25 +377,23 @@ tokenize([T|Rest], Line, Scope, Tokens) when T == $(;
374
377
Token = { list_to_atom ([T ]), Line },
375
378
handle_terminator (Rest , Line , Scope , Token , Tokens );
376
379
377
- % ## Comparison two token operators
378
- tokenize ([T1 ,T2 |Rest ], Line , Scope , Tokens ) when ? comp2 (T1 , T2 ) ->
379
- handle_comp_op (Rest , Line , list_to_atom ([T1 , T2 ]), Scope , Tokens );
380
-
381
380
% ## Two Token Operators
381
+ tokenize ([T1 ,T2 |Rest ], Line , Scope , Tokens ) when ? comp_op2 (T1 , T2 ) ->
382
+ handle_op (Rest , Line , comp_op , list_to_atom ([T1 , T2 ]), Scope , Tokens );
383
+
382
384
tokenize ([T1 ,T2 |Rest ], Line , Scope , Tokens ) when ? op2 (T1 , T2 ) ->
383
385
handle_op (Rest , Line , list_to_atom ([T1 , T2 ]), Scope , Tokens );
384
386
385
- % ## Comparison single token operators
386
- tokenize ([T |Rest ], Line , Scope , Tokens ) when ? comp1 (T ) ->
387
- handle_comp_op (Rest , Line , list_to_atom ([T ]), Scope , Tokens );
388
-
389
387
% ## Single Token Operators
390
388
tokenize ([T |Rest ], Line , Scope , Tokens ) when ? at_op (T ) ->
391
389
handle_unary_op (Rest , Line , at_op , list_to_atom ([T ]), Scope , Tokens );
392
390
393
391
tokenize ([T |Rest ], Line , Scope , Tokens ) when ? unary_op (T ) ->
394
392
handle_unary_op (Rest , Line , unary_op , list_to_atom ([T ]), Scope , Tokens );
395
393
394
+ tokenize ([T |Rest ], Line , Scope , Tokens ) when ? comp_op (T ) ->
395
+ handle_op (Rest , Line , comp_op , list_to_atom ([T ]), Scope , Tokens );
396
+
396
397
tokenize ([T |Rest ], Line , Scope , Tokens ) when ? dual_op (T ) ->
397
398
handle_unary_op (Rest , Line , dual_op , list_to_atom ([T ]), Scope , Tokens );
398
399
@@ -491,12 +492,6 @@ handle_strings(T, Line, H, #scope{file=File} = Scope, Tokens) ->
491
492
interpolation_error (Error , " (for string starting at line ~B )" , [Line ])
492
493
end .
493
494
494
- handle_comp_op ([$: |Rest ], Line , Op , Scope , Tokens ) when ? is_space (hd (Rest )) ->
495
- tokenize (Rest , Line , Scope , [{ kw_identifier , Line , Op }|Tokens ]);
496
-
497
- handle_comp_op (Rest , Line , Op , Scope , Tokens ) ->
498
- tokenize (Rest , Line , Scope , add_token_with_nl ({ comp_op , Line , Op }, Tokens )).
499
-
500
495
handle_op ([$: |Rest ], Line , Op , Scope , Tokens ) when ? is_space (hd (Rest )) ->
501
496
tokenize (Rest , Line , Scope , [{ kw_identifier , Line , Op }|Tokens ]);
502
497
@@ -513,7 +508,7 @@ handle_op([$:|Rest], Line, _Kind, Op, Scope, Tokens) when ?is_space(hd(Rest)) ->
513
508
tokenize (Rest , Line , Scope , [{ kw_identifier , Line , Op }|Tokens ]);
514
509
515
510
handle_op (Rest , Line , Kind , Op , Scope , Tokens ) ->
516
- tokenize (Rest , Line , Scope , add_token_with_nl ({ Kind , Op , Line }, Tokens )).
511
+ tokenize (Rest , Line , Scope , add_token_with_nl ({ Kind , Line , Op }, Tokens )).
517
512
518
513
handle_call_identifier (Rest , Line , Op , Scope , Tokens ) ->
519
514
Token = check_call_identifier (identifier , Line , Op , Rest ),
0 commit comments