@@ -8,87 +8,65 @@ function _precompile_()
8
8
precompile (Tokenize. Tokens. Token, (Tokenize. Tokens. Kind,Tuple{Int,Int},Tuple{Int,Int},Int,Int,String))
9
9
precompile (Tokenize. Tokens. Token, ())
10
10
precompile (Tokenize. Tokens. kind, (Tokenize. Tokens. Token,))
11
- precompile (Tokenize. Tokens. startpos, (Tokenize. Tokens. Token,))
12
- precompile (Tokenize. Tokens. endpos, (Tokenize. Tokens. Token,))
13
- precompile (Tokenize. Tokens. untokenize, (Tokenize. Tokens. Token,))
14
- precompile (Tokenize. Tokens. untokenize, (Tokenize. Tokens. RawToken,String))
15
- precompile (Tokenize. Tokens. untokenize, (Array{Tokenize. Tokens. Token, 1 },))
16
- precompile (Tokenize. Tokens. untokenize, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
17
11
18
12
precompile (Tokenize. Lexers. is_cat_id_start, (Char, Int32,))
19
13
precompile (Tokenize. Lexers. is_identifier_char, (Char,))
20
14
precompile (Tokenize. Lexers. is_identifier_start_char, (Char,))
21
- precompile (Tokenize. Lexers. peekchar, (GenericIOBuffer{Array{UInt8, 1 }},))
22
- precompile (Tokenize. Lexers. dpeekchar, (GenericIOBuffer{Array{UInt8, 1 }},))
23
- precompile (Tokenize. Lexers. readchar, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
24
- precompile (Tokenize. Lexers. readchar, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. RawToken},))
25
- precompile (Tokenize. Lexers. next_token, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
15
+ precompile (Tokenize. Lexers. readchar, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
16
+ precompile (Tokenize. Lexers. next_token, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
26
17
27
18
precompile (Tokenize. Lexers. ishex, (Char,))
28
19
precompile (Tokenize. Lexers. isbinary, (Char,))
29
20
precompile (Tokenize. Lexers. isoctal, (Char,))
30
21
precompile (Tokenize. Lexers. iswhitespace, (Char,))
31
22
precompile (Tokenize. Lexers. Lexer, (String,))
32
- precompile (Tokenize. Lexers. Lexer, (String,Type{Tokenize. Tokens. Token}))
33
- precompile (Tokenize. Lexers. Lexer, (String,Type{Tokenize. Tokens. RawToken}))
34
- precompile (Tokenize. Lexers. Lexer, (GenericIOBuffer{Array{UInt8, 1 }},Type{Tokenize. Tokens. Token}))
35
- precompile (Tokenize. Lexers. Lexer, (GenericIOBuffer{Array{UInt8, 1 }},Type{Tokenize. Tokens. RawToken}))
23
+ precompile (Tokenize. Lexers. Lexer, (GenericIOBuffer{Array{UInt8, 1 }},))
36
24
precompile (Tokenize. Lexers. tokenize, (String,))
37
25
38
- precompile (Tokenize. Lexers. iterate, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
39
- precompile (Tokenize. Lexers. iterate, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. RawToken},))
40
- precompile (Tokenize. Lexers. iterate, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token}, Bool,))
41
- precompile (Tokenize. Lexers. iterate, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. RawToken}, Bool,))
42
- precompile (Tokenize. Lexers. iterate, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token}, Bool,))
43
- precompile (Tokenize. Lexers. iterate, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. RawToken}, Bool,))
44
- precompile (Tokenize. Lexers. startpos, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
45
- precompile (Tokenize. Lexers. startpos, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. RawToken},))
46
- precompile (Tokenize. Lexers. startpos!, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},Int))
47
- precompile (Tokenize. Lexers. startpos!, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. RawToken},Int))
48
- precompile (Tokenize. Lexers. start_token!, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
49
- precompile (Tokenize. Lexers. start_token!, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. RawToken},))
26
+ precompile (Tokenize. Lexers. iterate, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}}, Bool,))
27
+ precompile (Tokenize. Lexers. iterate, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}}, Bool,))
28
+ precompile (Tokenize. Lexers. startpos, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
29
+ precompile (Tokenize. Lexers. startpos!, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},Int))
30
+ precompile (Tokenize. Lexers. start_token!, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
50
31
32
+ precompile (Tokenize. Lexers. lex_greater, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
33
+ precompile (Tokenize. Lexers. lex_prime, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
34
+ precompile (Tokenize. Lexers. lex_digit, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},Tokenize. Tokens. Kind))
35
+ precompile (Tokenize. Lexers. lex_identifier, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}}, Char,))
36
+ precompile (Tokenize. Lexers. lex_less, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
37
+ precompile (Tokenize. Lexers. lex_forwardslash, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
38
+ precompile (Tokenize. Lexers. lex_minus, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
39
+ precompile (Tokenize. Lexers. lex_xor, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
40
+ precompile (Tokenize. Lexers. lex_equal, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
41
+ precompile (Tokenize. Lexers. lex_bar, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
42
+ precompile (Tokenize. Lexers. lex_quote, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
43
+ precompile (Tokenize. Lexers. lex_plus, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
44
+ precompile (Tokenize. Lexers. lex_dot, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
45
+ precompile (Tokenize. Lexers. lex_exclaim, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
46
+ precompile (Tokenize. Lexers. lex_colon, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
47
+ precompile (Tokenize. Lexers. lex_percent, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
48
+ precompile (Tokenize. Lexers. lex_comment, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
49
+ precompile (Tokenize. Lexers. lex_comment, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},Bool))
50
+ precompile (Tokenize. Lexers. lex_division, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
51
+ precompile (Tokenize. Lexers. lex_circumflex, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
52
+ precompile (Tokenize. Lexers. lex_backslash, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
53
+ precompile (Tokenize. Lexers. lex_star, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
54
+ precompile (Tokenize. Lexers. lex_amper, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},))
51
55
56
+ precompile (Tokenize. Lexers. lex_whitespace, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},Char))
52
57
53
- precompile (Tokenize. Lexers. lex_greater, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
54
- precompile (Tokenize. Lexers. lex_prime, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
55
- precompile (Tokenize. Lexers. lex_digit, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},Tokenize. Tokens. Kind))
56
- precompile (Tokenize. Lexers. lex_identifier, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token}, Char,))
57
- precompile (Tokenize. Lexers. lex_less, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
58
- precompile (Tokenize. Lexers. lex_forwardslash, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
59
- precompile (Tokenize. Lexers. lex_minus, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
60
- precompile (Tokenize. Lexers. lex_xor, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
61
- precompile (Tokenize. Lexers. lex_equal, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
62
- precompile (Tokenize. Lexers. lex_bar, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
63
- precompile (Tokenize. Lexers. lex_quote, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
64
- precompile (Tokenize. Lexers. lex_plus, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
65
- precompile (Tokenize. Lexers. lex_dot, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
66
- precompile (Tokenize. Lexers. lex_exclaim, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
67
- precompile (Tokenize. Lexers. lex_colon, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
68
- precompile (Tokenize. Lexers. lex_percent, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
69
- precompile (Tokenize. Lexers. lex_comment, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
70
- precompile (Tokenize. Lexers. lex_comment, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},Bool))
71
- precompile (Tokenize. Lexers. lex_division, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
72
- precompile (Tokenize. Lexers. lex_circumflex, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
73
- precompile (Tokenize. Lexers. lex_backslash, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
74
- precompile (Tokenize. Lexers. lex_star, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
75
- precompile (Tokenize. Lexers. lex_amper, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
76
-
77
- precompile (Tokenize. Lexers. lex_whitespace, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},))
78
-
79
- precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token}, Char,))
80
- precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token}, String,))
81
- precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},typeof ( Base. isdigit),))
82
- precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},typeof ( Tokenize. Lexers. iswhitespace),))
83
- precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},typeof ( Tokenize. Lexers. is_identifier_char),))
84
- precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token},typeof (Tokenize. Lexers. ishex),))
85
- precompile (Tokenize. Lexers. accept_batch, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token}, typeof (Tokenize. Lexers. iswhitespace),))
86
- precompile (Tokenize. Lexers. accept_batch, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. Token}, typeof (Tokenize. Lexers. isdigit),))
87
-
88
- precompile (Tokenize. Lexers. accept_batch, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. RawToken}, typeof (Tokenize. Lexers. iswhitespace),))
89
- precompile (Tokenize. Lexers. accept_batch, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. RawToken}, typeof (Tokenize. Lexers. isdigit),))
90
- precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }},Tokenize. Tokens. RawToken}, Char,))
58
+ precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}}, Char,))
59
+ precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}}, String,))
60
+ precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},typeof ( Base. isdigit),))
61
+ precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},typeof ( Tokenize. Lexers. iswhitespace),))
62
+ precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},typeof ( Tokenize. Lexers. is_identifier_char),))
63
+ precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}},typeof (Tokenize. Lexers. ishex),))
64
+ precompile (Tokenize. Lexers. accept_batch, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}}, typeof (Tokenize. Lexers. iswhitespace),))
65
+ precompile (Tokenize. Lexers. accept_batch, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}}, typeof (Tokenize. Lexers. isdigit),))
91
66
67
+ precompile (Tokenize. Lexers. accept_batch, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}}, typeof (Tokenize. Lexers. iswhitespace),))
68
+ precompile (Tokenize. Lexers. accept_batch, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}}, typeof (Tokenize. Lexers. isdigit),))
69
+ precompile (Tokenize. Lexers. accept, (Tokenize. Lexers. Lexer{GenericIOBuffer{Array{UInt8, 1 }}}, Char,))
92
70
93
71
precompile (Tokenize. Lexers. readchar, (GenericIOBuffer{Array{UInt8, 1 }},))
94
72
end
0 commit comments