Skip to content

Commit 6c6dccf

Browse files
author
pnlmon_oth
authored
Tokenizer optimization
From my testing, the optimization improved the performance by 37% under minify(0.4992s to 0.3104s)
1 parent 211606e commit 6c6dccf

File tree

1 file changed

+7
-16
lines changed

1 file changed

+7
-16
lines changed

src/prometheus/tokenizer.lua

Lines changed: 7 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -45,19 +45,7 @@ Tokenizer.EOF_TOKEN = {
4545
}
4646

4747
local function getPosition(source, i)
48-
local line = 1;
49-
local linePos = 1;
50-
51-
for j = 1, i do
52-
local c = source:sub(j,j)
53-
if c == '\n' then
54-
line = line + 1;
55-
linePos = 1;
56-
else
57-
linePos = linePos + 1;
58-
end
59-
end
60-
return line, linePos
48+
return source:sub(1, i):gsub("[^\n]", ""):len() + 1, i - source:sub(1, i):gsub("[^\r]", ""):len() + 1;
6149
end
6250

6351
local function token(self, startPos, kind, value)
@@ -327,10 +315,13 @@ end
327315
-- Lex the Next Token as Identifier or Keyword
328316
function Tokenizer:ident()
329317
local startPos = self.index;
330-
local source = expect(self, self.IdentCharsLookup);
318+
local source = expect(self, self.IdentCharsLookup)
319+
local sourceAddContent = {source}
331320
while(is(self, self.IdentCharsLookup)) do
332-
source = source .. get(self);
321+
-- source = source .. get(self);
322+
table.insert(sourceAddContent, get(self))
333323
end
324+
source = table.concat(sourceAddContent)
334325
if(self.KeywordsLookup[source]) then
335326
return token(self, startPos, Tokenizer.TokenKind.Keyword, source);
336327
end
@@ -521,4 +512,4 @@ function Tokenizer:scanAll()
521512
return tb
522513
end
523514

524-
return Tokenizer
515+
return Tokenizer

0 commit comments

Comments
 (0)