1717 - getFileContents, fileExists
1818 - printf
1919 - run
20- - tokenize, newToken, concatTokens, removeUselessTokens, eachToken, isToken
20+ - tokenize, newToken, concatTokens, removeUselessTokens, eachToken, isToken, getNextUsefulToken
2121 - toLua, serialize
2222 Only in metaprogram:
2323 - outputValue, outputLua
980980-- eachToken()
981981-- Loop though tokens.
982982-- for index, token in eachToken( tokens [, ignoreUselessTokens=false ] ) do
983- local function getNextUsefulToken (tokens , i )
983+ local function nextUsefulToken (tokens , i )
984984 while true do
985985 i = i + 1
986986 local tok = tokens [i ]
@@ -990,12 +990,39 @@ local function getNextUsefulToken(tokens, i)
990990end
991991function metaFuncs .eachToken (tokens , ignoreUselessTokens )
992992 if ignoreUselessTokens then
993- return getNextUsefulToken , tokens , 0
993+ return nextUsefulToken , tokens , 0
994994 else
995995 return ipairs (tokens )
996996 end
997997end
998998
999+ -- getNextUsefulToken()
1000+ -- Get the next token that isn't a whitespace or comment. Returns nil if no more tokens are found.
1001+ -- token, index = getNextUsefulToken( tokens, startIndex [, steps=1 ] )
1002+ -- Specify a negative steps value to get an earlier token.
1003+ function metaFuncs .getNextUsefulToken (tokens , i1 , steps )
1004+ steps = (steps or 1 )
1005+
1006+ local i2 , dir
1007+ if steps == 0 then
1008+ return tokens [i1 ], i1
1009+ elseif steps < 0 then
1010+ i2 , dir = 1 , - 1
1011+ else
1012+ i2 , dir = # tokens , 1
1013+ end
1014+
1015+ for i = i1 , i2 , dir do
1016+ local tok = tokens [i ]
1017+ if not USELESS_TOKENS [tok .type ] then
1018+ steps = steps - dir
1019+ if steps == 0 then return tok , i end
1020+ end
1021+ end
1022+
1023+ return nil
1024+ end
1025+
9991026-- newToken()
10001027-- Create a new token. Different token types take different arguments.
10011028-- token = newToken( tokenType, ... )
0 commit comments