@@ -1763,17 +1763,22 @@ local function newTokenAt(tok, locationTok)
17631763 return tok
17641764end
17651765
1766- local function doExpansions ( tokensRaw , fileBuffers , params , stats )
1766+ local function doEarlyExpansions ( tokensToExpand , fileBuffers , params , stats )
17671767 if not stats .hasPreprocessorCode then
1768- return tokensRaw
1768+ return tokensToExpand
17691769 end
17701770
1771+ --
1772+ -- Here we expand simple things that makes it easier for
1773+ -- doLateExpansions() to do more elaborate expansions.
1774+ --
1775+
17711776 local tokenStack = {} -- We process the last token first, and we may push new tokens onto the stack.
17721777 local insertCount = 0
17731778 local tokens = {} -- To return.
17741779
1775- for i = # tokensRaw , 1 , - 1 do
1776- tableInsert (tokenStack , tokensRaw [i ])
1780+ for i = # tokensToExpand , 1 , - 1 do
1781+ tableInsert (tokenStack , tokensToExpand [i ])
17771782 end
17781783
17791784 while tokenStack [1 ] do
@@ -1792,11 +1797,55 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
17921797 table.remove (tokenStack ) -- "@line"
17931798 tableInsert (tokens , newTokenAt ({type = " number" , value = ppKeywordTok .line , representation = F (" %d" ,ppKeywordTok .line )}, ppKeywordTok ))
17941799
1800+ else
1801+ -- Expand later.
1802+ tableInsert (tokens , ppKeywordTok )
1803+ table.remove (tokenStack )
1804+ end
1805+
1806+ -- Backtick string.
1807+ elseif isToken (tok , " string" ) and tok .representation :find " ^`" then
1808+ local stringTok = tok
1809+ stringTok .representation = F (" %q" , stringTok .value )
1810+
1811+ tableInsert (tokens , stringTok )
1812+ table.remove (tokenStack )
1813+
1814+ -- Anything else.
1815+ else
1816+ tableInsert (tokens , tok )
1817+ table.remove (tokenStack )
1818+ end
1819+ end -- while tokenStack
1820+
1821+ return tokens
1822+ end
1823+
1824+ local function doLateExpansions (tokensToExpand , fileBuffers , params , stats )
1825+ if not stats .hasPreprocessorCode then
1826+ return tokensToExpand
1827+ end
1828+
1829+ local tokenStack = {} -- We process the last token first, and we may push new tokens onto the stack.
1830+ local insertCount = 0
1831+ local tokens = {} -- To return.
1832+
1833+ for i = # tokensToExpand , 1 , - 1 do
1834+ tableInsert (tokenStack , tokensToExpand [i ])
1835+ end
1836+
1837+ while tokenStack [1 ] do
1838+ local tok = tokenStack [# tokenStack ]
1839+
1840+ -- Keyword.
1841+ if isToken (tok , " pp_keyword" ) then
1842+ local ppKeywordTok = tok
1843+
17951844 -- @insert "name"
17961845 -- @insert identifier ( argument1, ... )
17971846 -- @insert identifier " ... "
17981847 -- @insert identifier { ... }
1799- elseif ppKeywordTok .value == " insert" then
1848+ if ppKeywordTok .value == " insert" then
18001849 local tokNext , iNext = getNextUsableToken (tokenStack , # tokenStack - 1 , nil , - 1 )
18011850 if not (isTokenAndNotNil (tokNext , " string" ) or isTokenAndNotNil (tokNext , " identifier" )) then
18021851 errorAtToken (
@@ -1849,6 +1898,8 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
18491898 end
18501899
18511900 local toInsertTokens = _tokenize (toInsertLua , toInsertName , true , params .backtickStrings , params .jitSyntax )
1901+ toInsertTokens = doEarlyExpansions (toInsertTokens , fileBuffers , params , stats )
1902+
18521903 for i = # toInsertTokens , 1 , - 1 do
18531904 tableInsert (tokenStack , toInsertTokens [i ])
18541905 end
@@ -1886,6 +1937,10 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
18861937 elseif isTokenAndNotNil (tokNext , " punctuation" , " {" ) then
18871938 popTokens (tokenStack , iNext ) -- "{"
18881939
1940+ --
1941+ -- (Similar code as `@insert identifier()` below.)
1942+ --
1943+
18891944 -- Add "!!(ident".
18901945 tableInsert (tokens , newTokenAt ({type = " pp_entry" , value = " !!" , representation = " !!" , double = true }, ppKeywordTok ))
18911946 tableInsert (tokens , newTokenAt ({type = " punctuation" , value = " (" , representation = " (" }, ppKeywordTok ))
@@ -1902,14 +1957,6 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
19021957 if not tok then
19031958 errorAtToken (fileBuffers , tableStartTok , nil , " Macro" , " Syntax error: Could not find end of table constructor before EOF." )
19041959
1905- -- @Copypaste from above. @Cleanup
1906- elseif isToken (tok , " pp_keyword" , " file" ) then
1907- local ppKwTokInner = table.remove (tokenStack ) -- "@file"
1908- tableInsert (argTokens , newTokenAt ({type = " string" , value = ppKwTokInner .file , representation = F (" %q" ,ppKwTokInner .file )}, ppKwTokInner ))
1909- elseif isToken (tok , " pp_keyword" , " line" ) then
1910- local ppKwTokInner = table.remove (tokenStack ) -- "@line"
1911- tableInsert (argTokens , newTokenAt ({type = " number" , value = ppKwTokInner .line , representation = F (" %d" ,ppKwTokInner .line )}, ppKwTokInner ))
1912-
19131960 elseif tok .type :find " ^pp_" then
19141961 errorAtToken (fileBuffers , tok , nil , " Macro" , " Preprocessor code not supported in macros. (Macro starting %s)" , getRelativeLocationText (ppKeywordTok , tok ))
19151962
@@ -1988,14 +2035,6 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
19882035 if not tok then
19892036 errorAtToken (fileBuffers , parensStartTok , nil , " Macro" , " Syntax error: Could not find end of argument list before EOF." )
19902037
1991- -- @Copypaste from above. @Cleanup
1992- elseif isToken (tok , " pp_keyword" , " file" ) then
1993- local ppKwTokInner = table.remove (tokenStack ) -- "@file"
1994- tableInsert (argTokens , newTokenAt ({type = " string" , value = ppKwTokInner .file , representation = F (" %q" ,ppKwTokInner .file )}, ppKwTokInner ))
1995- elseif isToken (tok , " pp_keyword" , " line" ) then
1996- local ppKwTokInner = table.remove (tokenStack ) -- "@line"
1997- tableInsert (argTokens , newTokenAt ({type = " number" , value = ppKwTokInner .line , representation = F (" %d" ,ppKwTokInner .line )}, ppKwTokInner ))
1998-
19992038 elseif tok .type :find " ^pp_" then
20002039 errorAtToken (fileBuffers , tok , nil , " Macro" , " Preprocessor code not supported in macros. (Macro starting %s)" , getRelativeLocationText (ppKeywordTok , tok ))
20012040
@@ -2084,20 +2123,13 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
20842123 end
20852124
20862125 else
2087- assert ( false )
2126+ errorAtToken ( fileBuffers , tokNext , nil , " Macro " , " Internal error. (%s) " , tokNext . type )
20882127 end
20892128
20902129 else
20912130 errorAtToken (fileBuffers , ppKeywordTok , ppKeywordTok .position + 1 , " Parser" , " Unknown preprocessor keyword '%s'." , ppKeywordTok .value )
20922131 end
20932132
2094- -- Backtick string.
2095- elseif isToken (tok , " string" ) and tok .representation :find " ^`" then
2096- tok .representation = F (" %q" , tok .value )
2097-
2098- tableInsert (tokens , tok )
2099- table.remove (tokenStack )
2100-
21012133 -- Anything else.
21022134 else
21032135 tableInsert (tokens , tok )
@@ -2146,23 +2178,23 @@ local function _processFileOrString(params, isFile)
21462178 luaUnprocessed = rest
21472179 end
21482180
2149- local tokensRaw = _tokenize (luaUnprocessed , pathIn , true , params .backtickStrings , params .jitSyntax )
2150- -- printTokens(tokensRaw )
2181+ local tokens = _tokenize (luaUnprocessed , pathIn , true , params .backtickStrings , params .jitSyntax )
2182+ -- printTokens(tokens )
21512183
21522184 -- Info variables.
2153- local lastTok = tokensRaw [ # tokensRaw ]
2185+ local lastTok = tokens [ # tokens ]
21542186
21552187 local stats = {
21562188 processedByteCount = # luaUnprocessed ,
21572189 lineCount = (specialFirstLine and 1 or 0 ) + (lastTok and lastTok .line + countString (lastTok .representation , " \n " , true ) or 0 ),
2158- lineCountCode = getLineCountWithCode (tokensRaw ),
2190+ lineCountCode = getLineCountWithCode (tokens ),
21592191 tokenCount = 0 , -- Set later.
21602192 hasPreprocessorCode = false ,
21612193 hasMetaprogram = false ,
21622194 insertedNames = {},
21632195 }
21642196
2165- for _ , tok in ipairs (tokensRaw ) do
2197+ for _ , tok in ipairs (tokens ) do
21662198 if isToken (tok , " pp_entry" ) or isToken (tok , " pp_keyword" , " insert" ) then
21672199 stats .hasPreprocessorCode = true
21682200 stats .hasMetaprogram = true
@@ -2173,7 +2205,8 @@ local function _processFileOrString(params, isFile)
21732205 end
21742206 end
21752207
2176- local tokens = doExpansions (tokensRaw , fileBuffers , params , stats ) -- Tokens for constructing the metaprogram.
2208+ tokens = doEarlyExpansions (tokens , fileBuffers , params , stats )
2209+ tokens = doLateExpansions (tokens , fileBuffers , params , stats )
21772210 stats .tokenCount = # tokens
21782211
21792212 -- Generate metaprogram.
0 commit comments