Skip to content

Commit 6c47475

Browse files
committed
Fixed backtick strings not working in macros.
Simplified expansion process.
1 parent c8845e7 commit 6c47475

File tree

4 files changed

+90
-40
lines changed

4 files changed

+90
-40
lines changed

preprocess.lua

Lines changed: 68 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1763,17 +1763,22 @@ local function newTokenAt(tok, locationTok)
17631763
return tok
17641764
end
17651765

1766-
local function doExpansions(tokensRaw, fileBuffers, params, stats)
1766+
local function doEarlyExpansions(tokensToExpand, fileBuffers, params, stats)
17671767
if not stats.hasPreprocessorCode then
1768-
return tokensRaw
1768+
return tokensToExpand
17691769
end
17701770

1771+
--
1772+
-- Here we expand simple things that makes it easier for
1773+
-- doLateExpansions() to do more elaborate expansions.
1774+
--
1775+
17711776
local tokenStack = {} -- We process the last token first, and we may push new tokens onto the stack.
17721777
local insertCount = 0
17731778
local tokens = {} -- To return.
17741779

1775-
for i = #tokensRaw, 1, -1 do
1776-
tableInsert(tokenStack, tokensRaw[i])
1780+
for i = #tokensToExpand, 1, -1 do
1781+
tableInsert(tokenStack, tokensToExpand[i])
17771782
end
17781783

17791784
while tokenStack[1] do
@@ -1792,11 +1797,55 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
17921797
table.remove(tokenStack) -- "@line"
17931798
tableInsert(tokens, newTokenAt({type="number", value=ppKeywordTok.line, representation=F("%d",ppKeywordTok.line)}, ppKeywordTok))
17941799

1800+
else
1801+
-- Expand later.
1802+
tableInsert(tokens, ppKeywordTok)
1803+
table.remove(tokenStack)
1804+
end
1805+
1806+
-- Backtick string.
1807+
elseif isToken(tok, "string") and tok.representation:find"^`" then
1808+
local stringTok = tok
1809+
stringTok.representation = F("%q", stringTok.value)
1810+
1811+
tableInsert(tokens, stringTok)
1812+
table.remove(tokenStack)
1813+
1814+
-- Anything else.
1815+
else
1816+
tableInsert(tokens, tok)
1817+
table.remove(tokenStack)
1818+
end
1819+
end--while tokenStack
1820+
1821+
return tokens
1822+
end
1823+
1824+
local function doLateExpansions(tokensToExpand, fileBuffers, params, stats)
1825+
if not stats.hasPreprocessorCode then
1826+
return tokensToExpand
1827+
end
1828+
1829+
local tokenStack = {} -- We process the last token first, and we may push new tokens onto the stack.
1830+
local insertCount = 0
1831+
local tokens = {} -- To return.
1832+
1833+
for i = #tokensToExpand, 1, -1 do
1834+
tableInsert(tokenStack, tokensToExpand[i])
1835+
end
1836+
1837+
while tokenStack[1] do
1838+
local tok = tokenStack[#tokenStack]
1839+
1840+
-- Keyword.
1841+
if isToken(tok, "pp_keyword") then
1842+
local ppKeywordTok = tok
1843+
17951844
-- @insert "name"
17961845
-- @insert identifier ( argument1, ... )
17971846
-- @insert identifier " ... "
17981847
-- @insert identifier { ... }
1799-
elseif ppKeywordTok.value == "insert" then
1848+
if ppKeywordTok.value == "insert" then
18001849
local tokNext, iNext = getNextUsableToken(tokenStack, #tokenStack-1, nil, -1)
18011850
if not (isTokenAndNotNil(tokNext, "string") or isTokenAndNotNil(tokNext, "identifier")) then
18021851
errorAtToken(
@@ -1849,6 +1898,8 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
18491898
end
18501899

18511900
local toInsertTokens = _tokenize(toInsertLua, toInsertName, true, params.backtickStrings, params.jitSyntax)
1901+
toInsertTokens = doEarlyExpansions(toInsertTokens, fileBuffers, params, stats)
1902+
18521903
for i = #toInsertTokens, 1, -1 do
18531904
tableInsert(tokenStack, toInsertTokens[i])
18541905
end
@@ -1886,6 +1937,10 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
18861937
elseif isTokenAndNotNil(tokNext, "punctuation", "{") then
18871938
popTokens(tokenStack, iNext) -- "{"
18881939

1940+
--
1941+
-- (Similar code as `@insert identifier()` below.)
1942+
--
1943+
18891944
-- Add "!!(ident".
18901945
tableInsert(tokens, newTokenAt({type="pp_entry", value="!!", representation="!!", double=true}, ppKeywordTok))
18911946
tableInsert(tokens, newTokenAt({type="punctuation", value="(", representation="(" }, ppKeywordTok))
@@ -1902,14 +1957,6 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
19021957
if not tok then
19031958
errorAtToken(fileBuffers, tableStartTok, nil, "Macro", "Syntax error: Could not find end of table constructor before EOF.")
19041959

1905-
-- @Copypaste from above. @Cleanup
1906-
elseif isToken(tok, "pp_keyword", "file") then
1907-
local ppKwTokInner = table.remove(tokenStack) -- "@file"
1908-
tableInsert(argTokens, newTokenAt({type="string", value=ppKwTokInner.file, representation=F("%q",ppKwTokInner.file)}, ppKwTokInner))
1909-
elseif isToken(tok, "pp_keyword", "line") then
1910-
local ppKwTokInner = table.remove(tokenStack) -- "@line"
1911-
tableInsert(argTokens, newTokenAt({type="number", value=ppKwTokInner.line, representation=F("%d",ppKwTokInner.line)}, ppKwTokInner))
1912-
19131960
elseif tok.type:find"^pp_" then
19141961
errorAtToken(fileBuffers, tok, nil, "Macro", "Preprocessor code not supported in macros. (Macro starting %s)", getRelativeLocationText(ppKeywordTok, tok))
19151962

@@ -1988,14 +2035,6 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
19882035
if not tok then
19892036
errorAtToken(fileBuffers, parensStartTok, nil, "Macro", "Syntax error: Could not find end of argument list before EOF.")
19902037

1991-
-- @Copypaste from above. @Cleanup
1992-
elseif isToken(tok, "pp_keyword", "file") then
1993-
local ppKwTokInner = table.remove(tokenStack) -- "@file"
1994-
tableInsert(argTokens, newTokenAt({type="string", value=ppKwTokInner.file, representation=F("%q",ppKwTokInner.file)}, ppKwTokInner))
1995-
elseif isToken(tok, "pp_keyword", "line") then
1996-
local ppKwTokInner = table.remove(tokenStack) -- "@line"
1997-
tableInsert(argTokens, newTokenAt({type="number", value=ppKwTokInner.line, representation=F("%d",ppKwTokInner.line)}, ppKwTokInner))
1998-
19992038
elseif tok.type:find"^pp_" then
20002039
errorAtToken(fileBuffers, tok, nil, "Macro", "Preprocessor code not supported in macros. (Macro starting %s)", getRelativeLocationText(ppKeywordTok, tok))
20012040

@@ -2084,20 +2123,13 @@ local function doExpansions(tokensRaw, fileBuffers, params, stats)
20842123
end
20852124

20862125
else
2087-
assert(false)
2126+
errorAtToken(fileBuffers, tokNext, nil, "Macro", "Internal error. (%s)", tokNext.type)
20882127
end
20892128

20902129
else
20912130
errorAtToken(fileBuffers, ppKeywordTok, ppKeywordTok.position+1, "Parser", "Unknown preprocessor keyword '%s'.", ppKeywordTok.value)
20922131
end
20932132

2094-
-- Backtick string.
2095-
elseif isToken(tok, "string") and tok.representation:find"^`" then
2096-
tok.representation = F("%q", tok.value)
2097-
2098-
tableInsert(tokens, tok)
2099-
table.remove(tokenStack)
2100-
21012133
-- Anything else.
21022134
else
21032135
tableInsert(tokens, tok)
@@ -2146,23 +2178,23 @@ local function _processFileOrString(params, isFile)
21462178
luaUnprocessed = rest
21472179
end
21482180

2149-
local tokensRaw = _tokenize(luaUnprocessed, pathIn, true, params.backtickStrings, params.jitSyntax)
2150-
-- printTokens(tokensRaw)
2181+
local tokens = _tokenize(luaUnprocessed, pathIn, true, params.backtickStrings, params.jitSyntax)
2182+
-- printTokens(tokens)
21512183

21522184
-- Info variables.
2153-
local lastTok = tokensRaw[#tokensRaw]
2185+
local lastTok = tokens[#tokens]
21542186

21552187
local stats = {
21562188
processedByteCount = #luaUnprocessed,
21572189
lineCount = (specialFirstLine and 1 or 0) + (lastTok and lastTok.line + countString(lastTok.representation, "\n", true) or 0),
2158-
lineCountCode = getLineCountWithCode(tokensRaw),
2190+
lineCountCode = getLineCountWithCode(tokens),
21592191
tokenCount = 0, -- Set later.
21602192
hasPreprocessorCode = false,
21612193
hasMetaprogram = false,
21622194
insertedNames = {},
21632195
}
21642196

2165-
for _, tok in ipairs(tokensRaw) do
2197+
for _, tok in ipairs(tokens) do
21662198
if isToken(tok, "pp_entry") or isToken(tok, "pp_keyword", "insert") then
21672199
stats.hasPreprocessorCode = true
21682200
stats.hasMetaprogram = true
@@ -2173,7 +2205,8 @@ local function _processFileOrString(params, isFile)
21732205
end
21742206
end
21752207

2176-
local tokens = doExpansions(tokensRaw, fileBuffers, params, stats) -- Tokens for constructing the metaprogram.
2208+
tokens = doEarlyExpansions(tokens, fileBuffers, params, stats)
2209+
tokens = doLateExpansions (tokens, fileBuffers, params, stats)
21772210
stats.tokenCount = #tokens
21782211

21792212
-- Generate metaprogram.

tests/quickTest.lua2p

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,9 +118,12 @@ local ok = 1==1
118118
local s = @insert PASS_THROUGH "foo"
119119
local t = @@ PASS_THROUGH { 496, b=true } -- @@func() means the same as @insert func().
120120

121+
-- local s = @insert PASS_THROUGH `foo`
122+
121123
local f = @insert PASS_THROUGH(function(a, b)
122124
while true do
123125
repeat until arePlanetsAligned("mars", "jupiter")
126+
-- repeat until arePlanetsAligned(`mars`, `jupiter`)
124127
break
125128
end
126129
return "", nil

tests/runQuickTest.cmd

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ CD /D "%~dp0.."
44
IF NOT EXIST local MD local
55

66
lua ./preprocess-cl.lua --debug --saveinfo=local/info.lua --data="Hello, world!" tests/quickTest.lua2p
7+
REM lua ./preprocess-cl.lua --debug --saveinfo=local/info.lua --data="Hello, world!" tests/quickTest.lua2p --backtickstrings
78
REM lua ./preprocess-cl.lua --debug --saveinfo=local/info.lua --data="Hello, world!" tests/quickTest.lua2p --linenumbers
89

910
REM lua ./preprocess-cl.lua --debug --saveinfo=local/info.lua --data="Hello, world!" --outputpaths tests/quickTest.lua2p local/quickTest.output.lua

tests/runTests.lua

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -226,6 +226,12 @@ doTest("Preprocessor keywords", function()
226226
]]})
227227
assertCodeOutput(luaOut, [[s = ""]])
228228

229+
local luaOut = assert(pp.processString{ backtickStrings=true, code=[[
230+
!function echo(v) return v end
231+
s = @insert echo``
232+
]]})
233+
assertCodeOutput(luaOut, [[s = ""]])
234+
229235
local luaOut = assert(pp.processString{ code=[[
230236
!function echo(v) return v end
231237
t = @insert echo{}
@@ -238,6 +244,12 @@ doTest("Preprocessor keywords", function()
238244
]]})
239245
assertCodeOutput(luaOut, [[f = function() return a,b end]])
240246

247+
local luaOut = assert(pp.processString{ backtickStrings=true, code=[[
248+
!function echo(v) return v end
249+
f = @insert echo(function() return a,`b` end)
250+
]]})
251+
assertCodeOutput(luaOut, [[f = function() return a,"b" end]])
252+
241253
-- Invalid: Ambiguous syntax.
242254
local luaOut = pp.processString{ code=[[
243255
!function void() return "" end
@@ -272,12 +284,13 @@ doTest("Serialize", function()
272284
local pp = assert(loadfile"preprocess.lua")()
273285

274286
local t = {
275-
z = 99,
276-
a = 2,
287+
z = 99,
288+
a = 2,
289+
["f"] = 176,
277290
}
278291

279292
local luaOut = assert(pp.toLua(t))
280-
assertCodeOutput(luaOut, [[{a=2,z=99}]]) -- Note: Table keys should be sorted.
293+
assertCodeOutput(luaOut, [[{a=2,f=176,z=99}]]) -- Note: Table keys should be sorted.
281294
end)
282295

283296

@@ -354,9 +367,9 @@ end
354367

355368
print()
356369
if countFails == 0 then
357-
print("All "..countResults.." tests passed!")
370+
print("All "..countResults.." tests passed! :)")
358371
else
359-
print(countFails.."/"..countResults.." tests FAILED!!!")
372+
print(countFails.."/"..countResults.." tests FAILED!!! :O")
360373
end
361374

362375
os.exit(countFails == 0 and 0 or 1)

0 commit comments

Comments
 (0)