Skip to content

Commit 366b15a

Browse files
committed
Added comments and function documentation to SQUISH.LUA
1 parent 182bd13 commit 366b15a

File tree

1 file changed

+19
-6
lines changed

1 file changed

+19
-6
lines changed

util/SQUISH.LUA

Lines changed: 19 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
#!/usr/bin/env lua
22

3+
---Remove whitespace, comments and newlines from a Lua script.
4+
---The file is squished in place so make a backup before running.
5+
---@param fileName string The path to the Lua file to squish.
36
local function Squish(fileName)
47
local f, e = io.open(fileName, "r")
58

@@ -8,6 +11,11 @@ local function Squish(fileName)
811
return
912
end
1013

14+
---Read a Lua script and create a tokenized list splitting the codebase into 1 of 4 categories:
15+
---code, comment, shebang and string
16+
---By doing this first it becomes easier to parse the codebase correctly later
17+
---@param file file The open file with it's pointer set at the start
18+
---@return table A list of the tokenized sections ready for further parsing
1119
local function tokenizeFile(file)
1220
local data, tokens, pos = file:read("all"), {}, 1
1321

@@ -94,13 +102,15 @@ local function Squish(fileName)
94102
return tokens
95103
end
96104

105+
---Remove whitespace from code tokens when it is not required by an interpreter
106+
---@param tokens table The tokenized list to process
97107
local function removeWhiteSpace(tokens)
98108
for _, token in ipairs(tokens) do
99109
if token.type == "code" then
100-
token.value = token.value:gsub("%s+", " ") -- Replace multiple spaces with a single space
101-
token.value = token.value:gsub("%s*([%(%){}<>.,:;=%+%-%*/%^])%s*", "%1") -- Remove spaces around operators and punctuation
102-
token.value = token.value:gsub("^%s+", "") -- Remove leading whitespace
103-
token.value = token.value:gsub("%s+$", "") -- Remove trailing whitespace
110+
token.value = token.value:gsub("%s+", " ") -- Replace multiple spaces with a single space
111+
token.value = token.value:gsub("%s*([%(%){}<>.,:;=%+%-%*/%^])%s*", "%1") -- Remove spaces around operators and punctuation
112+
token.value = token.value:gsub("^%s+", "") -- Remove leading whitespace
113+
token.value = token.value:gsub("%s+$", "") -- Remove trailing whitespace
104114
end
105115
end
106116
end
@@ -117,6 +127,8 @@ local function Squish(fileName)
117127

118128
for _, token in ipairs(tokens) do -- Drop comments and concatenate code and strings back together
119129
if token.type ~= "comment" then
130+
---Determine and add a space between tokens if required
131+
---@return string The string that should be concatenated before the new token is written
120132
local function pad()
121133
local last, start = #data > 1 and data:sub(-1, -1), #token.value > 1 and token.value:sub(1, 1)
122134
local function needsPadding(x) return x and (x:find("%a") or x:find("%d")) end
@@ -127,15 +139,16 @@ local function Squish(fileName)
127139
end
128140
local sep = pad()
129141
data = data .. sep .. token.value
130-
else
142+
else -- token.type == "comment"
131143
local pragma = token.value:match("^--%s*%#%s*squish%s+([%w%-]+)")
132144

133-
if pragma == "keep-eol" then
145+
if pragma == "keep-eol" then -- Add a new line in place of this comment
134146
data = data .. '\n'
135147
end
136148
end
137149
end
138150

151+
-- Overwrite the file initially read from with the squished version
139152
f, e = io.open(fileName, "w")
140153
if not f then
141154
print(e)

0 commit comments

Comments
 (0)