Skip to content

Commit 522d698

Browse files
committed
Fixed parsing of consecutive unary operations.
Some cleanup.
1 parent 3421142 commit 522d698

File tree

6 files changed

+68
-51
lines changed

6 files changed

+68
-51
lines changed

.gitignore

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1-
*.sublime-*
2-
.run
31
local/
2+
3+
/*.sublime-*
4+
/.run

Changelog.txt

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
Changelog
2+
DumbLuaParser
3+
4+
v1.2.0 (2021-05-13)
5+
- Added parser.updateReferences().
6+
- Changed arguments for parser.traverseTree() and the callback.
7+
- Fixed parsing of consecutive unary operations.
8+
9+
v1.1.0 (2020-07-06)
10+
- Added parser.traverseTree().
11+
12+
v1.0.0 (2020-07-05)
13+
- Initial release!

LICENSE.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
Copyright © 2020 Marcus 'ReFreezed' Thunström
1+
Copyright © 2020-2021 Marcus 'ReFreezed' Thunström
22

33
Permission is hereby granted, free of charge, to any person obtaining a copy
44
of this software and associated documentation files (the "Software"), to deal

dumbParser.lua

Lines changed: 49 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
--[[============================================================
22
--=
3-
--= Lua parsing library v1.1 (2020-07-06)
3+
--= Lua parsing library v1.2 (2021-05-13)
44
--= by Marcus 'ReFreezed' Thunström
55
--=
66
--= License: MIT (see the bottom of this file)
@@ -28,7 +28,7 @@
2828
--------------------------------
2929
3030
tokenizeString()
31-
tokens, error = parser.tokenizeString( luaString [, pathForErrors="?" ] )
31+
tokens, error = parser.tokenizeString( luaString [, pathForErrorMessages="?" ] )
3232
Convert a Lua string into tokens.
3333
Returns nil and an error message on error.
3434
@@ -51,7 +51,7 @@
5151
5252
parse()
5353
astNode, error = parser.parse( tokens )
54-
astNode, error = parser.parse( luaString, pathForErrors )
54+
astNode, error = parser.parse( luaString, pathForErrorMessages )
5555
astNode, error = parser.parse( path )
5656
Convert tokens or Lua code into an abstract syntax tree.
5757
Returns nil and an error message on error.
@@ -88,6 +88,10 @@
8888
parser.printTree( astNode )
8989
Print the structure of a whole AST to stdout.
9090
91+
VERSION
92+
parser.VERSION
93+
The parser's version number (e.g. "1.0.2").
94+
9195
9296
Tokens
9397
--------------------------------
@@ -147,6 +151,8 @@
147151
148152
--============================================================]]
149153

154+
local PARSER_VERSION = "1.2.0"
155+
150156
local F = string.format
151157
local find = string.find
152158
local getByte = string.byte
@@ -159,34 +165,18 @@ local loadLuaString = loadstring or load
159165
local unpack = table.unpack or unpack
160166

161167
local countString
162-
local insertToken
163-
local isToken
164-
local isTokenAnyValue
165-
local isTokenType
168+
local insertToken, removeToken
169+
local isToken, isTokenType, isTokenAnyValue
166170
local itemWith1
167171
local minify
168172
local newId
169173
local newNode
170174
local newTokenStream
171175
local parse
172-
local parseBlock
173-
local parseExpression
174-
local parseExpressionList
175-
local parseFunctionParametersAndBody
176-
local parseIdentifier
177-
local parseNameList
178-
local parseOneOrPossiblyMoreStatements
179-
local parseStringlikeToken
180-
local parseTable
181-
local printerr
182-
local printNode
176+
local printError, printfError, reportErrorInFile, reportErrorAtToken
177+
local printNode, printTree
183178
local printTokens
184-
local printTree
185-
local removeToken
186-
local reportErrorAtToken
187-
local reportErrorInFile
188-
local tokenizeFile
189-
local tokenizeString
179+
local tokenizeString, tokenizeFile
190180
local toLua
191181
local traverseTree
192182
local updateReferences
@@ -424,15 +414,18 @@ function countString(haystack, needle, plain)
424414
end
425415
end
426416

427-
function printerr(s)
417+
function printError(s)
428418
io.stderr:write(s, "\n")
429419
end
420+
function printfError(s, ...)
421+
io.stderr:write(s:format(...), "\n")
422+
end
430423

431424
function reportErrorInFile(contents, path, ptr, agent, s, ...)
432425
s = F(s, ...)
433426

434427
if contents == "" then
435-
printerr(F("Error @ %s: [%s] %s\n", path, agent, s))
428+
printfError("Error @ %s: [%s] %s\n", path, agent, s)
436429
return s
437430
end
438431

@@ -447,10 +440,10 @@ function reportErrorInFile(contents, path, ptr, agent, s, ...)
447440

448441
-- print(debug.traceback("", 2)) -- DEBUG
449442

450-
printerr(F(
443+
printfError(
451444
"Error @ %s:%d: [%s] %s\n>\n> %s\n>%s^\n>\n",
452445
path, ln, agent, s, lastLine, rep("-", col)
453-
))
446+
)
454447

455448
return s
456449
end
@@ -462,7 +455,7 @@ end
462455

463456

464457
-- success, equalSignCountIfLong|errorCode, ptr = parseStringlikeToken( s, ptr )
465-
function parseStringlikeToken(s, ptr)
458+
local function parseStringlikeToken(s, ptr)
466459
local longEqualSigns = match(s, "^%[(=*)%[", ptr)
467460
local equalSignCountIfLong = longEqualSigns and #longEqualSigns
468461

@@ -486,7 +479,7 @@ function parseStringlikeToken(s, ptr)
486479
return true, equalSignCountIfLong, ptr
487480
end
488481

489-
-- tokens, error = tokenizeString( luaString [, pathForErrors="?" ] )
482+
-- tokens, error = tokenizeString( luaString [, pathForErrorMessages="?" ] )
490483
function tokenizeString(s, path)
491484
if find(s, "\r", 1, true) then
492485
s = gsub(s, "\r\n?", "\n")
@@ -873,14 +866,20 @@ end
873866
function isToken(tokens, tok, tokType, tokValue)
874867
return tokens.type[tok] == tokType and tokens.value[tok] == tokValue
875868
end
869+
876870
function isTokenType(tokens, tok, tokType)
877871
return tokens.type[tok] == tokType
878872
end
873+
879874
function isTokenAnyValue(tokens, tok, tokValueSet)
880875
return tokValueSet[tokens.value[tok]] == true
881876
end
882877

883-
function parseIdentifier(tokens, tok) --> ident, token
878+
879+
880+
local parseExpression, parseExpressionList, parseFunctionParametersAndBody, parseBlock
881+
882+
local function parseIdentifier(tokens, tok) --> ident, token
884883
if not isTokenType(tokens, tok, "identifier") then
885884
reportErrorAtToken(tokens, tok, "Parser", "Expected an identifier.")
886885
return nil, tok
@@ -893,7 +892,7 @@ function parseIdentifier(tokens, tok) --> ident, token
893892
return ident, tok
894893
end
895894

896-
function parseNameList(tokens, tok, names, allowVararg) --> success, token, vararg|nil
895+
local function parseNameList(tokens, tok, names, allowVararg) --> success, token, vararg|nil
897896
while true do
898897
if allowVararg and isToken(tokens, tok, "punctuation", "...") then
899898
local vararg = AstVararg(tok)
@@ -916,7 +915,7 @@ function parseNameList(tokens, tok, names, allowVararg) --> success, token, vara
916915
return true, tok
917916
end
918917

919-
function parseTable(tokens, tok) --> tableNode, token
918+
local function parseTable(tokens, tok) --> tableNode, token
920919
local tableNode = AstTable(tok)
921920
tok = tok + 1 -- '{'
922921

@@ -1050,7 +1049,7 @@ function parseExpression(tokens, tok, lastPrecedence) --> expression, token
10501049
unary.operator = tokens.value[tok]
10511050
tok = tok + 1 -- operator
10521051

1053-
local subExpr, tokNext = parseExpression(tokens, tok, OPERATOR_PRECEDENCE.unary)
1052+
local subExpr, tokNext = parseExpression(tokens, tok, OPERATOR_PRECEDENCE.unary-1)
10541053
if not subExpr then return false, tok end
10551054
unary.expression = subExpr
10561055
tok = tokNext
@@ -1341,9 +1340,9 @@ function parseFunctionParametersAndBody(tokens, tok)
13411340
return func, tok
13421341
end
13431342

1344-
local blockEndTokenTypes = {["end"]=true, ["else"]=true, ["elseif"]=true, ["until"]=true}
1343+
local BLOCK_END_TOKEN_TYPES = {["end"]=true, ["else"]=true, ["elseif"]=true, ["until"]=true}
13451344

1346-
function parseOneOrPossiblyMoreStatements(tokens, tok, statements) --> success, token
1345+
local function parseOneOrPossiblyMoreStatements(tokens, tok, statements) --> success, token
13471346
--[[
13481347
stat ::= varlist '=' explist |
13491348
functioncall |
@@ -1700,7 +1699,7 @@ function parseOneOrPossiblyMoreStatements(tokens, tok, statements) --> success,
17001699
local returnNode = AstReturn(tok)
17011700
tok = tok + 1 -- 'return'
17021701

1703-
if tok <= tokens.n and not ((isTokenType(tokens, tok, "keyword") and isTokenAnyValue(tokens, tok, blockEndTokenTypes)) or isToken(tokens, tok, "punctuation", ";")) then
1702+
if tok <= tokens.n and not ((isTokenType(tokens, tok, "keyword") and isTokenAnyValue(tokens, tok, BLOCK_END_TOKEN_TYPES)) or isToken(tokens, tok, "punctuation", ";")) then
17041703
local ok, tokNext = parseExpressionList(tokens, tok, returnNode.values)
17051704
if not ok then return false, tok end
17061705
tok = tokNext
@@ -1776,7 +1775,7 @@ function parseBlock(tokens, tok, stopAtEndKeyword) --> block, token
17761775
tok = tok + 1 -- ';'
17771776
end
17781777

1779-
if stopAtEndKeyword and isTokenType(tokens, tok, "keyword") and isTokenAnyValue(tokens, tok, blockEndTokenTypes) then
1778+
if stopAtEndKeyword and isTokenType(tokens, tok, "keyword") and isTokenAnyValue(tokens, tok, BLOCK_END_TOKEN_TYPES) then
17801779
break
17811780
end
17821781

@@ -1809,7 +1808,7 @@ function parseBlock(tokens, tok, stopAtEndKeyword) --> block, token
18091808
end
18101809

18111810
-- ast, error = parse( tokens )
1812-
-- ast, error = parse( luaString, pathForErrors )
1811+
-- ast, error = parse( luaString, pathForErrorMessages )
18131812
-- ast, error = parse( path )
18141813
function parse(tokens, path)
18151814
if type(tokens) == "string" then
@@ -2341,6 +2340,7 @@ end
23412340

23422341

23432342
function minify(node)
2343+
printError("Error: Minifying not supported yet!")
23442344
end
23452345

23462346

@@ -2503,7 +2503,7 @@ do
25032503
lastOutput = writeAlphanum(buffer, pretty, lookup.member.value, lastOutput)
25042504

25052505
elseif forMethodCall then
2506-
printerr(F("AST: Callee for method call is not a lookup."))
2506+
printfError("Error: AST: Callee for method call is not a lookup.")
25072507
return false, lastOutput
25082508

25092509
else
@@ -2627,7 +2627,7 @@ do
26272627
lastOutput = writeLua(buffer, F('"%s"', s), "")
26282628

26292629
else
2630-
printerr(F("Failed outputting value '%s'.", node.value))
2630+
printfError("Error: Failed outputting value '%s'.", node.value)
26312631
return false, lastOutput
26322632
end
26332633

@@ -2715,7 +2715,7 @@ do
27152715
local lookup = node.callee
27162716

27172717
if lookup.type ~= "lookup" then
2718-
printerr(F("AST: Callee for method call is not a lookup."))
2718+
printfError("Error: AST: Callee for method call is not a lookup.")
27192719
return false, lastOutput
27202720
end
27212721

@@ -2762,7 +2762,7 @@ do
27622762
elseif nodeType == "label" then
27632763
local name = node.name.name
27642764
if not (name:find"^[%a_][%w_]*$" and not KEYWORDS[name]) then
2765-
printerr(F("AST: Invalid label '%s'.", name))
2765+
printfError("Error: AST: Invalid label '%s'.", name)
27662766
return false, lastOutput
27672767
end
27682768
lastOutput = writeLua(buffer, "::", "")
@@ -2773,7 +2773,7 @@ do
27732773
elseif nodeType == "goto" then
27742774
local name = node.name.name
27752775
if not (name:find"^[%a_][%w_]*$" and not KEYWORDS[name]) then
2776-
printerr(F("AST: Invalid label '%s'.", name))
2776+
printfError("Error: AST: Invalid label '%s'.", name)
27772777
return false, lastOutput
27782778
end
27792779
lastOutput = writeAlphanum(buffer, pretty, "goto", lastOutput)
@@ -2944,7 +2944,7 @@ do
29442944
lastOutput = writeAlphanum(buffer, pretty, "in", lastOutput)
29452945

29462946
else
2947-
printerr(F("Unknown 'for' loop kind '%s'.", node.kind))
2947+
printfError("Error: Unknown 'for' loop kind '%s'.", node.kind)
29482948
return false, lastOutput
29492949
end
29502950

@@ -2964,7 +2964,7 @@ do
29642964
lastOutput = writeAlphanum(buffer, pretty, "end", lastOutput)
29652965

29662966
else
2967-
printerr(F("Unknown node type '%s'.", nodeType))
2967+
printfError("Error: Unknown node type '%s'.", nodeType)
29682968
return false, lastOutput
29692969
end
29702970
return true, lastOutput
@@ -3010,6 +3010,8 @@ end
30103010

30113011

30123012
return {
3013+
VERSION = PARSER_VERSION,
3014+
30133015
tokenizeString = tokenizeString,
30143016
tokenizeFile = tokenizeFile,
30153017

@@ -3033,7 +3035,7 @@ return {
30333035

30343036
--[[============================================================
30353037
3036-
Copyright © 2020 Marcus 'ReFreezed' Thunström
3038+
Copyright © 2020-2021 Marcus 'ReFreezed' Thunström
30373039
30383040
Permission is hereby granted, free of charge, to any person obtaining a copy
30393041
of this software and associated documentation files (the "Software"), to deal

runTest.lua

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ do
3333
end)
3434

3535
parser.updateReferences(ast)
36-
parser.minify(ast)
36+
-- parser.minify(ast)
3737

3838
local lua = assert(parser.toLua(ast, pretty))
3939

test.lua

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ do
4848
local m = -8
4949
local n = not false or false
5050
local l = #t + 1
51+
local b = not not not true
5152

5253
;;;;
5354

0 commit comments

Comments
 (0)