@@ -109,11 +109,23 @@ function Tokenizer.get_next_line()
109109 return false
110110end
111111
112+ --- @return boolean
113+ function Tokenizer :has_next_line ()
114+ return false
115+ end
116+
117+ --- @return string | nil
118+ function Tokenizer :get_label ()
119+ return nil
120+ end
121+
112122--- Creates a new tokenizer without a specific implementation.
113123--- @return table A tokenizer instance (needs a concrete ` get_next_line` implementation ).
114- function Tokenizer :new ()
124+ function Tokenizer :new (luasm )
115125 local obj = {}
116126
127+ obj .luasm = luasm
128+
117129 setmetatable (obj , self )
118130 self .__index = self
119131
@@ -123,13 +135,13 @@ end
123135--- Reads in a file and returns a tokenizer for that file.
124136--- @param path string Path to the file to read.
125137--- @return table | nil Tokenizer instance or ` nil` if the file cannot be opened.
126- function LuASM . file_tokenizer (path )
138+ function LuASM : file_tokenizer (path )
127139 local file = io.open (path , " r" )
128140 if file == nil then
129141 return nil
130142 end
131143
132- local tokenizer = LuASM . string_tokenizer (file :read (" *a" ))
144+ local tokenizer = self : string_tokenizer (file :read (" *a" ))
133145
134146 file :close ()
135147
@@ -139,13 +151,15 @@ end
139151--- Reads in a string of the asm and returns a tokenizer for that file.
140152--- @param input string The complete ASM source as a string.
141153--- @return table Tokenizer instance.
142- function LuASM . string_tokenizer (input )
154+ function LuASM : string_tokenizer (input )
143155 local tokenizer = Tokenizer :new ()
144156
145157 tokenizer .input = input
146158 tokenizer .cursor = 1 -- byte index inside `input`
147159 tokenizer .current_line = 1 -- line counter (1‑based)
148160
161+ tokenizer .line = nil
162+
149163 -- Concrete implementation of `get_next_line` for a string source.
150164 tokenizer .get_next_line = function ()
151165 if # tokenizer .input <= tokenizer .cursor then
@@ -156,12 +170,38 @@ function LuASM.string_tokenizer(input)
156170
157171 local line = trim (string.sub (tokenizer .input , tokenizer .cursor , endIndex ))
158172
173+ -- Remove comment from the line
174+ if self .settings .comment ~= nil then
175+ line = line :gsub (self .settings .comment , " " )
176+ end
177+
159178 tokenizer .cursor = endIndex + 1
160179 tokenizer .current_line = tokenizer .current_line + 1
161180
162181 return line
163182 end
164183
184+ tokenizer .has_line = function ()
185+ tokenizer .line = tokenizer .get_next_line ()
186+
187+ return tokenizer .line ~= nil
188+ end
189+
190+ tokenizer .get_label = function ()
191+ if self .settings .label == nil then
192+ return nil
193+ end
194+
195+ local label , rest = tokenizer .line :match (self .settings .label )
196+
197+ if label ~= nil then
198+ tokenizer .line = rest
199+ tokenizer .cursor = tokenizer .cursor + # label
200+ end
201+
202+ return label
203+ end
204+
165205 return tokenizer
166206end
167207
@@ -222,9 +262,7 @@ function LuASM:parse(tokenizer)
222262 parsed_lines = 0
223263 }
224264
225- while tokenizer :has_next_line () do
226- tokenizer :goto_next_line () -- Maybe there should be an error if not everything was parsed
227-
265+ while tokenizer :has_line () do
228266 parse_data .parsed_lines = parse_data .parsed_lines + 1
229267
230268 local label = tokenizer :get_label ()
0 commit comments