@@ -186,6 +186,62 @@ export default class Tokenizer {
186
186
return isFunctionDeclaration ;
187
187
}
188
188
189
+ private isGlobalFunctionDeclaration (
190
+ lineIndex : number ,
191
+ tokenIndex : number ,
192
+ token : IToken ,
193
+ tokensArrays : ( IToken [ ] | undefined ) [ ] ,
194
+ ) {
195
+ return (
196
+ ! ( tokenIndex === 0 && lineIndex === 0 ) && // Not sure why we need this
197
+ ! token . scopes . includes ( LanguageScopes . block ) &&
198
+ token . scopes . includes ( LanguageScopes . functionIdentifier ) &&
199
+ this . isFunctionDeclaration ( lineIndex , tokensArrays )
200
+ ) ;
201
+ }
202
+
203
+ private isLocalFunctionDeclaration (
204
+ lineIndex : number ,
205
+ tokenIndex : number ,
206
+ token : IToken ,
207
+ tokensArrays : ( IToken [ ] | undefined ) [ ] ,
208
+ ) {
209
+ return (
210
+ token . scopes . includes ( LanguageScopes . functionIdentifier ) &&
211
+ ! token . scopes . includes ( LanguageScopes . block ) &&
212
+ ! ( tokenIndex === 0 && lineIndex === 0 ) && // Not sure why we need this
213
+ ! this . isFunctionDeclaration ( lineIndex , tokensArrays )
214
+ ) ;
215
+ }
216
+
217
+ private isGlobalConstant ( token : IToken ) {
218
+ return (
219
+ token . scopes . includes ( LanguageScopes . constantIdentifer ) &&
220
+ ! token . scopes . includes ( LanguageScopes . functionDeclaration ) &&
221
+ ! token . scopes . includes ( LanguageScopes . block )
222
+ ) ;
223
+ }
224
+
225
+ private isStructDeclaration ( token : IToken , lastToken : IToken , lineIndex : number , tokensArrays : ( IToken [ ] | undefined ) [ ] ) {
226
+ return (
227
+ token . scopes . includes ( LanguageScopes . structIdentifier ) &&
228
+ ( ( lastToken . scopes . includes ( LanguageScopes . structIdentifier ) &&
229
+ lastToken . scopes . includes ( LanguageScopes . blockDeclaraction ) ) ||
230
+ tokensArrays [ lineIndex + 1 ] ?. at ( 0 ) ?. scopes . includes ( LanguageScopes . blockDeclaraction ) )
231
+ ) ;
232
+ }
233
+
234
+ private isLocalVariable ( tokenIndex : number , token : IToken , tokensArray : IToken [ ] ) {
235
+ return (
236
+ token . scopes . includes ( LanguageScopes . variableIdentifer ) &&
237
+ tokenIndex > 1 &&
238
+ ( tokensArray [ tokenIndex - 2 ] . scopes . includes ( LanguageScopes . type ) ||
239
+ tokensArray [ tokenIndex - 2 ] . scopes . includes ( LanguageScopes . structIdentifier ) )
240
+ ) ;
241
+ }
242
+
243
+ // Naive implementation
244
+ // Ideally we would use an AST tree
189
245
private tokenizeLinesForGlobalScope ( lines : string [ ] , startIndex : number = 0 , stopIndex : number = - 1 ) {
190
246
const firstLineIndex = startIndex > lines . length || startIndex < 0 ? 0 : startIndex ;
191
247
const lastLineIndex = stopIndex + 10 > lines . length || stopIndex < 0 ? lines . length : stopIndex ;
@@ -234,18 +290,12 @@ export default class Tokenizer {
234
290
break ;
235
291
}
236
292
237
- // CHILD
238
293
if ( token . scopes . includes ( LanguageScopes . includeDeclaration ) ) {
239
294
scope . children . push ( this . getRawTokenContent ( line , tokensArray . at ( - 2 ) ! ) ) ;
240
295
break ;
241
296
}
242
297
243
- // CONSTANT
244
- if (
245
- token . scopes . includes ( LanguageScopes . constantIdentifer ) &&
246
- ! token . scopes . includes ( LanguageScopes . functionDeclaration ) &&
247
- ! token . scopes . includes ( LanguageScopes . block )
248
- ) {
298
+ if ( this . isGlobalConstant ( token ) ) {
249
299
scope . complexTokens . push ( {
250
300
position : { line : lineIndex , character : token . startIndex } ,
251
301
identifier : this . getRawTokenContent ( line , token ) ,
@@ -256,13 +306,7 @@ export default class Tokenizer {
256
306
break ;
257
307
}
258
308
259
- // FUNCTION
260
- if (
261
- token . scopes . includes ( LanguageScopes . functionIdentifier ) &&
262
- ! token . scopes . includes ( LanguageScopes . block ) &&
263
- this . isFunctionDeclaration ( lineIndex , tokensArrays ) &&
264
- ! ( tokenIndex === 0 && lineIndex === 0 )
265
- ) {
309
+ if ( this . isGlobalFunctionDeclaration ( lineIndex , tokenIndex , token , tokensArrays ) ) {
266
310
scope . complexTokens . push ( {
267
311
position : { line : lineIndex , character : token . startIndex } ,
268
312
identifier : this . getRawTokenContent ( line , token ) ,
@@ -278,13 +322,7 @@ export default class Tokenizer {
278
322
break ;
279
323
}
280
324
281
- // STRUCT
282
- if (
283
- token . scopes . includes ( LanguageScopes . structIdentifier ) &&
284
- ( ( lastToken . scopes . includes ( LanguageScopes . structIdentifier ) &&
285
- tokensArrays [ lineIndex + 1 ] ?. at ( 0 ) ?. scopes . includes ( LanguageScopes . blockDeclaraction ) ) ||
286
- lastToken . scopes . includes ( LanguageScopes . blockDeclaraction ) )
287
- ) {
325
+ if ( this . isStructDeclaration ( token , lastToken , lineIndex , tokensArrays ) ) {
288
326
currentStruct = {
289
327
position : { line : lineIndex , character : token . startIndex } ,
290
328
identifier : this . getRawTokenContent ( line , token ) ,
@@ -300,6 +338,8 @@ export default class Tokenizer {
300
338
return scope ;
301
339
}
302
340
341
+ // Naive implementation
342
+ // Ideally we would use an AST tree
303
343
private tokenizeLinesForLocalScope ( lines : string [ ] , startIndex : number = 0 , stopIndex : number = - 1 ) {
304
344
const firstLineIndex = startIndex > lines . length || startIndex < 0 ? 0 : startIndex ;
305
345
const lastLineIndex = stopIndex > lines . length || stopIndex < 0 ? lines . length : stopIndex ;
@@ -351,13 +391,7 @@ export default class Tokenizer {
351
391
const token = tokensArray [ tokenIndex ] ;
352
392
353
393
// VARIABLE
354
- if (
355
- computeFunctionLocals &&
356
- token . scopes . includes ( LanguageScopes . variableIdentifer ) &&
357
- tokenIndex > 1 &&
358
- ( tokensArray [ tokenIndex - 2 ] . scopes . includes ( LanguageScopes . type ) ||
359
- tokensArray [ tokenIndex - 2 ] . scopes . includes ( LanguageScopes . structIdentifier ) )
360
- ) {
394
+ if ( computeFunctionLocals && this . isLocalVariable ( tokenIndex , token , tokensArray ) ) {
361
395
const complexToken = {
362
396
position : { line : lineIndex , character : token . startIndex } ,
363
397
identifier : this . getRawTokenContent ( line , token ) ,
@@ -401,13 +435,7 @@ export default class Tokenizer {
401
435
} ) ;
402
436
}
403
437
404
- // FUNCTION
405
- if (
406
- token . scopes . includes ( LanguageScopes . functionIdentifier ) &&
407
- ! token . scopes . includes ( LanguageScopes . block ) &&
408
- ! this . isFunctionDeclaration ( lineIndex , tokensArrays ) &&
409
- ! ( tokenIndex === 0 && lineIndex === 0 )
410
- ) {
438
+ if ( this . isLocalFunctionDeclaration ( lineIndex , tokenIndex , token , tokensArrays ) ) {
411
439
scope . functionsComplexTokens . push ( {
412
440
position : { line : lineIndex , character : token . startIndex } ,
413
441
identifier : this . getRawTokenContent ( line , token ) ,
0 commit comments