6
6
from typing import (
7
7
TYPE_CHECKING ,
8
8
Any ,
9
+ AsyncGenerator ,
9
10
Awaitable ,
10
11
Callable ,
11
12
Iterator ,
25
26
from ...common .lsp_types import Location , Position , ReferenceContext
26
27
from ...common .text_document import TextDocument
27
28
from ..configuration import WorkspaceConfig
28
- from ..diagnostics .library_doc import KeywordDoc , KeywordMatcher , LibraryDoc
29
+ from ..diagnostics .library_doc import (
30
+ ALL_RUN_KEYWORDS_MATCHERS ,
31
+ RESOURCE_FILE_EXTENSION ,
32
+ ROBOT_FILE_EXTENSION ,
33
+ KeywordDoc ,
34
+ KeywordMatcher ,
35
+ LibraryDoc ,
36
+ )
37
+ from ..diagnostics .namespace import Namespace
29
38
from ..utils .ast import (
30
39
HasTokens ,
31
40
Token ,
32
41
get_nodes_at_position ,
33
42
get_tokens_at_position ,
43
+ is_not_variable_token ,
34
44
range_from_token ,
35
45
range_from_token_or_node ,
36
46
tokenize_variables ,
@@ -289,13 +299,6 @@ async def _find_keyword_references_in_file(
289
299
file : Path ,
290
300
cancel_token : CancelationToken ,
291
301
) -> List [Location ]:
292
- from robot .parsing .lexer .tokens import Token as RobotToken
293
- from robot .parsing .model .statements import (
294
- Fixture ,
295
- KeywordCall ,
296
- Template ,
297
- TestTemplate ,
298
- )
299
302
300
303
doc = self .parent .robot_workspace .get_or_open_document (file , "robotframework" )
301
304
namespace = await self .parent .documents_cache .get_namespace (doc , cancelation_token = cancel_token )
@@ -310,47 +313,129 @@ async def _find_keyword_references_in_file(
310
313
):
311
314
return []
312
315
313
- libraries_matchers = (await namespace .get_libraries_matchers ()).keys ()
314
- resources_matchers = (await namespace .get_resources_matchers ()).keys ()
316
+ return await asyncio .get_running_loop ().run_in_executor (
317
+ None , asyncio .run , self ._find_keyword_references_in_namespace (namespace , kw_doc , cancel_token )
318
+ )
319
+
320
+ async def _find_keyword_references_in_namespace (
321
+ self , namespace : Namespace , kw_doc : KeywordDoc , cancel_token : CancelationToken
322
+ ) -> List [Location ]:
323
+ from robot .parsing .lexer .tokens import Token as RobotToken
324
+ from robot .parsing .model .statements import (
325
+ Fixture ,
326
+ KeywordCall ,
327
+ Template ,
328
+ TestTemplate ,
329
+ )
330
+
331
+ result : List [Location ] = []
332
+
333
+ async for node in iter_nodes (namespace .model ):
334
+ cancel_token .throw_if_canceled ()
335
+
336
+ kw_token : Optional [Token ] = None
337
+ arguments : Optional [List [Token ]] = None
315
338
316
- async def _run () -> List [Location ]:
339
+ if isinstance (node , KeywordCall ):
340
+ kw_token = node .get_token (RobotToken .KEYWORD )
341
+ arguments = list (node .get_tokens (RobotToken .ARGUMENT ) or [])
342
+ elif isinstance (node , Fixture ):
343
+ kw_token = node .get_token (RobotToken .NAME )
344
+ arguments = list (node .get_tokens (RobotToken .ARGUMENT ) or [])
345
+ elif isinstance (node , (Template , TestTemplate )):
346
+ kw_token = node .get_token (RobotToken .NAME )
347
+ arguments = list (node .get_tokens (RobotToken .ARGUMENT ) or [])
348
+
349
+ async for location in self .get_keyword_references_from_tokens (namespace , kw_doc , node , kw_token , arguments ):
350
+ result .append (location )
351
+
352
+ return result
353
+
354
+ async def get_keyword_references_from_tokens (
355
+ self ,
356
+ namespace : Namespace ,
357
+ kw_doc : KeywordDoc ,
358
+ node : ast .AST ,
359
+ kw_token : Optional [Token ],
360
+ arguments : Optional [List [Token ]],
361
+ ) -> AsyncGenerator [Location , None ]:
362
+ if kw_token is not None and is_not_variable_token (kw_token ):
363
+ kw : Optional [KeywordDoc ] = None
317
364
kw_matcher = KeywordMatcher (kw_doc .name )
318
365
319
- result : List [Location ] = []
320
-
321
- async for node in iter_nodes (namespace .model ):
322
- cancel_token .throw_if_canceled ()
323
-
324
- kw : Optional [KeywordDoc ] = None
325
- kw_token : Optional [Token ] = None
326
-
327
- if isinstance (node , KeywordCall ):
328
- kw_token = node .get_token (RobotToken .KEYWORD )
329
- elif isinstance (node , Fixture ):
330
- kw_token = node .get_token (RobotToken .NAME )
331
- elif isinstance (node , (Template , TestTemplate )):
332
- kw_token = node .get_token (RobotToken .NAME )
333
-
334
- if kw_token is not None :
335
- for lib , name in self ._yield_owner_and_kw_names (kw_token .value ):
336
- if lib is not None :
337
- lib_matcher = KeywordMatcher (lib )
338
- if lib_matcher not in libraries_matchers and lib_matcher not in resources_matchers :
339
- continue
340
-
341
- if kw_matcher == name :
342
- kw = await namespace .find_keyword (str (kw_token .value ))
343
-
344
- if kw is not None and kw == kw_doc :
345
- result .append (
346
- Location (
347
- str (Uri .from_path (namespace .source ).normalized ()),
348
- range = range_from_token_or_node (node , kw_token ),
349
- )
350
- )
351
- return result
366
+ for lib , name in self ._yield_owner_and_kw_names (kw_token .value ):
367
+ if lib is not None :
368
+ lib_matcher = KeywordMatcher (lib )
369
+ if (
370
+ lib_matcher not in (await namespace .get_libraries_matchers ()).keys ()
371
+ and lib_matcher not in (await namespace .get_resources_matchers ()).keys ()
372
+ ):
373
+ continue
374
+
375
+ if name is not None :
376
+ name_matcher = KeywordMatcher (name )
377
+ if kw_matcher == name_matcher :
378
+ kw = await namespace .find_keyword (str (kw_token .value ))
379
+
380
+ if kw is not None and kw == kw_doc :
381
+ yield Location (
382
+ str (Uri .from_path (namespace .source ).normalized ()),
383
+ range = range_from_token_or_node (node , kw_token ),
384
+ )
385
+
386
+ if name_matcher in ALL_RUN_KEYWORDS_MATCHERS and arguments :
387
+ async for location in self .get_keyword_references_from_any_run_keyword (
388
+ namespace , kw_doc , node , kw_token , arguments
389
+ ):
390
+ yield location
391
+
392
+ async def get_keyword_references_from_any_run_keyword (
393
+ self ,
394
+ namespace : Namespace ,
395
+ kw_doc : KeywordDoc ,
396
+ node : ast .AST ,
397
+ kw_token : Token ,
398
+ arguments : List [Token ],
399
+ ) -> AsyncGenerator [Location , None ]:
400
+
401
+ if kw_token is None or is_not_variable_token (kw_token ):
402
+ return
403
+
404
+ kw = await namespace .find_keyword (str (kw_token .value ))
405
+
406
+ if kw is None or not kw .is_any_run_keyword ():
407
+ return
408
+
409
+ if kw .is_run_keyword () and len (arguments ) > 0 and is_not_variable_token (arguments [0 ]):
410
+ async for e in self .get_keyword_references_from_tokens (
411
+ namespace , kw_doc , node , arguments [0 ], arguments [1 :]
412
+ ):
413
+ yield e
414
+ elif kw .is_run_keyword_with_condition () and len (arguments ) > 1 and is_not_variable_token (arguments [1 ]):
415
+ async for e in self .get_keyword_references_from_tokens (
416
+ namespace , kw_doc , node , arguments [1 ], arguments [2 :]
417
+ ):
418
+ yield e
419
+ elif kw .is_run_keywords ():
420
+
421
+ while arguments :
422
+
423
+ t = arguments [0 ]
424
+ arguments = arguments [1 :]
425
+ if t .value == "AND" :
426
+ continue
427
+
428
+ if not is_not_variable_token (t ):
429
+ continue
430
+
431
+ and_token = next ((e for e in arguments if e .value == "AND" ), None )
432
+ args = []
433
+ if and_token is not None :
434
+ args = arguments [: arguments .index (and_token )]
435
+ arguments = arguments [arguments .index (and_token ) + 1 :]
352
436
353
- return await asyncio .get_running_loop ().run_in_executor (None , asyncio .run , _run ())
437
+ async for e in self .get_keyword_references_from_tokens (namespace , kw_doc , node , t , args ):
438
+ yield e
354
439
355
440
async def _find_keyword_references (self , document : TextDocument , kw_doc : KeywordDoc ) -> List [Location ]:
356
441
folder = self .parent .workspace .get_workspace_folder (document .uri )
@@ -391,7 +476,7 @@ async def _find_keyword_references(self, document: TextDocument, kw_doc: Keyword
391
476
392
477
async for f in iter_files (
393
478
folder .uri .to_path (),
394
- ("**/*.{robot,resource }" ),
479
+ (f "**/*.{{ { ROBOT_FILE_EXTENSION [ 1 :] } , { RESOURCE_FILE_EXTENSION [ 1 :] } } }" ),
395
480
ignore_patterns = config .exclude_patterns or [], # type: ignore
396
481
absolute = True ,
397
482
):
0 commit comments