@@ -82,6 +82,7 @@ class RobotSemTokenTypes(Enum):
82
82
KEYWORD_NAME = "keywordName"
83
83
CONTROL_FLOW = "controlFlow"
84
84
ARGUMENT = "argument"
85
+ EMBEDDED_ARGUMENT = "embeddedArgument"
85
86
VARIABLE = "variable"
86
87
KEYWORD = "keywordCall"
87
88
KEYWORD_INNER = "keywordCallInner"
@@ -400,18 +401,48 @@ async def generate_sem_sub_tokens(
400
401
1 ,
401
402
SemanticTokenTypes .OPERATOR ,
402
403
)
403
- # if builtin_library_doc is not None and KeywordMatcher(kw) in builtin_library_doc.keywords:
404
- # doc = await namespace.find_keyword(token.value)
405
- # if (
406
- # doc is not None
407
- # and doc.libname == cls.BUILTIN_MATCHER
408
- # and KeywordMatcher(doc.name) == KeywordMatcher(kw)
409
- # ):
410
- # if not sem_mod:
411
- # sem_mod = set()
412
- # sem_mod.add(RobotSemTokenModifiers.BUILTIN)
413
-
414
- yield SemTokenInfo .from_token (token , sem_type , sem_mod , col_offset + kw_index , len (kw ))
404
+
405
+ if builtin_library_doc is not None and kw in builtin_library_doc .keywords :
406
+ doc = await namespace .find_keyword (token .value )
407
+ if (
408
+ doc is not None
409
+ and doc .libname == cls .BUILTIN_MATCHER
410
+ and KeywordMatcher (doc .name ) == KeywordMatcher (kw )
411
+ ):
412
+ if not sem_mod :
413
+ sem_mod = set ()
414
+ sem_mod .add (RobotSemTokenModifiers .BUILTIN )
415
+
416
+ kw_doc = await namespace .find_keyword (token .value , raise_keyword_error = False )
417
+ if kw_doc is not None and kw_doc .is_embedded :
418
+ if get_robot_version () >= (6 , 0 ):
419
+ m = kw_doc .matcher .embedded_arguments .match (kw )
420
+ else :
421
+ m = kw_doc .matcher .embedded_arguments .name .match (kw )
422
+
423
+ if m and m .lastindex is not None :
424
+ start , end = m .span (0 )
425
+ for i in range (1 , m .lastindex + 1 ):
426
+ arg_start , arg_end = m .span (i )
427
+ yield SemTokenInfo .from_token (
428
+ token , sem_type , sem_mod , col_offset + kw_index + start , arg_start - start
429
+ )
430
+ yield SemTokenInfo .from_token (
431
+ token ,
432
+ RobotSemTokenTypes .EMBEDDED_ARGUMENT ,
433
+ sem_mod ,
434
+ col_offset + kw_index + arg_start ,
435
+ arg_end - arg_start ,
436
+ )
437
+ start = arg_end + 1
438
+
439
+ if start < end :
440
+ yield SemTokenInfo .from_token (
441
+ token , sem_type , sem_mod , col_offset + kw_index + start , end - start
442
+ )
443
+
444
+ else :
445
+ yield SemTokenInfo .from_token (token , sem_type , sem_mod , col_offset + kw_index , len (kw ))
415
446
elif token .type == RobotToken .NAME and isinstance (node , (LibraryImport , ResourceImport , VariablesImport )):
416
447
yield SemTokenInfo .from_token (token , RobotSemTokenTypes .NAMESPACE , sem_mod , col_offset , length )
417
448
elif get_robot_version () >= (5 , 0 ) and token .type == RobotToken .OPTION :
@@ -462,13 +493,12 @@ async def generate_sem_tokens(
462
493
) -> AsyncGenerator [SemTokenInfo , None ]:
463
494
from robot .parsing .lexer .tokens import Token as RobotToken
464
495
465
- if token .type in {* RobotToken .ALLOW_VARIABLES , RobotToken . KEYWORD , ROBOT_KEYWORD_INNER }:
496
+ if token .type in {* RobotToken .ALLOW_VARIABLES }:
466
497
467
498
for sub_token in self ._tokenize_variables (
468
499
token ,
469
500
ignore_errors = True ,
470
501
identifiers = "$" if token .type == RobotToken .KEYWORD_NAME else "$@&%" ,
471
- extra_types = {ROBOT_KEYWORD_INNER },
472
502
):
473
503
async for e in self .generate_sem_sub_tokens (
474
504
namespace , builtin_library_doc , libraries_matchers , resources_matchers , sub_token , node
0 commit comments