Skip to content

Commit 0df2df8

Browse files
committed
highlight named arguments in run keywords correctly
1 parent 78e6c64 commit 0df2df8

File tree

1 file changed

+64
-37
lines changed

1 file changed

+64
-37
lines changed

robotcode/language_server/robotframework/parts/semantic_tokens.py

Lines changed: 64 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,8 @@
6161
from .protocol_part import RobotLanguageServerProtocolPart
6262

6363
ROBOT_KEYWORD_INNER = "KEYWORD_INNER"
64+
ROBOT_NAMED_ARGUMENT = "NAMED_ARGUMENT"
65+
ROBOT_OPERATOR = "OPERATOR"
6466

6567

6668
class RobotSemTokenTypes(Enum):
@@ -175,6 +177,8 @@ def generate_mapping(cls) -> Dict[str, Tuple[Enum, Optional[Set[Enum]]]]:
175177
frozenset({RobotToken.VARIABLE, RobotToken.ASSIGN}): (RobotSemTokenTypes.VARIABLE, None),
176178
frozenset({RobotToken.KEYWORD}): (RobotSemTokenTypes.KEYWORD, None),
177179
frozenset({ROBOT_KEYWORD_INNER}): (RobotSemTokenTypes.KEYWORD_INNER, None),
180+
frozenset({ROBOT_NAMED_ARGUMENT}): (RobotSemTokenTypes.VARIABLE, None),
181+
frozenset({ROBOT_OPERATOR}): (SemanticTokenTypes.OPERATOR, None),
178182
frozenset({RobotToken.NAME}): (RobotSemTokenTypes.NAME, None),
179183
frozenset({RobotToken.CONTINUATION}): (RobotSemTokenTypes.CONTINUATION, None),
180184
frozenset({RobotToken.SEPARATOR}): (RobotSemTokenTypes.SEPARATOR, None),
@@ -370,26 +374,6 @@ async def generate_sem_tokens(
370374
resources_matchers: Container[KeywordMatcher],
371375
) -> AsyncGenerator[SemTokenInfo, None]:
372376
from robot.parsing.lexer.tokens import Token as RobotToken
373-
from robot.parsing.model.statements import Fixture, KeywordCall
374-
from robot.utils.escaping import split_from_equals
375-
376-
if token.type in {RobotToken.ARGUMENT} and isinstance(node, (KeywordCall, Fixture)):
377-
name, value = split_from_equals(token.value)
378-
if value is not None:
379-
if isinstance(node, KeywordCall):
380-
doc = await namespace.find_keyword(node.keyword)
381-
elif isinstance(node, Fixture):
382-
doc = await namespace.find_keyword(node.name)
383-
else:
384-
doc = None
385-
386-
if doc and any(v for v in doc.args if v.name == name):
387-
length = len(name)
388-
yield SemTokenInfo.from_token(token, RobotSemTokenTypes.VARIABLE, length=length)
389-
yield SemTokenInfo.from_token(
390-
token, SemanticTokenTypes.OPERATOR, col_offset=token.col_offset + length, length=1
391-
)
392-
token = RobotToken(token.type, value, token.lineno, token.col_offset + length + 1, token.error)
393377

394378
if token.type in {*RobotToken.ALLOW_VARIABLES, RobotToken.KEYWORD, ROBOT_KEYWORD_INNER}:
395379

@@ -431,9 +415,9 @@ async def skip_non_data_tokens() -> AsyncGenerator[Tuple[Token, ast.AST], None]:
431415
yield arguments[0], node,
432416
arguments = arguments[1:]
433417

434-
yield kw_token, node
435-
436418
if kw_doc is not None and kw_doc.is_any_run_keyword():
419+
yield kw_token, node
420+
437421
async for b in skip_non_data_tokens():
438422
yield b
439423

@@ -592,9 +576,41 @@ async def generate_run_kw_if() -> AsyncGenerator[Tuple[Token, ast.AST], None]:
592576
async for e in generate_run_kw_if():
593577
yield e
594578
else:
579+
async for a in self.generate_keyword_tokens(namespace, kw_token, arguments, node):
580+
yield a
581+
582+
async def generate_keyword_tokens(
583+
self,
584+
namespace: Namespace,
585+
kw_token: Token,
586+
arguments: List[Token],
587+
node: ast.AST,
588+
) -> AsyncGenerator[Tuple[Token, ast.AST], None]:
589+
from robot.parsing.lexer import Token as RobotToken
590+
from robot.utils.escaping import split_from_equals
591+
592+
yield kw_token, node
593+
594+
doc: Optional[KeywordDoc] = None
595+
for token in arguments:
596+
if token.type in [RobotToken.ARGUMENT]:
597+
name, value = split_from_equals(token.value)
598+
if value is not None:
599+
if doc is None:
600+
doc = await namespace.find_keyword(kw_token.value)
601+
602+
if doc and any(v for v in doc.args if v.name == name):
603+
length = len(name)
604+
yield RobotToken(ROBOT_NAMED_ARGUMENT, name, token.lineno, token.col_offset), node
605+
606+
yield RobotToken(ROBOT_OPERATOR, "=", token.lineno, token.col_offset + length), node
607+
yield RobotToken(
608+
token.type, value, token.lineno, token.col_offset + length + 1, token.error
609+
), node
610+
611+
continue
595612

596-
for a in arguments:
597-
yield a, node
613+
yield token, node
598614

599615
@_logger.call
600616
async def collect(
@@ -634,19 +650,30 @@ async def get_tokens() -> AsyncGenerator[Tuple[Token, ast.AST], None]:
634650
kw = name
635651
if kw:
636652
kw_doc = await namespace.find_keyword(kw_token.value)
637-
if kw_doc is not None and kw_doc.is_any_run_keyword():
638-
async for t in self.generate_run_kw_tokens(
639-
namespace,
640-
builtin_library_doc,
641-
libraries_matchers,
642-
resources_matchers,
643-
kw_doc,
644-
kw_token,
645-
node.tokens[node.tokens.index(kw_token) + 1 :],
646-
node,
647-
):
648-
yield t
649-
continue
653+
if kw_doc is not None:
654+
if kw_doc.is_any_run_keyword():
655+
async for t in self.generate_run_kw_tokens(
656+
namespace,
657+
builtin_library_doc,
658+
libraries_matchers,
659+
resources_matchers,
660+
kw_doc,
661+
kw_token,
662+
node.tokens[node.tokens.index(kw_token) + 1 :],
663+
node,
664+
):
665+
yield t
666+
continue
667+
else:
668+
async for t in self.generate_keyword_tokens(
669+
namespace,
670+
kw_token,
671+
node.tokens[node.tokens.index(kw_token) + 1 :],
672+
node,
673+
):
674+
yield t
675+
676+
continue
650677

651678
for token in node.tokens:
652679
yield token, node

0 commit comments

Comments
 (0)