Skip to content

Commit 0995a2e

Browse files
committed
feat(robotlangserver): Optimization of the analysis of keywords with embedded arguments
1 parent ec2c444 commit 0995a2e

File tree

2 files changed

+44
-28
lines changed

2 files changed

+44
-28
lines changed

robotcode/language_server/robotframework/diagnostics/analyzer.py

Lines changed: 43 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -330,6 +330,7 @@ async def _analyze_keyword_call(
330330
argument_tokens: List[Token],
331331
analyse_run_keywords: bool = True,
332332
allow_variables: bool = False,
333+
ignore_errors_if_contains_variables: bool = False,
333334
) -> Optional[KeywordDoc]:
334335
from robot.parsing.lexer.tokens import Token as RobotToken
335336
from robot.parsing.model.statements import Template, TestTemplate
@@ -371,13 +372,14 @@ async def _analyze_keyword_call(
371372

372373
result = await self.finder.find_keyword(keyword)
373374

374-
for e in self.finder.diagnostics:
375-
await self.append_diagnostics(
376-
range=kw_range,
377-
message=e.message,
378-
severity=e.severity,
379-
code=e.code,
380-
)
375+
if not ignore_errors_if_contains_variables or is_not_variable_token(keyword_token):
376+
for e in self.finder.diagnostics:
377+
await self.append_diagnostics(
378+
range=kw_range,
379+
message=e.message,
380+
severity=e.severity,
381+
code=e.code,
382+
)
381383

382384
if result is not None:
383385
if self.namespace.document is not None:
@@ -544,22 +546,27 @@ async def _analyse_run_keyword(
544546
if keyword_doc is None or not keyword_doc.is_any_run_keyword():
545547
return argument_tokens
546548

547-
if keyword_doc.is_run_keyword() and len(argument_tokens) > 0 and is_not_variable_token(argument_tokens[0]):
549+
if keyword_doc.is_run_keyword() and len(argument_tokens) > 0:
548550
await self._analyze_keyword_call(
549-
unescape(argument_tokens[0].value), node, argument_tokens[0], argument_tokens[1:]
551+
unescape(argument_tokens[0].value),
552+
node,
553+
argument_tokens[0],
554+
argument_tokens[1:],
555+
allow_variables=True,
556+
ignore_errors_if_contains_variables=True,
550557
)
551558

552559
return argument_tokens[1:]
553-
elif (
554-
keyword_doc.is_run_keyword_with_condition()
555-
and len(argument_tokens) > (cond_count := keyword_doc.run_keyword_condition_count())
556-
and is_not_variable_token(argument_tokens[cond_count])
560+
elif keyword_doc.is_run_keyword_with_condition() and len(argument_tokens) > (
561+
cond_count := keyword_doc.run_keyword_condition_count()
557562
):
558563
await self._analyze_keyword_call(
559564
unescape(argument_tokens[cond_count].value),
560565
node,
561566
argument_tokens[cond_count],
562567
argument_tokens[cond_count + 1 :],
568+
allow_variables=True,
569+
ignore_errors_if_contains_variables=True,
563570
)
564571
return argument_tokens[cond_count + 1 :]
565572
elif keyword_doc.is_run_keywords():
@@ -577,9 +584,6 @@ async def _analyse_run_keyword(
577584
)
578585
continue
579586

580-
if not is_not_variable_token(t):
581-
continue
582-
583587
and_token = next((e for e in argument_tokens if e.value == "AND"), None)
584588
args = []
585589
if and_token is not None:
@@ -590,7 +594,14 @@ async def _analyse_run_keyword(
590594
args = argument_tokens
591595
argument_tokens = []
592596

593-
await self._analyze_keyword_call(unescape(t.value), node, t, args)
597+
await self._analyze_keyword_call(
598+
unescape(t.value),
599+
node,
600+
t,
601+
args,
602+
allow_variables=True,
603+
ignore_errors_if_contains_variables=True,
604+
)
594605

595606
return []
596607

@@ -620,14 +631,15 @@ def skip_args() -> List[Token]:
620631

621632
args = skip_args()
622633

623-
if is_not_variable_token(kwt):
624-
await self._analyze_keyword_call(
625-
unescape(kwt.value),
626-
node,
627-
kwt,
628-
args,
629-
analyse_run_keywords=False,
630-
)
634+
await self._analyze_keyword_call(
635+
unescape(kwt.value),
636+
node,
637+
kwt,
638+
args,
639+
analyse_run_keywords=False,
640+
allow_variables=True,
641+
ignore_errors_if_contains_variables=True,
642+
)
631643

632644
while argument_tokens:
633645
if argument_tokens[0].value == "ELSE" and len(argument_tokens) > 1:
@@ -682,12 +694,16 @@ async def visit_Fixture(self, node: ast.AST) -> None: # noqa: N802
682694

683695
if (
684696
keyword_token is not None
685-
and is_not_variable_token(keyword_token)
686697
and keyword_token.value is not None
687698
and keyword_token.value.upper() not in ("", "NONE")
688699
):
689700
await self._analyze_keyword_call(
690-
value.name, value, keyword_token, [cast(Token, e) for e in value.get_tokens(RobotToken.ARGUMENT)]
701+
value.name,
702+
value,
703+
keyword_token,
704+
[cast(Token, e) for e in value.get_tokens(RobotToken.ARGUMENT)],
705+
allow_variables=True,
706+
ignore_errors_if_contains_variables=True,
691707
)
692708

693709
await self.generic_visit(node)

robotcode/language_server/robotframework/parts/semantic_tokens.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -492,7 +492,7 @@ async def generate_sem_tokens(
492492
) -> AsyncGenerator[SemTokenInfo, None]:
493493
from robot.parsing.lexer.tokens import Token as RobotToken
494494

495-
if token.type in {*RobotToken.ALLOW_VARIABLES}:
495+
if token.type in {RobotToken.ARGUMENT, RobotToken.TESTCASE_NAME, RobotToken.KEYWORD_NAME}:
496496

497497
for sub_token in self._tokenize_variables(
498498
token,

0 commit comments

Comments
 (0)