|
39 | 39 | Token,
|
40 | 40 | is_non_variable_token,
|
41 | 41 | range_from_node,
|
| 42 | + range_from_token, |
42 | 43 | range_from_token_or_node,
|
43 | 44 | tokenize_variables,
|
44 | 45 | )
|
@@ -610,9 +611,32 @@ async def _analyse_run_keyword(
|
610 | 611 | await self._analyze_keyword_call(argument_tokens[1].value, node, argument_tokens[1], argument_tokens[2:])
|
611 | 612 | return argument_tokens[2:]
|
612 | 613 | elif keyword_doc.is_run_keywords():
|
613 |
| - for t in argument_tokens: |
614 |
| - if is_non_variable_token(t): |
615 |
| - await self._analyze_keyword_call(t.value, node, t, []) |
| 614 | + |
| 615 | + while argument_tokens: |
| 616 | + # TODO: Parse "run keywords" with arguments using upper case AND |
| 617 | + t = argument_tokens[0] |
| 618 | + argument_tokens = argument_tokens[1:] |
| 619 | + if t.value == "AND": |
| 620 | + self._results.append( |
| 621 | + Diagnostic( |
| 622 | + range=range_from_token(t), |
| 623 | + message=f"Incorrect use of {t.value}", |
| 624 | + severity=DiagnosticSeverity.ERROR, |
| 625 | + source=DIAGNOSTICS_SOURCE_NAME, |
| 626 | + ) |
| 627 | + ) |
| 628 | + continue |
| 629 | + |
| 630 | + if not is_non_variable_token(t): |
| 631 | + continue |
| 632 | + |
| 633 | + and_token = next((e for e in argument_tokens if e.value == "AND"), None) |
| 634 | + args = [] |
| 635 | + if and_token is not None: |
| 636 | + args = argument_tokens[: argument_tokens.index(and_token)] |
| 637 | + argument_tokens = argument_tokens[argument_tokens.index(and_token) + 1 :] |
| 638 | + |
| 639 | + await self._analyze_keyword_call(t.value, node, t, args) |
616 | 640 |
|
617 | 641 | return []
|
618 | 642 |
|
@@ -875,7 +899,9 @@ def __init__(
|
875 | 899 | self.invalidated_callback = invalidated_callback
|
876 | 900 | self._document = weakref.ref(document) if document is not None else None
|
877 | 901 | self._libraries: OrderedDict[str, LibraryEntry] = OrderedDict()
|
| 902 | + self._libraries_matchers: Optional[List[KeywordMatcher]] = None |
878 | 903 | self._resources: OrderedDict[str, ResourceEntry] = OrderedDict()
|
| 904 | + self._resources_matchers: Optional[List[KeywordMatcher]] = None |
879 | 905 | self._variables: OrderedDict[str, VariablesEntry] = OrderedDict()
|
880 | 906 | self._initialized = False
|
881 | 907 | self._initialize_lock = asyncio.Lock()
|
@@ -922,12 +948,26 @@ async def get_libraries(self) -> OrderedDict[str, LibraryEntry]:
|
922 | 948 |
|
923 | 949 | return self._libraries
|
924 | 950 |
|
| 951 | + async def get_libraries_matchers(self) -> List[KeywordMatcher]: |
| 952 | + if self._libraries_matchers is None: |
| 953 | + self._libraries_matchers = [ |
| 954 | + KeywordMatcher(v.alias or v.name or v.import_name) for v in (await self.get_libraries()).values() |
| 955 | + ] |
| 956 | + return self._libraries_matchers |
| 957 | + |
925 | 958 | @_logger.call
|
926 | 959 | async def get_resources(self) -> OrderedDict[str, ResourceEntry]:
|
927 | 960 | await self.ensure_initialized()
|
928 | 961 |
|
929 | 962 | return self._resources
|
930 | 963 |
|
| 964 | + async def get_resources_matchers(self) -> List[KeywordMatcher]: |
| 965 | + if self._resources_matchers is None: |
| 966 | + self._resources_matchers = [ |
| 967 | + KeywordMatcher(v.alias or v.name or v.import_name) for v in (await self.get_resources()).values() |
| 968 | + ] |
| 969 | + return self._resources_matchers |
| 970 | + |
931 | 971 | async def get_library_doc(self) -> LibraryDoc:
|
932 | 972 | from ..parts.documents_cache import DocumentType
|
933 | 973 |
|
@@ -1203,7 +1243,7 @@ async def _import(value: Import) -> Optional[LibraryEntry]:
|
1203 | 1243 | ]
|
1204 | 1244 |
|
1205 | 1245 | if not allready_imported_resources and entry.library_doc.source != self.source:
|
1206 |
| - self._resources[entry.import_name] = entry |
| 1246 | + self._resources[entry.alias or entry.name or entry.import_name] = entry |
1207 | 1247 | try:
|
1208 | 1248 | await self._import_imports(
|
1209 | 1249 | entry.imports,
|
@@ -1235,7 +1275,7 @@ async def _import(value: Import) -> Optional[LibraryEntry]:
|
1235 | 1275 | )
|
1236 | 1276 | )
|
1237 | 1277 | elif allready_imported_resources and allready_imported_resources[0].library_doc.source:
|
1238 |
| - self._resources[entry.import_name] = entry |
| 1278 | + self._resources[entry.alias or entry.name or entry.import_name] = entry |
1239 | 1279 |
|
1240 | 1280 | self._diagnostics.append(
|
1241 | 1281 | Diagnostic(
|
|
0 commit comments