Skip to content

Commit 5511a45

Browse files
committed
Semantic tokens now highlight builtin keywords
1 parent 5f8dab9 commit 5511a45

File tree

2 files changed

+110
-53
lines changed

2 files changed

+110
-53
lines changed

package.json

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,12 @@
6666
"editor.semanticHighlighting.enabled": true
6767
}
6868
},
69+
"semanticTokenModifiers": [
70+
{
71+
"id": "builtin",
72+
"description": "built in library, keyword or variable"
73+
}
74+
],
6975
"semanticTokenScopes": [
7076
{
7177
"language": "robotframework",

robotcode/language_server/robotframework/parts/semantic_tokens.py

Lines changed: 104 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,16 @@
3535
SemanticTokenTypes,
3636
)
3737
from ...common.text_document import TextDocument
38-
from ..utils.ast import HasTokens, Token, iter_nodes, token_in_range, tokenize_variables
38+
from ..diagnostics.library_doc import BUILTIN_LIBRARY_NAME, KeywordMatcher, LibraryDoc
39+
from ..diagnostics.namespace import KeywordFinder, Namespace
40+
from ..utils.ast import (
41+
HasTokens,
42+
Token,
43+
iter_nodes,
44+
token_in_range,
45+
tokenize_variables,
46+
yield_owner_and_kw_names,
47+
)
3948

4049
if TYPE_CHECKING:
4150
from ..protocol import RobotLanguageServerProtocol
@@ -70,6 +79,10 @@ class RobotSemTokenTypes(Enum):
7079
NAMESPACE = "namespace"
7180

7281

82+
class RobotSemTokenModifiers(Enum):
83+
BUILTIN = "builtin"
84+
85+
7386
@dataclass
7487
class SemTokenInfo:
7588
lineno: int
@@ -102,6 +115,7 @@ class RobotSemanticTokenProtocolPart(RobotLanguageServerProtocolPart):
102115
def __init__(self, parent: RobotLanguageServerProtocol) -> None:
103116
super().__init__(parent)
104117
parent.semantic_tokens.token_types += [e for e in RobotSemTokenTypes]
118+
parent.semantic_tokens.token_modifiers += [e for e in RobotSemTokenModifiers]
105119

106120
parent.semantic_tokens.collect_full.add(self.collect_full)
107121
parent.semantic_tokens.collect_range.add(self.collect_range)
@@ -172,9 +186,18 @@ def mapping(cls) -> Dict[str, Tuple[Enum, Optional[Set[Enum]]]]:
172186
r"(?P<t>[^\\]+)|(?P<x>\\([^xuU]|x[0-f]{2}|u[0-f]{4}|U[0-f]{8}){0,1})", re.MULTILINE | re.DOTALL
173187
)
174188

189+
BUILTIN_MATCHER = KeywordMatcher("BuiltIn")
190+
175191
@classmethod
176192
async def generate_sem_sub_tokens(
177-
cls, token: Token, node: ast.AST, col_offset: Optional[int] = None, length: Optional[int] = None
193+
cls,
194+
namespace: Namespace,
195+
finder: KeywordFinder,
196+
builtin_library_doc: Optional[LibraryDoc],
197+
token: Token,
198+
node: ast.AST,
199+
col_offset: Optional[int] = None,
200+
length: Optional[int] = None,
178201
) -> AsyncGenerator[SemTokenInfo, None]:
179202
from robot.parsing.lexer.tokens import Token as RobotToken
180203
from robot.parsing.model.statements import (
@@ -229,73 +252,84 @@ async def generate_sem_sub_tokens(
229252
if length is None:
230253
length = token.end_col_offset - token.col_offset
231254

232-
index = token.value.find(".")
233-
old_index = 0
234-
while index >= 0:
235-
if index > 0:
236-
yield SemTokenInfo(
237-
token.lineno,
238-
col_offset + old_index,
239-
index - old_index,
240-
RobotSemTokenTypes.NAMESPACE,
241-
{SemanticTokenModifiers.DEFAULT_LIBRARY}
242-
if token.value[:index].casefold() == "BuiltIn".casefold()
243-
else None,
244-
)
245-
yield SemTokenInfo(token.lineno, col_offset + index, 1, RobotSemTokenTypes.SEPARATOR, sem_mod)
246-
247-
new_index = token.value.find(".", index + 1)
248-
if new_index >= 0:
249-
old_index = index
250-
index = new_index
251-
else:
252-
break
253-
254-
yield SemTokenInfo.from_token(token, sem_type, sem_mod, col_offset + index + 1, length - index - 1)
255+
kw_namespace: Optional[str] = None
256+
kw: str = token.value
257+
258+
for lib, name in yield_owner_and_kw_names(token.value):
259+
if lib is not None:
260+
lib_matcher = KeywordMatcher(lib)
261+
if (
262+
lib_matcher in (await namespace.get_libraries_matchers()).keys()
263+
or lib_matcher in (await namespace.get_resources_matchers()).keys()
264+
):
265+
kw_namespace = lib
266+
if name:
267+
kw = name
268+
break
269+
270+
kw_index = token.value.index(kw)
271+
272+
if kw_namespace:
273+
yield SemTokenInfo(
274+
token.lineno,
275+
col_offset,
276+
len(kw_namespace),
277+
RobotSemTokenTypes.NAMESPACE,
278+
{RobotSemTokenModifiers.BUILTIN} if kw_namespace == cls.BUILTIN_MATCHER else None,
279+
)
280+
yield SemTokenInfo(
281+
token.lineno,
282+
col_offset + len(kw_namespace),
283+
1,
284+
SemanticTokenTypes.OPERATOR,
285+
)
286+
if builtin_library_doc is not None and kw in builtin_library_doc.keywords:
287+
doc = await finder.find_keyword(token.value)
288+
if (
289+
doc is not None
290+
and doc.libname == cls.BUILTIN_MATCHER
291+
and KeywordMatcher(doc.name) == KeywordMatcher(kw)
292+
):
293+
if not sem_mod:
294+
sem_mod = set()
295+
sem_mod.add(RobotSemTokenModifiers.BUILTIN)
296+
297+
yield SemTokenInfo.from_token(token, sem_type, sem_mod, col_offset + kw_index, len(kw))
255298
elif token.type == RobotToken.NAME and isinstance(node, (LibraryImport, ResourceImport, VariablesImport)):
256299
yield SemTokenInfo.from_token(token, RobotSemTokenTypes.NAMESPACE, sem_mod, col_offset, length)
257300
else:
258301
yield SemTokenInfo.from_token(token, sem_type, sem_mod, col_offset, length)
259302

260-
async def generate_sem_tokens(self, token: Token, node: ast.AST) -> AsyncGenerator[SemTokenInfo, None]:
303+
async def generate_sem_tokens(
304+
self,
305+
token: Token,
306+
node: ast.AST,
307+
namespace: Namespace,
308+
finder: KeywordFinder,
309+
builtin_library_doc: Optional[LibraryDoc],
310+
) -> AsyncGenerator[SemTokenInfo, None]:
261311
from robot.parsing.lexer.tokens import Token as RobotToken
262312

263313
if token.type in {*RobotToken.ALLOW_VARIABLES, RobotToken.KEYWORD}:
264314

265315
for sub_token in tokenize_variables(
266316
token, ignore_errors=True, identifiers="$" if token.type == RobotToken.KEYWORD_NAME else "$@&%"
267317
):
268-
async for e in self.generate_sem_sub_tokens(sub_token, node):
318+
async for e in self.generate_sem_sub_tokens(namespace, finder, builtin_library_doc, sub_token, node):
269319
yield e
270320

271-
elif token.type == RobotToken.KEYWORD:
272-
is_builtin = False
273-
# TODO tag builtin keywords
274-
# if namespace.initialized:
275-
# try:
276-
# libdoc = await namespace.find_keyword(token.value)
277-
# if (
278-
# libdoc is not None
279-
# and libdoc.libname is not None
280-
# and libdoc.libname.casefold() == "builtin".casefold()
281-
# ):
282-
283-
# is_builtin = True
284-
# except BaseException:
285-
# pass
286-
287-
async for e in self.generate_sem_sub_tokens(token, node):
288-
if is_builtin:
289-
if e.sem_modifiers is None:
290-
e.sem_modifiers = set()
291-
e.sem_modifiers.add(SemanticTokenModifiers.DEFAULT_LIBRARY)
292-
yield e
293321
else:
294-
async for e in self.generate_sem_sub_tokens(token, node):
322+
async for e in self.generate_sem_sub_tokens(namespace, finder, builtin_library_doc, token, node):
295323
yield e
296324

297325
async def collect(
298-
self, model: ast.AST, range: Optional[Range], cancel_token: CancelationToken
326+
self,
327+
model: ast.AST,
328+
range: Optional[Range],
329+
namespace: Namespace,
330+
finder: KeywordFinder,
331+
builtin_library_doc: Optional[LibraryDoc],
332+
cancel_token: CancelationToken,
299333
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
300334

301335
data = []
@@ -319,7 +353,9 @@ def get_tokens() -> Generator[Tuple[Token, ast.AST], None, None]:
319353
):
320354
cancel_token.throw_if_canceled()
321355

322-
async for token in self.generate_sem_tokens(robot_token, robot_node):
356+
async for token in self.generate_sem_tokens(
357+
robot_token, robot_node, namespace, finder, builtin_library_doc
358+
):
323359
current_line = token.lineno - 1
324360

325361
data.append(current_line - last_line)
@@ -352,16 +388,31 @@ def get_tokens() -> Generator[Tuple[Token, ast.AST], None, None]:
352388
async def collect_threading(
353389
self, document: TextDocument, range: Optional[Range]
354390
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
391+
cancel_token = CancelationToken()
355392
try:
356393
model = await self.parent.documents_cache.get_model(document)
394+
namespace = await self.parent.documents_cache.get_namespace(document)
395+
396+
builtin_library_doc = next(
397+
(
398+
library.library_doc
399+
for library in (await namespace.get_libraries()).values()
400+
if library.name == BUILTIN_LIBRARY_NAME
401+
and library.import_name == BUILTIN_LIBRARY_NAME
402+
and library.import_range == Range.zero()
403+
),
404+
None,
405+
)
357406

358-
cancel_token = CancelationToken()
359407
return await asyncio.get_running_loop().run_in_executor(
360408
None,
361409
asyncio.run,
362410
self.collect(
363411
model,
364412
range,
413+
namespace,
414+
await namespace.create_finder(),
415+
builtin_library_doc,
365416
cancel_token,
366417
),
367418
)

0 commit comments

Comments
 (0)