Skip to content

Commit d7e89ed

Browse files
committed
implement highligth escape sequences, implement semantic highlight collect_range
1 parent cd4f72d commit d7e89ed

File tree

5 files changed

+155
-52
lines changed

5 files changed

+155
-52
lines changed

package.json

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,15 @@
110110
],
111111
"terminator": [
112112
"punctuation.terminator.robotframework"
113+
],
114+
"variableBegin": [
115+
"punctuation.definition.variable.begin.robotframework"
116+
],
117+
"variableEnd": [
118+
"punctuation.definition.variable.end.robotframework"
119+
],
120+
"escape": [
121+
"constant.character.escape.robotframework"
113122
]
114123
}
115124
}
@@ -128,7 +137,7 @@
128137
"configuration": "./language-configuration.json"
129138
}
130139
],
131-
"grammars": [
140+
"#grammars": [
132141
{
133142
"language": "robotframework",
134143
"scopeName": "source.robotframework",

robotcode/language_server/common/parts/semantic_tokens.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
SemanticTokensLegend,
2121
SemanticTokensOptions,
2222
SemanticTokensOptionsFull,
23-
SemanticTokensOptionsRange,
2423
SemanticTokensParams,
2524
SemanticTokensPartialResult,
2625
SemanticTokensRangeParams,
@@ -70,10 +69,12 @@ def extend_capabilities(self, capabilities: ServerCapabilities) -> None:
7069
token_types=[e.value for e in self.token_types],
7170
token_modifiers=[e.value for e in self.token_modifiers],
7271
),
73-
full=SemanticTokensOptionsFull(delta=True if len(self.collect_full_delta) > 0 else False)
72+
full=SemanticTokensOptionsFull(delta=True if len(self.collect_full_delta) else None)
73+
if len(self.collect_full) and len(self.collect_full_delta)
74+
else True
7475
if len(self.collect_full)
75-
else False,
76-
range=SemanticTokensOptionsRange() if len(self.collect_range) else False,
76+
else None,
77+
range=True if len(self.collect_range) else None,
7778
)
7879

7980
@rpc_method(name="textDocument/semanticTokens/full", param_type=SemanticTokensParams)

robotcode/language_server/robotframework/parts/semantic_tokens.py

Lines changed: 108 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from __future__ import annotations
22

33
import operator
4+
import re
45
from enum import Enum
56
from functools import reduce
67
from typing import (
@@ -20,12 +21,15 @@
2021
from ...common.language import language_id
2122
from ...common.text_document import TextDocument
2223
from ...common.types import (
24+
Range,
2325
SemanticTokenModifiers,
2426
SemanticTokens,
27+
SemanticTokensDelta,
28+
SemanticTokensDeltaPartialResult,
2529
SemanticTokensPartialResult,
2630
SemanticTokenTypes,
2731
)
28-
from ..utils.ast import Token
32+
from ..utils.ast import Token, token_in_range
2933

3034
if TYPE_CHECKING:
3135
from ..protocol import RobotLanguageServerProtocol
@@ -54,6 +58,9 @@ class RobotSemTokenTypes(Enum):
5458
SEPARATOR = "separator"
5559
TERMINATOR = "terminator"
5660
FOR_SEPARATOR = "forSeparator"
61+
VARIABLE_BEGIN = "variableBegin"
62+
VARIABLE_END = "variableEnd"
63+
ESCAPE = "escape"
5764

5865

5966
class SemTokenInfo(NamedTuple):
@@ -64,9 +71,20 @@ class SemTokenInfo(NamedTuple):
6471
sem_modifiers: Optional[Set[Enum]] = None
6572

6673
@classmethod
67-
def from_token(cls, token: Token, sem_token_type: Enum, sem_modifiers: Optional[Set[Enum]] = None) -> SemTokenInfo:
74+
def from_token(
75+
cls,
76+
token: Token,
77+
sem_token_type: Enum,
78+
sem_modifiers: Optional[Set[Enum]] = None,
79+
col_offset: Optional[int] = None,
80+
length: Optional[int] = None,
81+
) -> SemTokenInfo:
6882
return cls(
69-
token.lineno, token.col_offset, token.end_col_offset - token.col_offset, sem_token_type, sem_modifiers
83+
token.lineno,
84+
col_offset if col_offset is not None else token.col_offset,
85+
length if length is not None else token.end_col_offset - token.col_offset,
86+
sem_token_type,
87+
sem_modifiers,
7088
)
7189

7290

@@ -77,6 +95,8 @@ def __init__(self, parent: RobotLanguageServerProtocol) -> None:
7795
super().__init__(parent)
7896
parent.semantic_tokens.token_types += [e for e in RobotSemTokenTypes]
7997
parent.semantic_tokens.collect_full.add(self.collect_full)
98+
parent.semantic_tokens.collect_range.add(self.collect_range)
99+
parent.semantic_tokens.collect_full_delta.add(self.collect_full_delta)
80100

81101
@classmethod
82102
def generate_mapping(cls) -> Dict[str, Tuple[Enum, Optional[Set[Enum]]]]:
@@ -139,6 +159,45 @@ def mapping(cls) -> Dict[str, Tuple[Enum, Optional[Set[Enum]]]]:
139159
cls.__mapping = cls.generate_mapping()
140160
return cls.__mapping
141161

162+
ESCAPE_REGEX = re.compile(
163+
r"(?P<t>[^\\]+)|(?P<x>\\([^xuU]|x[0-f]{2}|u[0-f]{4}|U[0-f]{8}){0,1})", re.MULTILINE | re.DOTALL
164+
)
165+
166+
@classmethod
167+
async def generate_sem_sub_tokens(
168+
cls, token: Token, col_offset: Optional[int] = None, length: Optional[int] = None
169+
) -> AsyncGenerator[SemTokenInfo, None]:
170+
from robot.parsing.lexer.tokens import Token as RobotToken
171+
172+
sem_info = cls.mapping().get(token.type, None) if token.type is not None else None
173+
174+
if sem_info is not None:
175+
if token.type == RobotToken.VARIABLE:
176+
if col_offset is None:
177+
col_offset = token.col_offset
178+
if length is None:
179+
length = token.end_col_offset - token.col_offset
180+
181+
yield SemTokenInfo(token.lineno, col_offset, 2, RobotSemTokenTypes.VARIABLE_BEGIN)
182+
yield SemTokenInfo.from_token(token, sem_info[0], sem_info[1], col_offset + 2, length - 3)
183+
yield SemTokenInfo(token.lineno, col_offset + length - 1, 1, RobotSemTokenTypes.VARIABLE_END)
184+
if token.type == RobotToken.ARGUMENT and "\\" in token.value:
185+
if col_offset is None:
186+
col_offset = token.col_offset
187+
if length is None:
188+
length = token.end_col_offset - token.col_offset
189+
190+
for g in cls.ESCAPE_REGEX.finditer(token.value):
191+
yield SemTokenInfo.from_token(
192+
token,
193+
sem_info[0] if g.group("x") is None or g.end() - g.start() == 1 else RobotSemTokenTypes.ESCAPE,
194+
sem_info[1],
195+
col_offset + g.start(),
196+
g.end() - g.start(),
197+
)
198+
else:
199+
yield SemTokenInfo.from_token(token, sem_info[0], sem_info[1], col_offset, length)
200+
142201
@classmethod
143202
async def generate_sem_tokens(cls, token: Token) -> AsyncGenerator[SemTokenInfo, None]:
144203
from robot.parsing.lexer.tokens import Token as RobotToken
@@ -148,44 +207,46 @@ async def generate_sem_tokens(cls, token: Token) -> AsyncGenerator[SemTokenInfo,
148207
try:
149208
for sub_token in token.tokenize_variables():
150209
last_sub_token = sub_token
151-
if sub_token.type is not None:
152-
sem_info = cls.mapping().get(sub_token.type, None)
153-
if sem_info is not None:
154-
yield SemTokenInfo.from_token(sub_token, sem_info[0], sem_info[1])
210+
async for e in cls.generate_sem_sub_tokens(sub_token):
211+
yield e
155212
except BaseException:
156213
pass
157-
if last_sub_token == token and token.type is not None:
158-
sem_info = cls.mapping().get(token.type, None)
159-
if sem_info is not None:
160-
yield SemTokenInfo.from_token(token, sem_info[0], sem_info[1])
214+
if last_sub_token == token:
215+
async for e in cls.generate_sem_sub_tokens(last_sub_token):
216+
yield e
161217
elif last_sub_token is not None and last_sub_token.end_col_offset < token.end_col_offset:
162-
if token.type is not None:
163-
sem_info = cls.mapping().get(token.type, None)
164-
if sem_info is not None:
165-
yield SemTokenInfo(
166-
token.lineno,
167-
last_sub_token.end_col_offset,
168-
token.end_col_offset - last_sub_token.end_col_offset - last_sub_token.col_offset,
169-
sem_info[0],
170-
sem_info[1],
171-
)
172-
173-
elif token.type is not None:
174-
sem_info = cls.mapping().get(token.type, None)
175-
if sem_info is not None:
176-
yield SemTokenInfo.from_token(token, sem_info[0], sem_info[1])
218+
async for e in cls.generate_sem_sub_tokens(
219+
token,
220+
last_sub_token.end_col_offset,
221+
token.end_col_offset - last_sub_token.end_col_offset - last_sub_token.col_offset,
222+
):
223+
yield e
177224

178-
@language_id("robotframework")
179-
async def collect_full(
180-
self, sender: Any, document: TextDocument, **kwargs: Any
225+
else:
226+
async for e in cls.generate_sem_sub_tokens(token):
227+
yield e
228+
229+
async def collect(
230+
self, document: TextDocument, range: Optional[Range]
181231
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
182232

183233
data = []
184234
last_line = 0
185235
last_col = 0
186236

187237
tokens = await self.parent.documents_cache.get_tokens(document)
238+
239+
start = True
188240
for robot_token in tokens:
241+
if range is not None:
242+
if start and not token_in_range(robot_token, range):
243+
continue
244+
else:
245+
start = False
246+
247+
if not start and not token_in_range(robot_token, range):
248+
break
249+
189250
async for token in self.generate_sem_tokens(robot_token):
190251
current_line = token.lineno - 1
191252

@@ -215,3 +276,21 @@ async def collect_full(
215276
)
216277

217278
return SemanticTokens(data=data)
279+
280+
@language_id("robotframework")
281+
async def collect_full(
282+
self, sender: Any, document: TextDocument, **kwargs: Any
283+
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
284+
return await self.collect(document, None)
285+
286+
@language_id("robotframework")
287+
async def collect_range(
288+
self, sender: Any, document: TextDocument, range: Range, **kwargs: Any
289+
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
290+
return await self.collect(document, range)
291+
292+
@language_id("robotframework")
293+
async def collect_full_delta(
294+
self, sender: Any, document: TextDocument, previous_result_id: str, **kwargs: Any
295+
) -> Union[SemanticTokens, SemanticTokensDelta, SemanticTokensDeltaPartialResult, None]:
296+
return None

robotcode/language_server/robotframework/utils/ast.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,16 @@ def range_from_token(token: Token) -> Range:
8585
)
8686

8787

88+
def token_in_range(token: Token, range: Range) -> bool:
89+
token_range = range_from_token(token)
90+
return token_range.start.is_in_range(range) or token_range.end.is_in_range(range)
91+
92+
93+
def node_in_range(node: ast.AST, range: Range) -> bool:
94+
node_range = range_from_node(node)
95+
return node_range.start.is_in_range(range) or node_range.end.is_in_range(range)
96+
97+
8898
def range_from_token_or_node(node: ast.AST, token: Optional[Token]) -> Range:
8999
if token is not None:
90100
return range_from_token(token)

robotcode/utils/async_event.py

Lines changed: 22 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -222,27 +222,13 @@ def _get_name_prefix(descriptor: AsyncEventDescriptorBase[Any, Any, Any]) -> str
222222
return f"{descriptor._owner.__qualname__}.{descriptor._owner_name}"
223223

224224

225-
class async_tasking_event_iterator( # noqa: N801
226-
AsyncEventDescriptorBase[_TCallable, Any, AsyncTaskingEventIterator[_TCallable, Any]]
227-
):
228-
def __init__(self, _func: _TCallable) -> None:
229-
super().__init__(
230-
_func, AsyncTaskingEventIterator[_TCallable, Any], task_name_prefix=lambda: _get_name_prefix(self)
231-
)
232-
233-
234225
class AsyncTaskingEvent(AsyncTaskingEventResultIteratorBase[_TCallable, _TResult]):
235226
async def __call__(self, *args: Any, **kwargs: Any) -> List[Union[_TResult, BaseException]]:
236227
return [a async for a in self._notify(*args, **kwargs)]
237228

238229

239-
class async_tasking_event(AsyncEventDescriptorBase[_TCallable, Any, AsyncTaskingEvent[_TCallable, Any]]): # noqa: N801
240-
def __init__(self, _func: _TCallable) -> None:
241-
super().__init__(_func, AsyncTaskingEvent[_TCallable, Any], task_name_prefix=lambda: _get_name_prefix(self))
242-
243-
244230
class AsyncThreadingEventResultIteratorBase(AsyncEventResultIteratorBase[_TCallable, _TResult]):
245-
__executor: Optional[ThreadPoolExecutor]
231+
__executor: Optional[ThreadPoolExecutor] = None
246232

247233
def __init__(self, *, thread_name_prefix: Optional[str] = None) -> None:
248234
super().__init__()
@@ -308,13 +294,13 @@ def _done(f: asyncio.Future[_TResult]) -> None:
308294
result_callback(None, e)
309295

310296
if executor is None:
311-
if self.__executor is None:
312-
self.__executor = ThreadPoolExecutor(
297+
if AsyncThreadingEventResultIteratorBase.__executor is None:
298+
AsyncThreadingEventResultIteratorBase.__executor = ThreadPoolExecutor(
313299
thread_name_prefix=self.__thread_name_prefix()
314300
if callable(self.__thread_name_prefix)
315301
else self.__thread_name_prefix
316302
)
317-
executor = self.__executor
303+
executor = AsyncThreadingEventResultIteratorBase.__executor
318304

319305
awaitables: List[asyncio.Future[_TResult]] = []
320306
for method in filter(
@@ -370,3 +356,21 @@ class async_threading_event( # noqa: N801
370356
):
371357
def __init__(self, _func: _TCallable) -> None:
372358
super().__init__(_func, AsyncThreadingEvent[_TCallable, Any], thread_name_prefix=lambda: _get_name_prefix(self))
359+
360+
361+
# async_tasking_event_iterator = async_threading_event_iterator
362+
# async_tasking_event = async_threading_event
363+
364+
365+
class async_tasking_event_iterator( # noqa: N801
366+
AsyncEventDescriptorBase[_TCallable, Any, AsyncTaskingEventIterator[_TCallable, Any]]
367+
):
368+
def __init__(self, _func: _TCallable) -> None:
369+
super().__init__(
370+
_func, AsyncTaskingEventIterator[_TCallable, Any], task_name_prefix=lambda: _get_name_prefix(self)
371+
)
372+
373+
374+
class async_tasking_event(AsyncEventDescriptorBase[_TCallable, Any, AsyncTaskingEvent[_TCallable, Any]]): # noqa: N801
375+
def __init__(self, _func: _TCallable) -> None:
376+
super().__init__(_func, AsyncTaskingEvent[_TCallable, Any], task_name_prefix=lambda: _get_name_prefix(self))

0 commit comments

Comments
 (0)