1
1
from __future__ import annotations
2
2
3
3
import operator
4
+ import re
4
5
from enum import Enum
5
6
from functools import reduce
6
7
from typing import (
20
21
from ...common .language import language_id
21
22
from ...common .text_document import TextDocument
22
23
from ...common .types import (
24
+ Range ,
23
25
SemanticTokenModifiers ,
24
26
SemanticTokens ,
27
+ SemanticTokensDelta ,
28
+ SemanticTokensDeltaPartialResult ,
25
29
SemanticTokensPartialResult ,
26
30
SemanticTokenTypes ,
27
31
)
28
- from ..utils .ast import Token
32
+ from ..utils .ast import Token , token_in_range
29
33
30
34
if TYPE_CHECKING :
31
35
from ..protocol import RobotLanguageServerProtocol
@@ -54,6 +58,9 @@ class RobotSemTokenTypes(Enum):
54
58
SEPARATOR = "separator"
55
59
TERMINATOR = "terminator"
56
60
FOR_SEPARATOR = "forSeparator"
61
+ VARIABLE_BEGIN = "variableBegin"
62
+ VARIABLE_END = "variableEnd"
63
+ ESCAPE = "escape"
57
64
58
65
59
66
class SemTokenInfo (NamedTuple ):
@@ -64,9 +71,20 @@ class SemTokenInfo(NamedTuple):
64
71
sem_modifiers : Optional [Set [Enum ]] = None
65
72
66
73
@classmethod
67
- def from_token (cls , token : Token , sem_token_type : Enum , sem_modifiers : Optional [Set [Enum ]] = None ) -> SemTokenInfo :
74
+ def from_token (
75
+ cls ,
76
+ token : Token ,
77
+ sem_token_type : Enum ,
78
+ sem_modifiers : Optional [Set [Enum ]] = None ,
79
+ col_offset : Optional [int ] = None ,
80
+ length : Optional [int ] = None ,
81
+ ) -> SemTokenInfo :
68
82
return cls (
69
- token .lineno , token .col_offset , token .end_col_offset - token .col_offset , sem_token_type , sem_modifiers
83
+ token .lineno ,
84
+ col_offset if col_offset is not None else token .col_offset ,
85
+ length if length is not None else token .end_col_offset - token .col_offset ,
86
+ sem_token_type ,
87
+ sem_modifiers ,
70
88
)
71
89
72
90
@@ -77,6 +95,8 @@ def __init__(self, parent: RobotLanguageServerProtocol) -> None:
77
95
super ().__init__ (parent )
78
96
parent .semantic_tokens .token_types += [e for e in RobotSemTokenTypes ]
79
97
parent .semantic_tokens .collect_full .add (self .collect_full )
98
+ parent .semantic_tokens .collect_range .add (self .collect_range )
99
+ parent .semantic_tokens .collect_full_delta .add (self .collect_full_delta )
80
100
81
101
@classmethod
82
102
def generate_mapping (cls ) -> Dict [str , Tuple [Enum , Optional [Set [Enum ]]]]:
@@ -139,6 +159,45 @@ def mapping(cls) -> Dict[str, Tuple[Enum, Optional[Set[Enum]]]]:
139
159
cls .__mapping = cls .generate_mapping ()
140
160
return cls .__mapping
141
161
162
+ ESCAPE_REGEX = re .compile (
163
+ r"(?P<t>[^\\]+)|(?P<x>\\([^xuU]|x[0-f]{2}|u[0-f]{4}|U[0-f]{8}){0,1})" , re .MULTILINE | re .DOTALL
164
+ )
165
+
166
+ @classmethod
167
+ async def generate_sem_sub_tokens (
168
+ cls , token : Token , col_offset : Optional [int ] = None , length : Optional [int ] = None
169
+ ) -> AsyncGenerator [SemTokenInfo , None ]:
170
+ from robot .parsing .lexer .tokens import Token as RobotToken
171
+
172
+ sem_info = cls .mapping ().get (token .type , None ) if token .type is not None else None
173
+
174
+ if sem_info is not None :
175
+ if token .type == RobotToken .VARIABLE :
176
+ if col_offset is None :
177
+ col_offset = token .col_offset
178
+ if length is None :
179
+ length = token .end_col_offset - token .col_offset
180
+
181
+ yield SemTokenInfo (token .lineno , col_offset , 2 , RobotSemTokenTypes .VARIABLE_BEGIN )
182
+ yield SemTokenInfo .from_token (token , sem_info [0 ], sem_info [1 ], col_offset + 2 , length - 3 )
183
+ yield SemTokenInfo (token .lineno , col_offset + length - 1 , 1 , RobotSemTokenTypes .VARIABLE_END )
184
+ if token .type == RobotToken .ARGUMENT and "\\ " in token .value :
185
+ if col_offset is None :
186
+ col_offset = token .col_offset
187
+ if length is None :
188
+ length = token .end_col_offset - token .col_offset
189
+
190
+ for g in cls .ESCAPE_REGEX .finditer (token .value ):
191
+ yield SemTokenInfo .from_token (
192
+ token ,
193
+ sem_info [0 ] if g .group ("x" ) is None or g .end () - g .start () == 1 else RobotSemTokenTypes .ESCAPE ,
194
+ sem_info [1 ],
195
+ col_offset + g .start (),
196
+ g .end () - g .start (),
197
+ )
198
+ else :
199
+ yield SemTokenInfo .from_token (token , sem_info [0 ], sem_info [1 ], col_offset , length )
200
+
142
201
@classmethod
143
202
async def generate_sem_tokens (cls , token : Token ) -> AsyncGenerator [SemTokenInfo , None ]:
144
203
from robot .parsing .lexer .tokens import Token as RobotToken
@@ -148,44 +207,46 @@ async def generate_sem_tokens(cls, token: Token) -> AsyncGenerator[SemTokenInfo,
148
207
try :
149
208
for sub_token in token .tokenize_variables ():
150
209
last_sub_token = sub_token
151
- if sub_token .type is not None :
152
- sem_info = cls .mapping ().get (sub_token .type , None )
153
- if sem_info is not None :
154
- yield SemTokenInfo .from_token (sub_token , sem_info [0 ], sem_info [1 ])
210
+ async for e in cls .generate_sem_sub_tokens (sub_token ):
211
+ yield e
155
212
except BaseException :
156
213
pass
157
- if last_sub_token == token and token .type is not None :
158
- sem_info = cls .mapping ().get (token .type , None )
159
- if sem_info is not None :
160
- yield SemTokenInfo .from_token (token , sem_info [0 ], sem_info [1 ])
214
+ if last_sub_token == token :
215
+ async for e in cls .generate_sem_sub_tokens (last_sub_token ):
216
+ yield e
161
217
elif last_sub_token is not None and last_sub_token .end_col_offset < token .end_col_offset :
162
- if token .type is not None :
163
- sem_info = cls .mapping ().get (token .type , None )
164
- if sem_info is not None :
165
- yield SemTokenInfo (
166
- token .lineno ,
167
- last_sub_token .end_col_offset ,
168
- token .end_col_offset - last_sub_token .end_col_offset - last_sub_token .col_offset ,
169
- sem_info [0 ],
170
- sem_info [1 ],
171
- )
172
-
173
- elif token .type is not None :
174
- sem_info = cls .mapping ().get (token .type , None )
175
- if sem_info is not None :
176
- yield SemTokenInfo .from_token (token , sem_info [0 ], sem_info [1 ])
218
+ async for e in cls .generate_sem_sub_tokens (
219
+ token ,
220
+ last_sub_token .end_col_offset ,
221
+ token .end_col_offset - last_sub_token .end_col_offset - last_sub_token .col_offset ,
222
+ ):
223
+ yield e
177
224
178
- @language_id ("robotframework" )
179
- async def collect_full (
180
- self , sender : Any , document : TextDocument , ** kwargs : Any
225
+ else :
226
+ async for e in cls .generate_sem_sub_tokens (token ):
227
+ yield e
228
+
229
+ async def collect (
230
+ self , document : TextDocument , range : Optional [Range ]
181
231
) -> Union [SemanticTokens , SemanticTokensPartialResult , None ]:
182
232
183
233
data = []
184
234
last_line = 0
185
235
last_col = 0
186
236
187
237
tokens = await self .parent .documents_cache .get_tokens (document )
238
+
239
+ start = True
188
240
for robot_token in tokens :
241
+ if range is not None :
242
+ if start and not token_in_range (robot_token , range ):
243
+ continue
244
+ else :
245
+ start = False
246
+
247
+ if not start and not token_in_range (robot_token , range ):
248
+ break
249
+
189
250
async for token in self .generate_sem_tokens (robot_token ):
190
251
current_line = token .lineno - 1
191
252
@@ -215,3 +276,21 @@ async def collect_full(
215
276
)
216
277
217
278
return SemanticTokens (data = data )
279
+
280
+ @language_id ("robotframework" )
281
+ async def collect_full (
282
+ self , sender : Any , document : TextDocument , ** kwargs : Any
283
+ ) -> Union [SemanticTokens , SemanticTokensPartialResult , None ]:
284
+ return await self .collect (document , None )
285
+
286
+ @language_id ("robotframework" )
287
+ async def collect_range (
288
+ self , sender : Any , document : TextDocument , range : Range , ** kwargs : Any
289
+ ) -> Union [SemanticTokens , SemanticTokensPartialResult , None ]:
290
+ return await self .collect (document , range )
291
+
292
+ @language_id ("robotframework" )
293
+ async def collect_full_delta (
294
+ self , sender : Any , document : TextDocument , previous_result_id : str , ** kwargs : Any
295
+ ) -> Union [SemanticTokens , SemanticTokensDelta , SemanticTokensDeltaPartialResult , None ]:
296
+ return None
0 commit comments