Skip to content

Commit cfac7f9

Browse files
committed
discovering and semantic parsing now runs in own threads
1 parent d7e89ed commit cfac7f9

File tree

4 files changed

+58
-38
lines changed

4 files changed

+58
-38
lines changed

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@
137137
"configuration": "./language-configuration.json"
138138
}
139139
],
140-
"#grammars": [
140+
"grammars": [
141141
{
142142
"language": "robotframework",
143143
"scopeName": "source.robotframework",

robotcode/language_server/robotframework/parts/discovering.py

Lines changed: 23 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,16 @@
11
from __future__ import annotations
22

3+
import ast
4+
import asyncio
5+
from itertools import chain
36
from pathlib import Path
47
from typing import TYPE_CHECKING, Iterator, List, Optional
58

69
from ....jsonrpc2.protocol import rpc_method
7-
from ....utils.async_itertools import async_chain
810
from ....utils.logging import LoggingDescriptor
911
from ....utils.uri import Uri
1012
from ...common.text_document import TextDocument
1113
from ...common.types import DocumentUri, Model, Position, Range, TextDocumentIdentifier
12-
from ...robotframework.utils.async_ast import walk
1314
from .protocol_part import RobotLanguageServerProtocolPart
1415

1516
if TYPE_CHECKING:
@@ -62,8 +63,7 @@ def get_document(self, uri: DocumentUri) -> TextDocument:
6263

6364
return TextDocument(document_uri=uri, language_id="robot", version=None, text=text)
6465

65-
@rpc_method(name="robot/discovering/getTestsFromWorkspace", param_type=GetAllTestsParams)
66-
async def get_tests_from_workspace(self, paths: Optional[List[str]]) -> List[TestItem]:
66+
def get_tests_from_workspace_threading(self, paths: Optional[List[str]]) -> List[TestItem]:
6767
from robot.output.logger import LOGGER
6868
from robot.running import TestCase, TestSuite
6969

@@ -151,13 +151,16 @@ def nonexisting_paths(paths: List[str]) -> Iterator[str]:
151151
except BaseException as e:
152152
return [TestItem(type="error", id=Path.cwd().name, label=Path.cwd().name, error=str(e))]
153153

154-
@rpc_method(name="robot/discovering/getTestsFromDocument", param_type=GetTestsParams)
155-
async def get_tests_from_document(self, text_document: TextDocumentIdentifier, id: Optional[str]) -> List[TestItem]:
154+
@rpc_method(name="robot/discovering/getTestsFromWorkspace", param_type=GetAllTestsParams)
155+
async def get_tests_from_workspace(self, paths: Optional[List[str]]) -> List[TestItem]:
156+
return await asyncio.get_event_loop().run_in_executor(None, self.get_tests_from_workspace_threading, paths)
157+
158+
def get_tests_from_document_threading(
159+
self, text_document: TextDocumentIdentifier, id: Optional[str], model: ast.AST
160+
) -> List[TestItem]:
156161
from robot.parsing.model.blocks import TestCase
157162
from robot.parsing.model.statements import Tags
158163

159-
model = await self.parent.documents_cache.get_model(self.get_document(text_document.uri))
160-
161164
return [
162165
TestItem(
163166
type="test",
@@ -172,12 +175,19 @@ async def get_tests_from_document(self, text_document: TextDocumentIdentifier, i
172175
),
173176
),
174177
tags=[
175-
str(tag)
176-
async for tag in async_chain(
177-
*[tags.values async for tags in walk(test_case) if isinstance(tags, Tags)]
178-
)
178+
str(tag) for tag in chain(*[tags.values for tags in ast.walk(test_case) if isinstance(tags, Tags)])
179179
],
180180
)
181-
async for test_case in walk(model)
181+
for test_case in ast.walk(model)
182182
if isinstance(test_case, TestCase)
183183
]
184+
185+
@rpc_method(name="robot/discovering/getTestsFromDocument", param_type=GetTestsParams)
186+
async def get_tests_from_document(self, text_document: TextDocumentIdentifier, id: Optional[str]) -> List[TestItem]:
187+
return await asyncio.get_event_loop().run_in_executor(
188+
None,
189+
self.get_tests_from_document_threading,
190+
text_document,
191+
id,
192+
await self.parent.documents_cache.get_model(self.get_document(text_document.uri)),
193+
)

robotcode/language_server/robotframework/parts/semantic_tokens.py

Lines changed: 29 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,17 @@
11
from __future__ import annotations
22

3+
import asyncio
34
import operator
45
import re
56
from enum import Enum
67
from functools import reduce
78
from typing import (
89
TYPE_CHECKING,
910
Any,
10-
AsyncGenerator,
1111
Dict,
1212
FrozenSet,
13+
Generator,
14+
Iterable,
1315
NamedTuple,
1416
Optional,
1517
Set,
@@ -96,7 +98,7 @@ def __init__(self, parent: RobotLanguageServerProtocol) -> None:
9698
parent.semantic_tokens.token_types += [e for e in RobotSemTokenTypes]
9799
parent.semantic_tokens.collect_full.add(self.collect_full)
98100
parent.semantic_tokens.collect_range.add(self.collect_range)
99-
parent.semantic_tokens.collect_full_delta.add(self.collect_full_delta)
101+
# parent.semantic_tokens.collect_full_delta.add(self.collect_full_delta)
100102

101103
@classmethod
102104
def generate_mapping(cls) -> Dict[str, Tuple[Enum, Optional[Set[Enum]]]]:
@@ -164,9 +166,9 @@ def mapping(cls) -> Dict[str, Tuple[Enum, Optional[Set[Enum]]]]:
164166
)
165167

166168
@classmethod
167-
async def generate_sem_sub_tokens(
169+
def generate_sem_sub_tokens(
168170
cls, token: Token, col_offset: Optional[int] = None, length: Optional[int] = None
169-
) -> AsyncGenerator[SemTokenInfo, None]:
171+
) -> Generator[SemTokenInfo, None, None]:
170172
from robot.parsing.lexer.tokens import Token as RobotToken
171173

172174
sem_info = cls.mapping().get(token.type, None) if token.type is not None else None
@@ -199,55 +201,55 @@ async def generate_sem_sub_tokens(
199201
yield SemTokenInfo.from_token(token, sem_info[0], sem_info[1], col_offset, length)
200202

201203
@classmethod
202-
async def generate_sem_tokens(cls, token: Token) -> AsyncGenerator[SemTokenInfo, None]:
204+
def generate_sem_tokens(cls, token: Token) -> Generator[SemTokenInfo, None, None]:
203205
from robot.parsing.lexer.tokens import Token as RobotToken
204206

205207
if token.type in RobotToken.ALLOW_VARIABLES:
206208
last_sub_token = token
207209
try:
208210
for sub_token in token.tokenize_variables():
209211
last_sub_token = sub_token
210-
async for e in cls.generate_sem_sub_tokens(sub_token):
212+
for e in cls.generate_sem_sub_tokens(sub_token):
211213
yield e
212214
except BaseException:
213215
pass
214216
if last_sub_token == token:
215-
async for e in cls.generate_sem_sub_tokens(last_sub_token):
217+
for e in cls.generate_sem_sub_tokens(last_sub_token):
216218
yield e
217219
elif last_sub_token is not None and last_sub_token.end_col_offset < token.end_col_offset:
218-
async for e in cls.generate_sem_sub_tokens(
220+
for e in cls.generate_sem_sub_tokens(
219221
token,
220222
last_sub_token.end_col_offset,
221223
token.end_col_offset - last_sub_token.end_col_offset - last_sub_token.col_offset,
222224
):
223225
yield e
224226

225227
else:
226-
async for e in cls.generate_sem_sub_tokens(token):
228+
for e in cls.generate_sem_sub_tokens(token):
227229
yield e
228230

229-
async def collect(
230-
self, document: TextDocument, range: Optional[Range]
231+
def collect_threading(
232+
self, tokens: Iterable[Token], range: Optional[Range]
231233
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
232234

233235
data = []
234236
last_line = 0
235237
last_col = 0
236238

237-
tokens = await self.parent.documents_cache.get_tokens(document)
238-
239239
start = True
240+
240241
for robot_token in tokens:
241242
if range is not None:
242-
if start and not token_in_range(robot_token, range):
243-
continue
243+
if start:
244+
if not token_in_range(robot_token, range):
245+
continue
246+
else:
247+
start = False
244248
else:
245-
start = False
246-
247-
if not start and not token_in_range(robot_token, range):
248-
break
249+
if not token_in_range(robot_token, range):
250+
break
249251

250-
async for token in self.generate_sem_tokens(robot_token):
252+
for token in self.generate_sem_tokens(robot_token):
251253
current_line = token.lineno - 1
252254

253255
data.append(current_line - last_line)
@@ -281,13 +283,18 @@ async def collect(
281283
async def collect_full(
282284
self, sender: Any, document: TextDocument, **kwargs: Any
283285
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
284-
return await self.collect(document, None)
286+
287+
return await asyncio.get_event_loop().run_in_executor(
288+
None, self.collect_threading, await self.parent.documents_cache.get_tokens(document), None
289+
)
285290

286291
@language_id("robotframework")
287292
async def collect_range(
288293
self, sender: Any, document: TextDocument, range: Range, **kwargs: Any
289294
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
290-
return await self.collect(document, range)
295+
return await asyncio.get_event_loop().run_in_executor(
296+
None, self.collect_threading, await self.parent.documents_cache.get_tokens(document), range
297+
)
291298

292299
@language_id("robotframework")
293300
async def collect_full_delta(

robotcode/language_server/robotframework/utils/ast.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,12 @@ def tokenize_variables(self) -> Iterator[Token]:
4343

4444

4545
@runtime_checkable
46-
class Statement(Protocol):
46+
class HasTokens(Protocol):
4747
tokens: Tuple[Token, ...]
4848

49+
50+
@runtime_checkable
51+
class Statement(Protocol):
4952
def get_token(self, type: str) -> Token:
5053
...
5154

@@ -142,7 +145,7 @@ def whitespace_from_begin_of_token(token: Token) -> str:
142145
return result
143146

144147

145-
def get_tokens_at_position(node: Statement, position: Position) -> List[Token]:
148+
def get_tokens_at_position(node: HasTokens, position: Position) -> List[Token]:
146149
return [t for t in node.tokens if position.is_in_range(range := range_from_token(t)) or range.end == position]
147150

148151

0 commit comments

Comments
 (0)