Skip to content

Commit 0615fe4

Browse files
committed
Robocop diagnostics and semantic now running in Executor
1 parent cfac7f9 commit 0615fe4

File tree

2 files changed

+83
-57
lines changed

2 files changed

+83
-57
lines changed
Lines changed: 64 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,14 @@
11
from __future__ import annotations
22

3+
import ast
4+
import asyncio
35
import io
46
from typing import TYPE_CHECKING, Any, List, Optional
57

68
from ....utils.logging import LoggingDescriptor
79
from ...common.language import language_id
810
from ...common.parts.diagnostics import DiagnosticsResult
11+
from ...common.parts.workspace import WorkspaceFolder
912
from ...common.text_document import TextDocument
1013
from ...common.types import Diagnostic, DiagnosticSeverity, Position, Range
1114

@@ -46,59 +49,74 @@ async def get_config(self, document: TextDocument) -> Optional[RoboCopConfig]:
4649
@_logger.call
4750
async def collect_diagnostics(self, sender: Any, document: TextDocument) -> DiagnosticsResult:
4851

49-
from robocop.config import Config
50-
from robocop.rules import RuleSeverity
51-
from robocop.run import Robocop
52-
53-
result: List[Diagnostic] = []
5452
try:
5553
workspace_folder = self.parent.workspace.get_workspace_folder(document.uri)
5654
if workspace_folder is not None:
5755
extension_config = await self.get_config(document)
5856

5957
if extension_config is not None and extension_config.enabled:
6058

61-
with io.StringIO("") as output:
62-
config = Config(str(workspace_folder.uri.to_path()))
63-
64-
config.exec_dir = str(workspace_folder.uri.to_path())
65-
66-
config.output = output
67-
68-
if extension_config.include:
69-
config.include = set(extension_config.include)
70-
if extension_config.exclude:
71-
config.exclude = set(extension_config.exclude)
72-
if extension_config.configurations:
73-
config.configure = set(extension_config.configurations)
74-
75-
analyser = Robocop(from_cli=False, config=config)
76-
analyser.reload_config()
77-
78-
model = await self.parent.documents_cache.get_model(document)
79-
80-
issues = analyser.run_check(model, str(document.uri.to_path()), document.text)
81-
82-
for issue in issues:
83-
d = Diagnostic(
84-
range=Range(
85-
start=Position(line=max(0, issue.line - 1), character=issue.col),
86-
end=Position(line=max(0, issue.end_line - 1), character=issue.end_col),
87-
),
88-
message=issue.desc,
89-
severity=DiagnosticSeverity.INFORMATION
90-
if issue.severity == RuleSeverity.INFO
91-
else DiagnosticSeverity.WARNING
92-
if issue.severity == RuleSeverity.WARNING
93-
else DiagnosticSeverity.ERROR
94-
if issue.severity == RuleSeverity.ERROR
95-
else DiagnosticSeverity.HINT,
96-
source=self.source_name,
97-
code=f"{issue.severity.value}{issue.rule_id}",
98-
)
99-
100-
result.append(d)
59+
model = await self.parent.documents_cache.get_model(document)
60+
result = await self.collect_threading(document, workspace_folder, extension_config, model)
61+
return DiagnosticsResult(self.collect_diagnostics, result)
10162
except BaseException:
10263
pass
10364

104-
return DiagnosticsResult(self.collect_diagnostics, result)
65+
return DiagnosticsResult(self.collect_diagnostics, [])
66+
67+
async def collect_threading(
68+
self, document: TextDocument, workspace_folder: WorkspaceFolder, extension_config: RoboCopConfig, model: ast.AST
69+
) -> List[Diagnostic]:
70+
return await asyncio.get_event_loop().run_in_executor(
71+
None, self.collect, document, workspace_folder, extension_config, model
72+
)
73+
74+
def collect(
75+
self, document: TextDocument, workspace_folder: WorkspaceFolder, extension_config: RoboCopConfig, model: ast.AST
76+
) -> List[Diagnostic]:
77+
from robocop.config import Config
78+
from robocop.rules import RuleSeverity
79+
from robocop.run import Robocop
80+
81+
result: List[Diagnostic] = []
82+
83+
with io.StringIO("") as output:
84+
config = Config(str(workspace_folder.uri.to_path()))
85+
86+
config.exec_dir = str(workspace_folder.uri.to_path())
87+
88+
config.output = output
89+
90+
if extension_config.include:
91+
config.include = set(extension_config.include)
92+
if extension_config.exclude:
93+
config.exclude = set(extension_config.exclude)
94+
if extension_config.configurations:
95+
config.configure = set(extension_config.configurations)
96+
97+
analyser = Robocop(from_cli=False, config=config)
98+
analyser.reload_config()
99+
100+
issues = analyser.run_check(model, str(document.uri.to_path()), document.text)
101+
102+
for issue in issues:
103+
d = Diagnostic(
104+
range=Range(
105+
start=Position(line=max(0, issue.line - 1), character=issue.col),
106+
end=Position(line=max(0, issue.end_line - 1), character=issue.end_col),
107+
),
108+
message=issue.desc,
109+
severity=DiagnosticSeverity.INFORMATION
110+
if issue.severity == RuleSeverity.INFO
111+
else DiagnosticSeverity.WARNING
112+
if issue.severity == RuleSeverity.WARNING
113+
else DiagnosticSeverity.ERROR
114+
if issue.severity == RuleSeverity.ERROR
115+
else DiagnosticSeverity.HINT,
116+
source=self.source_name,
117+
code=f"{issue.severity.value}{issue.rule_id}",
118+
)
119+
120+
result.append(d)
121+
122+
return result

robotcode/language_server/robotframework/parts/semantic_tokens.py

Lines changed: 19 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -170,6 +170,7 @@ def generate_sem_sub_tokens(
170170
cls, token: Token, col_offset: Optional[int] = None, length: Optional[int] = None
171171
) -> Generator[SemTokenInfo, None, None]:
172172
from robot.parsing.lexer.tokens import Token as RobotToken
173+
from robot.variables.search import is_variable
173174

174175
sem_info = cls.mapping().get(token.type, None) if token.type is not None else None
175176

@@ -180,9 +181,13 @@ def generate_sem_sub_tokens(
180181
if length is None:
181182
length = token.end_col_offset - token.col_offset
182183

183-
yield SemTokenInfo(token.lineno, col_offset, 2, RobotSemTokenTypes.VARIABLE_BEGIN)
184-
yield SemTokenInfo.from_token(token, sem_info[0], sem_info[1], col_offset + 2, length - 3)
185-
yield SemTokenInfo(token.lineno, col_offset + length - 1, 1, RobotSemTokenTypes.VARIABLE_END)
184+
if is_variable(token.value):
185+
yield SemTokenInfo(token.lineno, col_offset, 2, RobotSemTokenTypes.VARIABLE_BEGIN)
186+
yield SemTokenInfo.from_token(token, sem_info[0], sem_info[1], col_offset + 2, length - 3)
187+
yield SemTokenInfo(token.lineno, col_offset + length - 1, 1, RobotSemTokenTypes.VARIABLE_END)
188+
else:
189+
yield SemTokenInfo.from_token(token, sem_info[0], sem_info[1])
190+
186191
if token.type == RobotToken.ARGUMENT and "\\" in token.value:
187192
if col_offset is None:
188193
col_offset = token.col_offset
@@ -228,7 +233,7 @@ def generate_sem_tokens(cls, token: Token) -> Generator[SemTokenInfo, None, None
228233
for e in cls.generate_sem_sub_tokens(token):
229234
yield e
230235

231-
def collect_threading(
236+
def collect(
232237
self, tokens: Iterable[Token], range: Optional[Range]
233238
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
234239

@@ -279,22 +284,25 @@ def collect_threading(
279284

280285
return SemanticTokens(data=data)
281286

287+
async def collect_threading(
288+
self, document: TextDocument, range: Optional[Range]
289+
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
290+
return await asyncio.get_event_loop().run_in_executor(
291+
None, self.collect, await self.parent.documents_cache.get_tokens(document), range
292+
)
293+
282294
@language_id("robotframework")
283295
async def collect_full(
284296
self, sender: Any, document: TextDocument, **kwargs: Any
285297
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
286-
287-
return await asyncio.get_event_loop().run_in_executor(
288-
None, self.collect_threading, await self.parent.documents_cache.get_tokens(document), None
289-
)
298+
return await document.get_cache(self.collect_threading, None)
299+
# return await self.collect_threading(document, None)
290300

291301
@language_id("robotframework")
292302
async def collect_range(
293303
self, sender: Any, document: TextDocument, range: Range, **kwargs: Any
294304
) -> Union[SemanticTokens, SemanticTokensPartialResult, None]:
295-
return await asyncio.get_event_loop().run_in_executor(
296-
None, self.collect_threading, await self.parent.documents_cache.get_tokens(document), range
297-
)
305+
return await self.collect_threading(document, range)
298306

299307
@language_id("robotframework")
300308
async def collect_full_delta(

0 commit comments

Comments
 (0)