Skip to content

Commit f1d5072

Browse files
bors[bot]kjeremy
andauthored
Merge #5526
5526: Handle semantic token deltas r=kjeremy a=kjeremy This basically takes the naive approach where we always compute the tokens but save space sending over the wire which apparently solves some GC problems with vscode. This is waiting for gluon-lang/lsp-types#174 to be merged. I am also unsure of the best way to stash the tokens into `DocumentData` in a safe manner. Co-authored-by: kjeremy <[email protected]> Co-authored-by: Jeremy Kolb <[email protected]>
2 parents 2cb079b + 195111d commit f1d5072

File tree

7 files changed

+208
-14
lines changed

7 files changed

+208
-14
lines changed

crates/rust-analyzer/src/caps.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,9 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti
7676
token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(),
7777
},
7878

79-
document_provider: Some(SemanticTokensDocumentProvider::Bool(true)),
79+
document_provider: Some(SemanticTokensDocumentProvider::Edits {
80+
edits: Some(true),
81+
}),
8082
range_provider: Some(true),
8183
work_done_progress_options: Default::default(),
8284
}

crates/rust-analyzer/src/document.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
//! In-memory document information.
22
33
/// Information about a document that the Language Client
4-
// knows about.
5-
// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose
6-
// client notifications.
4+
/// knows about.
5+
/// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose
6+
/// client notifications.
77
#[derive(Debug, Clone)]
88
pub(crate) struct DocumentData {
99
pub version: Option<i64>,

crates/rust-analyzer/src/global_state.rs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ use std::{sync::Arc, time::Instant};
77

88
use crossbeam_channel::{unbounded, Receiver, Sender};
99
use flycheck::FlycheckHandle;
10-
use lsp_types::Url;
11-
use parking_lot::RwLock;
10+
use lsp_types::{SemanticTokens, Url};
11+
use parking_lot::{Mutex, RwLock};
1212
use ra_db::{CrateId, VfsPath};
1313
use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId};
1414
use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target};
@@ -71,6 +71,7 @@ pub(crate) struct GlobalState {
7171
pub(crate) analysis_host: AnalysisHost,
7272
pub(crate) diagnostics: DiagnosticCollection,
7373
pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>,
74+
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
7475
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
7576
pub(crate) status: Status,
7677
pub(crate) source_root_config: SourceRootConfig,
@@ -86,6 +87,7 @@ pub(crate) struct GlobalStateSnapshot {
8687
pub(crate) check_fixes: CheckFixes,
8788
pub(crate) latest_requests: Arc<RwLock<LatestRequests>>,
8889
mem_docs: FxHashMap<VfsPath, DocumentData>,
90+
pub semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
8991
vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
9092
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
9193
}
@@ -120,6 +122,7 @@ impl GlobalState {
120122
analysis_host,
121123
diagnostics: Default::default(),
122124
mem_docs: FxHashMap::default(),
125+
semantic_tokens_cache: Arc::new(Default::default()),
123126
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
124127
status: Status::default(),
125128
source_root_config: SourceRootConfig::default(),
@@ -186,6 +189,7 @@ impl GlobalState {
186189
latest_requests: Arc::clone(&self.latest_requests),
187190
check_fixes: Arc::clone(&self.diagnostics.check_fixes),
188191
mem_docs: self.mem_docs.clone(),
192+
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
189193
}
190194
}
191195

crates/rust-analyzer/src/handlers.rs

Lines changed: 38 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,10 @@ use lsp_types::{
1313
CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
1414
CodeActionKind, CodeLens, Command, CompletionItem, Diagnostic, DocumentFormattingParams,
1515
DocumentHighlight, DocumentSymbol, FoldingRange, FoldingRangeParams, HoverContents, Location,
16-
Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensParams,
17-
SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
18-
SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
16+
Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensEditResult,
17+
SemanticTokensEditsParams, SemanticTokensParams, SemanticTokensRangeParams,
18+
SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, SymbolTag,
19+
TextDocumentIdentifier, Url, WorkspaceEdit,
1920
};
2021
use ra_ide::{
2122
FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query,
@@ -1179,6 +1180,40 @@ pub(crate) fn handle_semantic_tokens(
11791180

11801181
let highlights = snap.analysis.highlight(file_id)?;
11811182
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
1183+
1184+
// Unconditionally cache the tokens
1185+
snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
1186+
1187+
Ok(Some(semantic_tokens.into()))
1188+
}
1189+
1190+
pub(crate) fn handle_semantic_tokens_edits(
1191+
snap: GlobalStateSnapshot,
1192+
params: SemanticTokensEditsParams,
1193+
) -> Result<Option<SemanticTokensEditResult>> {
1194+
let _p = profile("handle_semantic_tokens_edits");
1195+
1196+
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
1197+
let text = snap.analysis.file_text(file_id)?;
1198+
let line_index = snap.analysis.file_line_index(file_id)?;
1199+
1200+
let highlights = snap.analysis.highlight(file_id)?;
1201+
1202+
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
1203+
1204+
let mut cache = snap.semantic_tokens_cache.lock();
1205+
let cached_tokens = cache.entry(params.text_document.uri).or_default();
1206+
1207+
if let Some(prev_id) = &cached_tokens.result_id {
1208+
if *prev_id == params.previous_result_id {
1209+
let edits = to_proto::semantic_token_edits(&cached_tokens, &semantic_tokens);
1210+
*cached_tokens = semantic_tokens;
1211+
return Ok(Some(edits.into()));
1212+
}
1213+
}
1214+
1215+
*cached_tokens = semantic_tokens.clone();
1216+
11821217
Ok(Some(semantic_tokens.into()))
11831218
}
11841219

crates/rust-analyzer/src/main_loop.rs

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -386,6 +386,9 @@ impl GlobalState {
386386
handlers::handle_call_hierarchy_outgoing,
387387
)?
388388
.on::<lsp_types::request::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
389+
.on::<lsp_types::request::SemanticTokensEditsRequest>(
390+
handlers::handle_semantic_tokens_edits,
391+
)?
389392
.on::<lsp_types::request::SemanticTokensRangeRequest>(
390393
handlers::handle_semantic_tokens_range,
391394
)?
@@ -443,6 +446,8 @@ impl GlobalState {
443446
None => log::error!("orphan DidCloseTextDocument: {}", path),
444447
}
445448

449+
this.semantic_tokens_cache.lock().remove(&params.text_document.uri);
450+
446451
if let Some(path) = path.as_path() {
447452
this.loader.handle.invalidate(path.to_path_buf());
448453
}

crates/rust-analyzer/src/semantic_tokens.rs

Lines changed: 136 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,10 @@
22
33
use std::ops;
44

5-
use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens};
5+
use lsp_types::{
6+
Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
7+
SemanticTokensEdit,
8+
};
69

710
macro_rules! define_semantic_token_types {
811
($(($ident:ident, $string:literal)),*$(,)?) => {
@@ -89,14 +92,18 @@ impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
8992
/// Tokens are encoded relative to each other.
9093
///
9194
/// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45
92-
#[derive(Default)]
9395
pub(crate) struct SemanticTokensBuilder {
96+
id: String,
9497
prev_line: u32,
9598
prev_char: u32,
9699
data: Vec<SemanticToken>,
97100
}
98101

99102
impl SemanticTokensBuilder {
103+
pub fn new(id: String) -> Self {
104+
SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
105+
}
106+
100107
/// Push a new token onto the builder
101108
pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
102109
let mut push_line = range.start.line as u32;
@@ -127,10 +134,136 @@ impl SemanticTokensBuilder {
127134
}
128135

129136
pub fn build(self) -> SemanticTokens {
130-
SemanticTokens { result_id: None, data: self.data }
137+
SemanticTokens { result_id: Some(self.id), data: self.data }
138+
}
139+
}
140+
141+
pub fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
142+
let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
143+
144+
let (_, old) = old.split_at(offset);
145+
let (_, new) = new.split_at(offset);
146+
147+
let offset_from_end =
148+
new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
149+
150+
let (old, _) = old.split_at(old.len() - offset_from_end);
151+
let (new, _) = new.split_at(new.len() - offset_from_end);
152+
153+
if old.is_empty() && new.is_empty() {
154+
vec![]
155+
} else {
156+
// The lsp data field is actually a byte-diff but we
157+
// travel in tokens so `start` and `delete_count` are in multiples of the
158+
// serialized size of `SemanticToken`.
159+
vec![SemanticTokensEdit {
160+
start: 5 * offset as u32,
161+
delete_count: 5 * old.len() as u32,
162+
data: Some(new.into()),
163+
}]
131164
}
132165
}
133166

134167
pub fn type_index(type_: SemanticTokenType) -> u32 {
135168
SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32
136169
}
170+
171+
#[cfg(test)]
172+
mod tests {
173+
use super::*;
174+
175+
fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
176+
SemanticToken {
177+
delta_line: t.0,
178+
delta_start: t.1,
179+
length: t.2,
180+
token_type: t.3,
181+
token_modifiers_bitset: t.4,
182+
}
183+
}
184+
185+
#[test]
186+
fn test_diff_insert_at_end() {
187+
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
188+
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
189+
190+
let edits = diff_tokens(&before, &after);
191+
assert_eq!(
192+
edits[0],
193+
SemanticTokensEdit {
194+
start: 10,
195+
delete_count: 0,
196+
data: Some(vec![from((11, 12, 13, 14, 15))])
197+
}
198+
);
199+
}
200+
201+
#[test]
202+
fn test_diff_insert_at_beginning() {
203+
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
204+
let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
205+
206+
let edits = diff_tokens(&before, &after);
207+
assert_eq!(
208+
edits[0],
209+
SemanticTokensEdit {
210+
start: 0,
211+
delete_count: 0,
212+
data: Some(vec![from((11, 12, 13, 14, 15))])
213+
}
214+
);
215+
}
216+
217+
#[test]
218+
fn test_diff_insert_in_middle() {
219+
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
220+
let after = [
221+
from((1, 2, 3, 4, 5)),
222+
from((10, 20, 30, 40, 50)),
223+
from((60, 70, 80, 90, 100)),
224+
from((6, 7, 8, 9, 10)),
225+
];
226+
227+
let edits = diff_tokens(&before, &after);
228+
assert_eq!(
229+
edits[0],
230+
SemanticTokensEdit {
231+
start: 5,
232+
delete_count: 0,
233+
data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
234+
}
235+
);
236+
}
237+
238+
#[test]
239+
fn test_diff_remove_from_end() {
240+
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
241+
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
242+
243+
let edits = diff_tokens(&before, &after);
244+
assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
245+
}
246+
247+
#[test]
248+
fn test_diff_remove_from_beginning() {
249+
let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
250+
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
251+
252+
let edits = diff_tokens(&before, &after);
253+
assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
254+
}
255+
256+
#[test]
257+
fn test_diff_remove_from_middle() {
258+
let before = [
259+
from((1, 2, 3, 4, 5)),
260+
from((10, 20, 30, 40, 50)),
261+
from((60, 70, 80, 90, 100)),
262+
from((6, 7, 8, 9, 10)),
263+
];
264+
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
265+
266+
let edits = diff_tokens(&before, &after);
267+
assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });
268+
}
269+
}

crates/rust-analyzer/src/to_proto.rs

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
//! Conversion of rust-analyzer specific types to lsp_types equivalents.
2-
use std::path::{self, Path};
2+
use std::{
3+
path::{self, Path},
4+
sync::atomic::{AtomicU32, Ordering},
5+
};
36

47
use itertools::Itertools;
58
use ra_db::{FileId, FileRange};
@@ -303,12 +306,15 @@ pub(crate) fn inlay_int(line_index: &LineIndex, inlay_hint: InlayHint) -> lsp_ex
303306
}
304307
}
305308

309+
static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
310+
306311
pub(crate) fn semantic_tokens(
307312
text: &str,
308313
line_index: &LineIndex,
309314
highlights: Vec<HighlightedRange>,
310315
) -> lsp_types::SemanticTokens {
311-
let mut builder = semantic_tokens::SemanticTokensBuilder::default();
316+
let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
317+
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
312318

313319
for highlight_range in highlights {
314320
let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
@@ -328,6 +334,15 @@ pub(crate) fn semantic_tokens(
328334
builder.build()
329335
}
330336

337+
pub(crate) fn semantic_token_edits(
338+
previous: &lsp_types::SemanticTokens,
339+
current: &lsp_types::SemanticTokens,
340+
) -> lsp_types::SemanticTokensEdits {
341+
let result_id = current.result_id.clone();
342+
let edits = semantic_tokens::diff_tokens(&previous.data, &current.data);
343+
lsp_types::SemanticTokensEdits { result_id, edits }
344+
}
345+
331346
fn semantic_token_type_and_modifiers(
332347
highlight: Highlight,
333348
) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {

0 commit comments

Comments
 (0)