|
2 | 2 |
|
3 | 3 | use std::ops; |
4 | 4 |
|
5 | | -use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens}; |
| 5 | +use lsp_types::{ |
| 6 | + Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens, |
| 7 | + SemanticTokensEdit, |
| 8 | +}; |
6 | 9 |
|
7 | 10 | macro_rules! define_semantic_token_types { |
8 | 11 | ($(($ident:ident, $string:literal)),*$(,)?) => { |
@@ -89,14 +92,18 @@ impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet { |
89 | 92 | /// Tokens are encoded relative to each other. |
90 | 93 | /// |
91 | 94 | /// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45 |
92 | | -#[derive(Default)] |
93 | 95 | pub(crate) struct SemanticTokensBuilder { |
| 96 | + id: String, |
94 | 97 | prev_line: u32, |
95 | 98 | prev_char: u32, |
96 | 99 | data: Vec<SemanticToken>, |
97 | 100 | } |
98 | 101 |
|
99 | 102 | impl SemanticTokensBuilder { |
| 103 | + pub fn new(id: String) -> Self { |
| 104 | + SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() } |
| 105 | + } |
| 106 | + |
100 | 107 | /// Push a new token onto the builder |
101 | 108 | pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) { |
102 | 109 | let mut push_line = range.start.line as u32; |
@@ -127,10 +134,136 @@ impl SemanticTokensBuilder { |
127 | 134 | } |
128 | 135 |
|
129 | 136 | pub fn build(self) -> SemanticTokens { |
130 | | - SemanticTokens { result_id: None, data: self.data } |
| 137 | + SemanticTokens { result_id: Some(self.id), data: self.data } |
| 138 | + } |
| 139 | +} |
| 140 | + |
| 141 | +pub fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> { |
| 142 | + let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count(); |
| 143 | + |
| 144 | + let (_, old) = old.split_at(offset); |
| 145 | + let (_, new) = new.split_at(offset); |
| 146 | + |
| 147 | + let offset_from_end = |
| 148 | + new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count(); |
| 149 | + |
| 150 | + let (old, _) = old.split_at(old.len() - offset_from_end); |
| 151 | + let (new, _) = new.split_at(new.len() - offset_from_end); |
| 152 | + |
| 153 | + if old.is_empty() && new.is_empty() { |
| 154 | + vec![] |
| 155 | + } else { |
| 156 | + // The lsp data field is actually a byte-diff but we |
| 157 | + // travel in tokens so `start` and `delete_count` are in multiples of the |
| 158 | + // serialized size of `SemanticToken`. |
| 159 | + vec![SemanticTokensEdit { |
| 160 | + start: 5 * offset as u32, |
| 161 | + delete_count: 5 * old.len() as u32, |
| 162 | + data: Some(new.into()), |
| 163 | + }] |
131 | 164 | } |
132 | 165 | } |
133 | 166 |
|
134 | 167 | pub fn type_index(type_: SemanticTokenType) -> u32 { |
135 | 168 | SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32 |
136 | 169 | } |
| 170 | + |
| 171 | +#[cfg(test)] |
| 172 | +mod tests { |
| 173 | + use super::*; |
| 174 | + |
| 175 | + fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken { |
| 176 | + SemanticToken { |
| 177 | + delta_line: t.0, |
| 178 | + delta_start: t.1, |
| 179 | + length: t.2, |
| 180 | + token_type: t.3, |
| 181 | + token_modifiers_bitset: t.4, |
| 182 | + } |
| 183 | + } |
| 184 | + |
| 185 | + #[test] |
| 186 | + fn test_diff_insert_at_end() { |
| 187 | + let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; |
| 188 | + let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))]; |
| 189 | + |
| 190 | + let edits = diff_tokens(&before, &after); |
| 191 | + assert_eq!( |
| 192 | + edits[0], |
| 193 | + SemanticTokensEdit { |
| 194 | + start: 10, |
| 195 | + delete_count: 0, |
| 196 | + data: Some(vec![from((11, 12, 13, 14, 15))]) |
| 197 | + } |
| 198 | + ); |
| 199 | + } |
| 200 | + |
| 201 | + #[test] |
| 202 | + fn test_diff_insert_at_beginning() { |
| 203 | + let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; |
| 204 | + let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; |
| 205 | + |
| 206 | + let edits = diff_tokens(&before, &after); |
| 207 | + assert_eq!( |
| 208 | + edits[0], |
| 209 | + SemanticTokensEdit { |
| 210 | + start: 0, |
| 211 | + delete_count: 0, |
| 212 | + data: Some(vec![from((11, 12, 13, 14, 15))]) |
| 213 | + } |
| 214 | + ); |
| 215 | + } |
| 216 | + |
| 217 | + #[test] |
| 218 | + fn test_diff_insert_in_middle() { |
| 219 | + let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; |
| 220 | + let after = [ |
| 221 | + from((1, 2, 3, 4, 5)), |
| 222 | + from((10, 20, 30, 40, 50)), |
| 223 | + from((60, 70, 80, 90, 100)), |
| 224 | + from((6, 7, 8, 9, 10)), |
| 225 | + ]; |
| 226 | + |
| 227 | + let edits = diff_tokens(&before, &after); |
| 228 | + assert_eq!( |
| 229 | + edits[0], |
| 230 | + SemanticTokensEdit { |
| 231 | + start: 5, |
| 232 | + delete_count: 0, |
| 233 | + data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))]) |
| 234 | + } |
| 235 | + ); |
| 236 | + } |
| 237 | + |
| 238 | + #[test] |
| 239 | + fn test_diff_remove_from_end() { |
| 240 | + let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))]; |
| 241 | + let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; |
| 242 | + |
| 243 | + let edits = diff_tokens(&before, &after); |
| 244 | + assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) }); |
| 245 | + } |
| 246 | + |
| 247 | + #[test] |
| 248 | + fn test_diff_remove_from_beginning() { |
| 249 | + let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; |
| 250 | + let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; |
| 251 | + |
| 252 | + let edits = diff_tokens(&before, &after); |
| 253 | + assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) }); |
| 254 | + } |
| 255 | + |
| 256 | + #[test] |
| 257 | + fn test_diff_remove_from_middle() { |
| 258 | + let before = [ |
| 259 | + from((1, 2, 3, 4, 5)), |
| 260 | + from((10, 20, 30, 40, 50)), |
| 261 | + from((60, 70, 80, 90, 100)), |
| 262 | + from((6, 7, 8, 9, 10)), |
| 263 | + ]; |
| 264 | + let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; |
| 265 | + |
| 266 | + let edits = diff_tokens(&before, &after); |
| 267 | + assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) }); |
| 268 | + } |
| 269 | +} |
0 commit comments