Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,11 @@
@RequiredArgsConstructor
public class SemanticTokensProvider {

/**
* Number of integers per semantic token (deltaLine, deltaStart, length, type, modifiers).
*/
private static final int TOKEN_SIZE = 5;

@SuppressWarnings("NullAway.Init")
private ExecutorService executorService;

Expand Down Expand Up @@ -218,8 +223,6 @@ private void cacheTokenData(String resultId, URI uri, int[] data) {
* и смещение строк при вставке/удалении строк в документе.
*/
private static List<SemanticTokensEdit> computeEdits(int[] prev, int[] curr) {
final int TOKEN_SIZE = 5;

int prevTokenCount = prev.length / TOKEN_SIZE;
int currTokenCount = curr.length / TOKEN_SIZE;

Expand All @@ -232,13 +235,13 @@ private static List<SemanticTokensEdit> computeEdits(int[] prev, int[] curr) {
int prefixAbsLine = 0;
int minTokens = Math.min(prevTokenCount, currTokenCount);

outer:
findFirstDifference:
for (int i = 0; i < minTokens; i++) {
int base = i * TOKEN_SIZE;
for (int j = 0; j < TOKEN_SIZE; j++) {
if (prev[base + j] != curr[base + j]) {
firstDiffToken = i;
break outer;
break findFirstDifference;
}
}
prefixAbsLine += prev[base]; // накапливаем deltaLine
Expand All @@ -262,7 +265,7 @@ private static List<SemanticTokensEdit> computeEdits(int[] prev, int[] curr) {
int lineOffset = currSuffixAbsLine - prevSuffixAbsLine;

// Находим последний отличающийся токен с учётом смещения строк
int suffixMatchTokens = findSuffixMatchWithOffset(prev, curr, firstDiffToken, lineOffset, TOKEN_SIZE);
int suffixMatchTokens = findSuffixMatchWithOffset(prev, curr, firstDiffToken, lineOffset);

// Вычисляем границы редактирования
int deleteEndToken = prevTokenCount - suffixMatchTokens;
Expand All @@ -272,10 +275,6 @@ private static List<SemanticTokensEdit> computeEdits(int[] prev, int[] curr) {
int deleteCount = (deleteEndToken - firstDiffToken) * TOKEN_SIZE;
int insertEnd = insertEndToken * TOKEN_SIZE;

if (deleteCount == 0 && deleteStart == insertEnd) {
return List.of();
}

// Создаём список для вставки из среза массива
List<Integer> insertData = toList(Arrays.copyOfRange(curr, deleteStart, insertEnd));

Expand All @@ -295,9 +294,9 @@ private static List<SemanticTokensEdit> computeEdits(int[] prev, int[] curr) {
* При дельта-кодировании токены после точки вставки идентичны,
* кроме первого токена, у которого deltaLine смещён на lineOffset.
*/
private static int findSuffixMatchWithOffset(int[] prev, int[] curr, int firstDiffToken, int lineOffset, int tokenSize) {
int prevTokenCount = prev.length / tokenSize;
int currTokenCount = curr.length / tokenSize;
private static int findSuffixMatchWithOffset(int[] prev, int[] curr, int firstDiffToken, int lineOffset) {
int prevTokenCount = prev.length / TOKEN_SIZE;
int currTokenCount = curr.length / TOKEN_SIZE;

int maxPrevSuffix = prevTokenCount - firstDiffToken;
int maxCurrSuffix = currTokenCount - firstDiffToken;
Expand All @@ -307,12 +306,12 @@ private static int findSuffixMatchWithOffset(int[] prev, int[] curr, int firstDi
boolean foundBoundary = false;

for (int i = 0; i < maxSuffix; i++) {
int prevIdx = (prevTokenCount - 1 - i) * tokenSize;
int currIdx = (currTokenCount - 1 - i) * tokenSize;
int prevIdx = (prevTokenCount - 1 - i) * TOKEN_SIZE;
int currIdx = (currTokenCount - 1 - i) * TOKEN_SIZE;

// Сначала проверяем все поля кроме deltaLine
boolean otherFieldsMatch = true;
for (int j = 1; j < tokenSize; j++) {
for (int j = 1; j < TOKEN_SIZE; j++) {
if (prev[prevIdx + j] != curr[currIdx + j]) {
otherFieldsMatch = false;
break;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.eclipse.lsp4j.SemanticTokenTypes;
import org.eclipse.lsp4j.SemanticTokens;
import org.eclipse.lsp4j.SemanticTokensDeltaParams;
import org.eclipse.lsp4j.SemanticTokensEdit;
import org.eclipse.lsp4j.SemanticTokensLegend;
import org.eclipse.lsp4j.SemanticTokensParams;
import org.eclipse.lsp4j.TextDocumentIdentifier;
Expand Down Expand Up @@ -1341,6 +1342,182 @@ void deltaWithLineInsertedInMiddle_shouldReturnOptimalDelta() {
assertThat(editSize).isLessThan(originalDataSize);
}

@Test
void deltaEdit_appliedToPreviousData_producesCurrentData() {
// given - simulate modifying document
String bsl1 = """
Перем А;
Перем Б;
Перем В;
""";

String bsl2 = """
Перем А;
Перем Новая;
Перем Б;
Перем В;
""";

DocumentContext context1 = TestUtils.getDocumentContext(bsl1);
referenceIndexFiller.fill(context1);
TextDocumentIdentifier textDocId1 = TestUtils.getTextDocumentIdentifier(context1.getUri());
SemanticTokens tokens1 = provider.getSemanticTokensFull(context1, new SemanticTokensParams(textDocId1));

DocumentContext context2 = TestUtils.getDocumentContext(context1.getUri(), bsl2);
referenceIndexFiller.fill(context2);
SemanticTokens tokens2 = provider.getSemanticTokensFull(context2, new SemanticTokensParams(textDocId1));

// when
var deltaParams = new SemanticTokensDeltaParams(textDocId1, tokens1.getResultId());
var result = provider.getSemanticTokensFullDelta(context2, deltaParams);

// then - applying edit to previous data should produce current data
assertThat(result.isRight()).isTrue();
var delta = result.getRight();
assertThat(delta.getEdits()).hasSize(1);

var edit = delta.getEdits().get(0);
List<Integer> appliedData = applyEdit(tokens1.getData(), edit);

assertThat(appliedData)
.as("Applying delta edit to previous data should produce current data")
.isEqualTo(tokens2.getData());
}

@Test
void deltaEdit_withLineInsertionAtStart_appliedCorrectly() {
// given
String bsl1 = """
Перем А;
Перем Б;
""";

String bsl2 = """
Перем Новая;
Перем А;
Перем Б;
""";

DocumentContext context1 = TestUtils.getDocumentContext(bsl1);
referenceIndexFiller.fill(context1);
TextDocumentIdentifier textDocId1 = TestUtils.getTextDocumentIdentifier(context1.getUri());
SemanticTokens tokens1 = provider.getSemanticTokensFull(context1, new SemanticTokensParams(textDocId1));

DocumentContext context2 = TestUtils.getDocumentContext(context1.getUri(), bsl2);
referenceIndexFiller.fill(context2);
SemanticTokens tokens2 = provider.getSemanticTokensFull(context2, new SemanticTokensParams(textDocId1));

// when
var deltaParams = new SemanticTokensDeltaParams(textDocId1, tokens1.getResultId());
var result = provider.getSemanticTokensFullDelta(context2, deltaParams);

// then
assertThat(result.isRight()).isTrue();
var delta = result.getRight();
var edit = delta.getEdits().get(0);
List<Integer> appliedData = applyEdit(tokens1.getData(), edit);
assertThat(appliedData)
.as("Applying delta edit should produce expected data")
.isEqualTo(tokens2.getData());
}

@Test
void deltaEdit_withLineDeletion_appliedCorrectly() {
// given
String bsl1 = """
Перем А;
Перем Удаляемая;
Перем Б;
Перем В;
""";

String bsl2 = """
Перем А;
Перем Б;
Перем В;
""";

DocumentContext context1 = TestUtils.getDocumentContext(bsl1);
referenceIndexFiller.fill(context1);
TextDocumentIdentifier textDocId1 = TestUtils.getTextDocumentIdentifier(context1.getUri());
SemanticTokens tokens1 = provider.getSemanticTokensFull(context1, new SemanticTokensParams(textDocId1));

DocumentContext context2 = TestUtils.getDocumentContext(context1.getUri(), bsl2);
referenceIndexFiller.fill(context2);
SemanticTokens tokens2 = provider.getSemanticTokensFull(context2, new SemanticTokensParams(textDocId1));

// when
var deltaParams = new SemanticTokensDeltaParams(textDocId1, tokens1.getResultId());
var result = provider.getSemanticTokensFullDelta(context2, deltaParams);

// then
assertThat(result.isRight()).isTrue();
var delta = result.getRight();
var edit = delta.getEdits().get(0);
List<Integer> appliedData = applyEdit(tokens1.getData(), edit);
assertThat(appliedData)
.as("Applying delta edit should produce expected data")
.isEqualTo(tokens2.getData());
}

@Test
void deltaEdit_withTextInsertionOnSameLine_appliedCorrectly() {
// given - adding text on the same line (no new line)
String bsl1 = """
Перем А;
""";

String bsl2 = """
Перем А, Б;
""";

DocumentContext context1 = TestUtils.getDocumentContext(bsl1);
referenceIndexFiller.fill(context1);
TextDocumentIdentifier textDocId1 = TestUtils.getTextDocumentIdentifier(context1.getUri());
SemanticTokens tokens1 = provider.getSemanticTokensFull(context1, new SemanticTokensParams(textDocId1));

DocumentContext context2 = TestUtils.getDocumentContext(context1.getUri(), bsl2);
referenceIndexFiller.fill(context2);
SemanticTokens tokens2 = provider.getSemanticTokensFull(context2, new SemanticTokensParams(textDocId1));

// when
var deltaParams = new SemanticTokensDeltaParams(textDocId1, tokens1.getResultId());
var result = provider.getSemanticTokensFullDelta(context2, deltaParams);

// then
assertThat(result.isRight()).isTrue();
var delta = result.getRight();
var edit = delta.getEdits().get(0);
List<Integer> appliedData = applyEdit(tokens1.getData(), edit);
assertThat(appliedData)
.as("Applying delta edit for same-line insertion should produce expected data")
.isEqualTo(tokens2.getData());
}

/**
* Helper method to apply a semantic tokens edit to previous data.
* Simulates what the LSP client does when receiving a delta.
*/
private List<Integer> applyEdit(List<Integer> previousData, SemanticTokensEdit edit) {
List<Integer> result = new ArrayList<>(previousData);

int start = edit.getStart();
int deleteCount = edit.getDeleteCount();
List<Integer> insertData = edit.getData();

// Remove deleteCount elements starting at start
for (int i = 0; i < deleteCount; i++) {
result.remove(start);
}

// Insert new data at start position
if (insertData != null && !insertData.isEmpty()) {
result.addAll(start, insertData);
}

return result;
}

// endregion
}