Skip to content
4 changes: 2 additions & 2 deletions build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -82,13 +82,13 @@ dependencies {
api("org.eclipse.lsp4j", "org.eclipse.lsp4j.websocket.jakarta", "0.24.0")

// 1c-syntax
api("io.github.1c-syntax", "bsl-parser", "0.30.0-rc.2") {
api("io.github.1c-syntax", "bsl-parser", "0.30.0-rc.5") {
exclude("com.ibm.icu", "*")
exclude("org.antlr", "ST4")
exclude("org.antlr", "antlr-runtime")
}
api("io.github.1c-syntax", "utils", "0.6.8")
api("io.github.1c-syntax", "mdclasses", "0.17.3")
api("io.github.1c-syntax", "mdclasses", "0.17.4")
api("io.github.1c-syntax", "bsl-common-library", "0.9.2")
api("io.github.1c-syntax", "supportconf", "0.15.0")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ private void addBslDocTokensPerLine(
int lineLength = lineText.length();
int charOffset = (lineIdx == 0) ? fileStartChar : 0;

var lineElements = elementsByLine.getOrDefault(lineIdx, List.of());
var lineElements = elementsByLine.getOrDefault(fileLine, List.of());

if (lineElements.isEmpty()) {
if (lineLength > 0) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ private void addDirectives(List<SemanticTokenEntry> entries, BSLParser.FileConte
}
}

// Other preprocessor directives: Macro for each HASH and PREPROC_* token,
// Other preprocessor directives: Macro for entire directive keyword (#Если, #КонецЕсли, etc.),
// excluding region start/end, native, use (handled as Namespace)
private void addOtherPreprocs(List<SemanticTokenEntry> entries, BSLParser.FileContext ast) {
for (var preprocessor : Trees.<BSLParser.PreprocessorContext>findAllRuleNodes(ast, BSLParser.RULE_preprocessor)) {
Expand All @@ -107,13 +107,30 @@ private void addOtherPreprocs(List<SemanticTokenEntry> entries, BSLParser.FileCo
continue; // region handled as Namespace above
}

// Find HASH token and keyword tokens to combine them into single token
Token hashToken = null;
boolean firstKeywordCombined = false;

for (Token token : Trees.getTokens(preprocessor)) {
if (token.getChannel() != Token.DEFAULT_CHANNEL) {
continue;
}
String symbolicName = BSLLexer.VOCABULARY.getSymbolicName(token.getType());
if (token.getType() == BSLLexer.HASH || (symbolicName != null && symbolicName.startsWith("PREPROC_"))) {
helper.addRange(entries, Ranges.create(token), SemanticTokenTypes.Macro);
if (token.getType() == BSLLexer.HASH) {
hashToken = token;
} else {
String symbolicName = BSLLexer.VOCABULARY.getSymbolicName(token.getType());
if (symbolicName != null && symbolicName.startsWith("PREPROC_")) {
// Track keyword tokens for combining with HASH
if (hashToken != null && !firstKeywordCombined) {
// First keyword after HASH - combine them into single token
helper.addRange(entries, Ranges.create(hashToken, token), SemanticTokenTypes.Macro);
firstKeywordCombined = true;
} else {
// Subsequent keywords (e.g., "Сервер", "Тогда" in "#Если Сервер Тогда")
// or keyword without preceding HASH (shouldn't happen in valid syntax)
helper.addRange(entries, Ranges.create(token), SemanticTokenTypes.Macro);
}
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ void testMethod12() {
assertThat(type.description()).isEmpty();
assertThat(type.fields()).isEmpty();
assertThat(type).isInstanceOf(HyperlinkTypeDescription.class);
assertThat(((HyperlinkTypeDescription) type).hyperlink()).isEqualTo(Hyperlink.create("ОбщийМодуль.Метод()"));
assertThat(((HyperlinkTypeDescription) type).hyperlink().link()).isEqualTo("ОбщийМодуль.Метод");
}

@Test
Expand All @@ -161,7 +161,7 @@ void testMethod11() {
assertThat(param.name()).isEqualTo("ОбщийМодуль.Метод");
assertThat(param.types()).hasSize(1);
assertThat(param.isHyperlink()).isTrue();
assertThat(param.link()).isEqualTo(Hyperlink.create("ОбщийМодуль.Метод()"));
assertThat(param.link().link()).isEqualTo("ОбщийМодуль.Метод");
}

@Test
Expand All @@ -175,7 +175,7 @@ void testMethod10() {
assertThat(method.getCallOptions()).isEmpty();
assertThat(method.getParameters()).isEmpty();
assertThat(method.getReturnedValue()).isEmpty();
assertThat(method.getLinks()).contains(Hyperlink.create("ОбщийМодуль.Метод()"));
assertThat(method.getLinks()).extracting(Hyperlink::link).contains("ОбщийМодуль.Метод");
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -346,18 +346,14 @@ void preprocessorDirectives() {

var decoded = getDecodedTokens(bsl);

// Verify preprocessor macro tokens on specific lines
// Verify preprocessor macro tokens - # and keyword are combined into single token
var expectedTokens = List.of(
new ExpectedToken(0, 0, 1, SemanticTokenTypes.Macro, "#"),
new ExpectedToken(0, 1, 4, SemanticTokenTypes.Macro, "Если"),
new ExpectedToken(0, 0, 5, SemanticTokenTypes.Macro, "#Если"),
new ExpectedToken(0, 6, 6, SemanticTokenTypes.Macro, "Сервер"),
new ExpectedToken(0, 13, 5, SemanticTokenTypes.Macro, "Тогда"),
new ExpectedToken(3, 0, 1, SemanticTokenTypes.Macro, "#"),
new ExpectedToken(3, 1, 9, SemanticTokenTypes.Macro, "ИначеЕсли"),
new ExpectedToken(4, 0, 1, SemanticTokenTypes.Macro, "#"),
new ExpectedToken(4, 1, 5, SemanticTokenTypes.Macro, "Иначе"),
new ExpectedToken(5, 0, 1, SemanticTokenTypes.Macro, "#"),
new ExpectedToken(5, 1, 9, SemanticTokenTypes.Macro, "КонецЕсли")
new ExpectedToken(3, 0, 10, SemanticTokenTypes.Macro, "#ИначеЕсли"),
new ExpectedToken(4, 0, 6, SemanticTokenTypes.Macro, "#Иначе"),
new ExpectedToken(5, 0, 10, SemanticTokenTypes.Macro, "#КонецЕсли")
);

assertContainsTokens(decoded, expectedTokens);
Expand Down Expand Up @@ -391,6 +387,8 @@ void literals() {
@Test
void methodDescriptionComments() {
String bsl = """
// просто коммент

// Описание процедуры
// Параметры:
// Парам - Число - описание
Expand All @@ -402,41 +400,43 @@ void methodDescriptionComments() {
var decoded = getDecodedTokens(bsl);

// Documentation comments are now split around BSL doc keywords and operators.
// Line 0: "// Описание процедуры" - no BSL doc elements, full line as Comment+Documentation
// Line 1: "// Параметры:" - keyword in structural position
// Line 2: "// Парам - Число - описание" - parameter name, type, operator, description
// Line 3: "// Описание процедуры" - no BSL doc elements, full line as Comment+Documentation
// Line 4: "// Параметры:" - keyword in structural position
// Line 4: "// Парам - Число - описание" - parameter name, type, operator, description
// Body comment on line 4 should NOT have Documentation modifier
var expected = List.of(
// Line 0: full line as Comment+Documentation
new ExpectedToken(0, 0, 21, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// Описание процедуры"),
// Line 1: "// " before keyword
new ExpectedToken(1, 0, 3, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// "),
// Line 1: "Параметры:" keyword
new ExpectedToken(1, 3, 10, SemanticTokenTypes.Macro, SemanticTokenModifiers.Documentation, "Параметры:"),
// Line 2: "// " before param name
new ExpectedToken(2, 0, 4, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// "),
// Line 2: "Парам" parameter name
new ExpectedToken(2, 4, 5, SemanticTokenTypes.Parameter, SemanticTokenModifiers.Documentation, "Парам"),
// Line 2: " " between param name and dash
new ExpectedToken(2, 9, 3, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, " - "),
// Line 2: "Число" type
new ExpectedToken(2, 12, 5, SemanticTokenTypes.Type, SemanticTokenModifiers.Documentation, "Число"),
// Line 2: " " between type and second dash
new ExpectedToken(2, 17, 11, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, " - описание"),
// Line 3: Процедура keyword
new ExpectedToken(3, 0, 9, SemanticTokenTypes.Keyword, "Процедура"),
// Line 3: ДокТест method name
new ExpectedToken(3, 10, 7, SemanticTokenTypes.Method, "ДокТест"),
// Line 3: ( operator
new ExpectedToken(3, 17, 1, SemanticTokenTypes.Operator, "("),
// Line 3: Парам parameter definition
new ExpectedToken(3, 18, 5, SemanticTokenTypes.Parameter, SemanticTokenModifiers.Definition, "Парам"),
// Line 3: ) operator
new ExpectedToken(3, 23, 1, SemanticTokenTypes.Operator, ")"),
// Line 4: body comment (no Documentation modifier)
new ExpectedToken(4, 2, 22, SemanticTokenTypes.Comment, "// обычный комментарий"),
// Line 5: КонецПроцедуры keyword
new ExpectedToken(5, 0, 14, SemanticTokenTypes.Keyword, "КонецПроцедуры")
new ExpectedToken(0, 0, 17, SemanticTokenTypes.Comment, "// просто коммент"),

// Line 2: full line as Comment+Documentation
new ExpectedToken(2, 0, 21, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// Описание процедуры"),
// Line 3: "// " before keyword
new ExpectedToken(3, 0, 3, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// "),
// Line 3: "Параметры:" keyword
new ExpectedToken(3, 3, 10, SemanticTokenTypes.Macro, SemanticTokenModifiers.Documentation, "Параметры:"),
// Line 4: "// " before param name
new ExpectedToken(4, 0, 4, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// "),
// Line 4: "Парам" parameter name
new ExpectedToken(4, 4, 5, SemanticTokenTypes.Parameter, SemanticTokenModifiers.Documentation, "Парам"),
// Line 4: " " between param name and dash
new ExpectedToken(4, 9, 3, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, " - "),
// Line 4: "Число" type
new ExpectedToken(4, 12, 5, SemanticTokenTypes.Type, SemanticTokenModifiers.Documentation, "Число"),
// Line 4: " " between type and second dash
new ExpectedToken(4, 17, 11, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, " - описание"),
// Line 5: Процедура keyword
new ExpectedToken(5, 0, 9, SemanticTokenTypes.Keyword, "Процедура"),
// Line 5: ДокТест method name
new ExpectedToken(5, 10, 7, SemanticTokenTypes.Method, "ДокТест"),
// Line 5: ( operator
new ExpectedToken(5, 17, 1, SemanticTokenTypes.Operator, "("),
// Line 5: Парам parameter definition
new ExpectedToken(5, 18, 5, SemanticTokenTypes.Parameter, SemanticTokenModifiers.Definition, "Парам"),
// Line 5: ) operator
new ExpectedToken(5, 23, 1, SemanticTokenTypes.Operator, ")"),
// Line 6: body comment (no Documentation modifier)
new ExpectedToken(6, 2, 22, SemanticTokenTypes.Comment, "// обычный комментарий"),
// Line 7: КонецПроцедуры keyword
new ExpectedToken(7, 0, 14, SemanticTokenTypes.Keyword, "КонецПроцедуры")
);

assertTokensMatch(decoded, expected);
Expand Down Expand Up @@ -465,7 +465,6 @@ void variableDescriptionComments() {
new ExpectedToken(1, 14, 8, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// трейл")
);


assertTokensMatch(decoded, expected);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import com.github._1c_syntax.bsl.languageserver.util.TestUtils;
import org.eclipse.lsp4j.SemanticTokenModifiers;
import org.eclipse.lsp4j.SemanticTokenTypes;
import org.eclipse.lsp4j.SemanticTokensLegend;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
Expand All @@ -37,8 +36,6 @@
import java.util.List;
import java.util.Set;

import static org.assertj.core.api.Assertions.assertThat;

@SpringBootTest
@CleanupContextBeforeClassAndAfterEachTestMethod
@Import(SemanticTokensTestHelper.class)
Expand All @@ -50,9 +47,6 @@ class BslDocSemanticTokensSupplierTest {
@Autowired
private SemanticTokensTestHelper helper;

@Autowired
private SemanticTokensLegend legend;

@BeforeEach
void init() {
supplier.setMultilineTokenSupport(false);
Expand Down Expand Up @@ -198,15 +192,14 @@ void testMultipleTypesOnSeparateLines() {
var decoded = helper.getDecodedTokens(bsl, supplier);

// then - All three types should be Type with Documentation modifier
int typeTypeIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Type);
int docModifierMask = 1 << legend.getTokenModifiers().indexOf(SemanticTokenModifiers.Documentation);

var typeTokens = decoded.stream()
.filter(t -> t.type() == typeTypeIdx && (t.modifiers() & docModifierMask) != 0)
.toList();

// Should have 3 type tokens: СправочникСсылка, ДокументСсылка, ПеречислениеСсылка
assertThat(typeTokens).hasSize(3);
helper.assertContainsTokens(decoded, List.of(
new ExpectedToken(2, 15, 16, SemanticTokenTypes.Type,
Set.of(SemanticTokenModifiers.Documentation), "СправочникСсылка"),
new ExpectedToken(3, 22, 14, SemanticTokenTypes.Type,
Set.of(SemanticTokenModifiers.Documentation), "ДокументСсылка"),
new ExpectedToken(4, 22, 18, SemanticTokenTypes.Type,
Set.of(SemanticTokenModifiers.Documentation), "ПеречислениеСсылка")
));
}

@Test
Expand All @@ -226,15 +219,12 @@ void testMultipleReturnTypesOnSeparateLines() {
var decoded = helper.getDecodedTokens(bsl, supplier);

// then - Both types should be Type with Documentation modifier
int typeTypeIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Type);
int docModifierMask = 1 << legend.getTokenModifiers().indexOf(SemanticTokenModifiers.Documentation);

var typeTokens = decoded.stream()
.filter(t -> t.type() == typeTypeIdx && (t.modifiers() & docModifierMask) != 0)
.toList();

// Should have 2 type tokens: СправочникСсылка, ДокументСсылка
assertThat(typeTokens).hasSize(2);
helper.assertContainsTokens(decoded, List.of(
new ExpectedToken(2, 4, 16, SemanticTokenTypes.Type,
Set.of(SemanticTokenModifiers.Documentation), "СправочникСсылка"),
new ExpectedToken(3, 6, 14, SemanticTokenTypes.Type,
Set.of(SemanticTokenModifiers.Documentation), "ДокументСсылка")
));
}

@Test
Expand All @@ -250,27 +240,29 @@ void testMultilineSupport() {

var documentContext = TestUtils.getDocumentContext(bsl);

// Test without multiline support
// Test without multiline support - should have 3 separate comment tokens (one per line)
supplier.setMultilineTokenSupport(false);
var tokensWithoutMultiline = helper.decodeFromEntries(supplier.getSemanticTokens(documentContext));

// Test with multiline support
helper.assertContainsTokens(tokensWithoutMultiline, List.of(
new ExpectedToken(0, 0, 25, SemanticTokenTypes.Comment,
Set.of(SemanticTokenModifiers.Documentation), "// Первая строка описания"),
new ExpectedToken(1, 0, 25, SemanticTokenTypes.Comment,
Set.of(SemanticTokenModifiers.Documentation), "// Вторая строка описания"),
new ExpectedToken(2, 0, 25, SemanticTokenTypes.Comment,
Set.of(SemanticTokenModifiers.Documentation), "// Третья строка описания")
));

// Test with multiline support - should merge consecutive lines into one token
supplier.setMultilineTokenSupport(true);
var tokensWithMultiline = helper.decodeFromEntries(supplier.getSemanticTokens(documentContext));

// Without multiline: should have 3 separate comment tokens (one per line)
// With multiline: may merge consecutive lines into fewer tokens
int commentTypeIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Comment);

var commentTokensWithout = tokensWithoutMultiline.stream()
.filter(t -> t.type() == commentTypeIdx)
.toList();
var commentTokensWith = tokensWithMultiline.stream()
.filter(t -> t.type() == commentTypeIdx)
.toList();

// With multiline support, we expect fewer or equal number of tokens
assertThat(commentTokensWith.size()).isLessThanOrEqualTo(commentTokensWithout.size());
// With multiline support, all 3 lines are merged into single token starting at line 0
// Length is 77 (25 + 1 + 25 + 1 + 25 = 77 including newlines)
helper.assertContainsTokens(tokensWithMultiline, List.of(
new ExpectedToken(0, 0, 77, SemanticTokenTypes.Comment,
Set.of(SemanticTokenModifiers.Documentation), "// Первая строка описания...")
));
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,6 @@

import java.util.List;

import static org.assertj.core.api.Assertions.assertThat;

@SpringBootTest
@CleanupContextBeforeClassAndAfterEachTestMethod
@Import(SemanticTokensTestHelper.class)
Expand Down Expand Up @@ -134,19 +132,20 @@ void testMultilineCommentTokens() {
supplier.setMultilineTokenSupport(false);
var tokensWithoutMultiline = helper.decodeFromEntries(supplier.getSemanticTokens(documentContext));

helper.assertTokensMatch(tokensWithoutMultiline, List.of(
new ExpectedToken(1, 2, 21, SemanticTokenTypes.Comment, "// Первый комментарий"),
new ExpectedToken(2, 2, 21, SemanticTokenTypes.Comment, "// Второй комментарий"),
new ExpectedToken(3, 2, 21, SemanticTokenTypes.Comment, "// Третий комментарий")
));

// Test with multiline support - should have 1 merged token
supplier.setMultilineTokenSupport(true);
var tokensWithMultiline = helper.decodeFromEntries(supplier.getSemanticTokens(documentContext));

// then
// Without multiline: 3 separate tokens
assertThat(tokensWithoutMultiline).hasSize(3);

// With multiline: 1 merged token for consecutive comments
assertThat(tokensWithMultiline).hasSize(1);

// The merged token should start on line 1 (0-indexed)
assertThat(tokensWithMultiline.get(0).line()).isEqualTo(1);
// With multiline: 1 merged token for consecutive comments (length = 21 + 1 + 21 + 1 + 21 = 65)
helper.assertTokensMatch(tokensWithMultiline, List.of(
new ExpectedToken(1, 2, 65, SemanticTokenTypes.Comment, "// Первый комментарий...")
));
}

@Test
Expand Down
Loading
Loading