diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 36ddc185588..c2d23593168 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -10,6 +10,8 @@ This is a Java-based language server that provides code analysis, diagnostics, c - Project site: https://1c-syntax.github.io/bsl-language-server - Documentation: [docs/index.md](../docs/index.md) (Russian), [docs/en/index.md](../docs/en/index.md) (English) - Contributor's Guide: [docs/en/contributing/index.md](../docs/en/contributing/index.md) +- BSL grammar used in BSLParser: https://github.com/1c-syntax/bsl-parser/blob/develop/src/main/antlr/BSLParser.g4 +- SDBL grammar used in BSLParser: https://github.com/1c-syntax/bsl-parser/blob/develop/src/main/antlr/SDBLParser.g4 ## Technology Stack diff --git a/.github/workflows/dependabot-automerge.yaml b/.github/workflows/dependabot-automerge.yaml new file mode 100644 index 00000000000..d99be1304d1 --- /dev/null +++ b/.github/workflows/dependabot-automerge.yaml @@ -0,0 +1,29 @@ +name: Dependabot auto-merge +on: + pull_request_target: + types: [opened, synchronize] + +permissions: + contents: write + pull-requests: write + +jobs: + dependabot: + runs-on: ubuntu-latest + if: github.event.pull_request.user.login == 'dependabot[bot]' && github.repository == '1c-syntax/bsl-language-server' + steps: + - name: Dependabot metadata + id: metadata + uses: dependabot/fetch-metadata@08eff52bf64351f401fb50d4972fa95b9f2c2d1b + with: + github-token: "${{ secrets.GITHUB_TOKEN }}" + - name: Approve a PR + run: gh pr review --approve "$PR_URL" + env: + PR_URL: ${{ github.event.pull_request.html_url }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Enable auto-merge for Dependabot PRs + run: gh pr merge --auto --merge "$PR_URL" + env: + PR_URL: ${{ github.event.pull_request.html_url }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/build.gradle.kts b/build.gradle.kts index 9faf3566a0e..260820db51c 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -15,13 +15,13 @@ plugins { id("io.freefair.aspectj.post-compile-weaving") version "9.1.0" // id("io.freefair.maven-central.validate-poms") version "9.0.0" // TODO: Re-enable when compatible with Gradle 9 id("com.github.ben-manes.versions") version "0.53.0" - id("org.springframework.boot") version "3.5.8" + id("org.springframework.boot") version "3.5.9" id("io.spring.dependency-management") version "1.1.7" id("io.sentry.jvm.gradle") version "5.12.2" id("io.github.1c-syntax.bslls-dev-tools") version "0.8.1" id("ru.vyarus.pom") version "3.0.0" id("org.jreleaser") version "1.21.0" - id("org.sonarqube") version "7.2.1.6560" + id("org.sonarqube") version "7.2.2.6593" id("me.champeau.jmh") version "0.7.3" id("com.gorylenko.gradle-git-properties") version "2.5.4" } diff --git a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/CognitiveComplexityCodeLensSupplier.java b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/CognitiveComplexityCodeLensSupplier.java index 68340299795..7415fa1cbdd 100644 --- a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/CognitiveComplexityCodeLensSupplier.java +++ b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/CognitiveComplexityCodeLensSupplier.java @@ -25,6 +25,7 @@ import com.github._1c_syntax.bsl.languageserver.configuration.LanguageServerConfiguration; import com.github._1c_syntax.bsl.languageserver.context.DocumentContext; import com.github._1c_syntax.bsl.languageserver.context.symbol.MethodSymbol; +import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import java.util.Map; @@ -33,6 +34,7 @@ * Сапплаер линз, показывающий когнитивную сложность методов. */ @Component +@Order(4) public class CognitiveComplexityCodeLensSupplier extends AbstractMethodComplexityCodeLensSupplier { public CognitiveComplexityCodeLensSupplier( diff --git a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/CyclomaticComplexityCodeLensSupplier.java b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/CyclomaticComplexityCodeLensSupplier.java index 75753176f0a..21e3fa997fc 100644 --- a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/CyclomaticComplexityCodeLensSupplier.java +++ b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/CyclomaticComplexityCodeLensSupplier.java @@ -25,14 +25,16 @@ import com.github._1c_syntax.bsl.languageserver.configuration.LanguageServerConfiguration; import com.github._1c_syntax.bsl.languageserver.context.DocumentContext; import com.github._1c_syntax.bsl.languageserver.context.symbol.MethodSymbol; +import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import java.util.Map; /** - * Сапплаер линз, показывающий когнитивную сложность методов. + * Сапплаер линз, показывающий цикломатическую сложность методов. */ @Component +@Order(5) public class CyclomaticComplexityCodeLensSupplier extends AbstractMethodComplexityCodeLensSupplier { public CyclomaticComplexityCodeLensSupplier( diff --git a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplier.java b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplier.java new file mode 100644 index 00000000000..e92ca1cd033 --- /dev/null +++ b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplier.java @@ -0,0 +1,171 @@ +/* + * This file is a part of BSL Language Server. + * + * Copyright (c) 2018-2025 + * Alexey Sosnoviy , Nikita Fedkin and contributors + * + * SPDX-License-Identifier: LGPL-3.0-or-later + * + * BSL Language Server is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 3.0 of the License, or (at your option) any later version. + * + * BSL Language Server is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with BSL Language Server. + */ +package com.github._1c_syntax.bsl.languageserver.codelenses; + +import com.github._1c_syntax.bsl.languageserver.codelenses.testrunner.TestRunnerAdapter; +import com.github._1c_syntax.bsl.languageserver.configuration.LanguageServerConfiguration; +import com.github._1c_syntax.bsl.languageserver.context.DocumentContext; +import com.github._1c_syntax.bsl.languageserver.context.FileType; +import com.github._1c_syntax.bsl.languageserver.context.symbol.MethodSymbol; +import com.github._1c_syntax.bsl.languageserver.utils.Resources; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.ToString; +import lombok.Value; +import lombok.extern.slf4j.Slf4j; +import org.eclipse.lsp4j.CodeLens; +import org.eclipse.lsp4j.Command; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Lazy; +import org.springframework.core.annotation.Order; +import org.springframework.stereotype.Component; + +import java.beans.ConstructorProperties; +import java.net.URI; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +@Component +@Slf4j +@Order(3) +public class DebugTestCodeLensSupplier + extends AbstractRunTestsCodeLensSupplier { + + private static final String COMMAND_ID = "language-1c-bsl.languageServer.debugTest"; + + private final TestRunnerAdapter testRunnerAdapter; + private final Resources resources; + + // Self-injection для работы кэша в базовом классе. + @Autowired + @Lazy + @Getter + @SuppressWarnings("NullAway.Init") + private DebugTestCodeLensSupplier self; + + public DebugTestCodeLensSupplier( + LanguageServerConfiguration configuration, + TestRunnerAdapter testRunnerAdapter, + Resources resources + ) { + super(configuration); + this.testRunnerAdapter = testRunnerAdapter; + this.resources = resources; + } + + /** + * {@inheritDoc} + */ + @Override + public List getCodeLenses(DocumentContext documentContext) { + + if (documentContext.getFileType() == FileType.BSL) { + return Collections.emptyList(); + } + + var options = configuration.getCodeLensOptions().getTestRunnerAdapterOptions(); + + if (options.getDebugTestArguments().isEmpty()) { + return Collections.emptyList(); + } + + var testIds = testRunnerAdapter.getTestIds(documentContext); + var symbolTree = documentContext.getSymbolTree(); + + return testIds.stream() + .map(symbolTree::getMethodSymbol) + .flatMap(Optional::stream) + .map(methodSymbol -> toCodeLens(methodSymbol, documentContext)) + .toList(); + } + + /** + * {@inheritDoc} + */ + @Override + public Class getCodeLensDataClass() { + return DebugTestCodeLensSupplier.DebugTestCodeLensData.class; + } + + /** + * {@inheritDoc} + */ + @Override + public CodeLens resolve(DocumentContext documentContext, CodeLens unresolved, DebugTestCodeLensData data) { + + var path = Paths.get(documentContext.getUri()); + var testId = data.getTestId(); + + var options = configuration.getCodeLensOptions().getTestRunnerAdapterOptions(); + var executable = options.getExecutableForCurrentOS(); + String runText = executable + " " + options.getDebugTestArguments(); + runText = String.format(runText, path, testId); + + var command = new Command(); + command.setTitle(resources.getResourceString(getClass(), "title")); + command.setCommand(COMMAND_ID); + command.setArguments(List.of(Map.of("text", runText))); + + unresolved.setCommand(command); + + return unresolved; + + } + + private CodeLens toCodeLens(MethodSymbol method, DocumentContext documentContext) { + var testId = method.getName(); + var codeLensData = new DebugTestCodeLensSupplier.DebugTestCodeLensData(documentContext.getUri(), getId(), testId); + + var codeLens = new CodeLens(method.getSubNameRange()); + codeLens.setData(codeLensData); + + return codeLens; + } + + /** + * DTO для хранения данных линз для отладки теста. + */ + @Value + @EqualsAndHashCode(callSuper = true) + @ToString(callSuper = true) + public static class DebugTestCodeLensData extends DefaultCodeLensData { + /** + * Имя метода. + */ + String testId; + + /** + * @param uri URI документа. + * @param id Идентификатор линзы. + * @param testId Идентификатор теста. + */ + @ConstructorProperties({"uri", "id", "testId"}) + public DebugTestCodeLensData(URI uri, String id, String testId) { + super(uri, id); + this.testId = testId; + } + } + +} diff --git a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/RunAllTestsCodeLensSupplier.java b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/RunAllTestsCodeLensSupplier.java index 48d4c5d1434..dcfa5abe56b 100644 --- a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/RunAllTestsCodeLensSupplier.java +++ b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/RunAllTestsCodeLensSupplier.java @@ -32,6 +32,7 @@ import org.eclipse.lsp4j.Command; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; +import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import java.nio.file.Paths; @@ -44,6 +45,7 @@ */ @Component @Slf4j +@Order(1) public class RunAllTestsCodeLensSupplier extends AbstractRunTestsCodeLensSupplier { diff --git a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/RunTestCodeLensSupplier.java b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/RunTestCodeLensSupplier.java index 905f3ace7b9..2595c5ea368 100644 --- a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/RunTestCodeLensSupplier.java +++ b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/RunTestCodeLensSupplier.java @@ -36,6 +36,7 @@ import org.eclipse.lsp4j.Command; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; +import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import java.beans.ConstructorProperties; @@ -51,6 +52,7 @@ */ @Component @Slf4j +@Order(2) public class RunTestCodeLensSupplier extends AbstractRunTestsCodeLensSupplier { @@ -139,7 +141,7 @@ private CodeLens toCodeLens(MethodSymbol method, DocumentContext documentContext } /** - * DTO для хранения данных линз о сложности методов в документе. + * DTO для хранения данных линз для запуска теста. */ @Value @EqualsAndHashCode(callSuper = true) diff --git a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/infrastructure/CodeLensesConfiguration.java b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/infrastructure/CodeLensesConfiguration.java index abdbe59a0d5..742d7b468b5 100644 --- a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/infrastructure/CodeLensesConfiguration.java +++ b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/infrastructure/CodeLensesConfiguration.java @@ -29,10 +29,14 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Scope; +import org.springframework.core.Ordered; +import org.springframework.core.annotation.OrderUtils; import java.util.Collection; +import java.util.Comparator; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.Function; import java.util.stream.Collectors; @@ -77,6 +81,8 @@ public List> enabledCodeLensSuppliers( var parameters = configuration.getCodeLensOptions().getParameters(); return codeLensSuppliersById.values().stream() .filter(supplier -> supplierIsEnabled(supplier.getId(), parameters)) + .sorted(Comparator.comparing(o -> + Objects.requireNonNullElse(OrderUtils.getOrder(o.getClass()), Ordered.LOWEST_PRECEDENCE))) .collect(Collectors.toList()); } diff --git a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/testrunner/TestRunnerAdapter.java b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/testrunner/TestRunnerAdapter.java index 1e5d2b6c592..b741f79d61d 100644 --- a/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/testrunner/TestRunnerAdapter.java +++ b/src/main/java/com/github/_1c_syntax/bsl/languageserver/codelenses/testrunner/TestRunnerAdapter.java @@ -144,6 +144,7 @@ private List computeTestIdsByTestRunner(DocumentContext documentContext) .map(getTestsRegex::matcher) .filter(Matcher::matches) .map(matcher -> matcher.group(1)) + .distinct() .toList(); } diff --git a/src/main/java/com/github/_1c_syntax/bsl/languageserver/configuration/codelens/TestRunnerAdapterOptions.java b/src/main/java/com/github/_1c_syntax/bsl/languageserver/configuration/codelens/TestRunnerAdapterOptions.java index 85bd30c0986..03486f19373 100644 --- a/src/main/java/com/github/_1c_syntax/bsl/languageserver/configuration/codelens/TestRunnerAdapterOptions.java +++ b/src/main/java/com/github/_1c_syntax/bsl/languageserver/configuration/codelens/TestRunnerAdapterOptions.java @@ -82,6 +82,10 @@ public class TestRunnerAdapterOptions { * Аргументы для запуска одного теста. */ private String runTestArguments = "-run %s %s"; + /** + * Аргументы для отладки одного теста. + */ + private String debugTestArguments = ""; /** * Аргументы для запуска всех тестов. */ diff --git a/src/main/java/com/github/_1c_syntax/bsl/languageserver/diagnostics/IdenticalExpressionsDiagnostic.java b/src/main/java/com/github/_1c_syntax/bsl/languageserver/diagnostics/IdenticalExpressionsDiagnostic.java index 7a58d665402..80c7b4eccbe 100644 --- a/src/main/java/com/github/_1c_syntax/bsl/languageserver/diagnostics/IdenticalExpressionsDiagnostic.java +++ b/src/main/java/com/github/_1c_syntax/bsl/languageserver/diagnostics/IdenticalExpressionsDiagnostic.java @@ -235,6 +235,6 @@ private static boolean isComplementary(BinaryOperationNode binary) { } private static boolean sufficientSize(BSLParser.ExpressionContext ctx) { - return ctx.children.size() < MIN_EXPRESSION_SIZE; + return ctx.getChildCount() < MIN_EXPRESSION_SIZE; } } diff --git a/src/main/java/com/github/_1c_syntax/bsl/languageserver/providers/SemanticTokensProvider.java b/src/main/java/com/github/_1c_syntax/bsl/languageserver/providers/SemanticTokensProvider.java index e963301e5d8..d2c2e82ed1f 100644 --- a/src/main/java/com/github/_1c_syntax/bsl/languageserver/providers/SemanticTokensProvider.java +++ b/src/main/java/com/github/_1c_syntax/bsl/languageserver/providers/SemanticTokensProvider.java @@ -45,6 +45,9 @@ import com.github._1c_syntax.bsl.parser.BSLParser.RegionEndContext; import com.github._1c_syntax.bsl.parser.BSLParser.RegionStartContext; import com.github._1c_syntax.bsl.parser.BSLParser.UseContext; +import com.github._1c_syntax.bsl.parser.SDBLLexer; +import com.github._1c_syntax.bsl.parser.SDBLParser; +import com.github._1c_syntax.bsl.parser.SDBLParserBaseVisitor; import lombok.AccessLevel; import lombok.RequiredArgsConstructor; import lombok.Setter; @@ -72,6 +75,7 @@ import java.util.ArrayList; import java.util.BitSet; import java.util.Comparator; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Objects; @@ -146,8 +150,24 @@ public class SemanticTokensProvider { BSLLexer.NULL ); + // SDBL (Query Language) token types + private static final Set SDBL_KEYWORDS = createSdblKeywords(); + private static final Set SDBL_FUNCTIONS = createSdblFunctions(); + private static final Set SDBL_METADATA_TYPES = createSdblMetadataTypes(); + private static final Set SDBL_LITERALS = createSdblLiterals(); + private static final Set SDBL_OPERATORS = createSdblOperators(); + private static final Set SDBL_STRINGS = Set.of(SDBLLexer.STR); + private static final Set SDBL_COMMENTS = Set.of(SDBLLexer.LINE_COMMENT); + private static final Set SDBL_EDS = Set.of( + SDBLLexer.EDS_CUBE, + SDBLLexer.EDS_TABLE, + SDBLLexer.EDS_CUBE_DIMTABLE + ); + private static final Set SDBL_NUMBERS = Set.of(SDBLLexer.DECIMAL, SDBLLexer.FLOAT); + private static final String[] NO_MODIFIERS = new String[0]; private static final String[] DOC_ONLY = new String[]{SemanticTokenModifiers.Documentation}; + private static final String[] DEFAULT_LIBRARY = new String[]{SemanticTokenModifiers.DefaultLibrary}; private final SemanticTokensLegend legend; private final ReferenceResolver referenceResolver; @@ -211,10 +231,17 @@ public SemanticTokens getSemanticTokensFull(DocumentContext documentContext, @Su // 3.1) Method call occurrences as Method tokens addMethodCallTokens(entries, uri); - // 4) Lexical tokens on default channel: strings, numbers, macros, operators, keywords - addLexicalTokens(tokensFromDefaultChannel, entries); + // 4) SDBL (Query Language) tokens - process before lexical tokens to identify strings to skip + var stringsToSkip = collectStringsWithSdblTokens(documentContext); + + // 5) Lexical tokens on default channel: strings, numbers, macros, operators, keywords + // Skip strings that contain SDBL tokens (they'll be split and added by addSdblTokens) + addLexicalTokens(tokensFromDefaultChannel, entries, stringsToSkip); - // 5) Build delta-encoded data + // 6) Add SDBL tokens and split string parts + addSdblTokens(documentContext, entries, stringsToSkip); + + // 7) Build delta-encoded data List data = toDeltaEncoded(entries); return new SemanticTokens(data); } @@ -548,11 +575,15 @@ private void addMethodCallTokens(List entries, URI uri) { } } - private void addLexicalTokens(List tokens, List entries) { + private void addLexicalTokens(List tokens, List entries, Set stringsToSkip) { for (Token token : tokens) { var tokenType = token.getType(); var tokenText = Objects.toString(token.getText(), ""); if (!tokenText.isEmpty()) { + // Skip string tokens that contain SDBL tokens - they'll be handled by addSdblTokens + if (STRING_TYPES.contains(tokenType) && stringsToSkip.contains(token)) { + continue; + } selectAndAddSemanticToken(entries, token, tokenType); } } @@ -579,6 +610,662 @@ private void selectAndAddSemanticToken(List entries, Token token, in } } + private Set collectStringsWithSdblTokens(DocumentContext documentContext) { + var queries = documentContext.getQueries(); + if (queries.isEmpty()) { + return Set.of(); + } + + // Collect all SDBL tokens grouped by line + // Note: ANTLR tokens use 1-indexed line numbers, convert to 0-indexed for LSP Range + var sdblTokensByLine = new HashMap>(); + for (var query : queries) { + for (Token token : query.getTokens()) { + if (token.getChannel() != Token.DEFAULT_CHANNEL) { + continue; + } + int zeroIndexedLine = token.getLine() - 1; // ANTLR uses 1-indexed, convert to 0-indexed for Range + sdblTokensByLine.computeIfAbsent(zeroIndexedLine, k -> new ArrayList<>()).add(token); + } + } + + if (sdblTokensByLine.isEmpty()) { + return Set.of(); + } + + // Collect BSL string tokens that contain SDBL tokens + var bslStringTokens = documentContext.getTokensFromDefaultChannel().stream() + .filter(token -> STRING_TYPES.contains(token.getType())) + .toList(); + + var stringsToSkip = new HashSet(); + + for (Token bslString : bslStringTokens) { + var stringRange = Ranges.create(bslString); + int stringLine = stringRange.getStart().getLine(); + + var sdblTokensOnLine = sdblTokensByLine.get(stringLine); + if (sdblTokensOnLine == null || sdblTokensOnLine.isEmpty()) { + continue; + } + + // Check if any SDBL tokens overlap with this string token + var hasOverlappingTokens = sdblTokensOnLine.stream() + .anyMatch(sdblToken -> { + var sdblRange = Ranges.create(sdblToken); + return Ranges.containsRange(stringRange, sdblRange); + }); + + if (hasOverlappingTokens) { + stringsToSkip.add(bslString); + } + } + + return stringsToSkip; + } + + private void addSdblTokens(DocumentContext documentContext, List entries, Set stringsToSkip) { + var queries = documentContext.getQueries(); + if (queries.isEmpty()) { + return; + } + + // Collect all SDBL tokens grouped by line + // Note: ANTLR tokens use 1-indexed line numbers, convert to 0-indexed for LSP Range + var sdblTokensByLine = new HashMap>(); + for (var query : queries) { + for (Token token : query.getTokens()) { + if (token.getChannel() != Token.DEFAULT_CHANNEL) { + continue; + } + int zeroIndexedLine = token.getLine() - 1; // ANTLR uses 1-indexed, convert to 0-indexed for Range + sdblTokensByLine.computeIfAbsent(zeroIndexedLine, k -> new ArrayList<>()).add(token); + } + } + + if (sdblTokensByLine.isEmpty()) { + return; + } + + // For each BSL string token that was skipped, split it around SDBL tokens + int stringTypeIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.String); + + for (Token stringToken : stringsToSkip) { + var stringRange = Ranges.create(stringToken); + int stringLine = stringRange.getStart().getLine(); + + var sdblTokensOnLine = sdblTokensByLine.get(stringLine); + if (sdblTokensOnLine == null || sdblTokensOnLine.isEmpty()) { + continue; + } + + // Check if any SDBL tokens overlap with this string token + int stringStart = stringRange.getStart().getCharacter(); + int stringEnd = stringRange.getEnd().getCharacter(); + + var overlappingTokens = sdblTokensOnLine.stream() + .filter(sdblToken -> { + int sdblStart = sdblToken.getCharPositionInLine(); + int sdblEnd = sdblStart + (int) sdblToken.getText().codePoints().count(); + // Token overlaps if it's within the string range + return sdblStart >= stringStart && sdblEnd <= stringEnd; + }) + .sorted(Comparator.comparingInt(Token::getCharPositionInLine)) + .toList(); + + if (overlappingTokens.isEmpty()) { + continue; + } + + // Split the STRING token around SDBL tokens + int currentPos = stringStart; + + for (Token sdblToken : overlappingTokens) { + int sdblStart = sdblToken.getCharPositionInLine(); + int sdblEnd = sdblStart + (int) sdblToken.getText().codePoints().count(); + + // Add string part before SDBL token + if (currentPos < sdblStart && stringTypeIdx >= 0) { + entries.add(new TokenEntry( + stringLine, + currentPos, + sdblStart - currentPos, + stringTypeIdx, + 0 + )); + } + + currentPos = sdblEnd; + } + + // Add final string part after last SDBL token + if (currentPos < stringEnd && stringTypeIdx >= 0) { + entries.add(new TokenEntry( + stringLine, + currentPos, + stringEnd - currentPos, + stringTypeIdx, + 0 + )); + } + } + + // Add all SDBL tokens (with adjusted line numbers) + for (var query : queries) { + for (Token token : query.getTokens()) { + if (token.getChannel() != Token.DEFAULT_CHANNEL) { + continue; + } + addSdblToken(entries, token); + } + } + + // Add AST-based semantic tokens (aliases, field names, metadata names, etc.) + for (var query : queries) { + var visitor = new SdblSemanticTokensVisitor(this, entries); + visitor.visit(query.getAst()); + } + } + + private void addSdblToken(List entries, Token token) { + var tokenType = token.getType(); + var semanticTypeAndModifiers = getSdblTokenTypeAndModifiers(tokenType); + if (semanticTypeAndModifiers != null) { + // ANTLR uses 1-indexed line numbers, convert to 0-indexed for LSP Range + int zeroIndexedLine = token.getLine() - 1; + int start = token.getCharPositionInLine(); + int length = (int) token.getText().codePoints().count(); + // Create range with corrected line number + var range = new Range( + new Position(zeroIndexedLine, start), + new Position(zeroIndexedLine, start + length) + ); + addRange(entries, range, semanticTypeAndModifiers.type, semanticTypeAndModifiers.modifiers); + } + } + + @Nullable + private SdblTokenTypeAndModifiers getSdblTokenTypeAndModifiers(int tokenType) { + if (SDBL_KEYWORDS.contains(tokenType)) { + return new SdblTokenTypeAndModifiers(SemanticTokenTypes.Keyword, NO_MODIFIERS); + } else if (SDBL_FUNCTIONS.contains(tokenType)) { + // Functions as Function type with defaultLibrary modifier (built-in SDBL functions) + return new SdblTokenTypeAndModifiers(SemanticTokenTypes.Function, DEFAULT_LIBRARY); + } else if (SDBL_METADATA_TYPES.contains(tokenType) || SDBL_EDS.contains(tokenType)) { + // Metadata types (Справочник, РегистрСведений, etc.) as Namespace with no modifiers (per JSON spec) + // Note: Virtual tables (SDBL_VIRTUAL_TABLES) are NOT included here because they should be + // handled by AST visitor as Method tokens in visitMdo + return new SdblTokenTypeAndModifiers(SemanticTokenTypes.Namespace, NO_MODIFIERS); + } else if (SDBL_LITERALS.contains(tokenType)) { + // Literals as Keyword (matching YAML: constant.language.sdbl, no Constant type in LSP) + return new SdblTokenTypeAndModifiers(SemanticTokenTypes.Keyword, NO_MODIFIERS); + } else if (SDBL_OPERATORS.contains(tokenType)) { + return new SdblTokenTypeAndModifiers(SemanticTokenTypes.Operator, NO_MODIFIERS); + } else if (SDBL_STRINGS.contains(tokenType)) { + return new SdblTokenTypeAndModifiers(SemanticTokenTypes.String, NO_MODIFIERS); + } else if (SDBL_COMMENTS.contains(tokenType)) { + return new SdblTokenTypeAndModifiers(SemanticTokenTypes.Comment, NO_MODIFIERS); + } else if (SDBL_NUMBERS.contains(tokenType)) { + // Numbers as Number (matching YAML: constant.numeric.sdbl) + return new SdblTokenTypeAndModifiers(SemanticTokenTypes.Number, NO_MODIFIERS); + } + return null; + } + + private record SdblTokenTypeAndModifiers(String type, String[] modifiers) { + } + + // SDBL token type factory methods + private static Set createSdblKeywords() { + return Set.of( + SDBLLexer.ALL, + SDBLLexer.ALLOWED, + SDBLLexer.AND, + SDBLLexer.AS, + SDBLLexer.ASC, + SDBLLexer.AUTOORDER, + SDBLLexer.BETWEEN, + SDBLLexer.BY_EN, + SDBLLexer.CASE, + SDBLLexer.CAST, + SDBLLexer.DESC, + SDBLLexer.DISTINCT, + SDBLLexer.DROP, + SDBLLexer.ELSE, + SDBLLexer.END, + SDBLLexer.ESCAPE, + SDBLLexer.FOR, + SDBLLexer.FROM, + SDBLLexer.FULL, + SDBLLexer.GROUP, + SDBLLexer.HAVING, + SDBLLexer.HIERARCHY, + SDBLLexer.HIERARCHY_FOR_IN, + SDBLLexer.IN, + SDBLLexer.INDEX, + SDBLLexer.INNER, + SDBLLexer.INTO, + SDBLLexer.IS, + SDBLLexer.JOIN, + SDBLLexer.LEFT, + SDBLLexer.LIKE, + SDBLLexer.NOT, + SDBLLexer.OF, + SDBLLexer.ONLY, + SDBLLexer.ON_EN, + SDBLLexer.OR, + SDBLLexer.ORDER, + SDBLLexer.OVERALL, + SDBLLexer.OUTER, + SDBLLexer.PERIODS, + SDBLLexer.PO_RU, + SDBLLexer.REFS, + SDBLLexer.RIGHT, + SDBLLexer.SELECT, + SDBLLexer.SET, + SDBLLexer.THEN, + SDBLLexer.TOP, + SDBLLexer.TOTALS, + SDBLLexer.UNION, + SDBLLexer.UPDATE, + SDBLLexer.WHEN, + SDBLLexer.WHERE, + SDBLLexer.EMPTYREF, + SDBLLexer.GROUPEDBY, + SDBLLexer.GROUPING + ); + } + + private static Set createSdblFunctions() { + return Set.of( + SDBLLexer.AVG, + SDBLLexer.BEGINOFPERIOD, + SDBLLexer.BOOLEAN, + SDBLLexer.COUNT, + SDBLLexer.DATE, + SDBLLexer.DATEADD, + SDBLLexer.DATEDIFF, + SDBLLexer.DATETIME, + SDBLLexer.DAY, + SDBLLexer.DAYOFYEAR, + SDBLLexer.EMPTYTABLE, + SDBLLexer.ENDOFPERIOD, + SDBLLexer.HALFYEAR, + SDBLLexer.HOUR, + SDBLLexer.ISNULL, + SDBLLexer.MAX, + SDBLLexer.MIN, + SDBLLexer.MINUTE, + SDBLLexer.MONTH, + SDBLLexer.NUMBER, + SDBLLexer.QUARTER, + SDBLLexer.PRESENTATION, + SDBLLexer.RECORDAUTONUMBER, + SDBLLexer.REFPRESENTATION, + SDBLLexer.SECOND, + SDBLLexer.STRING, + SDBLLexer.SUBSTRING, + SDBLLexer.SUM, + SDBLLexer.TENDAYS, + SDBLLexer.TYPE, + SDBLLexer.VALUE, + SDBLLexer.VALUETYPE, + SDBLLexer.WEEK, + SDBLLexer.WEEKDAY, + SDBLLexer.YEAR, + SDBLLexer.INT, + SDBLLexer.ACOS, + SDBLLexer.ASIN, + SDBLLexer.ATAN, + SDBLLexer.COS, + SDBLLexer.SIN, + SDBLLexer.TAN, + SDBLLexer.LOG, + SDBLLexer.LOG10, + SDBLLexer.EXP, + SDBLLexer.POW, + SDBLLexer.SQRT, + SDBLLexer.LOWER, + SDBLLexer.STRINGLENGTH, + SDBLLexer.TRIMALL, + SDBLLexer.TRIML, + SDBLLexer.TRIMR, + SDBLLexer.UPPER, + SDBLLexer.ROUND, + SDBLLexer.STOREDDATASIZE, + SDBLLexer.UUID, + SDBLLexer.STRFIND, + SDBLLexer.STRREPLACE + ); + } + + private static Set createSdblMetadataTypes() { + return Set.of( + SDBLLexer.ACCOUNTING_REGISTER_TYPE, + SDBLLexer.ACCUMULATION_REGISTER_TYPE, + SDBLLexer.BUSINESS_PROCESS_TYPE, + SDBLLexer.CALCULATION_REGISTER_TYPE, + SDBLLexer.CATALOG_TYPE, + SDBLLexer.CHART_OF_ACCOUNTS_TYPE, + SDBLLexer.CHART_OF_CALCULATION_TYPES_TYPE, + SDBLLexer.CHART_OF_CHARACTERISTIC_TYPES_TYPE, + SDBLLexer.CONSTANT_TYPE, + SDBLLexer.DOCUMENT_TYPE, + SDBLLexer.DOCUMENT_JOURNAL_TYPE, + SDBLLexer.ENUM_TYPE, + SDBLLexer.EXCHANGE_PLAN_TYPE, + SDBLLexer.EXTERNAL_DATA_SOURCE_TYPE, + SDBLLexer.FILTER_CRITERION_TYPE, + SDBLLexer.INFORMATION_REGISTER_TYPE, + SDBLLexer.SEQUENCE_TYPE, + SDBLLexer.TASK_TYPE + ); + } + + private static Set createSdblVirtualTables() { + return Set.of( + SDBLLexer.ACTUAL_ACTION_PERIOD_VT, + SDBLLexer.BALANCE_VT, + SDBLLexer.BALANCE_AND_TURNOVERS_VT, + SDBLLexer.BOUNDARIES_VT, + SDBLLexer.DR_CR_TURNOVERS_VT, + SDBLLexer.EXT_DIMENSIONS_VT, + SDBLLexer.RECORDS_WITH_EXT_DIMENSIONS_VT, + SDBLLexer.SCHEDULE_DATA_VT, + SDBLLexer.SLICEFIRST_VT, + SDBLLexer.SLICELAST_VT, + SDBLLexer.TASK_BY_PERFORMER_VT, + SDBLLexer.TURNOVERS_VT + ); + } + + private static Set createSdblLiterals() { + return Set.of( + SDBLLexer.TRUE, + SDBLLexer.FALSE, + SDBLLexer.UNDEFINED, + SDBLLexer.NULL + ); + } + + private static Set createSdblOperators() { + return Set.of( + SDBLLexer.SEMICOLON, + SDBLLexer.DOT, // Added for field access operator + SDBLLexer.PLUS, + SDBLLexer.MINUS, + SDBLLexer.MUL, + SDBLLexer.QUOTIENT, + SDBLLexer.ASSIGN, + SDBLLexer.LESS_OR_EQUAL, + SDBLLexer.LESS, + SDBLLexer.NOT_EQUAL, + SDBLLexer.GREATER_OR_EQUAL, + SDBLLexer.GREATER, + SDBLLexer.COMMA, + SDBLLexer.BRACE, + SDBLLexer.BRACE_START, + SDBLLexer.NUMBER_SIGH + ); + } + private record TokenEntry(int line, int start, int length, int type, int modifiers) { } + + /** + * Visitor for SDBL AST to add semantic tokens based on context. + * Handles: + * - Table aliases → Variable + * - Field names (after dots) → Property + * - Metadata type names → Namespace + * - Alias declarations (after AS/КАК) → Variable + Declaration + * - Temporary table declarations (INTO tableName) → Variable + Declaration + * - Temporary table references (FROM tableName) → Variable + * - Operators (dots, commas) → Operator + */ + private static class SdblSemanticTokensVisitor extends SDBLParserBaseVisitor { + private final SemanticTokensProvider provider; + private final List entries; + + public SdblSemanticTokensVisitor(SemanticTokensProvider provider, List entries) { + this.provider = provider; + this.entries = entries; + } + + @Override + public Void visitQuery(SDBLParser.QueryContext ctx) { + // Handle INTO temporaryTableName (ПОМЕСТИТЬ ВТ_Курсы) + // Grammar: (INTO temporaryTableName=temporaryTableIdentifier)? + // temporaryTableIdentifier: DOT? (NUMBER_SIGH+ | identifier | ((identifier | NUMBER_SIGH)+ DECIMAL*)+) + var temporaryTableName = ctx.temporaryTableName; + if (temporaryTableName != null) { + // Add the entire temporaryTableIdentifier as Variable + Declaration + provider.addSdblContextRange(entries, temporaryTableName, SemanticTokenTypes.Variable, SemanticTokenModifiers.Declaration); + } + + return super.visitQuery(ctx); + } + + @Override + public Void visitDataSource(SDBLParser.DataSourceContext ctx) { + // Handle table sources and their aliases + var alias = ctx.alias(); + if (alias != null && alias.identifier() != null) { + // Alias after AS/КАК → Variable + Declaration + var token = alias.identifier().getStart(); + provider.addSdblTokenRange(entries, token, SemanticTokenTypes.Variable, SemanticTokenModifiers.Declaration); + } + + return super.visitDataSource(ctx); + } + + @Override + public Void visitSelectedField(SDBLParser.SelectedFieldContext ctx) { + // Handle field selections and their aliases + var alias = ctx.alias(); + if (alias != null && alias.identifier() != null) { + // Alias after AS/КАК → Variable + Declaration + var token = alias.identifier().getStart(); + provider.addSdblTokenRange(entries, token, SemanticTokenTypes.Variable, SemanticTokenModifiers.Declaration); + } + + return super.visitSelectedField(ctx); + } + + @Override + public Void visitMdo(SDBLParser.MdoContext ctx) { + // Metadata object reference + // Grammar: mdo: type=(CATALOG_TYPE|...) DOT tableName=identifier + // type is already handled as Namespace by lexical processing + // tableName → Class (metadata object name, e.g., Пользователи in Справочник.Пользователи) + var tableName = ctx.tableName; + if (tableName != null) { + provider.addSdblTokenRange(entries, tableName.getStart(), SemanticTokenTypes.Class); + } + + return super.visitMdo(ctx); + } + + @Override + public Void visitVirtualTable(SDBLParser.VirtualTableContext ctx) { + // Virtual table methods like СрезПоследних, Обороты, etc. + // Grammar: mdo DOT virtualTableName=(SLICELAST_VT | SLICEFIRST_VT | ...) ( parameters )? + // virtualTableName is a token, not an identifier context + + // Get virtualTableName token from context + // It's defined in grammar as virtualTableName=(SLICELAST_VT | SLICEFIRST_VT | ...) + var virtualTableNameToken = ctx.virtualTableName; + if (virtualTableNameToken != null) { + provider.addSdblTokenRange(entries, virtualTableNameToken, SemanticTokenTypes.Method); + } + + return super.visitVirtualTable(ctx); + } + + @Override + public Void visitTable(SDBLParser.TableContext ctx) { + // Handle table references + // Grammar: table: mdo | mdo DOT objectTableName=identifier | tableName=identifier + + // tableName (third variant) is a temporary table reference + var tableName = ctx.tableName; + if (tableName != null) { + // Temporary table reference (ИЗ ВТ_Курсы) → Variable + provider.addSdblTokenRange(entries, tableName.getStart(), SemanticTokenTypes.Variable); + } + + // objectTableName (second variant) is a table part/subordinate table + // e.g., Справочник.Пользователи.ГруппыДоступа → ГруппыДоступа is objectTableName + var objectTableName = ctx.objectTableName; + if (objectTableName != null) { + // Table part (табличная часть) → Class (it's a full table, subordinate to the main object) + provider.addSdblTokenRange(entries, objectTableName.getStart(), SemanticTokenTypes.Class); + } + + return super.visitTable(ctx); + } + + @Override + public Void visitColumn(SDBLParser.ColumnContext ctx) { + // Handle field references: TableAlias.FieldName + var identifiers = ctx.identifier(); + if (identifiers != null && !identifiers.isEmpty()) { + if (identifiers.size() == 1) { + // Single identifier: in SDBL it may represent either a table alias or a field name. + // We intentionally highlight such ambiguous identifiers as "variable" for now, + // because distinguishing alias vs. field here would require deeper symbol resolution + // that is not performed in this visitor. + provider.addSdblTokenRange(entries, identifiers.get(0).getStart(), SemanticTokenTypes.Variable); + } else if (identifiers.size() >= 2) { + // First identifier → Variable (table alias) + provider.addSdblTokenRange(entries, identifiers.get(0).getStart(), SemanticTokenTypes.Variable); + + // Dots are handled by lexical token processing + + // Last identifier → Property (field name) + provider.addSdblTokenRange(entries, identifiers.get(identifiers.size() - 1).getStart(), SemanticTokenTypes.Property); + } + } + + return super.visitColumn(ctx); + } + + @Override + public Void visitParameter(SDBLParser.ParameterContext ctx) { + // Handle query parameters: &ParameterName + // Grammar: parameter: AMPERSAND name=PARAMETER_IDENTIFIER; + // Combine both tokens into a single Parameter token with Readonly modifier + var ampersand = ctx.AMPERSAND(); + var parameterName = ctx.name; + if (ampersand != null && parameterName != null) { + // Create range from start of AMPERSAND to end of PARAMETER_IDENTIFIER + provider.addSdblContextRange(entries, ctx, SemanticTokenTypes.Parameter, SemanticTokenModifiers.Readonly); + } + + return super.visitParameter(ctx); + } + + @Override + public Void visitValueFunction(SDBLParser.ValueFunctionContext ctx) { + // Handle VALUE function: Значение(...) + // Grammar variants: + // 1. type DOT mdoName DOT emptyRef=EMPTYREF (e.g., Справочник.Валюты.ПустаяСсылка) + // 2. type DOT mdoName DOT predefinedName (e.g., Справочник.Валюты.Рубль, Перечисление.Пол.Мужской) + // 3. systemName DOT predefinedName (e.g., for system enums) + // 4. mdo DOT (empty reference via mdo) + + var type = ctx.type; + var mdoName = ctx.mdoName; + var predefinedName = ctx.predefinedName; + var emptyRef = ctx.emptyFer; // Note: variable name matches grammar field 'emptyFer' (typo in grammar for 'emptyRef') + var systemName = ctx.systemName; + + if (type != null && mdoName != null) { + // Handle: type.mdoName.predefinedName or type.mdoName.EMPTYREF + // type is already handled as Namespace by lexical processing + + // mdoName → Class or Enum depending on type + if (type.getType() == SDBLLexer.ENUM_TYPE) { + // For Перечисление.Пол → Пол is Enum + provider.addSdblTokenRange(entries, mdoName.getStart(), SemanticTokenTypes.Enum); + } else { + // For Справочник.Валюты, ПланВидовХарактеристик.XXX, etc. → Class + provider.addSdblTokenRange(entries, mdoName.getStart(), SemanticTokenTypes.Class); + } + + // predefinedName or EMPTYREF → EnumMember + if (predefinedName != null) { + provider.addSdblTokenRange(entries, predefinedName.getStart(), SemanticTokenTypes.EnumMember); + } else if (emptyRef != null) { + provider.addSdblTokenRange(entries, emptyRef, SemanticTokenTypes.EnumMember); + } + } else if (systemName != null && predefinedName != null) { + // Handle system enum: systemName.predefinedName + // systemName → Enum + provider.addSdblTokenRange(entries, systemName.getStart(), SemanticTokenTypes.Enum); + // predefinedName → EnumMember + provider.addSdblTokenRange(entries, predefinedName.getStart(), SemanticTokenTypes.EnumMember); + } + + // Handle routePointName for business processes + var routePointName = ctx.routePointName; + if (routePointName != null) { + provider.addSdblTokenRange(entries, routePointName.getStart(), SemanticTokenTypes.EnumMember); + } + + return super.visitValueFunction(ctx); + } + + } + + /** + * Helper method to add semantic token from SDBL ANTLR token + * Handles conversion from ANTLR 1-indexed lines to LSP 0-indexed positions + */ + private void addSdblTokenRange(List entries, @Nullable Token token, String type, String... modifiers) { + if (token == null) { + return; + } + + // ANTLR uses 1-indexed line numbers, convert to 0-indexed for LSP Range + int zeroIndexedLine = token.getLine() - 1; + int start = token.getCharPositionInLine(); + int length = (int) token.getText().codePoints().count(); + + var range = new Range( + new Position(zeroIndexedLine, start), + new Position(zeroIndexedLine, start + length) + ); + + addRange(entries, range, type, modifiers); + } + + /** + * Helper method to add semantic token from SDBL ParserRuleContext + * Uses the entire range of the context (from start token to stop token) + */ + private void addSdblContextRange(List entries, ParserRuleContext ctx, String type, String... modifiers) { + if (ctx == null || ctx.getStart() == null || ctx.getStop() == null) { + return; + } + + var startToken = ctx.getStart(); + var stopToken = ctx.getStop(); + + // ANTLR uses 1-indexed line numbers, convert to 0-indexed for LSP Range + int zeroIndexedLine = startToken.getLine() - 1; + int start = startToken.getCharPositionInLine(); + + // Calculate length from start of first token to end of last token + // For single-line contexts, we can compute the total length + int stopEndPosition = stopToken.getCharPositionInLine() + (int) stopToken.getText().codePoints().count(); + int length = stopEndPosition - start; + + var range = new Range( + new Position(zeroIndexedLine, start), + new Position(zeroIndexedLine, start + length) + ); + + addRange(entries, range, type, modifiers); + } } diff --git a/src/main/java/com/github/_1c_syntax/bsl/languageserver/semantictokens/SemanticTokensLegendConfiguration.java b/src/main/java/com/github/_1c_syntax/bsl/languageserver/semantictokens/SemanticTokensLegendConfiguration.java index 9f5d5ff18e0..ebf37a82742 100644 --- a/src/main/java/com/github/_1c_syntax/bsl/languageserver/semantictokens/SemanticTokensLegendConfiguration.java +++ b/src/main/java/com/github/_1c_syntax/bsl/languageserver/semantictokens/SemanticTokensLegendConfiguration.java @@ -58,12 +58,19 @@ public SemanticTokensLegend semanticTokensLegend() { SemanticTokenTypes.Macro, SemanticTokenTypes.Decorator, SemanticTokenTypes.Operator, - SemanticTokenTypes.Namespace + SemanticTokenTypes.Namespace, + SemanticTokenTypes.Type, // Standard LSP token type for type names (identifiers of types) + SemanticTokenTypes.Property, // Added for SDBL field names + SemanticTokenTypes.Class, // Added for SDBL metadata object names (e.g. Справочник.Контрагенты, РегистрСведений.КурсыВалют) + SemanticTokenTypes.Enum, // Added for SDBL enum types (Перечисление.Пол) + SemanticTokenTypes.EnumMember // Added for predefined elements and enum values ); List tokenModifiers = List.of( SemanticTokenModifiers.Documentation, - SemanticTokenModifiers.Definition + SemanticTokenModifiers.Definition, + SemanticTokenModifiers.DefaultLibrary, // Added for SDBL built-in functions and types + SemanticTokenModifiers.Declaration // Added for SDBL alias declarations ); return new SemanticTokensLegend(tokenTypes, tokenModifiers); diff --git a/src/main/resources/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplier_en.properties b/src/main/resources/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplier_en.properties new file mode 100644 index 00000000000..6154a7a9753 --- /dev/null +++ b/src/main/resources/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplier_en.properties @@ -0,0 +1 @@ +title=𓆣 Debug test \ No newline at end of file diff --git a/src/main/resources/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplier_ru.properties b/src/main/resources/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplier_ru.properties new file mode 100644 index 00000000000..04a8c201364 --- /dev/null +++ b/src/main/resources/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplier_ru.properties @@ -0,0 +1 @@ +title=𓆣 Отладить тест \ No newline at end of file diff --git a/src/main/resources/com/github/_1c_syntax/bsl/languageserver/configuration/schema.json b/src/main/resources/com/github/_1c_syntax/bsl/languageserver/configuration/schema.json index 10f52879f8d..37bc7f9ce94 100644 --- a/src/main/resources/com/github/_1c_syntax/bsl/languageserver/configuration/schema.json +++ b/src/main/resources/com/github/_1c_syntax/bsl/languageserver/configuration/schema.json @@ -749,9 +749,15 @@ "runTestArguments": { "$id": "#/properties/codeLens/testRunner/runTestArguments", "type": "string", - "title": "Arguments to pass to test runner executable to run test method. %s will be replaced with path to current file, %m will be replaced with test method name.", + "title": "Arguments to pass to test runner executable to run test method. First %s will be replaced with path to current file, second %s will be replaced with test method name.", "default": "-run %s %s" }, + "debugTestArguments": { + "$id": "#/properties/codeLens/testRunner/debugTestArguments", + "type": "string", + "title": "Arguments to pass to test runner executable to debug test method. First %s will be replaced with path to current file, second %s will be replaced with test method name.", + "default": "" + }, "runAllTestsArguments": { "$id": "#/properties/codeLens/testRunner/runAllTestsArguments", "type": "string", diff --git a/src/test/java/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplierTest.java b/src/test/java/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplierTest.java new file mode 100644 index 00000000000..d8b82a29e72 --- /dev/null +++ b/src/test/java/com/github/_1c_syntax/bsl/languageserver/codelenses/DebugTestCodeLensSupplierTest.java @@ -0,0 +1,141 @@ +/* + * This file is a part of BSL Language Server. + * + * Copyright (c) 2018-2025 + * Alexey Sosnoviy , Nikita Fedkin and contributors + * + * SPDX-License-Identifier: LGPL-3.0-or-later + * + * BSL Language Server is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 3.0 of the License, or (at your option) any later version. + * + * BSL Language Server is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with BSL Language Server. + */ +package com.github._1c_syntax.bsl.languageserver.codelenses; + +import com.github._1c_syntax.bsl.languageserver.codelenses.testrunner.TestRunnerAdapter; +import com.github._1c_syntax.bsl.languageserver.configuration.LanguageServerConfiguration; +import com.github._1c_syntax.bsl.languageserver.configuration.codelens.CodeLensOptions; +import com.github._1c_syntax.bsl.languageserver.configuration.codelens.TestRunnerAdapterOptions; +import com.github._1c_syntax.bsl.languageserver.context.DocumentContext; +import com.github._1c_syntax.bsl.languageserver.events.LanguageServerInitializeRequestReceivedEvent; +import com.github._1c_syntax.bsl.languageserver.util.CleanupContextBeforeClassAndAfterEachTestMethod; +import com.github._1c_syntax.bsl.languageserver.util.TestUtils; +import org.eclipse.lsp4j.ClientInfo; +import org.eclipse.lsp4j.CodeLens; +import org.eclipse.lsp4j.InitializeParams; +import org.eclipse.lsp4j.services.LanguageServer; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.test.context.bean.override.mockito.MockitoSpyBean; + +import java.util.List; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +@SpringBootTest +@CleanupContextBeforeClassAndAfterEachTestMethod +class DebugTestCodeLensSupplierTest { + + @Autowired + private DebugTestCodeLensSupplier supplier; + + @Autowired + private ApplicationEventPublisher eventPublisher; + + @MockitoSpyBean + private TestRunnerAdapter testRunnerAdapter; + + @MockitoSpyBean + private LanguageServerConfiguration languageServerConfiguration; + + private DocumentContext documentContext; + + @BeforeEach + void init() { + var filePath = "./src/test/resources/codelenses/DebugTestCodeLensSupplier.os"; + documentContext = TestUtils.getDocumentContextFromFile(filePath); + } + + @Test + void testDryRun() { + // given + initializeServer("Visual Studio Code"); + + // when + var codeLenses = supplier.getCodeLenses(documentContext); + + // then + assertThat(codeLenses).isNotNull(); + } + + @Test + void testRunWithMockedTestIds() { + // given + initializeServer("Visual Studio Code"); + + when(testRunnerAdapter.getTestIds(documentContext)) + .thenReturn(List.of("testName")); + + var testRunnerAdapterOptions = mock(TestRunnerAdapterOptions.class); + + when(testRunnerAdapterOptions.getDebugTestArguments()) + .thenReturn("some"); + + var codeLensOptions = mock(CodeLensOptions.class); + when(codeLensOptions.getTestRunnerAdapterOptions()) + .thenReturn(testRunnerAdapterOptions); + + when(languageServerConfiguration.getCodeLensOptions()) + .thenReturn(codeLensOptions); + + // when + var codeLenses = supplier.getCodeLenses(documentContext); + + // then + assertThat(codeLenses).isNotNull(); + } + + @Test + void testResolve() { + // given + CodeLens codeLens = new CodeLens(); + DebugTestCodeLensSupplier.DebugTestCodeLensData codeLensData = new DebugTestCodeLensSupplier.DebugTestCodeLensData( + documentContext.getUri(), + supplier.getId(), + "testName" + ); + + // when + var resolvedCodeLens = supplier.resolve(documentContext, codeLens, codeLensData); + + // then + assertThat(resolvedCodeLens.getCommand()).isNotNull(); + } + + private void initializeServer(String clientName) { + var initializeParams = new InitializeParams(); + initializeParams.setClientInfo( + new ClientInfo(clientName, "1.0.0") + ); + + var event = new LanguageServerInitializeRequestReceivedEvent( + mock(LanguageServer.class), + initializeParams + ); + eventPublisher.publishEvent(event); + } +} \ No newline at end of file diff --git a/src/test/java/com/github/_1c_syntax/bsl/languageserver/providers/SemanticTokensProviderTest.java b/src/test/java/com/github/_1c_syntax/bsl/languageserver/providers/SemanticTokensProviderTest.java index fe8f145f971..28b6b38a27d 100644 --- a/src/test/java/com/github/_1c_syntax/bsl/languageserver/providers/SemanticTokensProviderTest.java +++ b/src/test/java/com/github/_1c_syntax/bsl/languageserver/providers/SemanticTokensProviderTest.java @@ -25,11 +25,7 @@ import com.github._1c_syntax.bsl.languageserver.references.ReferenceIndexFiller; import com.github._1c_syntax.bsl.languageserver.util.CleanupContextBeforeClassAndAfterEachTestMethod; import com.github._1c_syntax.bsl.languageserver.util.TestUtils; -import com.github._1c_syntax.bsl.parser.BSLLexer; -import com.github._1c_syntax.bsl.languageserver.context.symbol.MethodSymbol; -import org.antlr.v4.runtime.Token; -import org.eclipse.lsp4j.Position; -import org.eclipse.lsp4j.Range; +import org.eclipse.lsp4j.SemanticTokenModifiers; import org.eclipse.lsp4j.SemanticTokenTypes; import org.eclipse.lsp4j.SemanticTokens; import org.eclipse.lsp4j.SemanticTokensLegend; @@ -41,9 +37,7 @@ import org.springframework.boot.test.context.SpringBootTest; import java.util.ArrayList; -import java.util.HashSet; import java.util.List; -import java.util.Objects; import java.util.Set; import static org.assertj.core.api.Assertions.assertThat; @@ -66,754 +60,832 @@ void init() { provider.setMultilineTokenSupport(false); } - @Test - void emitsExpectedTokenTypes() { - // given: sample BSL with annotation, macro, method, parameter, string, number, comment, operators - String bsl = String.join("\n", - "&НаКлиенте", - "#Если Истина Тогда", - "Процедура Тест(Парам) Экспорт", - " // комментарий", - " Сообщить(\"строка\" + 123);", - "КонецПроцедуры", - "#КонецЕсли" - ); - - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); - - // when - var params = new SemanticTokensParams(textDocumentIdentifier); - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, params); + // region Helper types and methods + + /** + * Represents expected semantic token for assertion. + * + * @param line 0-based line number + * @param startChar 0-based start character + * @param length token length + * @param tokenType LSP token type (e.g., SemanticTokenTypes.Keyword) + * @param tokenModifiers set of LSP modifiers (e.g., SemanticTokenModifiers.Declaration) + * @param lexeme optional lexeme for documentation (not used in comparison) + */ + private record ExpectedToken( + int line, + int startChar, + int length, + String tokenType, + Set tokenModifiers, + String lexeme + ) { + ExpectedToken(int line, int startChar, int length, String tokenType, String lexeme) { + this(line, startChar, length, tokenType, Set.of(), lexeme); + } - // then: collect type indexes present - List data = tokens.getData(); - assertThat(data).isNotEmpty(); + ExpectedToken(int line, int startChar, int length, String tokenType, String modifier, String lexeme) { + this(line, startChar, length, tokenType, Set.of(modifier), lexeme); + } + } - Set presentTypes = indexesOfTypes(data); + private record DecodedToken(int line, int start, int length, int type, int modifiers) {} - // map desired types to indices and assert they're present - assertPresent(presentTypes, SemanticTokenTypes.Decorator); - assertPresent(presentTypes, SemanticTokenTypes.Macro); - assertPresent(presentTypes, SemanticTokenTypes.Method); - assertPresent(presentTypes, SemanticTokenTypes.Parameter); - assertPresent(presentTypes, SemanticTokenTypes.Keyword); - assertPresent(presentTypes, SemanticTokenTypes.String); - assertPresent(presentTypes, SemanticTokenTypes.Number); - assertPresent(presentTypes, SemanticTokenTypes.Comment); - assertPresent(presentTypes, SemanticTokenTypes.Operator); + private List decode(List data) { + List out = new ArrayList<>(); + int line = 0; + int start = 0; + for (int i = 0; i + 4 < data.size(); i += 5) { + int dLine = data.get(i); + int dStart = data.get(i + 1); + int length = data.get(i + 2); + int type = data.get(i + 3); + int mods = data.get(i + 4); + line = line + dLine; + start = (dLine == 0) ? start + dStart : dStart; + out.add(new DecodedToken(line, start, length, type, mods)); + } + return out; } - @Test - void emitsMacroForAllPreprocTokens() { - // given: preprocessor variety to cover PREPROC_* tokens including regions - String bsl = String.join("\n", - "#Область Region1", - "#Если Сервер И НЕ Клиент Тогда", - "Процедура Пусто()", - "КонецПроцедуры", - "#ИначеЕсли Клиент Тогда", - "#Иначе", - "#КонецЕсли", - "#КонецОбласти" - ); - - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); + private void assertTokensMatch(List actual, List expected) { + assertThat(actual) + .as("Number of tokens") + .hasSameSizeAs(expected); + + for (int i = 0; i < expected.size(); i++) { + var exp = expected.get(i); + var act = actual.get(i); + + int expectedTypeIdx = legend.getTokenTypes().indexOf(exp.tokenType); + int expectedModifiersMask = computeModifiersMask(exp.tokenModifiers); + + assertThat(act.line) + .as("Token %d (%s): line", i, exp.lexeme) + .isEqualTo(exp.line); + assertThat(act.start) + .as("Token %d (%s): start", i, exp.lexeme) + .isEqualTo(exp.startChar); + assertThat(act.length) + .as("Token %d (%s): length", i, exp.lexeme) + .isEqualTo(exp.length); + assertThat(act.type) + .as("Token %d (%s): type (expected %s)", i, exp.lexeme, exp.tokenType) + .isEqualTo(expectedTypeIdx); + assertThat(act.modifiers) + .as("Token %d (%s): modifiers", i, exp.lexeme) + .isEqualTo(expectedModifiersMask); + } + } - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); + private void assertContainsTokens(List actual, List expected) { + for (var exp : expected) { + int expectedTypeIdx = legend.getTokenTypes().indexOf(exp.tokenType); + int expectedModifiersMask = computeModifiersMask(exp.tokenModifiers); + + var found = actual.stream() + .filter(t -> t.line == exp.line + && t.start == exp.startChar + && t.length == exp.length + && t.type == expectedTypeIdx + && t.modifiers == expectedModifiersMask) + .findFirst(); + + assertThat(found) + .as("Expected token: %s at [%d:%d], length=%d, type=%s, modifiers=%s", + exp.lexeme, exp.line, exp.startChar, exp.length, exp.tokenType, exp.tokenModifiers) + .isPresent(); + } + } - // then: count how many lexer tokens are PREPROC_* (or HASH) on default channel - List defaultTokens = documentContext.getTokensFromDefaultChannel(); - - long totalPreproc = defaultTokens.stream() - .map(Token::getType) - .map(BSLLexer.VOCABULARY::getSymbolicName) - .filter(Objects::nonNull) - .filter(sym -> sym.equals("HASH") || sym.startsWith("PREPROC_")) - .count(); - - // count region directives and names - long regionDirectives = 0; - long regionNames = 0; - for (int i = 0; i + 1 < defaultTokens.size(); i++) { - Token t = defaultTokens.get(i); - Token n = defaultTokens.get(i + 1); - if (t.getType() == BSLLexer.HASH && n.getType() == BSLLexer.PREPROC_REGION) { - regionDirectives++; - // if name token follows, it is included into Namespace span and not counted as Macro - if (i + 2 < defaultTokens.size() && defaultTokens.get(i + 2).getType() == BSLLexer.PREPROC_IDENTIFIER) { - regionNames++; - } - } else if (t.getType() == BSLLexer.HASH && n.getType() == BSLLexer.PREPROC_END_REGION) { - regionDirectives++; + private int computeModifiersMask(Set modifiers) { + int mask = 0; + for (String mod : modifiers) { + int idx = legend.getTokenModifiers().indexOf(mod); + if (idx >= 0) { + mask |= (1 << idx); } } - - // expected macro tokens exclude region directives (HASH + PREPROC_*) and region names after PREPROC_REGION - long expectedMacro = totalPreproc - (regionDirectives * 2) - regionNames; - - int macroIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Macro); - int nsIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Namespace); - assertThat(macroIdx).isGreaterThanOrEqualTo(0); - assertThat(nsIdx).isGreaterThanOrEqualTo(0); - - long macroCount = countOfType(tokens.getData(), macroIdx); - long nsCount = countOfType(tokens.getData(), nsIdx); - - // macros match non-region preproc tokens; namespace tokens match number of region directives - assertThat(macroCount).isEqualTo(expectedMacro); - assertThat(nsCount).isEqualTo(regionDirectives); + return mask; } - @Test - void emitsOperatorsForPunctuators() { - // given: code with many punctuators and operators - String bsl = String.join("\n", - "Процедура Опер()", - " Массив = Новый Массив();", - " Массив.Добавить(1 + 2);", - " Значение = Массив[0]?;", - " Если 1 <> 2 Тогда КонецЕсли;", - "КонецПроцедуры" - ); - + private SemanticTokens getTokens(String bsl) { DocumentContext documentContext = TestUtils.getDocumentContext(bsl); referenceIndexFiller.fill(documentContext); TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); + return provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); + } - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); - - int operatorIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Operator); - assertThat(operatorIdx).isGreaterThanOrEqualTo(0); - - // count lexer operator/punctuator tokens - Set opTypes = Set.of( - BSLLexer.LPAREN, - BSLLexer.RPAREN, - BSLLexer.LBRACK, - BSLLexer.RBRACK, - BSLLexer.COMMA, - BSLLexer.SEMICOLON, - BSLLexer.COLON, - BSLLexer.DOT, - BSLLexer.PLUS, - BSLLexer.MINUS, - BSLLexer.MUL, - BSLLexer.QUOTIENT, - BSLLexer.MODULO, - BSLLexer.ASSIGN, - BSLLexer.NOT_EQUAL, - BSLLexer.LESS, - BSLLexer.LESS_OR_EQUAL, - BSLLexer.GREATER, - BSLLexer.GREATER_OR_EQUAL, - BSLLexer.QUESTION, - BSLLexer.TILDA - ); - - long lexerOpCount = documentContext.getTokensFromDefaultChannel().stream() - .map(Token::getType) - .filter(opTypes::contains) - .count(); + private List getDecodedTokens(String bsl) { + return decode(getTokens(bsl).getData()); + } - long operatorCount = countOfType(tokens.getData(), operatorIdx); + // endregion - // 1:1 mapping of lexer operator tokens to semantic Operator tokens - assertThat(operatorCount).isEqualTo(lexerOpCount); - } + // region Encoder test @Test - void annotationWithoutParams_isDecoratorOnly() { - // given - String annotation = "&НаКлиенте"; - String bsl = String.join("\n", - annotation, - "Процедура Тест()", - "КонецПроцедуры" - ); + void tokenEncodingFormat_deltaLineAndDeltaStart() { + // Test that the encoder correctly computes delta-line and delta-start values + // according to LSP SemanticTokens specification + String bsl = """ + Перем А; + Перем Б; + """; + + SemanticTokens tokens = getTokens(bsl); + List data = tokens.getData(); - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); + // Each token is 5 integers: [deltaLine, deltaStart, length, tokenType, tokenModifiers] + assertThat(data.size() % 5).isZero(); - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); + // Decode and verify absolute positions + List decoded = decode(data); + assertThat(decoded).isNotEmpty(); - int decoratorIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Decorator); - int operatorIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Operator); - assertThat(decoratorIdx).isGreaterThanOrEqualTo(0); - assertThat(operatorIdx).isGreaterThanOrEqualTo(0); + // First token should be at line 0 + assertThat(decoded.get(0).line).isZero(); - List firstLineTokens = decode(tokens.getData()).stream().filter(t -> t.line == 0).toList(); + // Tokens should be ordered by position + for (int i = 1; i < decoded.size(); i++) { + var prev = decoded.get(i - 1); + var curr = decoded.get(i); + // Either on a later line, or same line with later start + assertThat(curr.line > prev.line || (curr.line == prev.line && curr.start >= prev.start + prev.length)) + .as("Token %d should be after token %d", i, i - 1) + .isTrue(); + } + } - // then: on line 0 we should have exactly one Decorator token: merged '&НаКлиенте' - long decoratorsOnFirstLine = firstLineTokens.stream().filter(t -> t.type == decoratorIdx).count(); - assertThat(decoratorsOnFirstLine).isEqualTo(1); + // endregion - // and no operators or strings on that line - long operatorsOnFirstLine = firstLineTokens.stream().filter(t -> t.type == operatorIdx).count(); - assertThat(operatorsOnFirstLine).isZero(); - } + // region BSL tokens tests @Test - void annotationWithStringParam_tokenizesNameParenAndString() { - // given - String bsl = String.join("\n", - "&Перед(\"Строка\")", - "Процедура Тест()", - "КонецПроцедуры" + void annotationWithoutParams() { + String bsl = """ + &НаКлиенте + Процедура Тест() + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expected = List.of( + new ExpectedToken(0, 0, 10, SemanticTokenTypes.Decorator, "&НаКлиенте"), + new ExpectedToken(1, 0, 9, SemanticTokenTypes.Keyword, "Процедура"), + new ExpectedToken(1, 10, 4, SemanticTokenTypes.Method, "Тест"), + new ExpectedToken(1, 14, 1, SemanticTokenTypes.Operator, "("), + new ExpectedToken(1, 15, 1, SemanticTokenTypes.Operator, ")"), + new ExpectedToken(2, 0, 14, SemanticTokenTypes.Keyword, "КонецПроцедуры") ); - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); - - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); - - int decoratorIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Decorator); - int operatorIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Operator); - int stringIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.String); - assertThat(decoratorIdx).isGreaterThanOrEqualTo(0); - assertThat(operatorIdx).isGreaterThanOrEqualTo(0); - assertThat(stringIdx).isGreaterThanOrEqualTo(0); - - List firstLineTokens = decode(tokens.getData()).stream().filter(t -> t.line == 0).toList(); - - // one decorator on line 0: merged '&Перед' - assertThat(firstLineTokens.stream().filter(t -> t.type == decoratorIdx).count()).isEqualTo(1); - - // operators present for parentheses - assertThat(firstLineTokens.stream().filter(t -> t.type == operatorIdx).count()).isGreaterThanOrEqualTo(2); - - // string present - assertThat(firstLineTokens.stream().filter(t -> t.type == stringIdx).count()).isGreaterThanOrEqualTo(1); + assertTokensMatch(decoded, expected); } @Test - void customAnnotationWithNamedStringParam_marksIdentifierAsParameter() { - // given - String bsl = String.join("\n", - "&КастомнаяАннотация(Значение = \"Параметр\")", - "Процедура Тест()", - "КонецПроцедуры" + void annotationWithStringParam() { + String bsl = """ + &Перед("Строка") + Процедура Тест() + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expectedLine0 = List.of( + new ExpectedToken(0, 0, 6, SemanticTokenTypes.Decorator, "&Перед"), + new ExpectedToken(0, 6, 1, SemanticTokenTypes.Operator, "("), + new ExpectedToken(0, 7, 8, SemanticTokenTypes.String, "\"Строка\""), + new ExpectedToken(0, 15, 1, SemanticTokenTypes.Operator, ")") ); - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); - - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); - - int decoratorIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Decorator); - int operatorIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Operator); - int stringIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.String); - int paramIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Parameter); - - assertThat(decoratorIdx).isGreaterThanOrEqualTo(0); - assertThat(operatorIdx).isGreaterThanOrEqualTo(0); - assertThat(stringIdx).isGreaterThanOrEqualTo(0); - assertThat(paramIdx).isGreaterThanOrEqualTo(0); - - List firstLineTokens = decode(tokens.getData()).stream().filter(t -> t.line == 0).toList(); - - // one decorator: merged '&КастомнаяАннотация' - assertThat(firstLineTokens.stream().filter(t -> t.type == decoratorIdx).count()).isEqualTo(1); - - // operators for '(' ')' and '=' - assertThat(firstLineTokens.stream().filter(t -> t.type == operatorIdx).count()).isGreaterThanOrEqualTo(3); - - // parameter identifier 'Значение' - assertThat(firstLineTokens.stream().filter(t -> t.type == paramIdx).count()).isGreaterThanOrEqualTo(1); - - // string literal - assertThat(firstLineTokens.stream().filter(t -> t.type == stringIdx).count()).isGreaterThanOrEqualTo(1); + assertContainsTokens(decoded, expectedLine0); } @Test - void useDirective_isNamespace() { - // given: several #Использовать directives - String bsl = String.join("\n", - "#Использовать А", - "#Использовать Б", - "#Использовать В" + void annotationWithNamedParam() { + String bsl = """ + &КастомнаяАннотация(Значение = "Параметр") + Процедура Тест() + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expectedLine0 = List.of( + new ExpectedToken(0, 0, 19, SemanticTokenTypes.Decorator, "&КастомнаяАннотация"), + new ExpectedToken(0, 19, 1, SemanticTokenTypes.Operator, "("), + new ExpectedToken(0, 20, 8, SemanticTokenTypes.Parameter, "Значение"), + new ExpectedToken(0, 29, 1, SemanticTokenTypes.Operator, "="), + new ExpectedToken(0, 31, 10, SemanticTokenTypes.String, "\"Параметр\""), + new ExpectedToken(0, 41, 1, SemanticTokenTypes.Operator, ")") ); - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); - - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); - - int namespaceIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Namespace); - assertThat(namespaceIdx).isGreaterThanOrEqualTo(0); - - long nsCount = countOfType(tokens.getData(), namespaceIdx); - - // then: each use line produces one Namespace token - assertThat(nsCount).isEqualTo(3); + assertContainsTokens(decoded, expectedLine0); } @Test - void datetimeAndUndefinedTrueFalse_areHighlighted() { - // given: date literal and undefined/boolean literals - String bsl = String.join("\n", - "Процедура T()", - " Дата = '20010101';", - " X = Неопределено;", - " Если Истина Тогда", - " КонецЕсли;", - " Если Ложь Тогда", - " КонецЕсли;", - "КонецПроцедуры" + void useDirective() { + String bsl = """ + #Использовать А + #Использовать Б + """; + + var decoded = getDecodedTokens(bsl); + + var expected = List.of( + new ExpectedToken(0, 0, 13, SemanticTokenTypes.Namespace, "#Использовать"), + new ExpectedToken(0, 14, 1, SemanticTokenTypes.Variable, "А"), + new ExpectedToken(1, 0, 13, SemanticTokenTypes.Namespace, "#Использовать"), + new ExpectedToken(1, 14, 1, SemanticTokenTypes.Variable, "Б") ); - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); - - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); - - int stringIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.String); - int keywordIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Keyword); - assertThat(stringIdx).isGreaterThanOrEqualTo(0); - assertThat(keywordIdx).isGreaterThanOrEqualTo(0); - - long strings = countOfType(tokens.getData(), stringIdx); - long keywords = countOfType(tokens.getData(), keywordIdx); - - // then: at least one string (for DATETIME) and at least three keywords for undefined/true/false - assertThat(strings).isGreaterThanOrEqualTo(1); - - long expectedSpecialLiteralCount = documentContext.getTokensFromDefaultChannel().stream() - .map(Token::getType) - .filter(t -> t == BSLLexer.UNDEFINED || t == BSLLexer.TRUE || t == BSLLexer.FALSE) - .count(); - - assertThat(keywords).isGreaterThanOrEqualTo(expectedSpecialLiteralCount); + assertContainsTokens(decoded, expected); } @Test - void methodDescriptionComments_areMarkedWithDocumentationModifier() { - // given: leading description comments above a method and a non-doc comment in body - String bsl = String.join("\n", - "// Описание процедуры", - "// Параметры: Парам - Число", - "Процедура ДокТест(Парам)", - " // обычный комментарий", - "КонецПроцедуры" + void regionDirective() { + String bsl = """ + #Область МояСекция + Процедура Тест() + КонецПроцедуры + #КонецОбласти + """; + + var decoded = getDecodedTokens(bsl); + + // Verify region tokens + var expectedTokens = List.of( + new ExpectedToken(0, 0, 8, SemanticTokenTypes.Namespace, "#Область"), + new ExpectedToken(0, 9, 9, SemanticTokenTypes.Variable, "МояСекция"), + new ExpectedToken(3, 0, 13, SemanticTokenTypes.Namespace, "#КонецОбласти") ); - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); - - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); - - int commentIdx = legend.getTokenTypes().indexOf("comment"); - int docModIdx = legend.getTokenModifiers().indexOf("documentation"); - assertThat(commentIdx).isGreaterThanOrEqualTo(0); - assertThat(docModIdx).isGreaterThanOrEqualTo(0); - int docMask = 1 << docModIdx; - - List decoded = decode(tokens.getData()); - // comments on lines 0 and 1 must have documentation modifier; line 3 comment must not - var line0 = decoded.stream().filter(t -> t.line == 0 && t.type == commentIdx).toList(); - var line1 = decoded.stream().filter(t -> t.line == 1 && t.type == commentIdx).toList(); - var line3 = decoded.stream().filter(t -> t.line == 3 && t.type == commentIdx).toList(); - - assertThat(line0).isNotEmpty(); - assertThat(line1).isNotEmpty(); - assertThat(line3).isNotEmpty(); - - assertThat(line0.stream().allMatch(t -> (t.modifiers & docMask) != 0)).isTrue(); - assertThat(line1.stream().allMatch(t -> (t.modifiers & docMask) != 0)).isTrue(); - assertThat(line3.stream().allMatch(t -> (t.modifiers & docMask) == 0)).isTrue(); + assertContainsTokens(decoded, expectedTokens); } @Test - void variableDescriptionLeadingAndTrailing_areMarkedWithDocumentationModifier() { - // given: leading description and trailing description for a variable - String bsl = String.join("\n", - "// Описание переменной", - "Перем Перем1; // трейл" + void preprocessorDirectives() { + String bsl = """ + #Если Сервер Тогда + Процедура Пусто() + КонецПроцедуры + #ИначеЕсли Клиент Тогда + #Иначе + #КонецЕсли + """; + + var decoded = getDecodedTokens(bsl); + + // Verify preprocessor macro tokens on specific lines + var expectedTokens = List.of( + new ExpectedToken(0, 0, 1, SemanticTokenTypes.Macro, "#"), + new ExpectedToken(0, 1, 4, SemanticTokenTypes.Macro, "Если"), + new ExpectedToken(0, 6, 6, SemanticTokenTypes.Macro, "Сервер"), + new ExpectedToken(0, 13, 5, SemanticTokenTypes.Macro, "Тогда"), + new ExpectedToken(3, 0, 1, SemanticTokenTypes.Macro, "#"), + new ExpectedToken(3, 1, 9, SemanticTokenTypes.Macro, "ИначеЕсли"), + new ExpectedToken(4, 0, 1, SemanticTokenTypes.Macro, "#"), + new ExpectedToken(4, 1, 5, SemanticTokenTypes.Macro, "Иначе"), + new ExpectedToken(5, 0, 1, SemanticTokenTypes.Macro, "#"), + new ExpectedToken(5, 1, 9, SemanticTokenTypes.Macro, "КонецЕсли") ); - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); + assertContainsTokens(decoded, expectedTokens); + } - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); + @Test + void literals() { + String bsl = """ + Процедура Тест() + Дата = '20010101'; + X = Неопределено; + Y = Истина; + Z = Ложь; + N = 123; + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expectedTokens = List.of( + new ExpectedToken(1, 9, 10, SemanticTokenTypes.String, "'20010101'"), + new ExpectedToken(2, 6, 12, SemanticTokenTypes.Keyword, "Неопределено"), + new ExpectedToken(3, 6, 6, SemanticTokenTypes.Keyword, "Истина"), + new ExpectedToken(4, 6, 4, SemanticTokenTypes.Keyword, "Ложь"), + new ExpectedToken(5, 6, 3, SemanticTokenTypes.Number, "123") + ); - int commentIdx = legend.getTokenTypes().indexOf("comment"); - int docModIdx = legend.getTokenModifiers().indexOf("documentation"); - assertThat(commentIdx).isGreaterThanOrEqualTo(0); - assertThat(docModIdx).isGreaterThanOrEqualTo(0); - int docMask = 1 << docModIdx; + assertContainsTokens(decoded, expectedTokens); + } - List decoded = decode(tokens.getData()); + @Test + void methodDescriptionComments() { + String bsl = """ + // Описание процедуры + // Параметры: Парам - Число + Процедура ДокТест(Парам) + // обычный комментарий + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + // Documentation comments on lines 0-1 should have Documentation modifier + // Body comment on line 3 should NOT have Documentation modifier + var expected = List.of( + new ExpectedToken(0, 0, 21, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// Описание процедуры"), + new ExpectedToken(1, 0, 27, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// Параметры: Парам - Число"), + new ExpectedToken(3, 2, 22, SemanticTokenTypes.Comment, "// обычный комментарий") + ); - // We expect two comment tokens: line 0 (leading) and line 1 (trailing). Both should have documentation modifier. - var line0 = decoded.stream().filter(t -> t.line == 0 && t.type == commentIdx).toList(); - var line1 = decoded.stream().filter(t -> t.line == 1 && t.type == commentIdx).toList(); + assertContainsTokens(decoded, expected); + } - assertThat(line0).isNotEmpty(); - assertThat(line1).isNotEmpty(); + @Test + void variableDescriptionComments() { + String bsl = """ + // Описание переменной + Перем Перем1; // трейл + """; + + var decoded = getDecodedTokens(bsl); + + // Both leading (line 0) and trailing (line 1) comments should have documentation modifier + var expected = List.of( + new ExpectedToken(0, 0, 22, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// Описание переменной"), + new ExpectedToken(1, 14, 8, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// трейл") + ); - assertThat(line0.stream().allMatch(t -> (t.modifiers & docMask) != 0)).isTrue(); - assertThat(line1.stream().allMatch(t -> (t.modifiers & docMask) != 0)).isTrue(); + assertContainsTokens(decoded, expected); } @Test - void multilineDocumentation_isMergedIntoSingleToken_whenClientSupportsIt() { - // given: two-line documentation followed by a method and a body comment + void multilineDocumentation_mergedWhenSupported() { provider.setMultilineTokenSupport(true); - String bsl = String.join("\n", - "// Первая строка описания", - "// Вторая строка описания", - "Процедура ДокТест()", - " // не документация", - "КонецПроцедуры" + String bsl = """ + // Первая строка описания + // Вторая строка описания + Процедура ДокТест() + // не документация + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + // When multiline support is enabled, documentation comments should be merged into one token + // The merged token starts on line 0 and spans across lines + // Both lines "// Первая строка описания" (26 chars) + "// Вторая строка описания" (25 chars) = 51 chars total, + // i.e. the sum of the characters of both lines; the newline between them is not included in the length. + // Body comment on line 3 should NOT have Documentation modifier + var expected = List.of( + // Merged documentation comment (starts at line 0, length is sum of both lines without the newline) + new ExpectedToken(0, 0, 51, SemanticTokenTypes.Comment, SemanticTokenModifiers.Documentation, "// Первая+Вторая строка описания"), + // Body comment without documentation modifier + new ExpectedToken(3, 2, 18, SemanticTokenTypes.Comment, "// не документация") ); - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); - - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); - - int commentIdx = legend.getTokenTypes().indexOf("comment"); - int docModIdx = legend.getTokenModifiers().indexOf("documentation"); - assertThat(commentIdx).isGreaterThanOrEqualTo(0); - assertThat(docModIdx).isGreaterThanOrEqualTo(0); - int docMask = 1 << docModIdx; + assertContainsTokens(decoded, expected); + } - List decoded = decode(tokens.getData()); + @Test + void variableDefinition_hasDefinitionModifier() { + String bsl = """ + Перем Перем1; + """; - // then: exactly one documentation comment token exists (merged), starting on line 0 - var docTokens = decoded.stream().filter(t -> t.type == commentIdx && (t.modifiers & docMask) != 0).toList(); - assertThat(docTokens).hasSize(1); - assertThat(docTokens.get(0).line).isZero(); + var decoded = getDecodedTokens(bsl); - // and there is no comment token on line 1 (second doc line) - var commentsLine1 = decoded.stream().filter(t -> t.line == 1 && t.type == commentIdx).toList(); - assertThat(commentsLine1).isEmpty(); + var expected = List.of( + new ExpectedToken(0, 0, 5, SemanticTokenTypes.Keyword, "Перем"), + new ExpectedToken(0, 6, 6, SemanticTokenTypes.Variable, SemanticTokenModifiers.Definition, "Перем1"), + new ExpectedToken(0, 12, 1, SemanticTokenTypes.Operator, ";") + ); - // and a regular body comment exists on line 3 without the documentation modifier - var bodyComments = decoded.stream().filter(t -> t.line == 3 && t.type == commentIdx).toList(); - assertThat(bodyComments).isNotEmpty(); - assertThat(bodyComments.stream().allMatch(t -> (t.modifiers & docMask) == 0)).isTrue(); + assertContainsTokens(decoded, expected); } @Test - void regionName_isHighlightedAsVariable() { - // given: region with a name and its end - String bsl = String.join("\n", - "#Область МояСекция", - "Процедура Тест()\nКонецПроцедуры", - "#КонецОбласти" + void parameterAndVariableTokenTypes() { + String bsl = """ + Процедура Тест(Парам1, Парам2) + Перем ЛокальнаяПеременная; + НеявнаяПеременная = 1; + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expectedTokens = List.of( + // Parameters in signature + new ExpectedToken(0, 15, 6, SemanticTokenTypes.Parameter, SemanticTokenModifiers.Definition, "Парам1"), + new ExpectedToken(0, 23, 6, SemanticTokenTypes.Parameter, SemanticTokenModifiers.Definition, "Парам2"), + // Explicit variable declaration + new ExpectedToken(1, 8, 19, SemanticTokenTypes.Variable, SemanticTokenModifiers.Definition, "ЛокальнаяПеременная"), + // Implicit variable + new ExpectedToken(2, 2, 17, SemanticTokenTypes.Variable, SemanticTokenModifiers.Definition, "НеявнаяПеременная") ); - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); - - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); + assertContainsTokens(decoded, expectedTokens); + } - int nsIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Namespace); - int varIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Variable); - assertThat(nsIdx).isGreaterThanOrEqualTo(0); - assertThat(varIdx).isGreaterThanOrEqualTo(0); + @Test + void sameFileMethodCall() { + String bsl = """ + Процедура CallMe() + КонецПроцедуры - List decoded = decode(tokens.getData()); + Процедура Бар() + CallMe(); + КонецПроцедуры + """; - // then: one Namespace token for region start and one for region end, and one Variable on line 0 for the name - long nsOnLine0 = decoded.stream().filter(t -> t.line == 0 && t.type == nsIdx).count(); - long nsOnLastLine = decoded.stream().filter(t -> t.line == 3 && t.type == nsIdx).count(); - long varsOnLine0 = decoded.stream().filter(t -> t.line == 0 && t.type == varIdx).count(); + var decoded = getDecodedTokens(bsl); - assertThat(nsOnLine0).isEqualTo(1); - assertThat(nsOnLastLine).isEqualTo(1); - assertThat(varsOnLine0).isEqualTo(1); + // Method call on line 4 + var methodCallToken = new ExpectedToken(4, 2, 6, SemanticTokenTypes.Method, "CallMe"); + assertContainsTokens(decoded, List.of(methodCallToken)); } @Test - void variableDefinition_hasDefinitionModifier() { - // given: module-level variable declaration - String bsl = String.join("\n", - "Перем Перем1;", - "Процедура T()", - " // тело", - "КонецПроцедуры" + void parameterAndVariableUsages() { + var documentContext = TestUtils.getDocumentContextFromFile( + "./src/test/resources/providers/SemanticTokensProviderParameterTest.bsl" ); - - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); - // when + TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); + var decoded = decode(tokens.getData()); + + var expected = List.of( + // Parameters in signature (line 0) + new ExpectedToken(0, 15, 6, SemanticTokenTypes.Parameter, SemanticTokenModifiers.Definition, "Парам1"), + new ExpectedToken(0, 23, 6, SemanticTokenTypes.Parameter, SemanticTokenModifiers.Definition, "Парам2"), + // Local variable declaration (line 1) + new ExpectedToken(1, 8, 19, SemanticTokenTypes.Variable, SemanticTokenModifiers.Definition, "ЛокальнаяПеременная"), + // Variable usage on line 4 (without definition modifier) + new ExpectedToken(4, 11, 19, SemanticTokenTypes.Variable, "ЛокальнаяПеременная"), + // Parameter usage on line 3 (without definition modifier) + new ExpectedToken(3, 24, 6, SemanticTokenTypes.Parameter, "Парам1") + ); - int varIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Variable); - int defModIdx = legend.getTokenModifiers().indexOf("definition"); - assertThat(varIdx).isGreaterThanOrEqualTo(0); - assertThat(defModIdx).isGreaterThanOrEqualTo(0); - int defMask = 1 << defModIdx; + assertContainsTokens(decoded, expected); + } - // then: at least one Variable token has the definition modifier (for Перем1) - List decoded = decode(tokens.getData()); - long defs = decoded.stream() - .filter(t -> t.type == varIdx) - .filter(t -> (t.modifiers & defMask) != 0) - .count(); + // endregion - assertThat(defs).isGreaterThanOrEqualTo(1); - } + // region SDBL tokens tests @Test - void sameFileMethodCall_isHighlightedAsMethodTokenAtCallSite() { - // given: a method and a call to another method in the same file - String bsl = String.join("\n", - "Процедура CallMe()", - "КонецПроцедуры", - "", - "Процедура Бар()", - " CallMe();", - "КонецПроцедуры" + void sdblQuery_simpleSelect() { + String bsl = """ + Функция Тест() + Запрос = "Выбрать * из Справочник.Контрагенты"; + КонецФункции + """; + + var decoded = getDecodedTokens(bsl); + + // Expected SDBL tokens on line 1 + var expectedTokens = List.of( + // "Выбрать" keyword at position 12 (after ` Запрос = "`) + new ExpectedToken(1, 12, 7, SemanticTokenTypes.Keyword, "Выбрать"), + // "*" operator + new ExpectedToken(1, 20, 1, SemanticTokenTypes.Operator, "*"), + // "из" keyword + new ExpectedToken(1, 22, 2, SemanticTokenTypes.Keyword, "из"), + // "Справочник" metadata namespace + new ExpectedToken(1, 25, 10, SemanticTokenTypes.Namespace, "Справочник"), + // "." operator + new ExpectedToken(1, 35, 1, SemanticTokenTypes.Operator, "."), + // "Контрагенты" metadata class + new ExpectedToken(1, 36, 11, SemanticTokenTypes.Class, "Контрагенты") ); - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); - - // compute selection range for 'CallMe' on line 4 - int callLine = 4; - - // when - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); + assertContainsTokens(decoded, expectedTokens); + } - int methodIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Method); - assertThat(methodIdx).isGreaterThanOrEqualTo(0); + @Test + void sdblQuery_withAggregateFunction() { + String bsl = """ + Функция Тест() + Запрос = "Выбрать СУММА(Сумма) как Итого из Документ.Продажа"; + КонецФункции + """; + + var decoded = getDecodedTokens(bsl); + + var expected = List.of( + new ExpectedToken(1, 12, 7, SemanticTokenTypes.Keyword, "Выбрать"), + new ExpectedToken(1, 20, 5, SemanticTokenTypes.Function, SemanticTokenModifiers.DefaultLibrary, "СУММА"), + new ExpectedToken(1, 33, 3, SemanticTokenTypes.Keyword, "как"), + new ExpectedToken(1, 43, 2, SemanticTokenTypes.Keyword, "из"), + new ExpectedToken(1, 46, 8, SemanticTokenTypes.Namespace, "Документ"), + new ExpectedToken(1, 55, 7, SemanticTokenTypes.Class, "Продажа") + ); - // then: there is a Method token on the call line (line 4) - List decoded = decode(tokens.getData()); - long methodsOnCallLine = decoded.stream().filter(t -> t.line == callLine && t.type == methodIdx).count(); - assertThat(methodsOnCallLine).isGreaterThanOrEqualTo(1); + assertContainsTokens(decoded, expected); } @Test - void parameterAndVariableTokenTypes() { - String bsl = String.join("\n", - "Процедура Тест(Парам1, Парам2)", - " Перем ЛокальнаяПеременная;", - " НеявнаяПеременная = 1;", - " ЛокальнаяПеременная2 = 2;", - " Результат = 3;", - " Для ПеременнаяЦикла = 1 По 10 Цикл", - " КонецЦикла;", - "КонецПроцедуры" + void sdblQuery_withParameter() { + String bsl = """ + Функция Тест() + Запрос = "Выбрать * из Справочник.Контрагенты где Код = &Параметр"; + КонецФункции + """; + + var decoded = getDecodedTokens(bsl); + + var expected = List.of( + new ExpectedToken(1, 12, 7, SemanticTokenTypes.Keyword, "Выбрать"), + new ExpectedToken(1, 20, 1, SemanticTokenTypes.Operator, "*"), + new ExpectedToken(1, 22, 2, SemanticTokenTypes.Keyword, "из"), + new ExpectedToken(1, 25, 10, SemanticTokenTypes.Namespace, "Справочник"), + new ExpectedToken(1, 36, 11, SemanticTokenTypes.Class, "Контрагенты"), + new ExpectedToken(1, 48, 3, SemanticTokenTypes.Keyword, "где"), + // &Параметр as single Parameter token (& at 58, Параметр is 8 chars, total length 9) + new ExpectedToken(1, 58, 9, SemanticTokenTypes.Parameter, SemanticTokenModifiers.Readonly, "&Параметр") ); - DocumentContext documentContext = TestUtils.getDocumentContext(bsl); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); + assertContainsTokens(decoded, expected); + } - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); + @Test + void sdblQuery_multiline() { + String bsl = """ + Функция Тест() + Запрос = " + |Выбрать + | СУММА(Сумма) как Итого + |из + | Справочник.Контрагенты"; + КонецФункции + """; + + var decoded = getDecodedTokens(bsl); + + var expected = List.of( + new ExpectedToken(2, 3, 7, SemanticTokenTypes.Keyword, "Выбрать"), + new ExpectedToken(3, 5, 5, SemanticTokenTypes.Function, SemanticTokenModifiers.DefaultLibrary, "СУММА"), + new ExpectedToken(3, 18, 3, SemanticTokenTypes.Keyword, "как"), + new ExpectedToken(4, 3, 2, SemanticTokenTypes.Keyword, "из"), + new ExpectedToken(5, 5, 10, SemanticTokenTypes.Namespace, "Справочник"), + new ExpectedToken(5, 16, 11, SemanticTokenTypes.Class, "Контрагенты") + ); - int paramIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Parameter); - int varIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Variable); - assertThat(paramIdx).isGreaterThanOrEqualTo(0); - assertThat(varIdx).isGreaterThanOrEqualTo(0); + assertContainsTokens(decoded, expected); + } - List decoded = decode(tokens.getData()); + @Test + void sdblQuery_virtualTableMethod() { + String bsl = """ + Процедура Тест() + Текст = "ВЫБРАТЬ * ИЗ РегистрСведений.КурсыВалют.СрезПоследних(&Период)"; + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expectedTokens = List.of( + // Metadata namespace + new ExpectedToken(1, 24, 15, SemanticTokenTypes.Namespace, "РегистрСведений"), + // Metadata class + new ExpectedToken(1, 40, 10, SemanticTokenTypes.Class, "КурсыВалют"), + // Virtual table method + new ExpectedToken(1, 51, 13, SemanticTokenTypes.Method, "СрезПоследних") + ); - long paramsInSignature = decoded.stream() - .filter(t -> t.line == 0 && t.type == paramIdx) - .count(); - assertThat(paramsInSignature).as("Parameters in signature").isEqualTo(2); + assertContainsTokens(decoded, expectedTokens); + } - long localVarDeclaration = decoded.stream() - .filter(t -> t.line == 1 && t.type == varIdx) - .count(); - assertThat(localVarDeclaration).as("Explicit variable declaration").isEqualTo(1); + @Test + void sdblQuery_temporaryTable() { + String bsl = """ + Процедура Тест() + Запрос = " + |ВЫБРАТЬ Поле ПОМЕСТИТЬ ВТ_Таблица; + |ВЫБРАТЬ Поле ИЗ ВТ_Таблица"; + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expected = List.of( + // First query - line 2, positions based on actual parsing + new ExpectedToken(2, 3, 7, SemanticTokenTypes.Keyword, "ВЫБРАТЬ"), + new ExpectedToken(2, 16, 9, SemanticTokenTypes.Keyword, "ПОМЕСТИТЬ"), + new ExpectedToken(2, 26, 10, SemanticTokenTypes.Variable, SemanticTokenModifiers.Declaration, "ВТ_Таблица"), + // Second query - line 3 + new ExpectedToken(3, 3, 7, SemanticTokenTypes.Keyword, "ВЫБРАТЬ"), + new ExpectedToken(3, 16, 2, SemanticTokenTypes.Keyword, "ИЗ"), + new ExpectedToken(3, 19, 10, SemanticTokenTypes.Variable, "ВТ_Таблица") + ); - long implicitVarDeclaration1 = decoded.stream() - .filter(t -> t.line == 2 && t.type == varIdx) - .count(); - assertThat(implicitVarDeclaration1).as("First implicit variable declaration").isEqualTo(1); + assertContainsTokens(decoded, expected); + } - long implicitVarDeclaration2 = decoded.stream() - .filter(t -> t.line == 3 && t.type == varIdx) - .count(); - assertThat(implicitVarDeclaration2).as("Second implicit variable declaration").isEqualTo(1); + @Test + void sdblQuery_complexQueryWithJoin() { + // Complex query with temporary table, join, and field references + String bsl = """ + Процедура Тест() + Запрос = " + |ВЫБРАТЬ + | Курсы.Валюта КАК Валюта, + | Курсы.Курс КАК Курс, + | Курсы.Период КАК Период + |ПОМЕСТИТЬ ВТ_Курсы + |ИЗ РегистрСведений.КурсыВалют.СрезПоследних(&Период) КАК Курсы + |ИНДЕКСИРОВАТЬ ПО Валюта, Период; + | + |ВЫБРАТЬ + | ВТ.Валюта КАК Валюта, + | ВТ.Курс КАК Курс, + | СпрВалюта.Код КАК КодВалюты + |ИЗ ВТ_Курсы КАК ВТ + |ЛЕВОЕ СОЕДИНЕНИЕ Справочник.Валюты КАК СпрВалюта + |ПО ВТ.Валюта = СпрВалюта.Ссылка"; + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expected = List.of( + // First query - line 2: ВЫБРАТЬ + new ExpectedToken(2, 3, 7, SemanticTokenTypes.Keyword, "ВЫБРАТЬ"), + // Line 3: Курсы.Валюта КАК Валюта + new ExpectedToken(3, 7, 5, SemanticTokenTypes.Variable, "Курсы"), + new ExpectedToken(3, 13, 6, SemanticTokenTypes.Property, "Валюта"), + new ExpectedToken(3, 20, 3, SemanticTokenTypes.Keyword, "КАК"), + new ExpectedToken(3, 24, 6, SemanticTokenTypes.Variable, SemanticTokenModifiers.Declaration, "Валюта"), + // Line 6: ПОМЕСТИТЬ ВТ_Курсы + new ExpectedToken(6, 3, 9, SemanticTokenTypes.Keyword, "ПОМЕСТИТЬ"), + new ExpectedToken(6, 13, 8, SemanticTokenTypes.Variable, SemanticTokenModifiers.Declaration, "ВТ_Курсы"), + // Line 7: ИЗ РегистрСведений.КурсыВалют.СрезПоследних(&Период) КАК Курсы + new ExpectedToken(7, 3, 2, SemanticTokenTypes.Keyword, "ИЗ"), + new ExpectedToken(7, 6, 15, SemanticTokenTypes.Namespace, "РегистрСведений"), + new ExpectedToken(7, 22, 10, SemanticTokenTypes.Class, "КурсыВалют"), + new ExpectedToken(7, 33, 13, SemanticTokenTypes.Method, "СрезПоследних"), + new ExpectedToken(7, 47, 7, SemanticTokenTypes.Parameter, SemanticTokenModifiers.Readonly, "&Период"), + new ExpectedToken(7, 56, 3, SemanticTokenTypes.Keyword, "КАК"), + new ExpectedToken(7, 60, 5, SemanticTokenTypes.Variable, SemanticTokenModifiers.Declaration, "Курсы"), + // Line 8: ИНДЕКСИРОВАТЬ ПО Валюта, Период + new ExpectedToken(8, 3, 13, SemanticTokenTypes.Keyword, "ИНДЕКСИРОВАТЬ"), + new ExpectedToken(8, 17, 2, SemanticTokenTypes.Keyword, "ПО"), + // Second query - line 10: ВЫБРАТЬ + new ExpectedToken(10, 3, 7, SemanticTokenTypes.Keyword, "ВЫБРАТЬ"), + // Line 14: ИЗ ВТ_Курсы КАК ВТ + new ExpectedToken(14, 3, 2, SemanticTokenTypes.Keyword, "ИЗ"), + new ExpectedToken(14, 6, 8, SemanticTokenTypes.Variable, "ВТ_Курсы"), + new ExpectedToken(14, 15, 3, SemanticTokenTypes.Keyword, "КАК"), + new ExpectedToken(14, 19, 2, SemanticTokenTypes.Variable, SemanticTokenModifiers.Declaration, "ВТ"), + // Line 15: ЛЕВОЕ СОЕДИНЕНИЕ Справочник.Валюты КАК СпрВалюта + new ExpectedToken(15, 3, 5, SemanticTokenTypes.Keyword, "ЛЕВОЕ"), + new ExpectedToken(15, 9, 10, SemanticTokenTypes.Keyword, "СОЕДИНЕНИЕ"), + new ExpectedToken(15, 20, 10, SemanticTokenTypes.Namespace, "Справочник"), + new ExpectedToken(15, 31, 6, SemanticTokenTypes.Class, "Валюты"), + new ExpectedToken(15, 38, 3, SemanticTokenTypes.Keyword, "КАК"), + new ExpectedToken(15, 42, 9, SemanticTokenTypes.Variable, SemanticTokenModifiers.Declaration, "СпрВалюта"), + // Line 16: ПО ВТ.Валюта = СпрВалюта.Ссылка + new ExpectedToken(16, 3, 2, SemanticTokenTypes.Keyword, "ПО"), + new ExpectedToken(16, 6, 2, SemanticTokenTypes.Variable, "ВТ"), + new ExpectedToken(16, 9, 6, SemanticTokenTypes.Property, "Валюта"), + new ExpectedToken(16, 18, 9, SemanticTokenTypes.Variable, "СпрВалюта"), + new ExpectedToken(16, 28, 6, SemanticTokenTypes.Property, "Ссылка") + ); - long implicitVarDeclaration3 = decoded.stream() - .filter(t -> t.line == 4 && t.type == varIdx) - .count(); - assertThat(implicitVarDeclaration3).as("Third implicit variable declaration").isEqualTo(1); + assertContainsTokens(decoded, expected); + } - long forLoopVar = decoded.stream() - .filter(t -> t.line == 5 && t.type == varIdx) - .count(); - assertThat(forLoopVar).as("For loop variable").isEqualTo(1); + @Test + void sdblQuery_noTokenOverlaps() { + String bsl = """ + Функция Тест() + Запрос = "Выбрать * из Справочник.Контрагенты"; + КонецФункции + """; + + var decoded = getDecodedTokens(bsl); + + // Sort tokens by position + var sortedTokens = decoded.stream() + .filter(t -> t.line == 1) + .sorted((a, b) -> Integer.compare(a.start, b.start)) + .toList(); + + // Verify no overlaps + for (int i = 0; i < sortedTokens.size() - 1; i++) { + var current = sortedTokens.get(i); + var next = sortedTokens.get(i + 1); + int currentEnd = current.start + current.length; + + assertThat(currentEnd) + .as("Token at [%d, %d) should not overlap with next token at [%d, %d)", + current.start, currentEnd, next.start, next.start + next.length) + .isLessThanOrEqualTo(next.start); + } + } - long allParams = decoded.stream() - .filter(t -> t.type == paramIdx) - .count(); - assertThat(allParams).as("Total parameters").isEqualTo(2); + @Test + void sdblQuery_valueFunctionWithPredefinedElement() { + // Test: Значение(Справочник.Валюты.Рубль) + // Справочник → Namespace, Валюты → Class, Рубль → EnumMember + String bsl = """ + Процедура Тест() + Запрос = "ВЫБРАТЬ * ГДЕ Валюта = Значение(Справочник.Валюты.Рубль)"; + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expected = List.of( + // Справочник → Namespace (metadata type) at position 44 + new ExpectedToken(1, 44, 10, SemanticTokenTypes.Namespace, "Справочник"), + // Валюты → Class (metadata object) at position 55 + new ExpectedToken(1, 55, 6, SemanticTokenTypes.Class, "Валюты"), + // Рубль → EnumMember (predefined element) at position 62 + new ExpectedToken(1, 62, 5, SemanticTokenTypes.EnumMember, "Рубль") + ); - long allVars = decoded.stream() - .filter(t -> t.type == varIdx) - .count(); - assertThat(allVars).as("Total variables").isEqualTo(5); + assertContainsTokens(decoded, expected); } @Test - void parameterAndVariableUsages() { - var documentContext = TestUtils.getDocumentContextFromFile( - "./src/test/resources/providers/SemanticTokensProviderParameterTest.bsl" + void sdblQuery_valueFunctionWithEmptyRef() { + // Test: Значение(Справочник.Валюты.ПустаяСсылка) + // Справочник → Namespace, Валюты → Class, ПустаяСсылка → EnumMember + String bsl = """ + Процедура Тест() + Запрос = "ВЫБРАТЬ * ГДЕ Валюта = Значение(Справочник.Валюты.ПустаяСсылка)"; + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expected = List.of( + // Справочник → Namespace at position 44 + new ExpectedToken(1, 44, 10, SemanticTokenTypes.Namespace, "Справочник"), + // Валюты → Class at position 55 + new ExpectedToken(1, 55, 6, SemanticTokenTypes.Class, "Валюты"), + // ПустаяСсылка → EnumMember at position 62 + new ExpectedToken(1, 62, 12, SemanticTokenTypes.EnumMember, "ПустаяСсылка") ); - referenceIndexFiller.fill(documentContext); - TextDocumentIdentifier textDocumentIdentifier = TestUtils.getTextDocumentIdentifier(documentContext.getUri()); - SemanticTokens tokens = provider.getSemanticTokensFull(documentContext, new SemanticTokensParams(textDocumentIdentifier)); + assertContainsTokens(decoded, expected); + } - int paramIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Parameter); - int varIdx = legend.getTokenTypes().indexOf(SemanticTokenTypes.Variable); - assertThat(paramIdx).isGreaterThanOrEqualTo(0); - assertThat(varIdx).isGreaterThanOrEqualTo(0); - - List decoded = decode(tokens.getData()); - - long paramsLine0 = decoded.stream() - .filter(t -> t.line == 0 && t.type == paramIdx) - .count(); - assertThat(paramsLine0).as("Parameters in signature (line 0)").isEqualTo(2); - - long varsLine1 = decoded.stream() - .filter(t -> t.line == 1 && t.type == varIdx) - .count(); - assertThat(varsLine1).as("Local variable declaration (line 1)").isEqualTo(1); - - long varsLine3 = decoded.stream() - .filter(t -> t.line == 3 && t.type == varIdx) - .count(); - assertThat(varsLine3).as("Variable usage on left side (line 3)").isEqualTo(1); - - long paramsLine3 = decoded.stream() - .filter(t -> t.line == 3 && t.type == paramIdx) - .count(); - assertThat(paramsLine3).as("Parameter usage on right side (line 3)").isEqualTo(1); - - long varsLine4 = decoded.stream() - .filter(t -> t.line == 4 && t.type == varIdx) - .count(); - assertThat(varsLine4).as("Variable usage (line 4)").isEqualTo(1); - - long paramsLine4 = decoded.stream() - .filter(t -> t.line == 4 && t.type == paramIdx) - .count(); - assertThat(paramsLine4).as("Parameter usages (line 4)").isEqualTo(2); - - long paramsLine6 = decoded.stream() - .filter(t -> t.line == 6 && t.type == paramIdx) - .count(); - assertThat(paramsLine6).as("Parameter in condition (line 6)").isEqualTo(1); - - long paramsLine7 = decoded.stream() - .filter(t -> t.line == 7 && t.type == paramIdx) - .count(); - assertThat(paramsLine7).as("Parameter in Сообщить (line 7)").isEqualTo(1); - - long varsLine8 = decoded.stream() - .filter(t -> t.line == 8 && t.type == varIdx) - .count(); - assertThat(varsLine8).as("Variable assignment (line 8)").isEqualTo(1); - - long paramsLine8 = decoded.stream() - .filter(t -> t.line == 8 && t.type == paramIdx) - .count(); - assertThat(paramsLine8).as("Parameters in expression (line 8)").isEqualTo(2); - - long varsLine11 = decoded.stream() - .filter(t -> t.line == 11 && t.type == varIdx) - .count(); - assertThat(varsLine11).as("For loop variable (line 11)").isEqualTo(1); - - long paramsLine11 = decoded.stream() - .filter(t -> t.line == 11 && t.type == paramIdx) - .count(); - assertThat(paramsLine11).as("Parameter in loop bound (line 11)").isEqualTo(1); - - long varsLine12 = decoded.stream() - .filter(t -> t.line == 12 && t.type == varIdx) - .count(); - assertThat(varsLine12).as("Loop variable usage (line 12)").isEqualTo(1); - - long totalParams = decoded.stream() - .filter(t -> t.type == paramIdx) - .count(); - assertThat(totalParams).as("Total parameter tokens").isGreaterThanOrEqualTo(10); - - long totalVars = decoded.stream() - .filter(t -> t.type == varIdx) - .count(); - assertThat(totalVars).as("Total variable tokens").isGreaterThanOrEqualTo(6); - } - - // helpers - private record DecodedToken(int line, int start, int length, int type, int modifiers) {} + @Test + void sdblQuery_valueFunctionWithEnum() { + // Test: Значение(Перечисление.Пол.Мужской) + // Перечисление → Namespace, Пол → Enum, Мужской → EnumMember + String bsl = """ + Процедура Тест() + Запрос = "ВЫБРАТЬ * ГДЕ Пол = Значение(Перечисление.Пол.Мужской)"; + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expected = List.of( + // Перечисление → Namespace (metadata type) at position 41 + new ExpectedToken(1, 41, 12, SemanticTokenTypes.Namespace, "Перечисление"), + // Пол → Enum (enum object) at position 54 + new ExpectedToken(1, 54, 3, SemanticTokenTypes.Enum, "Пол"), + // Мужской → EnumMember (enum value) at position 58 + new ExpectedToken(1, 58, 7, SemanticTokenTypes.EnumMember, "Мужской") + ); - private List decode(List data) { - List out = new ArrayList<>(); - int line = 0; - int start = 0; - for (int i = 0; i + 4 < data.size(); i += 5) { - int dLine = data.get(i); - int dStart = data.get(i + 1); - int length = data.get(i + 2); - int type = data.get(i + 3); - int mods = data.get(i + 4); - line = line + dLine; - start = (dLine == 0) ? start + dStart : dStart; - out.add(new DecodedToken(line, start, length, type, mods)); - } - return out; + assertContainsTokens(decoded, expected); } - private Set indexesOfTypes(List data) { - // data: [deltaLine, deltaStart, length, tokenType, tokenModifiers] per token - Set res = new HashSet<>(); - for (int i = 0; i + 3 < data.size(); i += 5) { - res.add(data.get(i + 3)); - } - return res; - } + @Test + void sdblQuery_tableWithObjectTableName() { + // Test: Справочник.Пользователи.ГруппыДоступа + // Справочник → Namespace, Пользователи → Class, ГруппыДоступа → Class (table part is a full table) + String bsl = """ + Процедура Тест() + Запрос = "ВЫБРАТЬ * ИЗ Справочник.Пользователи.ГруппыДоступа"; + КонецПроцедуры + """; + + var decoded = getDecodedTokens(bsl); + + var expected = List.of( + // Справочник → Namespace (metadata type) at position 25 + new ExpectedToken(1, 25, 10, SemanticTokenTypes.Namespace, "Справочник"), + // Пользователи → Class (metadata object) at position 36 + new ExpectedToken(1, 36, 12, SemanticTokenTypes.Class, "Пользователи"), + // ГруппыДоступа → Class (table part is a full table) at position 49 + new ExpectedToken(1, 49, 13, SemanticTokenTypes.Class, "ГруппыДоступа") + ); - private long countOfType(List data, int typeIdx) { - long cnt = 0; - for (int i = 0; i + 3 < data.size(); i += 5) { - if (data.get(i + 3) == typeIdx) cnt++; - } - return cnt; + assertContainsTokens(decoded, expected); } - private void assertPresent(Set presentTypes, String tokenType) { - int idx = legend.getTokenTypes().indexOf(tokenType); - assertThat(idx).isGreaterThanOrEqualTo(0); - assertThat(presentTypes).contains(idx); - } + // endregion } + diff --git a/src/test/resources/codelenses/DebugTestCodeLensSupplier.os b/src/test/resources/codelenses/DebugTestCodeLensSupplier.os new file mode 100644 index 00000000000..654be9d9a9f --- /dev/null +++ b/src/test/resources/codelenses/DebugTestCodeLensSupplier.os @@ -0,0 +1,9 @@ +&Тест +Процедура Тест1() Экспорт + А = 0; +КонецПроцедуры + +&Тест +Процедура Тест2() Экспорт + Б = 0; +КонецПроцедуры