diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index f25092bc6d42f..13f5ffa6a0d5a 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -44,6 +44,9 @@ export GRADLE_BUILD_CACHE_USERNAME GRADLE_BUILD_CACHE_PASSWORD=$(vault read -field=password secret/ci/elastic-elasticsearch/migrated/gradle-build-cache) export GRADLE_BUILD_CACHE_PASSWORD +DEVELOCITY_ACCESS_KEY="gradle-enterprise.elastic.co=$(vault read -field=accesskey secret/ci/elastic-elasticsearch/migrated/gradle-build-cache)" +export DEVELOCITY_ACCESS_KEY + BUILDKITE_API_TOKEN=$(vault read -field=token secret/ci/elastic-elasticsearch/buildkite-api-token) export BUILDKITE_API_TOKEN diff --git a/.buildkite/pipelines/dra-workflow.yml b/.buildkite/pipelines/dra-workflow.yml index 36828a6512db8..73af7e7e9966e 100644 --- a/.buildkite/pipelines/dra-workflow.yml +++ b/.buildkite/pipelines/dra-workflow.yml @@ -20,4 +20,5 @@ steps: env: DRA_WORKFLOW: staging USE_PROD_DOCKER_CREDENTIALS: "true" + VERSION_QUALIFIER: "${VERSION_QUALIFIER}" if: build.env('DRA_WORKFLOW') == 'staging' diff --git a/.buildkite/pipelines/intake.template.yml b/.buildkite/pipelines/intake.template.yml index d1400bdb83da0..75c7a339b8cea 100644 --- a/.buildkite/pipelines/intake.template.yml +++ b/.buildkite/pipelines/intake.template.yml @@ -96,7 +96,7 @@ steps: - trigger: elasticsearch-dra-workflow label: Trigger DRA snapshot workflow async: true - branches: "main 8.* 7.17" + branches: "main 9.* 8.* 7.17" build: branch: "$BUILDKITE_BRANCH" commit: "$BUILDKITE_COMMIT" diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index ea04a0340076d..8767cc3821257 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.5", "8.17.3", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -76,7 +76,7 @@ steps: ES_VERSION: - "9.0.0" ES_COMMIT: - - "b2cc9d9b8f00ee621f93ddca07ea9c671aab1578" # update to match last commit before lucene bump + - "10352e57d85505984582616e1e38530d3ec6ca59" # update to match last commit before lucene bump / head of combat-lucene-10-0-0 agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -97,7 +97,7 @@ steps: - trigger: elasticsearch-dra-workflow label: Trigger DRA snapshot workflow async: true - branches: "main 8.* 7.17" + branches: "main 9.* 8.* 7.17" build: branch: "$BUILDKITE_BRANCH" commit: "$BUILDKITE_COMMIT" diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index aded97712d7a5..56603465ff50c 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -287,8 +287,8 @@ steps: env: BWC_VERSION: 8.15.5 - - label: "{{matrix.image}} / 8.16.4 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.4 + - label: "{{matrix.image}} / 8.16.5 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.5 timeout_in_minutes: 300 matrix: setup: @@ -301,10 +301,10 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.16.4 + BWC_VERSION: 8.16.5 - - label: "{{matrix.image}} / 8.17.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.2 + - label: "{{matrix.image}} / 8.17.3 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.3 timeout_in_minutes: 300 matrix: setup: @@ -317,7 +317,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.17.2 + BWC_VERSION: 8.17.3 - label: "{{matrix.image}} / 8.18.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 3472e7edce0da..7782cc311f580 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -306,8 +306,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.16.4 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.4#bwcTest + - label: 8.16.5 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.5#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -316,7 +316,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.16.4 + BWC_VERSION: 8.16.5 retry: automatic: - exit_status: "-1" @@ -325,8 +325,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.17.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.2#bwcTest + - label: 8.17.3 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.3#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -335,7 +335,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.17.2 + BWC_VERSION: 8.17.3 retry: automatic: - exit_status: "-1" @@ -448,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.5", "8.17.3", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -490,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.5", "8.17.3", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 9f4b86ffc7ada..701dcab74e7ea 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -15,7 +15,7 @@ BWC_VERSION: - "8.13.4" - "8.14.3" - "8.15.5" - - "8.16.4" - - "8.17.2" + - "8.16.5" + - "8.17.3" - "8.18.0" - "9.0.0" diff --git a/.ci/init.gradle b/.ci/init.gradle index 15d63f8ca7d20..04243b433eebd 100644 --- a/.ci/init.gradle +++ b/.ci/init.gradle @@ -1,29 +1,24 @@ +final String buildCacheUrl = System.getProperty('org.elasticsearch.build.cache.url') +final boolean buildCachePush = Boolean.valueOf(System.getProperty('org.elasticsearch.build.cache.push', 'false')) + gradle.settingsEvaluated { settings -> settings.pluginManager.withPlugin("com.gradle.develocity") { settings.develocity { - server = 'https://gradle-enterprise.elastic.co' + server = "https://gradle-enterprise.elastic.co" } - } -} - -final String buildCacheUrl = System.getProperty('org.elasticsearch.build.cache.url') -final boolean buildCachePush = Boolean.valueOf(System.getProperty('org.elasticsearch.build.cache.push', 'false')) - -if (buildCacheUrl) { - gradle.settingsEvaluated { settings -> - settings.buildCache { - local { - // Disable the local build cache in CI since we use ephemeral workers and it incurs an IO penalty - enabled = false - } - remote(HttpBuildCache) { - url = buildCacheUrl - push = buildCachePush - credentials { - username = System.getenv("GRADLE_BUILD_CACHE_USERNAME") - password = System.getenv("GRADLE_BUILD_CACHE_PASSWORD") + if (buildCacheUrl) { + settings.buildCache { + local { + // Disable the local build cache in CI since we use ephemeral workers and it incurs an IO penalty + enabled = false + } + remote(settings.develocity.buildCache) { + enabled = true + push = buildCachePush } } } } } + + diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 38104e03edb5f..b04a583f73581 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - - "8.16.4" - - "8.17.2" + - "8.16.5" + - "8.17.3" - "8.18.0" - "9.0.0" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 6a39b58b0957e..1a29b3831bd93 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -49,6 +49,7 @@ distribution/docker/src @elastic/es-delivery # Core/Infra distribution/tools @elastic/es-core-infra libs/core @elastic/es-core-infra +libs/entitlement @elastic/es-core-infra libs/logging @elastic/es-core-infra libs/native @elastic/es-core-infra libs/plugin-analysis-api @elastic/es-core-infra diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java index 652defa7b39cd..77c70bc3a10f4 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java @@ -155,7 +155,7 @@ private static Operator operator(DriverContext driverContext, String grouping, S if (grouping.equals("none")) { return new AggregationOperator( - List.of(supplier(op, dataType, filter, 0).aggregatorFactory(AggregatorMode.SINGLE).apply(driverContext)), + List.of(supplier(op, dataType, filter).aggregatorFactory(AggregatorMode.SINGLE, List.of(0)).apply(driverContext)), driverContext ); } @@ -182,33 +182,33 @@ private static Operator operator(DriverContext driverContext, String grouping, S default -> throw new IllegalArgumentException("unsupported grouping [" + grouping + "]"); }; return new HashAggregationOperator( - List.of(supplier(op, dataType, filter, groups.size()).groupingAggregatorFactory(AggregatorMode.SINGLE)), + List.of(supplier(op, dataType, filter).groupingAggregatorFactory(AggregatorMode.SINGLE, List.of(groups.size()))), () -> BlockHash.build(groups, driverContext.blockFactory(), 16 * 1024, false), driverContext ); } - private static AggregatorFunctionSupplier supplier(String op, String dataType, String filter, int dataChannel) { + private static AggregatorFunctionSupplier supplier(String op, String dataType, String filter) { return filtered(switch (op) { - case COUNT -> CountAggregatorFunction.supplier(List.of(dataChannel)); + case COUNT -> CountAggregatorFunction.supplier(); case COUNT_DISTINCT -> switch (dataType) { - case LONGS -> new CountDistinctLongAggregatorFunctionSupplier(List.of(dataChannel), 3000); - case DOUBLES -> new CountDistinctDoubleAggregatorFunctionSupplier(List.of(dataChannel), 3000); + case LONGS -> new CountDistinctLongAggregatorFunctionSupplier(3000); + case DOUBLES -> new CountDistinctDoubleAggregatorFunctionSupplier(3000); default -> throw new IllegalArgumentException("unsupported data type [" + dataType + "]"); }; case MAX -> switch (dataType) { - case LONGS -> new MaxLongAggregatorFunctionSupplier(List.of(dataChannel)); - case DOUBLES -> new MaxDoubleAggregatorFunctionSupplier(List.of(dataChannel)); + case LONGS -> new MaxLongAggregatorFunctionSupplier(); + case DOUBLES -> new MaxDoubleAggregatorFunctionSupplier(); default -> throw new IllegalArgumentException("unsupported data type [" + dataType + "]"); }; case MIN -> switch (dataType) { - case LONGS -> new MinLongAggregatorFunctionSupplier(List.of(dataChannel)); - case DOUBLES -> new MinDoubleAggregatorFunctionSupplier(List.of(dataChannel)); + case LONGS -> new MinLongAggregatorFunctionSupplier(); + case DOUBLES -> new MinDoubleAggregatorFunctionSupplier(); default -> throw new IllegalArgumentException("unsupported data type [" + dataType + "]"); }; case SUM -> switch (dataType) { - case LONGS -> new SumLongAggregatorFunctionSupplier(List.of(dataChannel)); - case DOUBLES -> new SumDoubleAggregatorFunctionSupplier(List.of(dataChannel)); + case LONGS -> new SumLongAggregatorFunctionSupplier(); + case DOUBLES -> new SumDoubleAggregatorFunctionSupplier(); default -> throw new IllegalArgumentException("unsupported data type [" + dataType + "]"); }; default -> throw new IllegalArgumentException("unsupported op [" + op + "]"); diff --git a/branches.json b/branches.json index 01e79708d4b60..9640c0f14704c 100644 --- a/branches.json +++ b/branches.json @@ -13,9 +13,6 @@ { "branch": "8.17" }, - { - "branch": "8.x" - }, { "branch": "7.17" } diff --git a/build-conventions/settings.gradle b/build-conventions/settings.gradle index 3256de7d56286..239d850330da8 100644 --- a/build-conventions/settings.gradle +++ b/build-conventions/settings.gradle @@ -6,6 +6,11 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ + +plugins { + id "com.gradle.develocity" version "3.18.1" +} + rootProject.name = 'build-conventions' dependencyResolutionManagement { diff --git a/build-tools-internal/settings.gradle b/build-tools-internal/settings.gradle index 8c88d36046768..0d87e06fa6af7 100644 --- a/build-tools-internal/settings.gradle +++ b/build-tools-internal/settings.gradle @@ -8,6 +8,10 @@ pluginManagement { includeBuild "../build-tools" } +plugins { + id "com.gradle.develocity" version "3.18.1" +} + dependencyResolutionManagement { versionCatalogs { buildLibs { diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index 797dc8bd0641b..8702f5a9bf0e9 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -32,7 +32,9 @@ develocity { // Automatically publish scans from Elasticsearch CI if (onCI) { publishing.onlyIf { true } - server = 'https://gradle-enterprise.elastic.co' + if(server.isPresent() == false) { + server = 'https://gradle-enterprise.elastic.co' + } } else if( server.isPresent() == false) { publishing.onlyIf { false } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 59723823e9258..b0129d26a818b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -22,7 +22,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:bd401704a162a7937cd1015f755ca9da9aba0fdf967fc6bf90bf8d3f6b2eb557", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:d74b1fda6b7fee2c90b410df258e005c049e0672fe16d79d00e58f14fb69f90b", "-wolfi", "apk" ), diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java index ee0eb3f6eb2bf..ce779343dfea9 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java @@ -58,7 +58,6 @@ public class ElasticsearchJavaBasePlugin implements Plugin { @Override public void apply(Project project) { - project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); // make sure the global build info plugin is applied to the root project project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); buildParams = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index b387f019ad386..b835bae815d07 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -163,6 +163,7 @@ private void addJar(Project project, SourceSet sourceSet, int javaVersion) { project.getConfigurations().register("java" + javaVersion); TaskProvider jarTask = project.getTasks().register("java" + javaVersion + "Jar", Jar.class, task -> { task.from(sourceSet.getOutput()); + task.getArchiveClassifier().set("java" + javaVersion); }); project.getArtifacts().add("java" + javaVersion, jarTask); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java index 7e3e8bd458c92..6d538f207702d 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java @@ -93,7 +93,6 @@ private static ListMultimap, String> createLegacyRestTestBasePluginUsag map.put(LegacyRestTestBasePlugin.class, ":x-pack:qa:smoke-test-security-with-mustache"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:qa:xpack-prefix-rest-compat"); map.put(LegacyRestTestBasePlugin.class, ":modules:ingest-geoip:qa:file-based-update"); - map.put(LegacyRestTestBasePlugin.class, ":plugins:discovery-ec2:qa:amazon-ec2"); map.put(LegacyRestTestBasePlugin.class, ":plugins:discovery-gce:qa:gce"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:qa:multi-cluster-search-security:legacy-with-basic-license"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:qa:multi-cluster-search-security:legacy-with-full-license"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java index 59ba9bae0a57d..0c86a2d030741 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java @@ -60,6 +60,7 @@ import static org.gradle.api.JavaVersion.VERSION_21; import static org.gradle.api.JavaVersion.VERSION_22; import static org.gradle.api.JavaVersion.VERSION_23; +import static org.gradle.api.JavaVersion.VERSION_24; @CacheableTask public abstract class ThirdPartyAuditTask extends DefaultTask { @@ -341,8 +342,12 @@ private String runForbiddenAPIsCli() throws IOException { spec.setExecutable(javaHome.get() + "/bin/java"); } spec.classpath(getForbiddenAPIsClasspath(), getThirdPartyClasspath()); - // Enable explicitly for each release as appropriate. Just JDK 20/21/22/23 for now, and just the vector module. - if (isJavaVersion(VERSION_20) || isJavaVersion(VERSION_21) || isJavaVersion(VERSION_22) || isJavaVersion(VERSION_23)) { + // Enable explicitly for each release as appropriate. Just JDK 20/21/22/23/24 for now, and just the vector module. + if (isJavaVersion(VERSION_20) + || isJavaVersion(VERSION_21) + || isJavaVersion(VERSION_22) + || isJavaVersion(VERSION_23) + || isJavaVersion(VERSION_24)) { spec.jvmArgs("--add-modules", "jdk.incubator.vector"); } spec.jvmArgs("-Xmx1g"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java index a6ead34b11079..ebd316d7f042a 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java @@ -15,6 +15,7 @@ import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration; import com.github.javaparser.ast.body.FieldDeclaration; import com.github.javaparser.ast.body.VariableDeclarator; +import com.github.javaparser.ast.expr.Expression; import com.github.javaparser.ast.expr.NameExpr; import com.github.javaparser.printer.lexicalpreservation.LexicalPreservingPrinter; import com.google.common.annotations.VisibleForTesting; @@ -33,6 +34,7 @@ import java.util.Objects; import java.util.Optional; import java.util.TreeMap; +import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -51,6 +53,8 @@ public class UpdateVersionsTask extends AbstractVersionsTask { private boolean setCurrent; @Nullable private Version removeVersion; + @Nullable + private String addTransportVersion; @Inject public UpdateVersionsTask(BuildLayout layout) { @@ -62,6 +66,11 @@ public void addVersion(String version) { this.addVersion = Version.fromString(version); } + @Option(option = "add-transport-version", description = "Specifies transport version to add") + public void addTransportVersion(String transportVersion) { + this.addTransportVersion = transportVersion; + } + @Option(option = "set-current", description = "Set the 'current' constant to the new version") public void setCurrent(boolean setCurrent) { this.setCurrent = setCurrent; @@ -87,15 +96,18 @@ static Optional parseVersionField(CharSequence field) { @TaskAction public void executeTask() throws IOException { - if (addVersion == null && removeVersion == null) { + if (addVersion == null && removeVersion == null && addTransportVersion == null) { throw new IllegalArgumentException("No versions to add or remove specified"); } if (setCurrent && addVersion == null) { throw new IllegalArgumentException("No new version added to set as the current version"); } - if (Objects.equals(addVersion, removeVersion)) { + if (addVersion != null && removeVersion != null && Objects.equals(addVersion, removeVersion)) { throw new IllegalArgumentException("Same version specified to add and remove"); } + if (addTransportVersion != null && addTransportVersion.split(":").length != 2) { + throw new IllegalArgumentException("Transport version specified must be in the format ':'"); + } Path versionJava = rootDir.resolve(VERSION_FILE_PATH); CompilationUnit file = LexicalPreservingPrinter.setup(StaticJavaParser.parse(versionJava)); @@ -115,6 +127,18 @@ public void executeTask() throws IOException { modifiedFile = removed; } } + if (addTransportVersion != null) { + var constant = addTransportVersion.split(":")[0]; + var versionId = Integer.parseInt(addTransportVersion.split(":")[1]); + LOGGER.lifecycle("Adding transport version constant [{}] with id [{}]", constant, versionId); + + var transportVersionsFile = rootDir.resolve(TRANSPORT_VERSIONS_FILE_PATH); + var transportVersions = LexicalPreservingPrinter.setup(StaticJavaParser.parse(transportVersionsFile)); + var modified = addTransportVersionConstant(transportVersions, constant, versionId); + if (modified.isPresent()) { + writeOutNewContents(transportVersionsFile, modified.get()); + } + } if (modifiedFile.isPresent()) { writeOutNewContents(versionJava, modifiedFile.get()); @@ -161,6 +185,51 @@ static Optional addVersionConstant(CompilationUnit versionJava, return Optional.of(versionJava); } + @VisibleForTesting + static Optional addTransportVersionConstant(CompilationUnit transportVersions, String constant, int versionId) { + ClassOrInterfaceDeclaration transportVersionsClass = transportVersions.getClassByName("TransportVersions").get(); + if (transportVersionsClass.getFieldByName(constant).isPresent()) { + LOGGER.lifecycle("New transport version constant [{}] already present, skipping", constant); + return Optional.empty(); + } + + TreeMap versions = transportVersionsClass.getFields() + .stream() + .filter(f -> f.getElementType().asString().equals("TransportVersion")) + .filter( + f -> f.getVariables().stream().limit(1).allMatch(v -> v.getInitializer().filter(Expression::isMethodCallExpr).isPresent()) + ) + .filter(f -> f.getVariable(0).getInitializer().get().asMethodCallExpr().getNameAsString().endsWith("def")) + .collect( + Collectors.toMap( + f -> f.getVariable(0) + .getInitializer() + .get() + .asMethodCallExpr() + .getArgument(0) + .asIntegerLiteralExpr() + .asNumber() + .intValue(), + Function.identity(), + (f1, f2) -> { + throw new IllegalStateException("Duplicate version constant " + f1); + }, + TreeMap::new + ) + ); + + // find the version this should be inserted after + Map.Entry previousVersion = versions.lowerEntry(versionId); + if (previousVersion == null) { + throw new IllegalStateException(String.format("Could not find previous version to [%s]", versionId)); + } + + FieldDeclaration newTransportVersion = createNewTransportVersionConstant(previousVersion.getValue(), constant, versionId); + transportVersionsClass.getMembers().addAfter(newTransportVersion, previousVersion.getValue()); + + return Optional.of(transportVersions); + } + private static FieldDeclaration createNewVersionConstant(FieldDeclaration lastVersion, String newName, String newExpr) { return new FieldDeclaration( new NodeList<>(lastVersion.getModifiers()), @@ -172,6 +241,29 @@ private static FieldDeclaration createNewVersionConstant(FieldDeclaration lastVe ); } + private static FieldDeclaration createNewTransportVersionConstant(FieldDeclaration lastVersion, String newName, int newId) { + return new FieldDeclaration( + new NodeList<>(lastVersion.getModifiers()), + new VariableDeclarator( + lastVersion.getCommonType(), + newName, + StaticJavaParser.parseExpression(String.format("def(%s)", formatTransportVersionId(newId))) + ) + ); + } + + private static String formatTransportVersionId(int id) { + String idString = Integer.toString(id); + + return new StringBuilder(idString.substring(idString.length() - 2, idString.length())).insert(0, "_") + .insert(0, idString.substring(idString.length() - 3, idString.length() - 2)) + .insert(0, "_") + .insert(0, idString.substring(idString.length() - 6, idString.length() - 3)) + .insert(0, "_") + .insert(0, idString.substring(0, idString.length() - 6)) + .toString(); + } + @VisibleForTesting static Optional removeVersionConstant(CompilationUnit versionJava, Version version) { String removeFieldName = toVersionField(version); diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java index 9e4f1cd3a913d..d5060a2e62365 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java @@ -239,6 +239,96 @@ public void updateVersionFile_removesCorrectly() throws Exception { assertThat(field.isPresent(), is(false)); } + @Test + public void addTransportVersion() throws Exception { + var transportVersions = """ + public class TransportVersions { + public static final TransportVersion V_1_0_0 = def(1_000_0_00); + public static final TransportVersion V_1_1_0 = def(1_001_0_00); + public static final TransportVersion V_1_2_0 = def(1_002_0_00); + public static final TransportVersion V_1_2_1 = def(1_002_0_01); + public static final TransportVersion V_1_2_2 = def(1_002_0_02); + public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00); + public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00); + public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0; + } + """; + + var expectedTransportVersions = """ + public class TransportVersions { + + public static final TransportVersion V_1_0_0 = def(1_000_0_00); + + public static final TransportVersion V_1_1_0 = def(1_001_0_00); + + public static final TransportVersion V_1_2_0 = def(1_002_0_00); + + public static final TransportVersion V_1_2_1 = def(1_002_0_01); + + public static final TransportVersion V_1_2_2 = def(1_002_0_02); + + public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00); + + public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00); + + public static final TransportVersion NEXT_TRANSPORT_VERSION = def(1_005_0_00); + + public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0; + } + """; + + var unit = StaticJavaParser.parse(transportVersions); + var result = UpdateVersionsTask.addTransportVersionConstant(unit, "NEXT_TRANSPORT_VERSION", 1_005_0_00); + + assertThat(result.isPresent(), is(true)); + assertThat(result.get(), hasToString(expectedTransportVersions)); + } + + @Test + public void addTransportVersionPatch() throws Exception { + var transportVersions = """ + public class TransportVersions { + public static final TransportVersion V_1_0_0 = def(1_000_0_00); + public static final TransportVersion V_1_1_0 = def(1_001_0_00); + public static final TransportVersion V_1_2_0 = def(1_002_0_00); + public static final TransportVersion V_1_2_1 = def(1_002_0_01); + public static final TransportVersion V_1_2_2 = def(1_002_0_02); + public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00); + public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00); + public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0; + } + """; + + var expectedTransportVersions = """ + public class TransportVersions { + + public static final TransportVersion V_1_0_0 = def(1_000_0_00); + + public static final TransportVersion V_1_1_0 = def(1_001_0_00); + + public static final TransportVersion V_1_2_0 = def(1_002_0_00); + + public static final TransportVersion V_1_2_1 = def(1_002_0_01); + + public static final TransportVersion V_1_2_2 = def(1_002_0_02); + + public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00); + + public static final TransportVersion PATCH_TRANSPORT_VERSION = def(1_003_0_01); + + public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00); + + public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0; + } + """; + + var unit = StaticJavaParser.parse(transportVersions); + var result = UpdateVersionsTask.addTransportVersionConstant(unit, "PATCH_TRANSPORT_VERSION", 1_003_0_01); + + assertThat(result.isPresent(), is(true)); + assertThat(result.get(), hasToString(expectedTransportVersions)); + } + private static Optional findFirstField(Node node, String name) { return node.findFirst(FieldDeclaration.class, f -> f.getVariable(0).getName().getIdentifier().equals(name)); } diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index c041bd2dd2156..a5fe15e7f318c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -17,7 +17,7 @@ jna = 5.12.1 netty = 4.1.115.Final commons_lang3 = 3.9 google_oauth_client = 1.34.1 -awsv1sdk = 1.12.270 +awsv1sdk = 1.12.746 awsv2sdk = 2.28.13 reactive_streams = 1.0.4 diff --git a/build-tools/settings.gradle b/build-tools/settings.gradle index fce1f3b33d524..f429f73ddffc8 100644 --- a/build-tools/settings.gradle +++ b/build-tools/settings.gradle @@ -9,7 +9,9 @@ pluginManagement { includeBuild "../build-conventions" } - +plugins { + id "com.gradle.develocity" version "3.18.1" +} include 'reaper' dependencyResolutionManagement { diff --git a/client/test/build.gradle b/client/test/build.gradle index e39b7587b69d5..3b7f62fd8ef58 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -28,9 +28,9 @@ dependencies { api "org.hamcrest:hamcrest:${versions.hamcrest}" // mockito - api 'org.mockito:mockito-core:5.11.0' - api 'org.mockito:mockito-subclass:5.11.0' - api 'net.bytebuddy:byte-buddy:1.14.12' + api 'org.mockito:mockito-core:5.15.2' + api 'org.mockito:mockito-subclass:5.15.2' + api 'net.bytebuddy:byte-buddy:1.15.11' api 'org.objenesis:objenesis:3.3' } diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java index cc662bd747575..c6421d76392cf 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java @@ -74,7 +74,7 @@ protected void executeCommand(Terminal terminal, OptionSet options, Environment keyStore.setFile(setting, Files.readAllBytes(file)); } - keyStore.save(env.configFile(), getKeyStorePassword().getChars()); + keyStore.save(env.configDir(), getKeyStorePassword().getChars()); } @SuppressForbidden(reason = "file arg for cli") diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java index c01c18418858a..a7ea6dcf7ce74 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java @@ -100,7 +100,7 @@ protected void executeCommand(Terminal terminal, OptionSet options, Environment } } - keyStore.save(env.configFile(), getKeyStorePassword().getChars()); + keyStore.save(env.configDir(), getKeyStorePassword().getChars()); } } diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java index 0380018d36cff..a8a75ac23c900 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java @@ -39,14 +39,14 @@ public BaseKeyStoreCommand(String description, boolean keyStoreMustExist) { @Override public final void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { try { - final Path configFile = env.configFile(); + final Path configFile = env.configDir(); keyStore = KeyStoreWrapper.load(configFile); if (keyStore == null) { if (keyStoreMustExist) { throw new UserException( ExitCodes.DATA_ERROR, "Elasticsearch keystore not found at [" - + KeyStoreWrapper.keystorePath(env.configFile()) + + KeyStoreWrapper.keystorePath(env.configDir()) + "]. Use 'create' command to create one." ); } else if (options.has(forceOption) == false) { diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java index 4dca3d538263a..9e4f70eee559d 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java @@ -31,7 +31,7 @@ class ChangeKeyStorePasswordCommand extends BaseKeyStoreCommand { protected void executeCommand(Terminal terminal, OptionSet options, Environment env) throws Exception { try (SecureString newPassword = readPassword(terminal, true)) { final KeyStoreWrapper keyStore = getKeyStore(); - keyStore.save(env.configFile(), newPassword.getChars()); + keyStore.save(env.configDir(), newPassword.getChars()); terminal.println("Elasticsearch keystore password changed successfully."); } catch (SecurityException e) { throw new UserException(ExitCodes.DATA_ERROR, e.getMessage()); diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java index a922c92f5f44b..ef561b08d9a50 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java @@ -40,7 +40,7 @@ class CreateKeyStoreCommand extends KeyStoreAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { try (SecureString password = options.has(passwordOption) ? readPassword(terminal, true) : new SecureString(new char[0])) { - Path keystoreFile = KeyStoreWrapper.keystorePath(env.configFile()); + Path keystoreFile = KeyStoreWrapper.keystorePath(env.configDir()); if (Files.exists(keystoreFile)) { if (terminal.promptYesNo("An elasticsearch keystore already exists. Overwrite?", false) == false) { terminal.println("Exiting without creating keystore."); @@ -48,8 +48,8 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } } KeyStoreWrapper keystore = KeyStoreWrapper.create(); - keystore.save(env.configFile(), password.getChars()); - terminal.println("Created elasticsearch keystore in " + KeyStoreWrapper.keystorePath(env.configFile())); + keystore.save(env.configDir(), password.getChars()); + terminal.println("Created elasticsearch keystore in " + KeyStoreWrapper.keystorePath(env.configDir())); } catch (SecurityException e) { throw new UserException(ExitCodes.IO_ERROR, "Error creating the elasticsearch keystore."); } diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java index 0428d5dcf7df8..f0eaca1648b96 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java @@ -32,7 +32,7 @@ public class HasPasswordKeyStoreCommand extends KeyStoreAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - final Path configFile = env.configFile(); + final Path configFile = env.configDir(); final KeyStoreWrapper keyStore = KeyStoreWrapper.load(configFile); // We handle error printing here so we can respect the "--silent" flag diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java index 8a973c6d67f7d..fb1a2ad1df7f3 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java @@ -45,6 +45,6 @@ protected void executeCommand(Terminal terminal, OptionSet options, Environment } keyStore.remove(setting); } - keyStore.save(env.configFile(), getKeyStorePassword().getChars()); + keyStore.save(env.configDir(), getKeyStorePassword().getChars()); } } diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java index b7061d6153b80..bbbfbf81f7ed9 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java @@ -26,7 +26,7 @@ public class UpgradeKeyStoreCommand extends BaseKeyStoreCommand { @Override protected void executeCommand(final Terminal terminal, final OptionSet options, final Environment env) throws Exception { - KeyStoreWrapper.upgrade(getKeyStore(), env.configFile(), getKeyStorePassword().getChars()); + KeyStoreWrapper.upgrade(getKeyStore(), env.configDir(), getKeyStorePassword().getChars()); } } diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java index edd70e4e52f55..56706dd44f0c3 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java @@ -46,14 +46,14 @@ private Path createRandomFile() throws IOException { for (int i = 0; i < length; ++i) { bytes[i] = randomByte(); } - Path file = env.configFile().resolve(randomAlphaOfLength(16)); + Path file = env.configDir().resolve(randomAlphaOfLength(16)); Files.write(file, bytes); return file; } private void addFile(KeyStoreWrapper keystore, String setting, Path file, String password) throws Exception { keystore.setFile(setting, Files.readAllBytes(file)); - keystore.save(env.configFile(), password.toCharArray()); + keystore.save(env.configDir(), password.toCharArray()); } public void testMissingCreateWithEmptyPasswordWhenPrompted() throws Exception { @@ -77,7 +77,7 @@ public void testMissingNoCreate() throws Exception { terminal.addSecretInput(randomFrom("", "keystorepassword")); terminal.addTextInput("n"); // explicit no execute("foo"); - assertNull(KeyStoreWrapper.load(env.configFile())); + assertNull(KeyStoreWrapper.load(env.configDir())); } public void testOverwritePromptDefault() throws Exception { diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java index 3de18e094104f..412624be1d506 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java @@ -83,7 +83,7 @@ public void testMissingPromptCreateWithoutPasswordWithoutPromptIfForced() throws public void testMissingNoCreate() throws Exception { terminal.addTextInput("n"); // explicit no execute("foo"); - assertNull(KeyStoreWrapper.load(env.configFile())); + assertNull(KeyStoreWrapper.load(env.configDir())); } public void testOverwritePromptDefault() throws Exception { @@ -143,7 +143,7 @@ public void testForceNonExistent() throws Exception { public void testPromptForValue() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); terminal.addSecretInput("secret value"); execute("foo"); @@ -152,7 +152,7 @@ public void testPromptForValue() throws Exception { public void testPromptForMultipleValues() throws Exception { final String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); terminal.addSecretInput("bar1"); terminal.addSecretInput("bar2"); @@ -165,7 +165,7 @@ public void testPromptForMultipleValues() throws Exception { public void testStdinShort() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("secret value 1"); execute("-x", "foo"); @@ -174,7 +174,7 @@ public void testStdinShort() throws Exception { public void testStdinLong() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("secret value 2"); execute("--stdin", "foo"); @@ -183,7 +183,7 @@ public void testStdinLong() throws Exception { public void testStdinNoInput() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput(""); execute("-x", "foo"); @@ -192,7 +192,7 @@ public void testStdinNoInput() throws Exception { public void testStdinInputWithLineBreaks() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("Typedthisandhitenter\n"); execute("-x", "foo"); @@ -201,7 +201,7 @@ public void testStdinInputWithLineBreaks() throws Exception { public void testStdinInputWithCarriageReturn() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("Typedthisandhitenter\r"); execute("-x", "foo"); @@ -210,7 +210,7 @@ public void testStdinInputWithCarriageReturn() throws Exception { public void testStdinWithMultipleValues() throws Exception { final String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("bar1\nbar2\nbar3"); execute(randomFrom("-x", "--stdin"), "foo1", "foo2", "foo3"); @@ -221,7 +221,7 @@ public void testStdinWithMultipleValues() throws Exception { public void testAddUtf8String() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); final int stringSize = randomIntBetween(8, 16); try (CharArrayWriter secretChars = new CharArrayWriter(stringSize)) { diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java index 0fc76943f9d05..d93bc2466ed7b 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java @@ -42,7 +42,7 @@ public void setupEnv() throws IOException { public void testLoadSecureSettings() throws Exception { final char[] password = KeyStoreWrapperTests.getPossibleKeystorePassword(); - final Path configPath = env.configFile(); + final Path configPath = env.configDir(); final SecureString seed; try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) { seed = KeyStoreWrapper.SEED_SETTING.get(Settings.builder().setSecureSettings(keyStoreWrapper).build()); diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java index 72a83a48b6344..74b8c634939fd 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java @@ -48,7 +48,7 @@ public void testNotMatchingPasswords() throws Exception { public void testDefaultNotPromptForPassword() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); execute(); - Path configDir = env.configFile(); + Path configDir = env.configDir(); assertNotNull(KeyStoreWrapper.load(configDir)); } @@ -63,7 +63,7 @@ public void testPosix() throws Exception { } else { execute(); } - Path configDir = env.configFile(); + Path configDir = env.configDir(); assertNotNull(KeyStoreWrapper.load(configDir)); } @@ -79,13 +79,13 @@ public void testNotPosix() throws Exception { } else { execute(); } - Path configDir = env.configFile(); + Path configDir = env.configDir(); assertNotNull(KeyStoreWrapper.load(configDir)); } public void testOverwrite() throws Exception { String password = getPossibleKeystorePassword(); - Path keystoreFile = KeyStoreWrapper.keystorePath(env.configFile()); + Path keystoreFile = KeyStoreWrapper.keystorePath(env.configDir()); byte[] content = "not a keystore".getBytes(StandardCharsets.UTF_8); Files.write(keystoreFile, content); @@ -110,6 +110,6 @@ public void testOverwrite() throws Exception { } else { execute(); } - assertNotNull(KeyStoreWrapper.load(env.configFile())); + assertNotNull(KeyStoreWrapper.load(env.configDir())); } } diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java index 80edce4a20796..fcbe7b2226296 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java @@ -77,11 +77,11 @@ KeyStoreWrapper createKeystore(String password, String... settings) throws Excep } void saveKeystore(KeyStoreWrapper keystore, String password) throws Exception { - keystore.save(env.configFile(), password.toCharArray()); + keystore.save(env.configDir(), password.toCharArray()); } KeyStoreWrapper loadKeystore(String password) throws Exception { - KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password.toCharArray()); return keystore; } diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java index 5ab27bac3998a..ee3a53d5c3df7 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java @@ -84,8 +84,8 @@ public void testFileSettingExhaustiveBytes() throws Exception { bytes[i] = (byte) i; } keystore.setFile("foo", bytes); - keystore.save(env.configFile(), password); - keystore = KeyStoreWrapper.load(env.configFile()); + keystore.save(env.configDir(), password); + keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password); try (InputStream stream = keystore.getFile("foo")) { for (int i = 0; i < 256; ++i) { @@ -114,8 +114,8 @@ public void testDecryptKeyStoreWithWrongPassword() throws Exception { invalidPassword[realPassword.length] = '#'; } KeyStoreWrapper keystore = KeyStoreWrapper.create(); - keystore.save(env.configFile(), realPassword); - final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configFile()); + keystore.save(env.configDir(), realPassword); + final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configDir()); final SecurityException exception = expectThrows(SecurityException.class, () -> loadedkeystore.decrypt(invalidPassword)); if (inFipsJvm()) { assertThat( @@ -133,8 +133,8 @@ public void testDecryptKeyStoreWithWrongPassword() throws Exception { public void testDecryptKeyStoreWithShortPasswordInFips() throws Exception { assumeTrue("This should run only in FIPS mode", inFipsJvm()); KeyStoreWrapper keystore = KeyStoreWrapper.create(); - keystore.save(env.configFile(), "alongenoughpassword".toCharArray()); - final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configFile()); + keystore.save(env.configDir(), "alongenoughpassword".toCharArray()); + final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configDir()); final GeneralSecurityException exception = expectThrows( GeneralSecurityException.class, () -> loadedkeystore.decrypt("shortpwd".toCharArray()) // shorter than 14 characters @@ -147,7 +147,7 @@ public void testCreateKeyStoreWithShortPasswordInFips() throws Exception { KeyStoreWrapper keystore = KeyStoreWrapper.create(); final GeneralSecurityException exception = expectThrows( GeneralSecurityException.class, - () -> keystore.save(env.configFile(), "shortpwd".toCharArray()) // shorter than 14 characters + () -> keystore.save(env.configDir(), "shortpwd".toCharArray()) // shorter than 14 characters ); assertThat(exception.getMessage(), containsString("Error generating an encryption key from the provided password")); } @@ -192,18 +192,18 @@ public void testUpgradeNoop() throws Exception { final char[] password = getPossibleKeystorePassword(); KeyStoreWrapper keystore = KeyStoreWrapper.create(); SecureString seed = keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()); - keystore.save(env.configFile(), password); + keystore.save(env.configDir(), password); // upgrade does not overwrite seed - KeyStoreWrapper.upgrade(keystore, env.configFile(), password); + KeyStoreWrapper.upgrade(keystore, env.configDir(), password); assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString()); - keystore = KeyStoreWrapper.load(env.configFile()); + keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password); assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString()); } public void testFailWhenCannotConsumeSecretStream() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -234,7 +234,7 @@ public void testFailWhenCannotConsumeSecretStream() throws Exception { public void testFailWhenCannotConsumeEncryptedBytesStream() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -266,7 +266,7 @@ public void testFailWhenCannotConsumeEncryptedBytesStream() throws Exception { public void testFailWhenSecretStreamNotConsumed() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -296,7 +296,7 @@ public void testFailWhenSecretStreamNotConsumed() throws Exception { public void testFailWhenEncryptedBytesStreamIsNotConsumed() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -359,11 +359,11 @@ public void testUpgradeAddsSeed() throws Exception { final char[] password = getPossibleKeystorePassword(); KeyStoreWrapper keystore = KeyStoreWrapper.create(); keystore.remove(KeyStoreWrapper.SEED_SETTING.getKey()); - keystore.save(env.configFile(), password); - KeyStoreWrapper.upgrade(keystore, env.configFile(), password); + keystore.save(env.configDir(), password); + KeyStoreWrapper.upgrade(keystore, env.configDir(), password); SecureString seed = keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()); assertNotNull(seed); - keystore = KeyStoreWrapper.load(env.configFile()); + keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password); assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString()); } @@ -380,7 +380,7 @@ public void testIllegalSettingName() throws Exception { public void testBackcompatV4() throws Exception { assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -421,10 +421,10 @@ public void testStringAndFileDistinction() throws Exception { final Path temp = createTempDir(); Files.writeString(temp.resolve("file_setting"), "file_value", StandardCharsets.UTF_8); wrapper.setFile("file_setting", Files.readAllBytes(temp.resolve("file_setting"))); - wrapper.save(env.configFile(), password); + wrapper.save(env.configDir(), password); wrapper.close(); - final KeyStoreWrapper afterSave = KeyStoreWrapper.load(env.configFile()); + final KeyStoreWrapper afterSave = KeyStoreWrapper.load(env.configDir()); assertNotNull(afterSave); afterSave.decrypt(password); assertThat(afterSave.getSettingNames(), equalTo(Set.of("keystore.seed", "string_setting", "file_setting"))); @@ -510,8 +510,8 @@ public void testSerializationWhenLoadedFromFile() throws Exception { // testing with password and raw dataBytes[] final char[] password = getPossibleKeystorePassword(); - wrapper.save(env.configFile(), password); - final KeyStoreWrapper fromFile = KeyStoreWrapper.load(env.configFile()); + wrapper.save(env.configDir(), password); + final KeyStoreWrapper fromFile = KeyStoreWrapper.load(env.configDir()); fromFile.decrypt(password); assertThat(fromFile.getSettingNames(), hasSize(2)); diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java index bb533f32c7ac2..894b9d215a47f 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java @@ -62,11 +62,11 @@ private void assertKeystoreUpgradeWithPassword(String file, int version) throws } private void assertKeystoreUpgrade(String file, int version, @Nullable String password) throws Exception { - final Path keystore = KeyStoreWrapper.keystorePath(env.configFile()); + final Path keystore = KeyStoreWrapper.keystorePath(env.configDir()); try (InputStream is = KeyStoreWrapperTests.class.getResourceAsStream(file); OutputStream os = Files.newOutputStream(keystore)) { is.transferTo(os); } - try (KeyStoreWrapper beforeUpgrade = KeyStoreWrapper.load(env.configFile())) { + try (KeyStoreWrapper beforeUpgrade = KeyStoreWrapper.load(env.configDir())) { assertNotNull(beforeUpgrade); assertThat(beforeUpgrade.getFormatVersion(), equalTo(version)); } @@ -77,7 +77,7 @@ private void assertKeystoreUpgrade(String file, int version, @Nullable String pa execute(); terminal.reset(); - try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configFile())) { + try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configDir())) { assertNotNull(afterUpgrade); assertThat(afterUpgrade.getFormatVersion(), equalTo(KeyStoreWrapper.CURRENT_VERSION)); afterUpgrade.decrypt(password != null ? password.toCharArray() : new char[0]); @@ -87,6 +87,6 @@ private void assertKeystoreUpgrade(String file, int version, @Nullable String pa public void testKeystoreDoesNotExist() { final UserException e = expectThrows(UserException.class, this::execute); - assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configFile()) + "]"))); + assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configDir()) + "]"))); } } diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java index d443cf5e1e181..0803d24c3914f 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.env.Environment; import org.elasticsearch.jdk.JarHell; +import org.elasticsearch.jdk.RuntimeVersionFeature; import org.elasticsearch.plugin.scanner.ClassReaders; import org.elasticsearch.plugin.scanner.NamedComponentScanner; import org.elasticsearch.plugins.Platforms; @@ -249,8 +250,8 @@ public void execute(List plugins) throws Exception { final List deleteOnFailure = new ArrayList<>(); deleteOnFailures.put(pluginId, deleteOnFailure); - final Path pluginZip = download(plugin, env.tmpFile()); - final Path extractedZip = unzip(pluginZip, env.pluginsFile()); + final Path pluginZip = download(plugin, env.tmpDir()); + final Path extractedZip = unzip(pluginZip, env.pluginsDir()); deleteOnFailure.add(extractedZip); final PluginDescriptor pluginDescriptor = installPlugin(plugin, extractedZip, deleteOnFailure); terminal.println(logPrefix + "Installed " + pluginDescriptor.getName()); @@ -868,14 +869,14 @@ private PluginDescriptor loadPluginInfo(Path pluginRoot) throws Exception { PluginsUtils.verifyCompatibility(info); // checking for existing version of the plugin - verifyPluginName(env.pluginsFile(), info.getName()); + verifyPluginName(env.pluginsDir(), info.getName()); - PluginsUtils.checkForFailedPluginRemovals(env.pluginsFile()); + PluginsUtils.checkForFailedPluginRemovals(env.pluginsDir()); terminal.println(VERBOSE, info.toString()); // check for jar hell before any copying - jarHellCheck(info, pluginRoot, env.pluginsFile(), env.modulesFile()); + jarHellCheck(info, pluginRoot, env.pluginsDir(), env.modulesDir()); if (info.isStable() && hasNamedComponentFile(pluginRoot) == false) { generateNameComponentFile(pluginRoot); @@ -922,10 +923,12 @@ void jarHellCheck(PluginDescriptor candidateInfo, Path candidateDir, Path plugin */ private PluginDescriptor installPlugin(InstallablePlugin descriptor, Path tmpRoot, List deleteOnFailure) throws Exception { final PluginDescriptor info = loadPluginInfo(tmpRoot); - PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpFile()); - if (pluginPolicy != null) { - Set permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpFile()); - PluginSecurity.confirmPolicyExceptions(terminal, permissions, batch); + if (RuntimeVersionFeature.isSecurityManagerAvailable()) { + PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpDir()); + if (pluginPolicy != null) { + Set permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpDir()); + PluginSecurity.confirmPolicyExceptions(terminal, permissions, batch); + } } // Validate that the downloaded plugin's ID matches what we expect from the descriptor. The @@ -938,14 +941,14 @@ private PluginDescriptor installPlugin(InstallablePlugin descriptor, Path tmpRoo ); } - final Path destination = env.pluginsFile().resolve(info.getName()); + final Path destination = env.pluginsDir().resolve(info.getName()); deleteOnFailure.add(destination); installPluginSupportFiles( info, tmpRoot, - env.binFile().resolve(info.getName()), - env.configFile().resolve(info.getName()), + env.binDir().resolve(info.getName()), + env.configDir().resolve(info.getName()), deleteOnFailure ); movePlugin(tmpRoot, destination); diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java index fc578c81b24c9..f51a478fe2135 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java @@ -40,13 +40,13 @@ class ListPluginsCommand extends EnvironmentAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - if (Files.exists(env.pluginsFile()) == false) { - throw new IOException("Plugins directory missing: " + env.pluginsFile()); + if (Files.exists(env.pluginsDir()) == false) { + throw new IOException("Plugins directory missing: " + env.pluginsDir()); } - terminal.println(Terminal.Verbosity.VERBOSE, "Plugins directory: " + env.pluginsFile()); + terminal.println(Terminal.Verbosity.VERBOSE, "Plugins directory: " + env.pluginsDir()); final List plugins = new ArrayList<>(); - try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsFile())) { + try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsDir())) { for (Path path : paths) { if (path.getFileName().toString().equals(ELASTICSEARCH_PLUGINS_YML_CACHE) == false) { plugins.add(path); @@ -61,7 +61,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce private static void printPlugin(Environment env, Terminal terminal, Path plugin, String prefix) throws IOException { terminal.println(Terminal.Verbosity.SILENT, prefix + plugin.getFileName().toString()); - PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsFile().resolve(plugin)); + PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsDir().resolve(plugin)); terminal.println(Terminal.Verbosity.VERBOSE, info.toString(prefix)); // When PluginDescriptor#getElasticsearchVersion returns a string, we can revisit the need diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java index a8f9e746a24e1..ac9c2b21788c6 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java @@ -93,7 +93,7 @@ private void ensurePluginsNotUsedByOtherPlugins(List plugins) // We build a new map where the keys are plugins that extend plugins // we want to remove and the values are the plugins we can't remove // because of this dependency - Map> pluginDependencyMap = PluginsUtils.getDependencyMapView(env.pluginsFile()); + Map> pluginDependencyMap = PluginsUtils.getDependencyMapView(env.pluginsDir()); for (Map.Entry> entry : pluginDependencyMap.entrySet()) { for (String extendedPlugin : entry.getValue()) { for (InstallablePlugin plugin : plugins) { @@ -121,9 +121,9 @@ private void ensurePluginsNotUsedByOtherPlugins(List plugins) private void checkCanRemove(InstallablePlugin plugin) throws UserException { String pluginId = plugin.getId(); - final Path pluginDir = env.pluginsFile().resolve(pluginId); - final Path pluginConfigDir = env.configFile().resolve(pluginId); - final Path removing = env.pluginsFile().resolve(".removing-" + pluginId); + final Path pluginDir = env.pluginsDir().resolve(pluginId); + final Path pluginConfigDir = env.configDir().resolve(pluginId); + final Path removing = env.pluginsDir().resolve(".removing-" + pluginId); /* * If the plugin does not exist and the plugin config does not exist, fail to the user that the plugin is not found, unless there's @@ -147,7 +147,7 @@ private void checkCanRemove(InstallablePlugin plugin) throws UserException { } } - final Path pluginBinDir = env.binFile().resolve(pluginId); + final Path pluginBinDir = env.binDir().resolve(pluginId); if (Files.exists(pluginBinDir)) { if (Files.isDirectory(pluginBinDir) == false) { throw new UserException(ExitCodes.IO_ERROR, "bin dir for " + pluginId + " is not a directory"); @@ -157,9 +157,9 @@ private void checkCanRemove(InstallablePlugin plugin) throws UserException { private void removePlugin(InstallablePlugin plugin) throws IOException { final String pluginId = plugin.getId(); - final Path pluginDir = env.pluginsFile().resolve(pluginId); - final Path pluginConfigDir = env.configFile().resolve(pluginId); - final Path removing = env.pluginsFile().resolve(".removing-" + pluginId); + final Path pluginDir = env.pluginsDir().resolve(pluginId); + final Path pluginConfigDir = env.configDir().resolve(pluginId); + final Path removing = env.pluginsDir().resolve(".removing-" + pluginId); terminal.println("-> removing [" + pluginId + "]..."); @@ -176,7 +176,7 @@ private void removePlugin(InstallablePlugin plugin) throws IOException { terminal.println(VERBOSE, "removing [" + pluginDir + "]"); } - final Path pluginBinDir = env.binFile().resolve(pluginId); + final Path pluginBinDir = env.binDir().resolve(pluginId); if (Files.exists(pluginBinDir)) { try (Stream paths = Files.list(pluginBinDir)) { pluginPaths.addAll(paths.toList()); diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java index d6d0619422770..6d77437bd71d5 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java @@ -61,7 +61,7 @@ public SyncPluginsAction(Terminal terminal, Environment env) { * @throws UserException if a plugins config file is found. */ public static void ensureNoConfigFile(Environment env) throws UserException { - final Path pluginsConfig = env.configFile().resolve(ELASTICSEARCH_PLUGINS_YML); + final Path pluginsConfig = env.configDir().resolve(ELASTICSEARCH_PLUGINS_YML); if (Files.exists(pluginsConfig)) { throw new UserException( ExitCodes.USAGE, @@ -79,16 +79,16 @@ public static void ensureNoConfigFile(Environment env) throws UserException { * @throws Exception if anything goes wrong */ public void execute() throws Exception { - final Path configPath = this.env.configFile().resolve(ELASTICSEARCH_PLUGINS_YML); - final Path previousConfigPath = this.env.pluginsFile().resolve(ELASTICSEARCH_PLUGINS_YML_CACHE); + final Path configPath = this.env.configDir().resolve(ELASTICSEARCH_PLUGINS_YML); + final Path previousConfigPath = this.env.pluginsDir().resolve(ELASTICSEARCH_PLUGINS_YML_CACHE); if (Files.exists(configPath) == false) { // The `PluginsManager` will have checked that this file exists before invoking the action. throw new PluginSyncException("Plugins config does not exist: " + configPath.toAbsolutePath()); } - if (Files.exists(env.pluginsFile()) == false) { - throw new PluginSyncException("Plugins directory missing: " + env.pluginsFile()); + if (Files.exists(env.pluginsDir()) == false) { + throw new PluginSyncException("Plugins directory missing: " + env.pluginsDir()); } // Parse descriptor file @@ -267,14 +267,14 @@ private List getExistingPlugins() throws PluginSyncException { final List plugins = new ArrayList<>(); try { - try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsFile())) { + try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsDir())) { for (Path pluginPath : paths) { String filename = pluginPath.getFileName().toString(); if (filename.startsWith(".")) { continue; } - PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsFile().resolve(pluginPath)); + PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsDir().resolve(pluginPath)); plugins.add(info); // Check for a version mismatch, unless it's an official plugin since we can upgrade them. diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java index 88b24ab9ae614..a5dacebec69bc 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java @@ -37,7 +37,7 @@ public Command create() { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { var action = new SyncPluginsAction(terminal, env); - if (Files.exists(env.configFile().resolve(ELASTICSEARCH_PLUGINS_YML)) == false) { + if (Files.exists(env.configDir().resolve(ELASTICSEARCH_PLUGINS_YML)) == false) { return; } if (Build.current().type() != Build.Type.DOCKER) { diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java index d638534943ecd..d2c8d4adb4d1b 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java @@ -354,7 +354,7 @@ void installPlugins(final List plugins, final Path home, fina } void assertPlugin(String name, Path original, Environment environment) throws IOException { - assertPluginInternal(name, environment.pluginsFile(), original); + assertPluginInternal(name, environment.pluginsDir(), original); assertConfigAndBin(name, original, environment); assertInstallCleaned(environment); } @@ -395,7 +395,7 @@ void assertPluginInternal(String name, Path pluginsFile, Path originalPlugin) th void assertConfigAndBin(String name, Path original, Environment environment) throws IOException { if (Files.exists(original.resolve("bin"))) { - Path binDir = environment.binFile().resolve(name); + Path binDir = environment.binDir().resolve(name); assertTrue("bin dir exists", Files.exists(binDir)); assertTrue("bin is a dir", Files.isDirectory(binDir)); try (DirectoryStream stream = Files.newDirectoryStream(binDir)) { @@ -409,7 +409,7 @@ void assertConfigAndBin(String name, Path original, Environment environment) thr } } if (Files.exists(original.resolve("config"))) { - Path configDir = environment.configFile().resolve(name); + Path configDir = environment.configDir().resolve(name); assertTrue("config dir exists", Files.exists(configDir)); assertTrue("config is a dir", Files.isDirectory(configDir)); @@ -417,7 +417,7 @@ void assertConfigAndBin(String name, Path original, Environment environment) thr GroupPrincipal group = null; if (isPosix) { - PosixFileAttributes configAttributes = Files.getFileAttributeView(environment.configFile(), PosixFileAttributeView.class) + PosixFileAttributes configAttributes = Files.getFileAttributeView(environment.configDir(), PosixFileAttributeView.class) .readAttributes(); user = configAttributes.owner(); group = configAttributes.group(); @@ -446,7 +446,7 @@ void assertConfigAndBin(String name, Path original, Environment environment) thr } void assertInstallCleaned(Environment environment) throws IOException { - try (DirectoryStream stream = Files.newDirectoryStream(environment.pluginsFile())) { + try (DirectoryStream stream = Files.newDirectoryStream(environment.pluginsDir())) { for (Path file : stream) { if (file.getFileName().toString().startsWith(".installing")) { fail("Installation dir still exists, " + file); @@ -549,7 +549,7 @@ public void testTransaction() throws Exception { () -> installPlugins(List.of(pluginZip, nonexistentPluginZip), env.v1()) ); assertThat(e.getMessage(), containsString("does-not-exist")); - final Path fakeInstallPath = env.v2().pluginsFile().resolve("fake"); + final Path fakeInstallPath = env.v2().pluginsDir().resolve("fake"); // fake should have been removed when the file not found exception occurred assertFalse(Files.exists(fakeInstallPath)); assertInstallCleaned(env.v2()); @@ -557,7 +557,7 @@ public void testTransaction() throws Exception { public void testInstallFailsIfPreviouslyRemovedPluginFailed() throws Exception { InstallablePlugin pluginZip = createPluginZip("fake", pluginDir); - final Path removing = env.v2().pluginsFile().resolve(".removing-failed"); + final Path removing = env.v2().pluginsDir().resolve(".removing-failed"); Files.createDirectory(removing); final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip)); final String expected = Strings.format( @@ -603,11 +603,11 @@ public void testUnknownPlugin() { public void testPluginsDirReadOnly() throws Exception { assumeTrue("posix and filesystem", isPosix && isReal); - try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsFile())) { + try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsDir())) { pluginsAttrs.setPermissions(new HashSet<>()); InstallablePlugin pluginZip = createPluginZip("fake", pluginDir); IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip)); - assertThat(e.getMessage(), containsString(env.v2().pluginsFile().toString())); + assertThat(e.getMessage(), containsString(env.v2().pluginsDir().toString())); } assertInstallCleaned(env.v2()); } @@ -694,7 +694,7 @@ public void testBinConflict() throws Exception { Files.createFile(binDir.resolve("somescript")); InstallablePlugin pluginZip = createPluginZip("elasticsearch", pluginDir); FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip)); - assertThat(e.getMessage(), containsString(env.v2().binFile().resolve("elasticsearch").toString())); + assertThat(e.getMessage(), containsString(env.v2().binDir().resolve("elasticsearch").toString())); assertInstallCleaned(env.v2()); } @@ -704,7 +704,7 @@ public void testBinPermissions() throws Exception { Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); InstallablePlugin pluginZip = createPluginZip("fake", pluginDir); - try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) { + try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binDir())) { Set perms = binAttrs.getCopyPermissions(); // make sure at least one execute perm is missing, so we know we forced it during installation perms.remove(PosixFilePermission.GROUP_EXECUTE); @@ -734,7 +734,7 @@ public void testPluginPermissions() throws Exception { installPlugin(pluginZip); assertPlugin("fake", tempPluginDir, env.v2()); - final Path fake = env.v2().pluginsFile().resolve("fake"); + final Path fake = env.v2().pluginsDir().resolve("fake"); final Path resources = fake.resolve("resources"); final Path platform = fake.resolve("platform"); final Path platformName = platform.resolve("linux-x86_64"); @@ -784,7 +784,7 @@ public void testConfig() throws Exception { } public void testExistingConfig() throws Exception { - Path envConfigDir = env.v2().configFile().resolve("fake"); + Path envConfigDir = env.v2().configDir().resolve("fake"); Files.createDirectories(envConfigDir); Files.write(envConfigDir.resolve("custom.yml"), "existing config".getBytes(StandardCharsets.UTF_8)); Path configDir = pluginDir.resolve("config"); @@ -921,7 +921,7 @@ public void testPluginAlreadyInstalled() throws Exception { e.getMessage(), equalTo( "plugin directory [" - + env.v2().pluginsFile().resolve("fake") + + env.v2().pluginsDir().resolve("fake") + "] already exists; " + "if you need to update the plugin, uninstall it first using command 'remove fake'" ) @@ -1499,7 +1499,7 @@ private void assertPolicyConfirmation(Tuple pathEnvironmentTu assertThat(e.getMessage(), containsString("installation aborted by user")); assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning)); - try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsFile())) { + try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsDir())) { assertThat(fileStream.collect(Collectors.toList()), empty()); } @@ -1512,7 +1512,7 @@ private void assertPolicyConfirmation(Tuple pathEnvironmentTu e = expectThrows(UserException.class, () -> installPlugin(pluginZip)); assertThat(e.getMessage(), containsString("installation aborted by user")); assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning)); - try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsFile())) { + try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsDir())) { assertThat(fileStream.collect(Collectors.toList()), empty()); } } @@ -1566,7 +1566,7 @@ public void testStablePluginWithNamedComponentsFile() throws Exception { InstallablePlugin stablePluginZip = createStablePlugin("stable1", pluginDir, true); installPlugins(List.of(stablePluginZip), env.v1()); assertPlugin("stable1", pluginDir, env.v2()); - assertNamedComponentFile("stable1", env.v2().pluginsFile(), namedComponentsJSON()); + assertNamedComponentFile("stable1", env.v2().pluginsDir(), namedComponentsJSON()); } @SuppressWarnings("unchecked") @@ -1577,7 +1577,7 @@ public void testStablePluginWithoutNamedComponentsFile() throws Exception { installPlugins(List.of(stablePluginZip), env.v1()); assertPlugin("stable1", pluginDir, env.v2()); - assertNamedComponentFile("stable1", env.v2().pluginsFile(), namedComponentsJSON()); + assertNamedComponentFile("stable1", env.v2().pluginsDir(), namedComponentsJSON()); } public void testGetSemanticVersion() { diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java index 0064b8c4bc513..5249aeefc2f2d 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java @@ -65,7 +65,7 @@ private static void buildFakePlugin( final boolean hasNativeController ) throws IOException { PluginTestUtil.writePluginProperties( - env.pluginsFile().resolve(name), + env.pluginsDir().resolve(name), "description", description, "name", @@ -84,9 +84,9 @@ private static void buildFakePlugin( } public void testPluginsDirMissing() throws Exception { - Files.delete(env.pluginsFile()); + Files.delete(env.pluginsDir()); IOException e = expectThrows(IOException.class, () -> execute()); - assertEquals("Plugins directory missing: " + env.pluginsFile(), e.getMessage()); + assertEquals("Plugins directory missing: " + env.pluginsDir(), e.getMessage()); } public void testNoPlugins() throws Exception { @@ -112,7 +112,7 @@ public void testPluginWithVerbose() throws Exception { execute("-v"); assertEquals( buildMultiline( - "Plugins directory: " + env.pluginsFile(), + "Plugins directory: " + env.pluginsDir(), "fake_plugin", "- Plugin information:", "Name: fake_plugin", @@ -134,7 +134,7 @@ public void testPluginWithNativeController() throws Exception { execute("-v"); assertEquals( buildMultiline( - "Plugins directory: " + env.pluginsFile(), + "Plugins directory: " + env.pluginsDir(), "fake_plugin1", "- Plugin information:", "Name: fake_plugin1", @@ -157,7 +157,7 @@ public void testPluginWithVerboseMultiplePlugins() throws Exception { execute("-v"); assertEquals( buildMultiline( - "Plugins directory: " + env.pluginsFile(), + "Plugins directory: " + env.pluginsDir(), "fake_plugin1", "- Plugin information:", "Name: fake_plugin1", @@ -193,14 +193,14 @@ public void testPluginWithoutVerboseMultiplePlugins() throws Exception { } public void testPluginWithoutDescriptorFile() throws Exception { - final Path pluginDir = env.pluginsFile().resolve("fake1"); + final Path pluginDir = env.pluginsDir().resolve("fake1"); Files.createDirectories(pluginDir); var e = expectThrows(IllegalStateException.class, () -> execute()); assertThat(e.getMessage(), equalTo("Plugin [fake1] is missing a descriptor properties file.")); } public void testPluginWithWrongDescriptorFile() throws Exception { - final Path pluginDir = env.pluginsFile().resolve("fake1"); + final Path pluginDir = env.pluginsDir().resolve("fake1"); PluginTestUtil.writePluginProperties(pluginDir, "description", "fake desc"); var e = expectThrows(IllegalArgumentException.class, () -> execute()); assertThat(e.getMessage(), startsWith("property [name] is missing for plugin")); @@ -208,7 +208,7 @@ public void testPluginWithWrongDescriptorFile() throws Exception { public void testExistingIncompatiblePlugin() throws Exception { PluginTestUtil.writePluginProperties( - env.pluginsFile().resolve("fake_plugin1"), + env.pluginsDir().resolve("fake_plugin1"), "description", "fake desc 1", "name", diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java index aabdd4aaceb9e..8338c395e5e4c 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java @@ -58,11 +58,11 @@ public void setUp() throws Exception { } void createPlugin(String name) throws IOException { - createPlugin(env.pluginsFile(), name, Version.CURRENT); + createPlugin(env.pluginsDir(), name, Version.CURRENT); } void createPlugin(String name, Version version) throws IOException { - createPlugin(env.pluginsFile(), name, version); + createPlugin(env.pluginsDir(), name, version); } void createPlugin(Path path, String name, Version version) throws IOException { @@ -98,7 +98,7 @@ static MockTerminal removePlugin(List pluginIds, Path home, boolean purg } static void assertRemoveCleaned(Environment env) throws IOException { - try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsFile())) { + try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsDir())) { for (Path file : stream) { if (file.getFileName().toString().startsWith(".removing")) { fail("Removal dir still exists, " + file); @@ -115,84 +115,84 @@ public void testMissing() throws Exception { public void testBasic() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); - Files.createDirectory(env.pluginsFile().resolve("fake").resolve("subdir")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); + Files.createDirectory(env.pluginsDir().resolve("fake").resolve("subdir")); createPlugin("other"); removePlugin("fake", home, randomBoolean()); - assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); - assertTrue(Files.exists(env.pluginsFile().resolve("other"))); + assertFalse(Files.exists(env.pluginsDir().resolve("fake"))); + assertTrue(Files.exists(env.pluginsDir().resolve("other"))); assertRemoveCleaned(env); } /** Check that multiple plugins can be removed at the same time. */ public void testRemoveMultiple() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); - Files.createDirectory(env.pluginsFile().resolve("fake").resolve("subdir")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); + Files.createDirectory(env.pluginsDir().resolve("fake").resolve("subdir")); createPlugin("other"); - Files.createFile(env.pluginsFile().resolve("other").resolve("plugin.jar")); - Files.createDirectory(env.pluginsFile().resolve("other").resolve("subdir")); + Files.createFile(env.pluginsDir().resolve("other").resolve("plugin.jar")); + Files.createDirectory(env.pluginsDir().resolve("other").resolve("subdir")); removePlugin("fake", home, randomBoolean()); removePlugin("other", home, randomBoolean()); - assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); - assertFalse(Files.exists(env.pluginsFile().resolve("other"))); + assertFalse(Files.exists(env.pluginsDir().resolve("fake"))); + assertFalse(Files.exists(env.pluginsDir().resolve("other"))); assertRemoveCleaned(env); } public void testBin() throws Exception { createPlugin("fake"); - Path binDir = env.binFile().resolve("fake"); + Path binDir = env.binDir().resolve("fake"); Files.createDirectories(binDir); Files.createFile(binDir.resolve("somescript")); removePlugin("fake", home, randomBoolean()); - assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); - assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); + assertFalse(Files.exists(env.pluginsDir().resolve("fake"))); + assertTrue(Files.exists(env.binDir().resolve("elasticsearch"))); assertFalse(Files.exists(binDir)); assertRemoveCleaned(env); } public void testBinNotDir() throws Exception { createPlugin("fake"); - Files.createFile(env.binFile().resolve("fake")); + Files.createFile(env.binDir().resolve("fake")); UserException e = expectThrows(UserException.class, () -> removePlugin("fake", home, randomBoolean())); assertThat(e.getMessage(), containsString("not a directory")); - assertTrue(Files.exists(env.pluginsFile().resolve("fake"))); // did not remove - assertTrue(Files.exists(env.binFile().resolve("fake"))); + assertTrue(Files.exists(env.pluginsDir().resolve("fake"))); // did not remove + assertTrue(Files.exists(env.binDir().resolve("fake"))); assertRemoveCleaned(env); } public void testConfigDirPreserved() throws Exception { createPlugin("fake"); - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); final MockTerminal terminal = removePlugin("fake", home, false); - assertTrue(Files.exists(env.configFile().resolve("fake"))); + assertTrue(Files.exists(env.configDir().resolve("fake"))); assertThat(terminal.getOutput(), containsString(expectedConfigDirPreservedMessage(configDir))); assertRemoveCleaned(env); } public void testPurgePluginExists() throws Exception { createPlugin("fake"); - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); if (randomBoolean()) { Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); } final MockTerminal terminal = removePlugin("fake", home, true); - assertFalse(Files.exists(env.configFile().resolve("fake"))); + assertFalse(Files.exists(env.configDir().resolve("fake"))); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); assertRemoveCleaned(env); } public void testPurgePluginDoesNotExist() throws Exception { - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); final MockTerminal terminal = removePlugin("fake", home, true); - assertFalse(Files.exists(env.configFile().resolve("fake"))); + assertFalse(Files.exists(env.configDir().resolve("fake"))); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); assertRemoveCleaned(env); } @@ -203,8 +203,8 @@ public void testPurgeNothingExists() throws Exception { } public void testPurgeOnlyMarkerFileExists() throws Exception { - final Path configDir = env.configFile().resolve("fake"); - final Path removing = env.pluginsFile().resolve(".removing-fake"); + final Path configDir = env.configDir().resolve("fake"); + final Path removing = env.pluginsDir().resolve(".removing-fake"); Files.createFile(removing); final MockTerminal terminal = removePlugin("fake", home, randomBoolean()); assertFalse(Files.exists(removing)); @@ -213,7 +213,7 @@ public void testPurgeOnlyMarkerFileExists() throws Exception { public void testNoConfigDirPreserved() throws Exception { createPlugin("fake"); - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); final MockTerminal terminal = removePlugin("fake", home, randomBoolean()); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); } @@ -250,8 +250,8 @@ public void testMissingPluginName() { public void testRemoveWhenRemovingMarker() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); - Files.createFile(env.pluginsFile().resolve(".removing-fake")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); + Files.createFile(env.pluginsDir().resolve(".removing-fake")); removePlugin("fake", home, randomBoolean()); } @@ -262,10 +262,10 @@ public void testRemoveWhenRemovingMarker() throws Exception { public void testRemoveMigratedPluginsWhenInstalled() throws Exception { for (String id : List.of("repository-azure", "repository-gcs", "repository-s3")) { createPlugin(id); - Files.createFile(env.pluginsFile().resolve(id).resolve("plugin.jar")); + Files.createFile(env.pluginsDir().resolve(id).resolve("plugin.jar")); final MockTerminal terminal = removePlugin(id, home, randomBoolean()); - assertThat(Files.exists(env.pluginsFile().resolve(id)), is(false)); + assertThat(Files.exists(env.pluginsDir().resolve(id)), is(false)); // This message shouldn't be printed if plugin was actually installed. assertThat(terminal.getErrorOutput(), not(containsString("plugin [" + id + "] is no longer a plugin"))); } @@ -288,11 +288,11 @@ public void testRemoveMigratedPluginsWhenNotInstalled() throws Exception { */ public void testRemoveRegularInstalledPluginAndMigratedUninstalledPlugin() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); final MockTerminal terminal = removePlugin(List.of("fake", "repository-s3"), home, randomBoolean()); - assertThat(Files.exists(env.pluginsFile().resolve("fake")), is(false)); + assertThat(Files.exists(env.pluginsDir().resolve("fake")), is(false)); assertThat(terminal.getErrorOutput(), containsString("plugin [repository-s3] is no longer a plugin")); } diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java index 2d2336428a0a5..1a09736160956 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java @@ -55,10 +55,10 @@ public void setUp() throws Exception { Path home = createTempDir(); Settings settings = Settings.builder().put("path.home", home).build(); env = TestEnvironment.newEnvironment(settings); - Files.createDirectories(env.binFile()); - Files.createFile(env.binFile().resolve("elasticsearch")); - Files.createDirectories(env.configFile()); - Files.createDirectories(env.pluginsFile()); + Files.createDirectories(env.binDir()); + Files.createFile(env.binDir().resolve("elasticsearch")); + Files.createDirectories(env.configDir()); + Files.createDirectories(env.pluginsDir()); terminal = MockTerminal.create(); action = new SyncPluginsAction(terminal, env); @@ -78,7 +78,7 @@ public void test_ensureNoConfigFile_withoutConfig_doesNothing() throws Exception * then an exception is thrown. */ public void test_ensureNoConfigFile_withConfig_throwsException() throws Exception { - Files.createFile(env.configFile().resolve("elasticsearch-plugins.yml")); + Files.createFile(env.configDir().resolve("elasticsearch-plugins.yml")); final UserException e = expectThrows(UserException.class, () -> SyncPluginsAction.ensureNoConfigFile(env)); assertThat(e.getMessage(), Matchers.matchesPattern("^Plugins config \\[.*] exists.*$")); @@ -354,7 +354,7 @@ private void createPlugin(String name) throws IOException { private void createPlugin(String name, String version) throws IOException { PluginTestUtil.writePluginProperties( - env.pluginsFile().resolve(name), + env.pluginsDir().resolve(name), "description", "dummy", "name", diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java index 9430cb598cf02..2ae58040437af 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java @@ -24,7 +24,7 @@ public class KeyStoreLoader implements SecureSettingsLoader { @Override public LoadedSecrets load(Environment environment, Terminal terminal) throws Exception { // See if we have a keystore already present - KeyStoreWrapper secureSettings = KeyStoreWrapper.load(environment.configFile()); + KeyStoreWrapper secureSettings = KeyStoreWrapper.load(environment.configDir()); // If there's no keystore or the keystore has no password, set an empty password var password = (secureSettings == null || secureSettings.hasPassword() == false) ? new SecureString(new char[0]) @@ -35,7 +35,7 @@ public LoadedSecrets load(Environment environment, Terminal terminal) throws Exc @Override public SecureSettings bootstrap(Environment environment, SecureString password) throws Exception { - return KeyStoreWrapper.bootstrap(environment.configFile(), () -> password); + return KeyStoreWrapper.bootstrap(environment.configDir(), () -> password); } @Override diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java index 22b62972befe4..be454350133eb 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java @@ -150,7 +150,7 @@ private void validateConfig(OptionSet options, Environment env) throws UserExcep throw new UserException(ExitCodes.USAGE, "Multiple --enrollment-token parameters are not allowed"); } - Path log4jConfig = env.configFile().resolve("log4j2.properties"); + Path log4jConfig = env.configDir().resolve("log4j2.properties"); if (Files.exists(log4jConfig) == false) { throw new UserException(ExitCodes.CONFIG, "Missing logging config file at " + log4jConfig); } @@ -239,7 +239,7 @@ private ServerArgs createArgs(OptionSet options, Environment env, SecureSettings } validatePidFile(pidFile); } - return new ServerArgs(daemonize, quiet, pidFile, secrets, env.settings(), env.configFile(), env.logsFile()); + return new ServerArgs(daemonize, quiet, pidFile, secrets, env.settings(), env.configDir(), env.logsDir()); } @Override diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java index 66ee712fcce95..2854d76c110d1 100644 --- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java +++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java @@ -43,8 +43,8 @@ class WindowsServiceDaemon extends EnvironmentAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { // the Windows service daemon doesn't support secure settings implementations other than the keystore - try (var loadedSecrets = KeyStoreWrapper.bootstrap(env.configFile(), () -> new SecureString(new char[0]))) { - var args = new ServerArgs(false, true, null, loadedSecrets, env.settings(), env.configFile(), env.logsFile()); + try (var loadedSecrets = KeyStoreWrapper.bootstrap(env.configDir(), () -> new SecureString(new char[0]))) { + var args = new ServerArgs(false, true, null, loadedSecrets, env.settings(), env.configDir(), env.logsDir()); var tempDir = ServerProcessUtils.setupTempDir(processInfo); var jvmOptions = JvmOptionsParser.determineJvmOptions(args, processInfo, tempDir, new MachineDependentHeap()); var serverProcessBuilder = new ServerProcessBuilder().withTerminal(terminal) diff --git a/docs/changelog/117176.yaml b/docs/changelog/117176.yaml new file mode 100644 index 0000000000000..26e0d3635bc9e --- /dev/null +++ b/docs/changelog/117176.yaml @@ -0,0 +1,5 @@ +pr: 117176 +summary: Integrate IBM watsonx to Inference API for re-ranking task +area: Experiences +type: enhancement +issues: [] diff --git a/docs/changelog/117201.yaml b/docs/changelog/117201.yaml deleted file mode 100644 index f8a2be35c70a3..0000000000000 --- a/docs/changelog/117201.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117201 -summary: "Use `field_caps` native nested fields filtering" -area: ES|QL -type: bug -issues: - - 117054 diff --git a/docs/changelog/117504.yaml b/docs/changelog/117504.yaml deleted file mode 100644 index 91a62c61b88f2..0000000000000 --- a/docs/changelog/117504.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117504 -summary: Fix NPE caused by race condition in async search when minimise round trips - is true -area: Search -type: bug -issues: [] diff --git a/docs/changelog/119748.yaml b/docs/changelog/119748.yaml deleted file mode 100644 index 8b29fb7c1a647..0000000000000 --- a/docs/changelog/119748.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 119748 -summary: Issue S3 web identity token refresh call with sufficient permissions -area: Snapshot/Restore -type: bug -issues: - - 119747 diff --git a/docs/changelog/119886.yaml b/docs/changelog/119886.yaml new file mode 100644 index 0000000000000..8b866637ddc4a --- /dev/null +++ b/docs/changelog/119886.yaml @@ -0,0 +1,5 @@ +pr: 119886 +summary: Initial support for unmapped fields +area: ES|QL +type: feature +issues: [] diff --git a/docs/changelog/120014.yaml b/docs/changelog/120014.yaml deleted file mode 100644 index bef1f3ba49939..0000000000000 --- a/docs/changelog/120014.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120014 -summary: Fix potential file leak in ES816BinaryQuantizedVectorsWriter -area: Search -type: bug -issues: - - 119981 diff --git a/docs/changelog/120062.yaml b/docs/changelog/120062.yaml deleted file mode 100644 index 42e8d97f17444..0000000000000 --- a/docs/changelog/120062.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120062 -summary: Update Text Similarity Reranker to Properly Handle Aliases -area: Ranking -type: bug -issues: - - 119617 diff --git a/docs/changelog/120133.yaml b/docs/changelog/120133.yaml deleted file mode 100644 index 4ec88267a1bf8..0000000000000 --- a/docs/changelog/120133.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120133 -summary: Use approximation to advance matched queries -area: Search -type: bug -issues: - - 120130 diff --git a/docs/changelog/120256.yaml b/docs/changelog/120256.yaml deleted file mode 100644 index c4ee5ab1705c5..0000000000000 --- a/docs/changelog/120256.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 120256 -summary: Improve memory aspects of enrich cache -area: Ingest Node -type: enhancement -issues: - - 96050 - - 120021 diff --git a/docs/changelog/120355.yaml b/docs/changelog/120355.yaml new file mode 100644 index 0000000000000..fd335fe1f5892 --- /dev/null +++ b/docs/changelog/120355.yaml @@ -0,0 +1,5 @@ +pr: 120355 +summary: Ensure cluster string could be quoted +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/120483.yaml b/docs/changelog/120483.yaml deleted file mode 100644 index 20da3b9ab4e8d..0000000000000 --- a/docs/changelog/120483.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120483 -summary: Fix NPE on disabled API auth key cache -area: Authentication -type: bug -issues: [] diff --git a/docs/changelog/120590.yaml b/docs/changelog/120590.yaml deleted file mode 100644 index 56abe44fbce1e..0000000000000 --- a/docs/changelog/120590.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120590 -summary: Map `scope.name` as a dimension -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/120717.yaml b/docs/changelog/120717.yaml deleted file mode 100644 index c5609e7e3df5f..0000000000000 --- a/docs/changelog/120717.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120717 -summary: Fix LTR rescorer throws 'local model reference is null' on multi-shards index when explained is enabled -area: Ranking -type: bug -issues: - - 120739 diff --git a/docs/changelog/120781.yaml b/docs/changelog/120781.yaml deleted file mode 100644 index 67c7d90528d6e..0000000000000 --- a/docs/changelog/120781.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120781 -summary: Add back `keep_alive` to `async_search.submit` rest-api-spec -area: Search -type: bug -issues: [] diff --git a/docs/changelog/120809.yaml b/docs/changelog/120809.yaml deleted file mode 100644 index 30a3736dc93a4..0000000000000 --- a/docs/changelog/120809.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120809 -summary: LTR sometines throw `NullPointerException:` Cannot read field "approximation" - because "top" is null -area: Ranking -type: bug -issues: [] diff --git a/docs/changelog/120852.yaml b/docs/changelog/120852.yaml new file mode 100644 index 0000000000000..90a05aa860f3f --- /dev/null +++ b/docs/changelog/120852.yaml @@ -0,0 +1,5 @@ +pr: 120852 +summary: Correct line and column numbers of missing named parameters +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/120930.yaml b/docs/changelog/120930.yaml deleted file mode 100644 index 376edb7632a0b..0000000000000 --- a/docs/changelog/120930.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120930 -summary: Normalize negative scores for `text_similarity_reranker` retriever -area: Ranking -type: bug -issues: - - 120201 diff --git a/docs/changelog/121109.yaml b/docs/changelog/121109.yaml new file mode 100644 index 0000000000000..6492eccbf975a --- /dev/null +++ b/docs/changelog/121109.yaml @@ -0,0 +1,6 @@ +pr: 121109 +summary: Fix propagation of dynamic mapping parameter when applying `copy_to` +area: Mapping +type: bug +issues: + - 113049 diff --git a/docs/changelog/121119.yaml b/docs/changelog/121119.yaml deleted file mode 100644 index ad05011affbb5..0000000000000 --- a/docs/changelog/121119.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121119 -summary: Revert "Reduce Data Loss in System Indices Migration" -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/121156.yaml b/docs/changelog/121156.yaml new file mode 100644 index 0000000000000..8f9c5ccdb38c9 --- /dev/null +++ b/docs/changelog/121156.yaml @@ -0,0 +1,5 @@ +pr: 121156 +summary: Remove redundant sorts from execution plan +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/121193.yaml b/docs/changelog/121193.yaml new file mode 100644 index 0000000000000..af45b0656265f --- /dev/null +++ b/docs/changelog/121193.yaml @@ -0,0 +1,18 @@ +pr: 121193 +summary: Enable LOOKUP JOIN in non-snapshot builds +area: ES|QL +type: enhancement +issues: + - 121185 +highlight: + title: Enable LOOKUP JOIN in non-snapshot builds + body: |- + This effectively releases LOOKUP JOIN into tech preview. Docs will + follow in a separate PR. + + - Enable the lexing/grammar for LOOKUP JOIN in non-snapshot builds. + - Remove the grammar for the unsupported `| JOIN ...` command (without `LOOKUP` as first keyword). The way the lexer modes work, otherwise we'd also have to enable `| JOIN ...` syntax on non-snapshot builds and would have to add additional validation to provide appropriate error messages. + - Remove grammar for `LOOKUP JOIN index AS ...` because qualifiers are not yet supported. Otherwise we'd have to put in additional validation as well to prevent such queries. + + Also fix https://github.com/elastic/elasticsearch/issues/121185 + notable: true diff --git a/docs/changelog/121196.yaml b/docs/changelog/121196.yaml new file mode 100644 index 0000000000000..f5168b1ea436c --- /dev/null +++ b/docs/changelog/121196.yaml @@ -0,0 +1,5 @@ +pr: 121196 +summary: Fix geoip databases index access after system feature migration +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/121325.yaml b/docs/changelog/121325.yaml new file mode 100644 index 0000000000000..9a9edc67d19fa --- /dev/null +++ b/docs/changelog/121325.yaml @@ -0,0 +1,5 @@ +pr: 121325 +summary: '`ReindexDataStreamIndex` bug in assertion caused by reference equality' +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/121396.yaml b/docs/changelog/121396.yaml new file mode 100644 index 0000000000000..1d77a8fbb0079 --- /dev/null +++ b/docs/changelog/121396.yaml @@ -0,0 +1,5 @@ +pr: 121396 +summary: Change format for Unified Chat +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/121552.yaml b/docs/changelog/121552.yaml new file mode 100644 index 0000000000000..c12e7615d1245 --- /dev/null +++ b/docs/changelog/121552.yaml @@ -0,0 +1,5 @@ +pr: 121552 +summary: Fix a bug in TOP +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/121556.yaml b/docs/changelog/121556.yaml new file mode 100644 index 0000000000000..d5fad5f37e5c8 --- /dev/null +++ b/docs/changelog/121556.yaml @@ -0,0 +1,5 @@ +pr: 121556 +summary: Enable New Semantic Text Format Only On Newly Created Indices +area: Mapping +type: bug +issues: [] diff --git a/docs/changelog/121568.yaml b/docs/changelog/121568.yaml new file mode 100644 index 0000000000000..80d769967dc2d --- /dev/null +++ b/docs/changelog/121568.yaml @@ -0,0 +1,6 @@ +pr: 121568 +summary: Analyze API to return 400 for wrong custom analyzer +area: Analysis +type: bug +issues: + - 121443 diff --git a/docs/changelog/121720.yaml b/docs/changelog/121720.yaml new file mode 100644 index 0000000000000..40dcfaeb770dd --- /dev/null +++ b/docs/changelog/121720.yaml @@ -0,0 +1,5 @@ +pr: 121720 +summary: Skip fetching _inference_fields field in legacy semantic_text format +area: Search +type: bug +issues: [] diff --git a/docs/changelog/121727.yaml b/docs/changelog/121727.yaml new file mode 100644 index 0000000000000..80c0a5eae4335 --- /dev/null +++ b/docs/changelog/121727.yaml @@ -0,0 +1,7 @@ +pr: 121727 +summary: Copy metrics and `default_metric` properties when downsampling `aggregate_metric_double` +area: Downsampling +type: bug +issues: + - 119696 + - 96076 diff --git a/docs/changelog/121731.yaml b/docs/changelog/121731.yaml new file mode 100644 index 0000000000000..9fc117610a490 --- /dev/null +++ b/docs/changelog/121731.yaml @@ -0,0 +1,21 @@ +pr: 121731 +summary: Remove TLSv1.1 from default protocols +area: TLS +type: breaking +issues: [] +breaking: + title: Remove TLSv1.1 from default protocols + area: Cluster and node setting + details: "TLSv1.1 is no longer enabled by default. Prior to version 9.0, Elasticsearch\ + \ would attempt to enable TLSv1.1 if the JDK supported it. In most cases, including\ + \ all cases where Elasticsearch 8 was running with the bundled JDK, the JDK would\ + \ not support TLSv1.1, so that protocol would not be available in Elasticsearch.\ + \ However, if Elasticsearch was running on an old JDK or a JDK that have been\ + \ reconfigured to support TLSv1.1, then the protocol would automatically be available\ + \ within Elasticsearch. As of Elasticsearch 9.0, this is no longer true. If you\ + \ wish to enable TLSv1.1 then you must enable it within the JDK and also enable\ + \ it within Elasticsearch by using the `ssl.supported_protocols` setting." + impact: "Most users will not be impacted. If your Elastisearch 8 cluster was using\ + \ a custom JDK and you relied on TLSv1.1, then you will need to explicitly enable\ + \ TLSv1.1 within Elasticsearch (as well as enabling it within your JDK)" + notable: false diff --git a/docs/changelog/121821.yaml b/docs/changelog/121821.yaml new file mode 100644 index 0000000000000..1e8edd09dcd9a --- /dev/null +++ b/docs/changelog/121821.yaml @@ -0,0 +1,6 @@ +pr: 121821 +summary: Fix get all inference endponts not returning multiple endpoints sharing model + deployment +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/121843.yaml b/docs/changelog/121843.yaml new file mode 100644 index 0000000000000..85b19e317a09c --- /dev/null +++ b/docs/changelog/121843.yaml @@ -0,0 +1,6 @@ +pr: 121843 +summary: Fix async stop sometimes not properly collecting result +area: ES|QL +type: bug +issues: + - 121249 diff --git a/docs/changelog/121850.yaml b/docs/changelog/121850.yaml new file mode 100644 index 0000000000000..b6c5ba2e03fe8 --- /dev/null +++ b/docs/changelog/121850.yaml @@ -0,0 +1,5 @@ +pr: 121850 +summary: Take named parameters for identifier and pattern out of snapshot +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/121911.yaml b/docs/changelog/121911.yaml new file mode 100644 index 0000000000000..d30e96abc21c2 --- /dev/null +++ b/docs/changelog/121911.yaml @@ -0,0 +1,5 @@ +pr: 121911 +summary: Fix ENRICH validation for use of wildcards +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/122011.yaml b/docs/changelog/122011.yaml new file mode 100644 index 0000000000000..2cc9a37b01616 --- /dev/null +++ b/docs/changelog/122011.yaml @@ -0,0 +1,5 @@ +pr: 122011 +summary: Fix JOIN command validation (not supported) +area: EQL +type: bug +issues: [] diff --git a/docs/changelog/122074.yaml b/docs/changelog/122074.yaml new file mode 100644 index 0000000000000..21e171d0eb5e2 --- /dev/null +++ b/docs/changelog/122074.yaml @@ -0,0 +1,8 @@ +pr: 122074 +summary: If the Transform is configured to write to an alias as its destination index, + when the delete_dest_index parameter is set to true, then the Delete API will now + delete the write index backing the alias +area: Transform +type: bug +issues: + - 121913 diff --git a/docs/changelog/122199.yaml b/docs/changelog/122199.yaml new file mode 100644 index 0000000000000..172ae900bdabb --- /dev/null +++ b/docs/changelog/122199.yaml @@ -0,0 +1,5 @@ +pr: 122199 +summary: Fix issues that prevents using search only snapshots for indices that use index sorting. This is includes Logsdb and time series indices. +area: Logs +type: bug +issues: [] diff --git a/docs/changelog/122246.yaml b/docs/changelog/122246.yaml new file mode 100644 index 0000000000000..c1e90f3423117 --- /dev/null +++ b/docs/changelog/122246.yaml @@ -0,0 +1,5 @@ +pr: 122246 +summary: Ensure removal of index blocks does not leave key with null value +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/122257.yaml b/docs/changelog/122257.yaml new file mode 100644 index 0000000000000..24078170eb6b6 --- /dev/null +++ b/docs/changelog/122257.yaml @@ -0,0 +1,5 @@ +pr: 122257 +summary: Revive inlinestats +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/122278.yaml b/docs/changelog/122278.yaml new file mode 100644 index 0000000000000..529d17c729c7f --- /dev/null +++ b/docs/changelog/122278.yaml @@ -0,0 +1,5 @@ +pr: 122278 +summary: Fix serialising the inference update request +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/122357.yaml b/docs/changelog/122357.yaml new file mode 100644 index 0000000000000..7648002c9356f --- /dev/null +++ b/docs/changelog/122357.yaml @@ -0,0 +1,6 @@ +pr: 122357 +summary: Handle search timeout in `SuggestPhase` +area: Search +type: bug +issues: + - 122186 diff --git a/docs/changelog/122417.yaml b/docs/changelog/122417.yaml new file mode 100644 index 0000000000000..f9e33df2a523c --- /dev/null +++ b/docs/changelog/122417.yaml @@ -0,0 +1,6 @@ +pr: 122417 +summary: Fix listener leak in exchange service +area: ES|QL +type: bug +issues: + - 122271 diff --git a/docs/changelog/122425.yaml b/docs/changelog/122425.yaml new file mode 100644 index 0000000000000..a0e590dcdc36c --- /dev/null +++ b/docs/changelog/122425.yaml @@ -0,0 +1,5 @@ +pr: 122425 +summary: Fix synthetic source bug that would mishandle nested `dense_vector` fields +area: Mapping +type: bug +issues: [] diff --git a/docs/changelog/122427.yaml b/docs/changelog/122427.yaml new file mode 100644 index 0000000000000..2444a0ec894ab --- /dev/null +++ b/docs/changelog/122427.yaml @@ -0,0 +1,5 @@ +pr: 122427 +summary: Improve size limiting string message +area: Infra/Core +type: enhancement +issues: [] diff --git a/docs/changelog/122431.yaml b/docs/changelog/122431.yaml new file mode 100644 index 0000000000000..cb72dabc40aa1 --- /dev/null +++ b/docs/changelog/122431.yaml @@ -0,0 +1,5 @@ +pr: 122431 +summary: Upgrade AWS SDK to v1.12.746 +area: Snapshot/Restore +type: upgrade +issues: [] diff --git a/docs/changelog/122496.yaml b/docs/changelog/122496.yaml new file mode 100644 index 0000000000000..37ce70977112b --- /dev/null +++ b/docs/changelog/122496.yaml @@ -0,0 +1,5 @@ +pr: 122496 +summary: Deduplicate `IngestStats` and `IngestStats.Stats` identity records when deserializing +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/122575.yaml b/docs/changelog/122575.yaml new file mode 100644 index 0000000000000..af72c81b9da8c --- /dev/null +++ b/docs/changelog/122575.yaml @@ -0,0 +1,6 @@ +pr: 122575 +summary: Return an empty suggestion when suggest phase times out +area: Suggesters +type: bug +issues: + - 122548 diff --git a/docs/changelog/122610.yaml b/docs/changelog/122610.yaml new file mode 100644 index 0000000000000..57977e703c06b --- /dev/null +++ b/docs/changelog/122610.yaml @@ -0,0 +1,5 @@ +pr: 122610 +summary: Canonicalize processor names and types in `IngestStats` +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/122640.yaml b/docs/changelog/122640.yaml new file mode 100644 index 0000000000000..f46fc55fc5396 --- /dev/null +++ b/docs/changelog/122640.yaml @@ -0,0 +1,5 @@ +pr: 122640 +summary: Fix redact processor arraycopy bug +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/122653.yaml b/docs/changelog/122653.yaml new file mode 100644 index 0000000000000..e36eb8debf10d --- /dev/null +++ b/docs/changelog/122653.yaml @@ -0,0 +1,6 @@ +pr: 122653 +summary: Knn vector rescoring to sort score docs +area: Vector Search +type: bug +issues: + - 119711 diff --git a/docs/internal/Versioning.md b/docs/internal/Versioning.md index f0f730f618259..474278e873922 100644 --- a/docs/internal/Versioning.md +++ b/docs/internal/Versioning.md @@ -35,19 +35,19 @@ Every change to the transport protocol is represented by a new transport version higher than all previous transport versions, which then becomes the highest version recognized by that build of Elasticsearch. The version ids are stored as constants in the `TransportVersions` class. -Each id has a standard pattern `M_NNN_SS_P`, where: +Each id has a standard pattern `M_NNN_S_PP`, where: * `M` is the major version * `NNN` is an incrementing id -* `SS` is used in subsidiary repos amending the default transport protocol -* `P` is used for patches and backports +* `S` is used in subsidiary repos amending the default transport protocol +* `PP` is used for patches and backports When you make a change to the serialization form of any object, you need to create a new sequential constant in `TransportVersions`, introduced in the same PR that adds the change, that increments the `NNN` component from the previous highest version, with other components set to zero. -For example, if the previous version number is `8_413_00_1`, -the next version number should be `8_414_00_0`. +For example, if the previous version number is `8_413_0_01`, +the next version number should be `8_414_0_00`. Once you have defined your constant, you then need to use it in serialization code. If the transport version is at or above the new id, @@ -166,7 +166,7 @@ also has that change, and knows about the patch backport ids and what they mean. Index version is a single incrementing version number for the index data format, metadata, and associated mappings. It is declared the same way as the -transport version - with the pattern `M_NNN_SS_P`, for the major version, version id, +transport version - with the pattern `M_NNN_S_PP`, for the major version, version id, subsidiary version id, and patch number respectively. Index version is stored in index metadata when an index is created, diff --git a/docs/reference/esql/functions/kibana/definition/abs.json b/docs/reference/esql/functions/kibana/definition/abs.json index e875d8200715d..888ebf6386401 100644 --- a/docs/reference/esql/functions/kibana/definition/abs.json +++ b/docs/reference/esql/functions/kibana/definition/abs.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "abs", "description" : "Returns the absolute value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/acos.json b/docs/reference/esql/functions/kibana/definition/acos.json index f86b1a5552a6f..5b6a5b023f48d 100644 --- a/docs/reference/esql/functions/kibana/definition/acos.json +++ b/docs/reference/esql/functions/kibana/definition/acos.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "acos", "description" : "Returns the arccosine of `n` as an angle, expressed in radians.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/asin.json b/docs/reference/esql/functions/kibana/definition/asin.json index 4063ffcba6140..293a66a21ab28 100644 --- a/docs/reference/esql/functions/kibana/definition/asin.json +++ b/docs/reference/esql/functions/kibana/definition/asin.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "asin", "description" : "Returns the arcsine of the input\nnumeric expression as an angle, expressed in radians.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/atan.json b/docs/reference/esql/functions/kibana/definition/atan.json index 1b2a3a1860bc2..afa380ca1d7fa 100644 --- a/docs/reference/esql/functions/kibana/definition/atan.json +++ b/docs/reference/esql/functions/kibana/definition/atan.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "atan", "description" : "Returns the arctangent of the input\nnumeric expression as an angle, expressed in radians.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/atan2.json b/docs/reference/esql/functions/kibana/definition/atan2.json index 9b67c07d8d73b..8e12198cb60ee 100644 --- a/docs/reference/esql/functions/kibana/definition/atan2.json +++ b/docs/reference/esql/functions/kibana/definition/atan2.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "atan2", "description" : "The angle between the positive x-axis and the ray from the\norigin to the point (x , y) in the Cartesian plane, expressed in radians.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/bit_length.json b/docs/reference/esql/functions/kibana/definition/bit_length.json index 0c75b76cdbbfb..25a032044ec9c 100644 --- a/docs/reference/esql/functions/kibana/definition/bit_length.json +++ b/docs/reference/esql/functions/kibana/definition/bit_length.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "bit_length", "description" : "Returns the bit length of a string.", "note" : "All strings are in UTF-8, so a single character can use multiple bytes.", diff --git a/docs/reference/esql/functions/kibana/definition/bucket.json b/docs/reference/esql/functions/kibana/definition/bucket.json index f9c7f2f27d6f9..990f102bac16f 100644 --- a/docs/reference/esql/functions/kibana/definition/bucket.json +++ b/docs/reference/esql/functions/kibana/definition/bucket.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "grouping", "name" : "bucket", "description" : "Creates groups of values - buckets - out of a datetime or numeric input.\nThe size of the buckets can either be provided directly, or chosen based on a recommended count and values range.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/byte_length.json b/docs/reference/esql/functions/kibana/definition/byte_length.json index 60f439b9d8133..6d1a91813221b 100644 --- a/docs/reference/esql/functions/kibana/definition/byte_length.json +++ b/docs/reference/esql/functions/kibana/definition/byte_length.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "byte_length", "description" : "Returns the byte length of a string.", "note" : "All strings are in UTF-8, so a single character can use multiple bytes.", diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json index 51693d9d30660..4e2d4187712e3 100644 --- a/docs/reference/esql/functions/kibana/definition/case.json +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "case", "description" : "Accepts pairs of conditions and values. The function returns the value that\nbelongs to the first condition that evaluates to `true`.\n\nIf the number of arguments is odd, the last argument is the default value which\nis returned when no condition matches. If the number of arguments is even, and\nno condition matches, the function returns `null`.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/categorize.json b/docs/reference/esql/functions/kibana/definition/categorize.json index ed5fa15232b85..1b7f20405756b 100644 --- a/docs/reference/esql/functions/kibana/definition/categorize.json +++ b/docs/reference/esql/functions/kibana/definition/categorize.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "grouping", "name" : "categorize", "description" : "Groups text messages into categories of similarly formatted text values.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/cbrt.json b/docs/reference/esql/functions/kibana/definition/cbrt.json index ce23551f75e5f..f146a864929a8 100644 --- a/docs/reference/esql/functions/kibana/definition/cbrt.json +++ b/docs/reference/esql/functions/kibana/definition/cbrt.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "cbrt", "description" : "Returns the cube root of a number. The input can be any numeric value, the return value is always a double.\nCube roots of infinities are null.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/ceil.json b/docs/reference/esql/functions/kibana/definition/ceil.json index 15e7bafd1fa5d..4a9e24e9094e8 100644 --- a/docs/reference/esql/functions/kibana/definition/ceil.json +++ b/docs/reference/esql/functions/kibana/definition/ceil.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "ceil", "description" : "Round a number up to the nearest integer.", "note" : "This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to Math.ceil.", diff --git a/docs/reference/esql/functions/kibana/definition/cidr_match.json b/docs/reference/esql/functions/kibana/definition/cidr_match.json index dc5d6773f69e6..9e988623c0fd6 100644 --- a/docs/reference/esql/functions/kibana/definition/cidr_match.json +++ b/docs/reference/esql/functions/kibana/definition/cidr_match.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "cidr_match", "description" : "Returns true if the provided IP is contained in one of the provided CIDR blocks.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index c929323397c9b..a507aea3fd0d9 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "coalesce", "description" : "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/concat.json b/docs/reference/esql/functions/kibana/definition/concat.json index d3e9f0852bc13..b41ebba40b58e 100644 --- a/docs/reference/esql/functions/kibana/definition/concat.json +++ b/docs/reference/esql/functions/kibana/definition/concat.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "concat", "description" : "Concatenates two or more strings.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/cos.json b/docs/reference/esql/functions/kibana/definition/cos.json index 1b9ca2f1d4867..8922dd6db555c 100644 --- a/docs/reference/esql/functions/kibana/definition/cos.json +++ b/docs/reference/esql/functions/kibana/definition/cos.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "cos", "description" : "Returns the cosine of an angle.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/cosh.json b/docs/reference/esql/functions/kibana/definition/cosh.json index 77d9853d34e44..458f4ecf04554 100644 --- a/docs/reference/esql/functions/kibana/definition/cosh.json +++ b/docs/reference/esql/functions/kibana/definition/cosh.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "cosh", "description" : "Returns the hyperbolic cosine of a number.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/date_diff.json b/docs/reference/esql/functions/kibana/definition/date_diff.json index 2738ec8390226..dab7f7e0ef2e3 100644 --- a/docs/reference/esql/functions/kibana/definition/date_diff.json +++ b/docs/reference/esql/functions/kibana/definition/date_diff.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "date_diff", "description" : "Subtracts the `startTimestamp` from the `endTimestamp` and returns the difference in multiples of `unit`.\nIf `startTimestamp` is later than the `endTimestamp`, negative values are returned.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/date_extract.json b/docs/reference/esql/functions/kibana/definition/date_extract.json index 0ababf80d9137..1778d99dcf342 100644 --- a/docs/reference/esql/functions/kibana/definition/date_extract.json +++ b/docs/reference/esql/functions/kibana/definition/date_extract.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "date_extract", "description" : "Extracts parts of a date, like year, month, day, hour.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/date_format.json b/docs/reference/esql/functions/kibana/definition/date_format.json index f6f48e9df82b0..633e20444d771 100644 --- a/docs/reference/esql/functions/kibana/definition/date_format.json +++ b/docs/reference/esql/functions/kibana/definition/date_format.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "date_format", "description" : "Returns a string representation of a date, in the provided format.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/date_parse.json b/docs/reference/esql/functions/kibana/definition/date_parse.json index 77247b332c6f6..8e2f15efce829 100644 --- a/docs/reference/esql/functions/kibana/definition/date_parse.json +++ b/docs/reference/esql/functions/kibana/definition/date_parse.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "date_parse", "description" : "Returns a date by parsing the second argument using the format specified in the first argument.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/date_trunc.json b/docs/reference/esql/functions/kibana/definition/date_trunc.json index cdda984a0ce7e..023298d0f8b53 100644 --- a/docs/reference/esql/functions/kibana/definition/date_trunc.json +++ b/docs/reference/esql/functions/kibana/definition/date_trunc.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "date_trunc", "description" : "Rounds down a date to the closest interval.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/e.json b/docs/reference/esql/functions/kibana/definition/e.json index e5941b50c00a6..15bcb0572e2a8 100644 --- a/docs/reference/esql/functions/kibana/definition/e.json +++ b/docs/reference/esql/functions/kibana/definition/e.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "e", "description" : "Returns Euler's number.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/ends_with.json b/docs/reference/esql/functions/kibana/definition/ends_with.json index 5c2f721c33442..eb2e0268214ad 100644 --- a/docs/reference/esql/functions/kibana/definition/ends_with.json +++ b/docs/reference/esql/functions/kibana/definition/ends_with.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "ends_with", "description" : "Returns a boolean that indicates whether a keyword string ends with another string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/exp.json b/docs/reference/esql/functions/kibana/definition/exp.json index 4df40ce467d80..186cde72c7999 100644 --- a/docs/reference/esql/functions/kibana/definition/exp.json +++ b/docs/reference/esql/functions/kibana/definition/exp.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "exp", "description" : "Returns the value of e raised to the power of the given number.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/floor.json b/docs/reference/esql/functions/kibana/definition/floor.json index 1d1c961bfe2b2..6e2deb3dce567 100644 --- a/docs/reference/esql/functions/kibana/definition/floor.json +++ b/docs/reference/esql/functions/kibana/definition/floor.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "floor", "description" : "Round a number down to the nearest integer.", "note" : "This is a noop for `long` (including unsigned) and `integer`.\nFor `double` this picks the closest `double` value to the integer\nsimilar to Math.floor.", diff --git a/docs/reference/esql/functions/kibana/definition/from_base64.json b/docs/reference/esql/functions/kibana/definition/from_base64.json index 7580b817031a5..a94f4a338cb08 100644 --- a/docs/reference/esql/functions/kibana/definition/from_base64.json +++ b/docs/reference/esql/functions/kibana/definition/from_base64.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "from_base64", "description" : "Decode a base64 string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json index 077100317dfca..af78ca3d02b5d 100644 --- a/docs/reference/esql/functions/kibana/definition/greatest.json +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "greatest", "description" : "Returns the maximum value from multiple columns. This is similar to <>\nexcept it is intended to run on multiple columns at once.", "note" : "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`.", diff --git a/docs/reference/esql/functions/kibana/definition/hash.json b/docs/reference/esql/functions/kibana/definition/hash.json index dbf4a2542afc5..93ee99bf3e9c9 100644 --- a/docs/reference/esql/functions/kibana/definition/hash.json +++ b/docs/reference/esql/functions/kibana/definition/hash.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "hash", "description" : "Computes the hash of the input using various algorithms such as MD5, SHA, SHA-224, SHA-256, SHA-384, SHA-512.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/hypot.json b/docs/reference/esql/functions/kibana/definition/hypot.json index 06971f07a3585..a71f318a286b1 100644 --- a/docs/reference/esql/functions/kibana/definition/hypot.json +++ b/docs/reference/esql/functions/kibana/definition/hypot.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "hypot", "description" : "Returns the hypotenuse of two numbers. The input can be any numeric values, the return value is always a double.\nHypotenuses of infinities are null.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/ip_prefix.json b/docs/reference/esql/functions/kibana/definition/ip_prefix.json index bbf0702b325e9..0d039a784a7bb 100644 --- a/docs/reference/esql/functions/kibana/definition/ip_prefix.json +++ b/docs/reference/esql/functions/kibana/definition/ip_prefix.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "ip_prefix", "description" : "Truncates an IP to a given prefix length.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/kql.json b/docs/reference/esql/functions/kibana/definition/kql.json index 440786ec63e77..ff0f854f20fa5 100644 --- a/docs/reference/esql/functions/kibana/definition/kql.json +++ b/docs/reference/esql/functions/kibana/definition/kql.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "kql", "description" : "Performs a KQL query. Returns true if the provided KQL query string matches the row.", "signatures" : [ @@ -30,7 +30,7 @@ } ], "examples" : [ - "FROM books \n| WHERE KQL(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + "FROM books \n| WHERE KQL(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json index 18ec65c60f475..30f27b9be0b07 100644 --- a/docs/reference/esql/functions/kibana/definition/least.json +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "least", "description" : "Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/left.json b/docs/reference/esql/functions/kibana/definition/left.json index 9e226d6cde59b..b367dc9655ec5 100644 --- a/docs/reference/esql/functions/kibana/definition/left.json +++ b/docs/reference/esql/functions/kibana/definition/left.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "left", "description" : "Returns the substring that extracts 'length' chars from 'string' starting from the left.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/length.json b/docs/reference/esql/functions/kibana/definition/length.json index bc26acde744f5..c2eca63e4e1dd 100644 --- a/docs/reference/esql/functions/kibana/definition/length.json +++ b/docs/reference/esql/functions/kibana/definition/length.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "length", "description" : "Returns the character length of a string.", "note" : "All strings are in UTF-8, so a single character can use multiple bytes.", diff --git a/docs/reference/esql/functions/kibana/definition/locate.json b/docs/reference/esql/functions/kibana/definition/locate.json index 6278cb5e33a00..0322ec1945aa7 100644 --- a/docs/reference/esql/functions/kibana/definition/locate.json +++ b/docs/reference/esql/functions/kibana/definition/locate.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "locate", "description" : "Returns an integer that indicates the position of a keyword substring within another string.\nReturns `0` if the substring cannot be found.\nNote that string positions start from `1`.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/log.json b/docs/reference/esql/functions/kibana/definition/log.json index 369503c1b8e4a..c75349a89630d 100644 --- a/docs/reference/esql/functions/kibana/definition/log.json +++ b/docs/reference/esql/functions/kibana/definition/log.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "log", "description" : "Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double.\n\nLogs of zero, negative numbers, and base of one return `null` as well as a warning.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/log10.json b/docs/reference/esql/functions/kibana/definition/log10.json index a958e5caec8a8..6cc089ae49c4f 100644 --- a/docs/reference/esql/functions/kibana/definition/log10.json +++ b/docs/reference/esql/functions/kibana/definition/log10.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "log10", "description" : "Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double.\n\nLogs of 0 and negative numbers return `null` as well as a warning.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/ltrim.json b/docs/reference/esql/functions/kibana/definition/ltrim.json index 6d992b9db7b2c..f0a58dd4f9aea 100644 --- a/docs/reference/esql/functions/kibana/definition/ltrim.json +++ b/docs/reference/esql/functions/kibana/definition/ltrim.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "ltrim", "description" : "Removes leading whitespaces from a string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/match.json b/docs/reference/esql/functions/kibana/definition/match.json index 23a81ba34e387..4844382fe04cf 100644 --- a/docs/reference/esql/functions/kibana/definition/match.json +++ b/docs/reference/esql/functions/kibana/definition/match.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "match", "description" : "Use `MATCH` to perform a <> on the specified field.\nUsing `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL.\n\nMatch can be used on fields from the text family like <> and <>,\nas well as other field types like keyword, boolean, dates, and numeric types.\n\nMatch can use <> to specify additional options for the match query.\nAll <> are supported.\n\nFor a simplified syntax, you can use the <> `:` operator instead of `MATCH`.\n\n`MATCH` returns true if the provided query matches the row.", "signatures" : [ @@ -20,10 +20,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -45,10 +45,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -70,10 +70,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -95,10 +95,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -120,10 +120,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -145,10 +145,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -170,10 +170,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -195,10 +195,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -220,10 +220,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -245,10 +245,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -270,10 +270,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -295,10 +295,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -320,10 +320,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -345,10 +345,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -370,10 +370,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -395,10 +395,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -420,10 +420,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -445,10 +445,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -470,10 +470,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -495,10 +495,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -520,10 +520,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -545,10 +545,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -570,10 +570,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -595,10 +595,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -620,10 +620,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -645,10 +645,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -670,10 +670,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -695,10 +695,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -720,10 +720,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -731,7 +731,7 @@ } ], "examples" : [ - "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;", + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5", "FROM books \n| WHERE MATCH(title, \"Hobbit Back Again\", {\"operator\": \"AND\"})\n| KEEP title;" ], "preview" : true, diff --git a/docs/reference/esql/functions/kibana/definition/match_operator.json b/docs/reference/esql/functions/kibana/definition/match_operator.json index a67c6b0e45c4a..98f1a8d73d35e 100644 --- a/docs/reference/esql/functions/kibana/definition/match_operator.json +++ b/docs/reference/esql/functions/kibana/definition/match_operator.json @@ -529,7 +529,7 @@ } ], "examples" : [ - "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/md5.json b/docs/reference/esql/functions/kibana/definition/md5.json index 4d3a88e123ff4..b631f7816cb5f 100644 --- a/docs/reference/esql/functions/kibana/definition/md5.json +++ b/docs/reference/esql/functions/kibana/definition/md5.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "md5", "description" : "Computes the MD5 hash of the input.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_append.json b/docs/reference/esql/functions/kibana/definition/mv_append.json index 043625d9ea1e7..7cbcc678464c7 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_append.json +++ b/docs/reference/esql/functions/kibana/definition/mv_append.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_append", "description" : "Concatenates values of two multi-value fields.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_avg.json b/docs/reference/esql/functions/kibana/definition/mv_avg.json index a25d38c909e66..65a32cba133ef 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_avg.json +++ b/docs/reference/esql/functions/kibana/definition/mv_avg.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_avg", "description" : "Converts a multivalued field into a single valued field containing the average of all of the values.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_concat.json b/docs/reference/esql/functions/kibana/definition/mv_concat.json index 88b1107a9f401..6855525abfba5 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_concat.json +++ b/docs/reference/esql/functions/kibana/definition/mv_concat.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_concat", "description" : "Converts a multivalued string expression into a single valued column containing the concatenation of all values separated by a delimiter.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_count.json b/docs/reference/esql/functions/kibana/definition/mv_count.json index 90ace2525f710..b82b7b382409d 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_count.json +++ b/docs/reference/esql/functions/kibana/definition/mv_count.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_count", "description" : "Converts a multivalued expression into a single valued column containing a count of the number of values.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json index 2fb5b9c61727f..fbce83189ef2b 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_dedupe", "description" : "Remove duplicate values from a multivalued field.", "note" : "`MV_DEDUPE` may, but won't always, sort the values in the column.", diff --git a/docs/reference/esql/functions/kibana/definition/mv_first.json b/docs/reference/esql/functions/kibana/definition/mv_first.json index 552f568c9b171..32525a7c124f3 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_first.json +++ b/docs/reference/esql/functions/kibana/definition/mv_first.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_first", "description" : "Converts a multivalued expression into a single valued column containing the\nfirst value. This is most useful when reading from a function that emits\nmultivalued columns in a known order like <>.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_last.json b/docs/reference/esql/functions/kibana/definition/mv_last.json index 78d7b348a6042..f11482b69824c 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_last.json +++ b/docs/reference/esql/functions/kibana/definition/mv_last.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_last", "description" : "Converts a multivalue expression into a single valued column containing the last\nvalue. This is most useful when reading from a function that emits multivalued\ncolumns in a known order like <>.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_max.json b/docs/reference/esql/functions/kibana/definition/mv_max.json index a1e55c58cff70..65b8d801edabd 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_max.json +++ b/docs/reference/esql/functions/kibana/definition/mv_max.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_max", "description" : "Converts a multivalued expression into a single valued column containing the maximum value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_median.json b/docs/reference/esql/functions/kibana/definition/mv_median.json index fe95e1999f6a9..3ba870023cb47 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_median.json +++ b/docs/reference/esql/functions/kibana/definition/mv_median.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_median", "description" : "Converts a multivalued field into a single valued field containing the median value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_median_absolute_deviation.json b/docs/reference/esql/functions/kibana/definition/mv_median_absolute_deviation.json index af16e9ab0d3bb..e4beb343cd20d 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_median_absolute_deviation.json +++ b/docs/reference/esql/functions/kibana/definition/mv_median_absolute_deviation.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_median_absolute_deviation", "description" : "Converts a multivalued field into a single valued field containing the median absolute deviation.\n\nIt is calculated as the median of each data point's deviation from the median of the entire sample. That is, for a random variable `X`, the median absolute deviation is `median(|median(X) - X|)`.", "note" : "If the field has an even number of values, the medians will be calculated as the average of the middle two values. If the value is not a floating point number, the averages are rounded towards 0.", diff --git a/docs/reference/esql/functions/kibana/definition/mv_min.json b/docs/reference/esql/functions/kibana/definition/mv_min.json index 7998ca4eda94e..ef36cee912d5c 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_min.json +++ b/docs/reference/esql/functions/kibana/definition/mv_min.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_min", "description" : "Converts a multivalued expression into a single valued column containing the minimum value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_percentile.json b/docs/reference/esql/functions/kibana/definition/mv_percentile.json index 8e4cec705f48d..7835241ed68ba 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_percentile.json +++ b/docs/reference/esql/functions/kibana/definition/mv_percentile.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_percentile", "description" : "Converts a multivalued field into a single valued field containing the value at which a certain percentage of observed values occur.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_pseries_weighted_sum.json b/docs/reference/esql/functions/kibana/definition/mv_pseries_weighted_sum.json index f96a6dfb5a43a..7935afe3338e3 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_pseries_weighted_sum.json +++ b/docs/reference/esql/functions/kibana/definition/mv_pseries_weighted_sum.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_pseries_weighted_sum", "description" : "Converts a multivalued expression into a single-valued column by multiplying every element on the input list by its corresponding term in P-Series and computing the sum.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json index 5ad8f588cdc2b..f23c0d089d93b 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_slice.json +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_slice", "description" : "Returns a subset of the multivalued field using the start and end index values.\nThis is most useful when reading from a function that emits multivalued columns\nin a known order like <> or <>.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_sort.json b/docs/reference/esql/functions/kibana/definition/mv_sort.json index 072c05743af33..17a8fb426755c 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_sort.json +++ b/docs/reference/esql/functions/kibana/definition/mv_sort.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_sort", "description" : "Sorts a multivalued field in lexicographical order.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_sum.json b/docs/reference/esql/functions/kibana/definition/mv_sum.json index 6ccbcec3c61b4..31b0e5e420b70 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_sum.json +++ b/docs/reference/esql/functions/kibana/definition/mv_sum.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_sum", "description" : "Converts a multivalued field into a single valued field containing the sum of all of the values.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_zip.json b/docs/reference/esql/functions/kibana/definition/mv_zip.json index e292cd27ee738..fc573834054b7 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_zip.json +++ b/docs/reference/esql/functions/kibana/definition/mv_zip.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_zip", "description" : "Combines the values from two multivalued fields with a delimiter that joins them together.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/now.json b/docs/reference/esql/functions/kibana/definition/now.json index 26ef362995185..42831c2faf497 100644 --- a/docs/reference/esql/functions/kibana/definition/now.json +++ b/docs/reference/esql/functions/kibana/definition/now.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "now", "description" : "Returns current date and time.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/pi.json b/docs/reference/esql/functions/kibana/definition/pi.json index 98d9451a2b929..0dd720dd69cb6 100644 --- a/docs/reference/esql/functions/kibana/definition/pi.json +++ b/docs/reference/esql/functions/kibana/definition/pi.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "pi", "description" : "Returns Pi, the ratio of a circle's circumference to its diameter.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/pow.json b/docs/reference/esql/functions/kibana/definition/pow.json index 35fe080d82f79..93dda492f087b 100644 --- a/docs/reference/esql/functions/kibana/definition/pow.json +++ b/docs/reference/esql/functions/kibana/definition/pow.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "pow", "description" : "Returns the value of `base` raised to the power of `exponent`.", "note" : "It is still possible to overflow a double result here; in that case, null will be returned.", diff --git a/docs/reference/esql/functions/kibana/definition/qstr.json b/docs/reference/esql/functions/kibana/definition/qstr.json index 3b091bfe2e13b..b617f9f9246c6 100644 --- a/docs/reference/esql/functions/kibana/definition/qstr.json +++ b/docs/reference/esql/functions/kibana/definition/qstr.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "qstr", "description" : "Performs a <>. Returns true if the provided query string matches the row.", "signatures" : [ @@ -30,7 +30,7 @@ } ], "examples" : [ - "FROM books \n| WHERE QSTR(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + "FROM books \n| WHERE QSTR(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/repeat.json b/docs/reference/esql/functions/kibana/definition/repeat.json index 201484cf7aa6f..2eb739f0d0bc0 100644 --- a/docs/reference/esql/functions/kibana/definition/repeat.json +++ b/docs/reference/esql/functions/kibana/definition/repeat.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "repeat", "description" : "Returns a string constructed by concatenating `string` with itself the specified `number` of times.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/replace.json b/docs/reference/esql/functions/kibana/definition/replace.json index b5d1a1af7e8f4..b512019a9951a 100644 --- a/docs/reference/esql/functions/kibana/definition/replace.json +++ b/docs/reference/esql/functions/kibana/definition/replace.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "replace", "description" : "The function substitutes in the string `str` any match of the regular expression `regex`\nwith the replacement string `newStr`.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/reverse.json b/docs/reference/esql/functions/kibana/definition/reverse.json index 0652d9cfa6b15..2ebf63fa5787c 100644 --- a/docs/reference/esql/functions/kibana/definition/reverse.json +++ b/docs/reference/esql/functions/kibana/definition/reverse.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "reverse", "description" : "Returns a new string representing the input string in reverse order.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/right.json b/docs/reference/esql/functions/kibana/definition/right.json index 1211877a2f902..84408f082db5a 100644 --- a/docs/reference/esql/functions/kibana/definition/right.json +++ b/docs/reference/esql/functions/kibana/definition/right.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "right", "description" : "Return the substring that extracts 'length' chars from 'str' starting from the right.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/round.json b/docs/reference/esql/functions/kibana/definition/round.json index 4ef20aa162b42..a364dbbed0b5a 100644 --- a/docs/reference/esql/functions/kibana/definition/round.json +++ b/docs/reference/esql/functions/kibana/definition/round.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "round", "description" : "Rounds a number to the specified number of decimal places.\nDefaults to 0, which returns the nearest integer. If the\nprecision is a negative number, rounds to the number of digits left\nof the decimal point.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/rtrim.json b/docs/reference/esql/functions/kibana/definition/rtrim.json index 9c8a7578ed789..ef6c93f9fa172 100644 --- a/docs/reference/esql/functions/kibana/definition/rtrim.json +++ b/docs/reference/esql/functions/kibana/definition/rtrim.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "rtrim", "description" : "Removes trailing whitespaces from a string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/sha1.json b/docs/reference/esql/functions/kibana/definition/sha1.json index a6abb31368bb3..18fa2c33ae0d5 100644 --- a/docs/reference/esql/functions/kibana/definition/sha1.json +++ b/docs/reference/esql/functions/kibana/definition/sha1.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "sha1", "description" : "Computes the SHA1 hash of the input.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/sha256.json b/docs/reference/esql/functions/kibana/definition/sha256.json index 700425d485b61..7ad0e2c5500da 100644 --- a/docs/reference/esql/functions/kibana/definition/sha256.json +++ b/docs/reference/esql/functions/kibana/definition/sha256.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "sha256", "description" : "Computes the SHA256 hash of the input.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/signum.json b/docs/reference/esql/functions/kibana/definition/signum.json index 0902826651eff..20667b8cb683e 100644 --- a/docs/reference/esql/functions/kibana/definition/signum.json +++ b/docs/reference/esql/functions/kibana/definition/signum.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "signum", "description" : "Returns the sign of the given number.\nIt returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/sin.json b/docs/reference/esql/functions/kibana/definition/sin.json index 0d4f4e5c5cf13..efdf96a3b8d38 100644 --- a/docs/reference/esql/functions/kibana/definition/sin.json +++ b/docs/reference/esql/functions/kibana/definition/sin.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "sin", "description" : "Returns the sine of an angle.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/sinh.json b/docs/reference/esql/functions/kibana/definition/sinh.json index b20db3328775b..3ed3eaa23c9ca 100644 --- a/docs/reference/esql/functions/kibana/definition/sinh.json +++ b/docs/reference/esql/functions/kibana/definition/sinh.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "sinh", "description" : "Returns the hyperbolic sine of a number.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/space.json b/docs/reference/esql/functions/kibana/definition/space.json index 7a819d3ccd213..cc1223de2cdf2 100644 --- a/docs/reference/esql/functions/kibana/definition/space.json +++ b/docs/reference/esql/functions/kibana/definition/space.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "space", "description" : "Returns a string made of `number` spaces.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/split.json b/docs/reference/esql/functions/kibana/definition/split.json index 862cf7a09f19c..87a7b0a0f87c6 100644 --- a/docs/reference/esql/functions/kibana/definition/split.json +++ b/docs/reference/esql/functions/kibana/definition/split.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "split", "description" : "Split a single valued string into multiple strings.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/sqrt.json b/docs/reference/esql/functions/kibana/definition/sqrt.json index 11620a465c45f..6985cb20043c7 100644 --- a/docs/reference/esql/functions/kibana/definition/sqrt.json +++ b/docs/reference/esql/functions/kibana/definition/sqrt.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "sqrt", "description" : "Returns the square root of a number. The input can be any numeric value, the return value is always a double.\nSquare roots of negative numbers and infinities are null.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_contains.json b/docs/reference/esql/functions/kibana/definition/st_contains.json index 0c1b61c563402..7d78518f91da1 100644 --- a/docs/reference/esql/functions/kibana/definition/st_contains.json +++ b/docs/reference/esql/functions/kibana/definition/st_contains.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_contains", "description" : "Returns whether the first geometry contains the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_disjoint.json b/docs/reference/esql/functions/kibana/definition/st_disjoint.json index 0c4f6e7c36f53..f0da69103ea68 100644 --- a/docs/reference/esql/functions/kibana/definition/st_disjoint.json +++ b/docs/reference/esql/functions/kibana/definition/st_disjoint.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_disjoint", "description" : "Returns whether the two geometries or geometry columns are disjoint.\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_distance.json b/docs/reference/esql/functions/kibana/definition/st_distance.json index 2fb8f341d12f4..74492b6e30742 100644 --- a/docs/reference/esql/functions/kibana/definition/st_distance.json +++ b/docs/reference/esql/functions/kibana/definition/st_distance.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_distance", "description" : "Computes the distance between two points.\nFor cartesian geometries, this is the pythagorean distance in the same units as the original coordinates.\nFor geographic geometries, this is the circular distance along the great circle in meters.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_envelope.json b/docs/reference/esql/functions/kibana/definition/st_envelope.json index 6c00dda265ac7..83f4bad5b826f 100644 --- a/docs/reference/esql/functions/kibana/definition/st_envelope.json +++ b/docs/reference/esql/functions/kibana/definition/st_envelope.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_envelope", "description" : "Determines the minimum bounding box of the supplied geometry.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_intersects.json b/docs/reference/esql/functions/kibana/definition/st_intersects.json index 51237a0b1cee1..b35df6711b338 100644 --- a/docs/reference/esql/functions/kibana/definition/st_intersects.json +++ b/docs/reference/esql/functions/kibana/definition/st_intersects.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_intersects", "description" : "Returns true if two geometries intersect.\nThey intersect if they have any point in common, including their interior points\n(points along lines or within polygons).\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_within.json b/docs/reference/esql/functions/kibana/definition/st_within.json index d877cf4e8dfbc..3ab419683f021 100644 --- a/docs/reference/esql/functions/kibana/definition/st_within.json +++ b/docs/reference/esql/functions/kibana/definition/st_within.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_within", "description" : "Returns whether the first geometry is within the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_x.json b/docs/reference/esql/functions/kibana/definition/st_x.json index f434f1467c0bf..27378edd25ff8 100644 --- a/docs/reference/esql/functions/kibana/definition/st_x.json +++ b/docs/reference/esql/functions/kibana/definition/st_x.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_x", "description" : "Extracts the `x` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `longitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_xmax.json b/docs/reference/esql/functions/kibana/definition/st_xmax.json index 7be22617c0992..c1223ecae7349 100644 --- a/docs/reference/esql/functions/kibana/definition/st_xmax.json +++ b/docs/reference/esql/functions/kibana/definition/st_xmax.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_xmax", "description" : "Extracts the maximum value of the `x` coordinates from the supplied geometry.\nIf the geometry is of type `geo_point` or `geo_shape` this is equivalent to extracting the maximum `longitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_xmin.json b/docs/reference/esql/functions/kibana/definition/st_xmin.json index 8052fdb861cea..20c4bd8cca79f 100644 --- a/docs/reference/esql/functions/kibana/definition/st_xmin.json +++ b/docs/reference/esql/functions/kibana/definition/st_xmin.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_xmin", "description" : "Extracts the minimum value of the `x` coordinates from the supplied geometry.\nIf the geometry is of type `geo_point` or `geo_shape` this is equivalent to extracting the minimum `longitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_y.json b/docs/reference/esql/functions/kibana/definition/st_y.json index ca7d817a36ff0..42f37d0346b1f 100644 --- a/docs/reference/esql/functions/kibana/definition/st_y.json +++ b/docs/reference/esql/functions/kibana/definition/st_y.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_y", "description" : "Extracts the `y` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `latitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_ymax.json b/docs/reference/esql/functions/kibana/definition/st_ymax.json index 1a53f7388ea56..83a8070834e1a 100644 --- a/docs/reference/esql/functions/kibana/definition/st_ymax.json +++ b/docs/reference/esql/functions/kibana/definition/st_ymax.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_ymax", "description" : "Extracts the maximum value of the `y` coordinates from the supplied geometry.\nIf the geometry is of type `geo_point` or `geo_shape` this is equivalent to extracting the maximum `latitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_ymin.json b/docs/reference/esql/functions/kibana/definition/st_ymin.json index e11722a8f9c07..21051d02cb6d3 100644 --- a/docs/reference/esql/functions/kibana/definition/st_ymin.json +++ b/docs/reference/esql/functions/kibana/definition/st_ymin.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_ymin", "description" : "Extracts the minimum value of the `y` coordinates from the supplied geometry.\nIf the geometry is of type `geo_point` or `geo_shape` this is equivalent to extracting the minimum `latitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/starts_with.json b/docs/reference/esql/functions/kibana/definition/starts_with.json index 0e5dc076a5689..bad7fa1d34b69 100644 --- a/docs/reference/esql/functions/kibana/definition/starts_with.json +++ b/docs/reference/esql/functions/kibana/definition/starts_with.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "starts_with", "description" : "Returns a boolean that indicates whether a keyword string starts with another string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/substring.json b/docs/reference/esql/functions/kibana/definition/substring.json index 1998eb0428482..c6260c5040e58 100644 --- a/docs/reference/esql/functions/kibana/definition/substring.json +++ b/docs/reference/esql/functions/kibana/definition/substring.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "substring", "description" : "Returns a substring of a string, specified by a start position and an optional length.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/tan.json b/docs/reference/esql/functions/kibana/definition/tan.json index 76fab2ff6ab1e..af28eace4c591 100644 --- a/docs/reference/esql/functions/kibana/definition/tan.json +++ b/docs/reference/esql/functions/kibana/definition/tan.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "tan", "description" : "Returns the tangent of an angle.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/tanh.json b/docs/reference/esql/functions/kibana/definition/tanh.json index f56e053a3d4ee..a36edcc1e88f4 100644 --- a/docs/reference/esql/functions/kibana/definition/tanh.json +++ b/docs/reference/esql/functions/kibana/definition/tanh.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "tanh", "description" : "Returns the hyperbolic tangent of a number.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/tau.json b/docs/reference/esql/functions/kibana/definition/tau.json index 1dbb2e892ee60..b5090e8a39a81 100644 --- a/docs/reference/esql/functions/kibana/definition/tau.json +++ b/docs/reference/esql/functions/kibana/definition/tau.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "tau", "description" : "Returns the ratio of a circle's circumference to its radius.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/term.json b/docs/reference/esql/functions/kibana/definition/term.json index b0f129afd239c..1a0ea7bf8a4ee 100644 --- a/docs/reference/esql/functions/kibana/definition/term.json +++ b/docs/reference/esql/functions/kibana/definition/term.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "term", "description" : "Performs a Term query on the specified field. Returns true if the provided term matches the row.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_base64.json b/docs/reference/esql/functions/kibana/definition/to_base64.json index 84ace22073ab7..39014ceb9a5c2 100644 --- a/docs/reference/esql/functions/kibana/definition/to_base64.json +++ b/docs/reference/esql/functions/kibana/definition/to_base64.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_base64", "description" : "Encode a string to a base64 string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_boolean.json b/docs/reference/esql/functions/kibana/definition/to_boolean.json index 3a403caaace6c..d295c826f5767 100644 --- a/docs/reference/esql/functions/kibana/definition/to_boolean.json +++ b/docs/reference/esql/functions/kibana/definition/to_boolean.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_boolean", "description" : "Converts an input value to a boolean value.\nA string value of *true* will be case-insensitive converted to the Boolean *true*.\nFor anything else, including the empty string, the function will return *false*.\nThe numerical value of *0* will be converted to *false*, anything else will be converted to *true*.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json index 7f6e99f88b777..f7303457120f4 100644 --- a/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_cartesianpoint", "description" : "Converts an input value to a `cartesian_point` value.\nA string will only be successfully converted if it respects WKT Point format.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json index 284288068b415..6a08d531523ad 100644 --- a/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_cartesianshape", "description" : "Converts an input value to a `cartesian_shape` value.\nA string will only be successfully converted if it respects WKT format.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json index 210b9608f9eff..bc74476a4867c 100644 --- a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json +++ b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_date_nanos", "description" : "Converts an input to a nanosecond-resolution date value (aka date_nanos).", "note" : "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z, attepting to convertvalues outside of that range will result in null with a warning.. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", diff --git a/docs/reference/esql/functions/kibana/definition/to_dateperiod.json b/docs/reference/esql/functions/kibana/definition/to_dateperiod.json index 2940a08a7a100..790c7ec92401c 100644 --- a/docs/reference/esql/functions/kibana/definition/to_dateperiod.json +++ b/docs/reference/esql/functions/kibana/definition/to_dateperiod.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_dateperiod", "description" : "Converts an input value into a `date_period` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_datetime.json b/docs/reference/esql/functions/kibana/definition/to_datetime.json index 8f9ecbd139d32..90c683cf3767f 100644 --- a/docs/reference/esql/functions/kibana/definition/to_datetime.json +++ b/docs/reference/esql/functions/kibana/definition/to_datetime.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_datetime", "description" : "Converts an input value to a date value.\nA string will only be successfully converted if it's respecting the format `yyyy-MM-dd'T'HH:mm:ss.SSS'Z'`.\nTo convert dates in other formats, use <>.", "note" : "Note that when converting from nanosecond resolution to millisecond resolution with this function, the nanosecond date is truncated, not rounded.", diff --git a/docs/reference/esql/functions/kibana/definition/to_degrees.json b/docs/reference/esql/functions/kibana/definition/to_degrees.json index 6aefca5c5d4bc..7b1cc51809704 100644 --- a/docs/reference/esql/functions/kibana/definition/to_degrees.json +++ b/docs/reference/esql/functions/kibana/definition/to_degrees.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_degrees", "description" : "Converts a number in radians to degrees.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_double.json b/docs/reference/esql/functions/kibana/definition/to_double.json index 27565e616d6ed..09d5341a62a1d 100644 --- a/docs/reference/esql/functions/kibana/definition/to_double.json +++ b/docs/reference/esql/functions/kibana/definition/to_double.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_double", "description" : "Converts an input value to a double value. If the input parameter is of a date type,\nits value will be interpreted as milliseconds since the Unix epoch,\nconverted to double. Boolean *true* will be converted to double *1.0*, *false* to *0.0*.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_geopoint.json b/docs/reference/esql/functions/kibana/definition/to_geopoint.json index e2ba492e39e9e..2c465fcfc2f8d 100644 --- a/docs/reference/esql/functions/kibana/definition/to_geopoint.json +++ b/docs/reference/esql/functions/kibana/definition/to_geopoint.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_geopoint", "description" : "Converts an input value to a `geo_point` value.\nA string will only be successfully converted if it respects WKT Point format.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_geoshape.json b/docs/reference/esql/functions/kibana/definition/to_geoshape.json index 53316cc0f7b84..dc05f12e6ee3e 100644 --- a/docs/reference/esql/functions/kibana/definition/to_geoshape.json +++ b/docs/reference/esql/functions/kibana/definition/to_geoshape.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_geoshape", "description" : "Converts an input value to a `geo_shape` value.\nA string will only be successfully converted if it respects WKT format.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_integer.json b/docs/reference/esql/functions/kibana/definition/to_integer.json index 93691ac4e94ef..0228d6baaf507 100644 --- a/docs/reference/esql/functions/kibana/definition/to_integer.json +++ b/docs/reference/esql/functions/kibana/definition/to_integer.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_integer", "description" : "Converts an input value to an integer value.\nIf the input parameter is of a date type, its value will be interpreted as milliseconds\nsince the Unix epoch, converted to integer.\nBoolean *true* will be converted to integer *1*, *false* to *0*.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_ip.json b/docs/reference/esql/functions/kibana/definition/to_ip.json index dfc8e97d283f9..4ec424442c2c1 100644 --- a/docs/reference/esql/functions/kibana/definition/to_ip.json +++ b/docs/reference/esql/functions/kibana/definition/to_ip.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_ip", "description" : "Converts an input string to an IP value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_long.json b/docs/reference/esql/functions/kibana/definition/to_long.json index eb1ce7220c3f9..5cd920092473f 100644 --- a/docs/reference/esql/functions/kibana/definition/to_long.json +++ b/docs/reference/esql/functions/kibana/definition/to_long.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_long", "description" : "Converts an input value to a long value. If the input parameter is of a date type,\nits value will be interpreted as milliseconds since the Unix epoch, converted to long.\nBoolean *true* will be converted to long *1*, *false* to *0*.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_lower.json b/docs/reference/esql/functions/kibana/definition/to_lower.json index 07bb057fe080d..22fd8f4fc1b11 100644 --- a/docs/reference/esql/functions/kibana/definition/to_lower.json +++ b/docs/reference/esql/functions/kibana/definition/to_lower.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_lower", "description" : "Returns a new string representing the input string converted to lower case.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_radians.json b/docs/reference/esql/functions/kibana/definition/to_radians.json index 1b13ef22e23f0..638ef8ec13e8c 100644 --- a/docs/reference/esql/functions/kibana/definition/to_radians.json +++ b/docs/reference/esql/functions/kibana/definition/to_radians.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_radians", "description" : "Converts a number in degrees to radians.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_string.json b/docs/reference/esql/functions/kibana/definition/to_string.json index 1c86e81b31136..40e9588b03f85 100644 --- a/docs/reference/esql/functions/kibana/definition/to_string.json +++ b/docs/reference/esql/functions/kibana/definition/to_string.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_string", "description" : "Converts an input value into a string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_timeduration.json b/docs/reference/esql/functions/kibana/definition/to_timeduration.json index eb7d4e0d5ccec..923aa2024f335 100644 --- a/docs/reference/esql/functions/kibana/definition/to_timeduration.json +++ b/docs/reference/esql/functions/kibana/definition/to_timeduration.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_timeduration", "description" : "Converts an input value into a `time_duration` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json index 4a215b1ea97f3..f7725311b298a 100644 --- a/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json +++ b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_unsigned_long", "description" : "Converts an input value to an unsigned long value. If the input parameter is of a date type,\nits value will be interpreted as milliseconds since the Unix epoch, converted to unsigned long.\nBoolean *true* will be converted to unsigned long *1*, *false* to *0*.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_upper.json b/docs/reference/esql/functions/kibana/definition/to_upper.json index caa9d563b08b1..ac0494b1fb9ec 100644 --- a/docs/reference/esql/functions/kibana/definition/to_upper.json +++ b/docs/reference/esql/functions/kibana/definition/to_upper.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_upper", "description" : "Returns a new string representing the input string converted to upper case.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_version.json b/docs/reference/esql/functions/kibana/definition/to_version.json index a77fc250c3a36..41ad00dc20c9e 100644 --- a/docs/reference/esql/functions/kibana/definition/to_version.json +++ b/docs/reference/esql/functions/kibana/definition/to_version.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_version", "description" : "Converts an input string to a version value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/trim.json b/docs/reference/esql/functions/kibana/definition/trim.json index 45805b3bfb054..eb72d5d041d0f 100644 --- a/docs/reference/esql/functions/kibana/definition/trim.json +++ b/docs/reference/esql/functions/kibana/definition/trim.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "trim", "description" : "Removes leading and trailing whitespaces from a string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/docs/kql.md b/docs/reference/esql/functions/kibana/docs/kql.md index 0ba419c1cd032..14c914d57af91 100644 --- a/docs/reference/esql/functions/kibana/docs/kql.md +++ b/docs/reference/esql/functions/kibana/docs/kql.md @@ -10,5 +10,5 @@ FROM books | WHERE KQL("author: Faulkner") | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 ``` diff --git a/docs/reference/esql/functions/kibana/docs/match.md b/docs/reference/esql/functions/kibana/docs/match.md index 6526d9e84168e..72132533ea82d 100644 --- a/docs/reference/esql/functions/kibana/docs/match.md +++ b/docs/reference/esql/functions/kibana/docs/match.md @@ -21,5 +21,5 @@ FROM books | WHERE MATCH(author, "Faulkner") | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 ``` diff --git a/docs/reference/esql/functions/kibana/docs/match_operator.md b/docs/reference/esql/functions/kibana/docs/match_operator.md index 0624329182f3a..59662b36b804f 100644 --- a/docs/reference/esql/functions/kibana/docs/match_operator.md +++ b/docs/reference/esql/functions/kibana/docs/match_operator.md @@ -18,5 +18,5 @@ FROM books | WHERE MATCH(author, "Faulkner") | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 ``` diff --git a/docs/reference/esql/functions/kibana/docs/qstr.md b/docs/reference/esql/functions/kibana/docs/qstr.md index 7df5a2fe08a9d..374854b805fee 100644 --- a/docs/reference/esql/functions/kibana/docs/qstr.md +++ b/docs/reference/esql/functions/kibana/docs/qstr.md @@ -10,5 +10,5 @@ FROM books | WHERE QSTR("author: Faulkner") | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 ``` diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index 05fda8e0244de..1aed327c45796 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -11,7 +11,7 @@ apache-compress = "org.apache.commons:commons-compress:1.26.1" apache-rat = "org.apache.rat:apache-rat:0.11" asm = { group = "org.ow2.asm", name="asm", version.ref="asm" } asm-tree = { group = "org.ow2.asm", name="asm-tree", version.ref="asm" } -bytebuddy = "net.bytebuddy:byte-buddy:1.14.12" +bytebuddy = "net.bytebuddy:byte-buddy:1.15.11" checkstyle = "com.puppycrawl.tools:checkstyle:10.3" commons-codec = "commons-codec:commons-codec:1.11" commmons-io = "commons-io:commons-io:2.2" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index b1db86f3a7b28..00be18548cb4b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -89,29 +89,29 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + @@ -1724,9 +1724,9 @@ - - - + + + @@ -4118,14 +4118,14 @@ - - - + + + - - - + + + diff --git a/libs/core/src/main/java/org/elasticsearch/core/CheckedSupplier.java b/libs/core/src/main/java/org/elasticsearch/core/CheckedSupplier.java new file mode 100644 index 0000000000000..5d3831881f285 --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/core/CheckedSupplier.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.core; + +/** + * A {@link java.util.function.Supplier}-like interface which allows throwing checked exceptions. + */ +@FunctionalInterface +public interface CheckedSupplier { + T get() throws E; +} diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java index 5a7868325e20d..ffcc23e16d1f6 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java @@ -23,49 +23,96 @@ import java.io.IOException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; +import java.util.ArrayDeque; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; public class InstrumentationServiceImpl implements InstrumentationService { + private static final String OBJECT_INTERNAL_NAME = Type.getInternalName(Object.class); + @Override public Instrumenter newInstrumenter(Class clazz, Map methods) { return InstrumenterImpl.create(clazz, methods); } + private interface CheckerMethodVisitor { + void visit(Class currentClass, int access, String checkerMethodName, String checkerMethodDescriptor); + } + + private void visitClassAndSupers(Class checkerClass, CheckerMethodVisitor checkerMethodVisitor) throws ClassNotFoundException { + Set> visitedClasses = new HashSet<>(); + ArrayDeque> classesToVisit = new ArrayDeque<>(Collections.singleton(checkerClass)); + while (classesToVisit.isEmpty() == false) { + var currentClass = classesToVisit.remove(); + if (visitedClasses.contains(currentClass)) { + continue; + } + visitedClasses.add(currentClass); + + try { + var classFileInfo = InstrumenterImpl.getClassFileInfo(currentClass); + ClassReader reader = new ClassReader(classFileInfo.bytecodes()); + ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { + + @Override + public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + try { + if (OBJECT_INTERNAL_NAME.equals(superName) == false) { + classesToVisit.add(Class.forName(Type.getObjectType(superName).getClassName())); + } + for (var interfaceName : interfaces) { + classesToVisit.add(Class.forName(Type.getObjectType(interfaceName).getClassName())); + } + } catch (ClassNotFoundException e) { + throw new IllegalArgumentException("Cannot inspect checker class " + currentClass.getName(), e); + } + } + + @Override + public MethodVisitor visitMethod( + int access, + String checkerMethodName, + String checkerMethodDescriptor, + String signature, + String[] exceptions + ) { + var mv = super.visitMethod(access, checkerMethodName, checkerMethodDescriptor, signature, exceptions); + checkerMethodVisitor.visit(currentClass, access, checkerMethodName, checkerMethodDescriptor); + return mv; + } + }; + reader.accept(visitor, 0); + } catch (IOException e) { + throw new ClassNotFoundException("Cannot find a definition for class [" + checkerClass.getName() + "]", e); + } + } + } + @Override - public Map lookupMethods(Class checkerClass) throws IOException { - var methodsToInstrument = new HashMap(); - var classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass); - ClassReader reader = new ClassReader(classFileInfo.bytecodes()); - ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { - @Override - public MethodVisitor visitMethod( - int access, - String checkerMethodName, - String checkerMethodDescriptor, - String signature, - String[] exceptions - ) { - var mv = super.visitMethod(access, checkerMethodName, checkerMethodDescriptor, signature, exceptions); - if (checkerMethodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX)) { - var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor); - var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes); - - var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList(); - var checkMethod = new CheckMethod(Type.getInternalName(checkerClass), checkerMethodName, checkerParameterDescriptors); - - methodsToInstrument.put(methodToInstrument, checkMethod); - } - return mv; + public Map lookupMethods(Class checkerClass) throws ClassNotFoundException { + Map methodsToInstrument = new HashMap<>(); + + visitClassAndSupers(checkerClass, (currentClass, access, checkerMethodName, checkerMethodDescriptor) -> { + if (checkerMethodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX)) { + var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor); + var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes); + + var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList(); + var checkMethod = new CheckMethod(Type.getInternalName(currentClass), checkerMethodName, checkerParameterDescriptors); + methodsToInstrument.putIfAbsent(methodToInstrument, checkMethod); } - }; - reader.accept(visitor, 0); + }); + return methodsToInstrument; } @@ -73,15 +120,16 @@ public MethodVisitor visitMethod( @Override public InstrumentationInfo lookupImplementationMethod( Class targetSuperclass, - String methodName, + String targetMethodName, Class implementationClass, Class checkerClass, String checkMethodName, Class... parameterTypes ) throws NoSuchMethodException, ClassNotFoundException { - var targetMethod = targetSuperclass.getDeclaredMethod(methodName, parameterTypes); - validateTargetMethod(implementationClass, targetMethod); + var targetMethod = targetSuperclass.getDeclaredMethod(targetMethodName, parameterTypes); + var implementationMethod = implementationClass.getMethod(targetMethod.getName(), targetMethod.getParameterTypes()); + validateTargetMethod(implementationClass, targetMethod, implementationMethod); var checkerAdditionalArguments = Stream.of(Class.class, targetSuperclass); var checkMethodArgumentTypes = Stream.concat(checkerAdditionalArguments, Arrays.stream(parameterTypes)) @@ -90,33 +138,15 @@ public InstrumentationInfo lookupImplementationMethod( CheckMethod[] checkMethod = new CheckMethod[1]; - try { - InstrumenterImpl.ClassFileInfo classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass); - ClassReader reader = new ClassReader(classFileInfo.bytecodes()); - ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { - @Override - public MethodVisitor visitMethod( - int access, - String methodName, - String methodDescriptor, - String signature, - String[] exceptions - ) { - var mv = super.visitMethod(access, methodName, methodDescriptor, signature, exceptions); - if (methodName.equals(checkMethodName)) { - var methodArgumentTypes = Type.getArgumentTypes(methodDescriptor); - if (Arrays.equals(methodArgumentTypes, checkMethodArgumentTypes)) { - var checkerParameterDescriptors = Arrays.stream(methodArgumentTypes).map(Type::getDescriptor).toList(); - checkMethod[0] = new CheckMethod(Type.getInternalName(checkerClass), methodName, checkerParameterDescriptors); - } - } - return mv; + visitClassAndSupers(checkerClass, (currentClass, access, methodName, methodDescriptor) -> { + if (methodName.equals(checkMethodName)) { + var methodArgumentTypes = Type.getArgumentTypes(methodDescriptor); + if (Arrays.equals(methodArgumentTypes, checkMethodArgumentTypes)) { + var checkerParameterDescriptors = Arrays.stream(methodArgumentTypes).map(Type::getDescriptor).toList(); + checkMethod[0] = new CheckMethod(Type.getInternalName(currentClass), methodName, checkerParameterDescriptors); } - }; - reader.accept(visitor, 0); - } catch (IOException e) { - throw new ClassNotFoundException("Cannot find a definition for class [" + checkerClass.getName() + "]", e); - } + } + }); if (checkMethod[0] == null) { throw new NoSuchMethodException( @@ -132,15 +162,15 @@ public MethodVisitor visitMethod( return new InstrumentationInfo( new MethodKey( - Type.getInternalName(implementationClass), - targetMethod.getName(), + Type.getInternalName(implementationMethod.getDeclaringClass()), + implementationMethod.getName(), Arrays.stream(parameterTypes).map(c -> Type.getType(c).getInternalName()).toList() ), checkMethod[0] ); } - private static void validateTargetMethod(Class implementationClass, Method targetMethod) { + private static void validateTargetMethod(Class implementationClass, Method targetMethod, Method implementationMethod) { if (targetMethod.getDeclaringClass().isAssignableFrom(implementationClass) == false) { throw new IllegalArgumentException( String.format( @@ -172,37 +202,26 @@ private static void validateTargetMethod(Class implementationClass, Method ta ) ); } - try { - var implementationMethod = implementationClass.getMethod(targetMethod.getName(), targetMethod.getParameterTypes()); - var methodModifiers = implementationMethod.getModifiers(); - if (Modifier.isAbstract(methodModifiers)) { - throw new IllegalArgumentException( - String.format( - Locale.ROOT, - "Not a valid instrumentation method: %s is abstract in %s", - targetMethod.getName(), - implementationClass.getName() - ) - ); - } - if (Modifier.isPublic(methodModifiers) == false) { - throw new IllegalArgumentException( - String.format( - Locale.ROOT, - "Not a valid instrumentation method: %s is not public in %s", - targetMethod.getName(), - implementationClass.getName() - ) - ); - } - } catch (NoSuchMethodException e) { - assert false - : String.format( + var methodModifiers = implementationMethod.getModifiers(); + if (Modifier.isAbstract(methodModifiers)) { + throw new IllegalArgumentException( + String.format( Locale.ROOT, - "Not a valid instrumentation method: %s cannot be found in %s", + "Not a valid instrumentation method: %s is abstract in %s", targetMethod.getName(), implementationClass.getName() - ); + ) + ); + } + if (Modifier.isPublic(methodModifiers) == false) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Not a valid instrumentation method: %s is not public in %s", + targetMethod.getName(), + implementationClass.getName() + ) + ); } } diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index 06408941ac96e..b10c58afacb1e 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -152,14 +152,13 @@ public MethodVisitor visitMethod(int access, String name, String descriptor, Str if (isAnnotationPresent == false) { boolean isStatic = (access & ACC_STATIC) != 0; boolean isCtor = "".equals(name); - boolean hasReceiver = (isStatic || isCtor) == false; var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList()); var instrumentationMethod = checkMethods.get(key); if (instrumentationMethod != null) { - // LOGGER.debug("Will instrument method {}", key); + // System.out.println("Will instrument method " + key); return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, isCtor, descriptor, instrumentationMethod); } else { - // LOGGER.trace("Will not instrument method {}", key); + // System.out.println("Will not instrument method " + key); } } return mv; diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java index 33c48fb8823ca..25689f0b8a636 100644 --- a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.test.ESTestCase; import org.objectweb.asm.Type; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -40,9 +39,16 @@ public void instanceMethod(int x, String y) {} abstract static class TestTargetBaseClass { abstract void instanceMethod(int x, String y); + + abstract void instanceMethod2(int x, String y); } - static class TestTargetImplementationClass extends TestTargetBaseClass { + abstract static class TestTargetIntermediateClass extends TestTargetBaseClass { + @Override + public void instanceMethod2(int x, String y) {} + } + + static class TestTargetImplementationClass extends TestTargetIntermediateClass { @Override public void instanceMethod(int x, String y) {} } @@ -55,6 +61,14 @@ interface TestChecker { void check$org_example_TestTargetClass$instanceMethodWithArgs(Class clazz, TestTargetClass that, int x, int y); } + interface TestCheckerDerived extends TestChecker { + void check$org_example_TestTargetClass$instanceMethodNoArgs(Class clazz, TestTargetClass that); + + void check$org_example_TestTargetClass$differentInstanceMethod(Class clazz, TestTargetClass that); + } + + interface TestCheckerDerived2 extends TestCheckerDerived, TestChecker {} + interface TestCheckerOverloads { void check$org_example_TestTargetClass$$staticMethodWithOverload(Class clazz, int x, int y); @@ -75,7 +89,9 @@ interface TestCheckerMixed { void checkInstanceMethodManual(Class clazz, TestTargetBaseClass that, int x, String y); } - public void testInstrumentationTargetLookup() throws IOException { + interface TestCheckerDerived3 extends TestCheckerMixed {} + + public void testInstrumentationTargetLookup() throws ClassNotFoundException { Map checkMethods = instrumentationService.lookupMethods(TestChecker.class); assertThat(checkMethods, aMapWithSize(3)); @@ -128,7 +144,7 @@ public void testInstrumentationTargetLookup() throws IOException { ); } - public void testInstrumentationTargetLookupWithOverloads() throws IOException { + public void testInstrumentationTargetLookupWithOverloads() throws ClassNotFoundException { Map checkMethods = instrumentationService.lookupMethods(TestCheckerOverloads.class); assertThat(checkMethods, aMapWithSize(2)); @@ -160,7 +176,76 @@ public void testInstrumentationTargetLookupWithOverloads() throws IOException { ); } - public void testInstrumentationTargetLookupWithCtors() throws IOException { + public void testInstrumentationTargetLookupWithDerivedClass() throws ClassNotFoundException { + Map checkMethods = instrumentationService.lookupMethods(TestCheckerDerived2.class); + + assertThat(checkMethods, aMapWithSize(4)); + assertThat( + checkMethods, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethod", List.of("I", "java/lang/String", "java/lang/Object"))), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$org_example_TestTargetClass$$staticMethod", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;", "Ljava/lang/Object;") + ) + ) + ) + ); + assertThat( + checkMethods, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "instanceMethodNoArgs", List.of())), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerDerived", + "check$org_example_TestTargetClass$instanceMethodNoArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;" + ) + ) + ) + ) + ); + assertThat( + checkMethods, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "instanceMethodWithArgs", List.of("I", "I"))), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$org_example_TestTargetClass$instanceMethodWithArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;", + "I", + "I" + ) + ) + ) + ) + ); + assertThat( + checkMethods, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "differentInstanceMethod", List.of())), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerDerived", + "check$org_example_TestTargetClass$differentInstanceMethod", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;" + ) + ) + ) + ) + ); + } + + public void testInstrumentationTargetLookupWithCtors() throws ClassNotFoundException { Map checkMethods = instrumentationService.lookupMethods(TestCheckerCtors.class); assertThat(checkMethods, aMapWithSize(2)); @@ -192,7 +277,7 @@ public void testInstrumentationTargetLookupWithCtors() throws IOException { ); } - public void testInstrumentationTargetLookupWithExtraMethods() throws IOException { + public void testInstrumentationTargetLookupWithExtraMethods() throws ClassNotFoundException { Map checkMethods = instrumentationService.lookupMethods(TestCheckerMixed.class); assertThat(checkMethods, aMapWithSize(1)); @@ -287,6 +372,82 @@ public void testLookupImplementationMethodWithBaseClass() throws ClassNotFoundEx ); } + public void testLookupImplementationMethodWithInheritanceOnTarget() throws ClassNotFoundException, NoSuchMethodException { + var info = instrumentationService.lookupImplementationMethod( + TestTargetBaseClass.class, + "instanceMethod2", + TestTargetImplementationClass.class, + TestCheckerMixed.class, + "checkInstanceMethodManual", + int.class, + String.class + ); + + assertThat( + info.targetMethod(), + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetIntermediateClass", + "instanceMethod2", + List.of("I", "java/lang/String") + ) + ) + ); + assertThat( + info.checkMethod(), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerMixed", + "checkInstanceMethodManual", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetBaseClass;", + "I", + "Ljava/lang/String;" + ) + ) + ) + ); + } + + public void testLookupImplementationMethodWithInheritanceOnChecker() throws ClassNotFoundException, NoSuchMethodException { + var info = instrumentationService.lookupImplementationMethod( + TestTargetBaseClass.class, + "instanceMethod2", + TestTargetImplementationClass.class, + TestCheckerDerived3.class, + "checkInstanceMethodManual", + int.class, + String.class + ); + + assertThat( + info.targetMethod(), + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetIntermediateClass", + "instanceMethod2", + List.of("I", "java/lang/String") + ) + ) + ); + assertThat( + info.checkMethod(), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerMixed", + "checkInstanceMethodManual", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetBaseClass;", + "I", + "Ljava/lang/String;" + ) + ) + ) + ); + } + public void testParseCheckerMethodSignatureStaticMethod() { var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( "check$org_example_TestClass$$staticMethod", diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index bde467d20f0aa..640e0d06cc4b9 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -10,6 +10,8 @@ package org.elasticsearch.entitlement.bridge; import java.io.File; +import java.io.FileFilter; +import java.io.FilenameFilter; import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; @@ -35,6 +37,7 @@ import java.net.Socket; import java.net.SocketAddress; import java.net.SocketImplFactory; +import java.net.URI; import java.net.URL; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; @@ -48,16 +51,29 @@ import java.nio.channels.DatagramChannel; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; +import java.nio.channels.spi.SelectorProvider; import java.nio.charset.Charset; +import java.nio.file.AccessMode; +import java.nio.file.CopyOption; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.LinkOption; import java.nio.file.OpenOption; import java.nio.file.Path; +import java.nio.file.WatchEvent; +import java.nio.file.WatchService; +import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.UserPrincipal; import java.nio.file.spi.FileSystemProvider; import java.security.cert.CertStoreParameters; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Properties; +import java.util.Set; import java.util.TimeZone; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ForkJoinPool; import java.util.function.Consumer; import javax.net.ssl.HostnameVerifier; @@ -81,7 +97,7 @@ public interface EntitlementChecker { /// ///////////////// // - // ClassLoader ctor + // create class loaders // void check$java_lang_ClassLoader$(Class callerClass); @@ -90,22 +106,6 @@ public interface EntitlementChecker { void check$java_lang_ClassLoader$(Class callerClass, String name, ClassLoader parent); - /// ///////////////// - // - // SecureClassLoader ctor - // - - void check$java_security_SecureClassLoader$(Class callerClass); - - void check$java_security_SecureClassLoader$(Class callerClass, ClassLoader parent); - - void check$java_security_SecureClassLoader$(Class callerClass, String name, ClassLoader parent); - - /// ///////////////// - // - // URLClassLoader constructors - // - void check$java_net_URLClassLoader$(Class callerClass, URL[] urls); void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent); @@ -116,6 +116,12 @@ public interface EntitlementChecker { void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory); + void check$java_security_SecureClassLoader$(Class callerClass); + + void check$java_security_SecureClassLoader$(Class callerClass, ClassLoader parent); + + void check$java_security_SecureClassLoader$(Class callerClass, String name, ClassLoader parent); + /// ///////////////// // // "setFactory" methods @@ -143,6 +149,8 @@ public interface EntitlementChecker { // System Properties and similar // + void check$java_lang_System$$setProperties(Class callerClass, Properties props); + void check$java_lang_System$$setProperty(Class callerClass, String key, String value); void check$java_lang_System$$clearProperty(Class callerClass, String key); @@ -152,33 +160,33 @@ public interface EntitlementChecker { // JVM-wide state changes // - void check$java_lang_System$$setIn(Class callerClass, InputStream in); - - void check$java_lang_System$$setOut(Class callerClass, PrintStream out); + void check$com_sun_tools_jdi_VirtualMachineManagerImpl$$virtualMachineManager(Class callerClass); void check$java_lang_System$$setErr(Class callerClass, PrintStream err); - void check$java_lang_System$$setProperties(Class callerClass, Properties props); + void check$java_lang_System$$setIn(Class callerClass, InputStream in); + + void check$java_lang_System$$setOut(Class callerClass, PrintStream out); void check$java_lang_Runtime$addShutdownHook(Class callerClass, Runtime runtime, Thread hook); void check$java_lang_Runtime$removeShutdownHook(Class callerClass, Runtime runtime, Thread hook); - void check$jdk_tools_jlink_internal_Jlink$(Class callerClass); + void check$java_lang_Thread$$setDefaultUncaughtExceptionHandler(Class callerClass, Thread.UncaughtExceptionHandler ueh); - void check$jdk_tools_jlink_internal_Main$$run(Class callerClass, PrintWriter out, PrintWriter err, String... args); + void check$java_net_DatagramSocket$$setDatagramSocketImplFactory(Class callerClass, DatagramSocketImplFactory fac); - void check$jdk_vm_ci_services_JVMCIServiceLocator$$getProviders(Class callerClass, Class service); + void check$java_net_HttpURLConnection$$setFollowRedirects(Class callerClass, boolean set); - void check$jdk_vm_ci_services_Services$$load(Class callerClass, Class service); + void check$java_net_ServerSocket$$setSocketFactory(Class callerClass, SocketImplFactory fac); - void check$jdk_vm_ci_services_Services$$loadSingle(Class callerClass, Class service, boolean required); + void check$java_net_Socket$$setSocketImplFactory(Class callerClass, SocketImplFactory fac); - void check$com_sun_tools_jdi_VirtualMachineManagerImpl$$virtualMachineManager(Class callerClass); + void check$java_net_URL$$setURLStreamHandlerFactory(Class callerClass, URLStreamHandlerFactory fac); - void check$java_lang_Thread$$setDefaultUncaughtExceptionHandler(Class callerClass, Thread.UncaughtExceptionHandler ueh); + void check$java_net_URLConnection$$setFileNameMap(Class callerClass, FileNameMap map); - void check$java_util_spi_LocaleServiceProvider$(Class callerClass); + void check$java_net_URLConnection$$setContentHandlerFactory(Class callerClass, ContentHandlerFactory fac); void check$java_text_spi_BreakIteratorProvider$(Class callerClass); @@ -200,6 +208,8 @@ public interface EntitlementChecker { void check$java_util_spi_LocaleNameProvider$(Class callerClass); + void check$java_util_spi_LocaleServiceProvider$(Class callerClass); + void check$java_util_spi_TimeZoneNameProvider$(Class callerClass); void check$java_util_logging_LogManager$(Class callerClass); @@ -210,19 +220,17 @@ public interface EntitlementChecker { void check$java_util_TimeZone$$setDefault(Class callerClass, TimeZone zone); - void check$java_net_DatagramSocket$$setDatagramSocketImplFactory(Class callerClass, DatagramSocketImplFactory fac); - - void check$java_net_HttpURLConnection$$setFollowRedirects(Class callerClass, boolean set); + void check$jdk_tools_jlink_internal_Jlink$(Class callerClass); - void check$java_net_ServerSocket$$setSocketFactory(Class callerClass, SocketImplFactory fac); + void check$jdk_tools_jlink_internal_Main$$run(Class callerClass, PrintWriter out, PrintWriter err, String... args); - void check$java_net_Socket$$setSocketImplFactory(Class callerClass, SocketImplFactory fac); + void check$jdk_vm_ci_services_JVMCIServiceLocator$$getProviders(Class callerClass, Class service); - void check$java_net_URL$$setURLStreamHandlerFactory(Class callerClass, URLStreamHandlerFactory fac); + void check$jdk_vm_ci_services_Services$$load(Class callerClass, Class service); - void check$java_net_URLConnection$$setFileNameMap(Class callerClass, FileNameMap map); + void check$jdk_vm_ci_services_Services$$loadSingle(Class callerClass, Class service, boolean required); - void check$java_net_URLConnection$$setContentHandlerFactory(Class callerClass, ContentHandlerFactory fac); + void check$java_nio_charset_spi_CharsetProvider$(Class callerClass); /// ///////////////// // @@ -232,10 +240,6 @@ public interface EntitlementChecker { void check$java_net_ResponseCache$$setDefault(Class callerClass, ResponseCache rc); - void check$java_net_spi_InetAddressResolverProvider$(Class callerClass); - - void check$java_net_spi_URLStreamHandlerProvider$(Class callerClass); - void check$java_net_URL$(Class callerClass, String protocol, String host, int port, String file, URLStreamHandler handler); void check$java_net_URL$(Class callerClass, URL context, String spec, URLStreamHandler handler); @@ -246,14 +250,14 @@ public interface EntitlementChecker { void check$java_net_DatagramSocket$connect(Class callerClass, DatagramSocket that, SocketAddress addr); - void check$java_net_DatagramSocket$send(Class callerClass, DatagramSocket that, DatagramPacket p); - - void check$java_net_DatagramSocket$receive(Class callerClass, DatagramSocket that, DatagramPacket p); - void check$java_net_DatagramSocket$joinGroup(Class callerClass, DatagramSocket that, SocketAddress addr, NetworkInterface ni); void check$java_net_DatagramSocket$leaveGroup(Class callerClass, DatagramSocket that, SocketAddress addr, NetworkInterface ni); + void check$java_net_DatagramSocket$receive(Class callerClass, DatagramSocket that, DatagramPacket p); + + void check$java_net_DatagramSocket$send(Class callerClass, DatagramSocket that, DatagramPacket p); + void check$java_net_MulticastSocket$joinGroup(Class callerClass, MulticastSocket that, InetAddress addr); void check$java_net_MulticastSocket$joinGroup(Class callerClass, MulticastSocket that, SocketAddress addr, NetworkInterface ni); @@ -264,6 +268,10 @@ public interface EntitlementChecker { void check$java_net_MulticastSocket$send(Class callerClass, MulticastSocket that, DatagramPacket p, byte ttl); + void check$java_net_spi_InetAddressResolverProvider$(Class callerClass); + + void check$java_net_spi_URLStreamHandlerProvider$(Class callerClass); + // Binding/connecting ctor void check$java_net_ServerSocket$(Class callerClass, int port); @@ -421,6 +429,16 @@ public interface EntitlementChecker { void check$sun_nio_ch_DatagramChannelImpl$receive(Class callerClass, DatagramChannel that, ByteBuffer dst); + // providers (SPI) + + // protected constructors + void check$java_nio_channels_spi_SelectorProvider$(Class callerClass); + + void check$java_nio_channels_spi_AsynchronousChannelProvider$(Class callerClass); + + // provider methods (dynamic) + void checkSelectorProviderInheritedChannel(Class callerClass, SelectorProvider that); + /// ///////////////// // // Load native libraries @@ -495,24 +513,210 @@ public interface EntitlementChecker { // File access // - void check$java_util_Scanner$(Class callerClass, File source); + // old io (ie File) + void check$java_io_File$canExecute(Class callerClass, File file); - void check$java_util_Scanner$(Class callerClass, File source, String charsetName); + void check$java_io_File$canRead(Class callerClass, File file); - void check$java_util_Scanner$(Class callerClass, File source, Charset charset); + void check$java_io_File$canWrite(Class callerClass, File file); - void check$java_io_FileOutputStream$(Class callerClass, String name); + void check$java_io_File$createNewFile(Class callerClass, File file); - void check$java_io_FileOutputStream$(Class callerClass, String name, boolean append); + void check$java_io_File$$createTempFile(Class callerClass, String prefix, String suffix, File directory); + + void check$java_io_File$delete(Class callerClass, File file); + + void check$java_io_File$deleteOnExit(Class callerClass, File file); + + void check$java_io_File$exists(Class callerClass, File file); + + void check$java_io_File$isDirectory(Class callerClass, File file); + + void check$java_io_File$isFile(Class callerClass, File file); + + void check$java_io_File$isHidden(Class callerClass, File file); + + void check$java_io_File$lastModified(Class callerClass, File file); + + void check$java_io_File$length(Class callerClass, File file); + + void check$java_io_File$list(Class callerClass, File file); + + void check$java_io_File$list(Class callerClass, File file, FilenameFilter filter); + + void check$java_io_File$listFiles(Class callerClass, File file); + + void check$java_io_File$listFiles(Class callerClass, File file, FileFilter filter); + + void check$java_io_File$listFiles(Class callerClass, File file, FilenameFilter filter); + + void check$java_io_File$mkdir(Class callerClass, File file); + + void check$java_io_File$mkdirs(Class callerClass, File file); + + void check$java_io_File$renameTo(Class callerClass, File file, File dest); + + void check$java_io_File$setExecutable(Class callerClass, File file, boolean executable); + + void check$java_io_File$setExecutable(Class callerClass, File file, boolean executable, boolean ownerOnly); + + void check$java_io_File$setLastModified(Class callerClass, File file, long time); + + void check$java_io_File$setReadable(Class callerClass, File file, boolean readable); + + void check$java_io_File$setReadable(Class callerClass, File file, boolean readable, boolean ownerOnly); + + void check$java_io_File$setReadOnly(Class callerClass, File file); + + void check$java_io_File$setWritable(Class callerClass, File file, boolean writable); + + void check$java_io_File$setWritable(Class callerClass, File file, boolean writable, boolean ownerOnly); void check$java_io_FileOutputStream$(Class callerClass, File file); void check$java_io_FileOutputStream$(Class callerClass, File file, boolean append); + void check$java_io_FileOutputStream$(Class callerClass, String name); + + void check$java_io_FileOutputStream$(Class callerClass, String name, boolean append); + + void check$java_util_Scanner$(Class callerClass, File source); + + void check$java_util_Scanner$(Class callerClass, File source, String charsetName); + + void check$java_util_Scanner$(Class callerClass, File source, Charset charset); + + // nio + void check$java_nio_file_Files$$getOwner(Class callerClass, Path path, LinkOption... options); + void check$java_nio_file_Files$$probeContentType(Class callerClass, Path path); void check$java_nio_file_Files$$setOwner(Class callerClass, Path path, UserPrincipal principal); - // hand-wired methods + // file system providers + void check$java_nio_file_spi_FileSystemProvider$(Class callerClass); + + void checkNewFileSystem(Class callerClass, FileSystemProvider that, URI uri, Map env); + + void checkNewFileSystem(Class callerClass, FileSystemProvider that, Path path, Map env); + void checkNewInputStream(Class callerClass, FileSystemProvider that, Path path, OpenOption... options); + + void checkNewOutputStream(Class callerClass, FileSystemProvider that, Path path, OpenOption... options); + + void checkNewFileChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + FileAttribute... attrs + ); + + void checkNewAsynchronousFileChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + ExecutorService executor, + FileAttribute... attrs + ); + + void checkNewByteChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + FileAttribute... attrs + ); + + void checkNewDirectoryStream(Class callerClass, FileSystemProvider that, Path dir, DirectoryStream.Filter filter); + + void checkCreateDirectory(Class callerClass, FileSystemProvider that, Path dir, FileAttribute... attrs); + + void checkCreateSymbolicLink(Class callerClass, FileSystemProvider that, Path link, Path target, FileAttribute... attrs); + + void checkCreateLink(Class callerClass, FileSystemProvider that, Path link, Path existing); + + void checkDelete(Class callerClass, FileSystemProvider that, Path path); + + void checkDeleteIfExists(Class callerClass, FileSystemProvider that, Path path); + + void checkReadSymbolicLink(Class callerClass, FileSystemProvider that, Path link); + + void checkCopy(Class callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options); + + void checkMove(Class callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options); + + void checkIsSameFile(Class callerClass, FileSystemProvider that, Path path, Path path2); + + void checkIsHidden(Class callerClass, FileSystemProvider that, Path path); + + void checkGetFileStore(Class callerClass, FileSystemProvider that, Path path); + + void checkCheckAccess(Class callerClass, FileSystemProvider that, Path path, AccessMode... modes); + + void checkGetFileAttributeView(Class callerClass, FileSystemProvider that, Path path, Class type, LinkOption... options); + + void checkReadAttributes(Class callerClass, FileSystemProvider that, Path path, Class type, LinkOption... options); + + void checkReadAttributes(Class callerClass, FileSystemProvider that, Path path, String attributes, LinkOption... options); + + void checkReadAttributesIfExists(Class callerClass, FileSystemProvider that, Path path, Class type, LinkOption... options); + + void checkSetAttribute(Class callerClass, FileSystemProvider that, Path path, String attribute, Object value, LinkOption... options); + + void checkExists(Class callerClass, FileSystemProvider that, Path path, LinkOption... options); + + // file store + void checkGetFileStoreAttributeView(Class callerClass, FileStore that, Class type); + + void checkGetAttribute(Class callerClass, FileStore that, String attribute); + + void checkGetBlockSize(Class callerClass, FileStore that); + + void checkGetTotalSpace(Class callerClass, FileStore that); + + void checkGetUnallocatedSpace(Class callerClass, FileStore that); + + void checkGetUsableSpace(Class callerClass, FileStore that); + + void checkIsReadOnly(Class callerClass, FileStore that); + + void checkName(Class callerClass, FileStore that); + + void checkType(Class callerClass, FileStore that); + + // path + void checkPathToRealPath(Class callerClass, Path that, LinkOption... options); + + void checkPathRegister(Class callerClass, Path that, WatchService watcher, WatchEvent.Kind... events); + + void checkPathRegister( + Class callerClass, + Path that, + WatchService watcher, + WatchEvent.Kind[] events, + WatchEvent.Modifier... modifiers + ); + + //////////////////// + // + // Thread management + // + + void check$java_lang_Thread$start(Class callerClass, Thread thread); + + void check$java_lang_Thread$setDaemon(Class callerClass, Thread thread, boolean on); + + void check$java_lang_ThreadGroup$setDaemon(Class callerClass, ThreadGroup threadGroup, boolean daemon); + + void check$java_util_concurrent_ForkJoinPool$setParallelism(Class callerClass, ForkJoinPool forkJoinPool, int size); + + void check$java_lang_Thread$setName(Class callerClass, Thread thread, String name); + + void check$java_lang_Thread$setPriority(Class callerClass, Thread thread, int newPriority); + + void check$java_lang_Thread$setUncaughtExceptionHandler(Class callerClass, Thread thread, Thread.UncaughtExceptionHandler ueh); + + void check$java_lang_ThreadGroup$setMaxPriority(Class callerClass, ThreadGroup threadGroup, int pri); } diff --git a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java index 24d7472e07c65..58bafdc47a0bd 100644 --- a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java +++ b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java @@ -14,17 +14,47 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.nio.file.attribute.UserPrincipal; +import java.security.SecureRandom; +@SuppressForbidden(reason = "Exposes forbidden APIs for testing purposes") public final class EntitledActions { private EntitledActions() {} - @SuppressForbidden(reason = "Exposes forbidden APIs for testing purposes") - static void System_clearProperty(String key) { - System.clearProperty(key); + private static final SecureRandom random = new SecureRandom(); + + private static final Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir")); + + private static Path readDir() { + return testRootDir.resolve("read_dir"); + } + + private static Path readWriteDir() { + return testRootDir.resolve("read_write_dir"); } public static UserPrincipal getFileOwner(Path path) throws IOException { return Files.getOwner(path); } + + public static void createFile(Path path) throws IOException { + Files.createFile(path); + } + + public static Path createTempFileForRead() throws IOException { + return Files.createFile(readDir().resolve("entitlements-" + random.nextLong() + ".tmp")); + } + + public static Path createTempFileForWrite() throws IOException { + return Files.createFile(readWriteDir().resolve("entitlements-" + random.nextLong() + ".tmp")); + } + + public static Path createTempDirectoryForWrite() throws IOException { + return Files.createDirectory(readWriteDir().resolve("entitlements-dir-" + random.nextLong())); + } + + public static Path createTempSymbolicLink() throws IOException { + return Files.createSymbolicLink(readDir().resolve("entitlements-link-" + random.nextLong()), readWriteDir()); + } } diff --git a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java index 7a60d92ecc552..cec48ac168678 100644 --- a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java +++ b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java @@ -15,7 +15,7 @@ import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.Plugin; -import static org.elasticsearch.entitlement.qa.entitled.EntitledActions.System_clearProperty; +import java.util.concurrent.atomic.AtomicBoolean; public class EntitledPlugin extends Plugin implements ExtensiblePlugin { @@ -28,11 +28,19 @@ public static void selfTest() { selfTestNotEntitled(); } - private static final String SELF_TEST_PROPERTY = "org.elasticsearch.entitlement.qa.selfTest"; - private static void selfTestEntitled() { logger.debug("selfTestEntitled"); - System_clearProperty(SELF_TEST_PROPERTY); + AtomicBoolean threadRan = new AtomicBoolean(false); + try { + Thread testThread = new Thread(() -> threadRan.set(true), "testThread"); + testThread.start(); + testThread.join(); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + if (threadRan.get() == false) { + throw new AssertionError("Self-test thread did not run"); + } } private static void selfTestNotEntitled() { diff --git a/libs/entitlement/qa/entitled-plugin/src/main/plugin-metadata/entitlement-policy.yaml b/libs/entitlement/qa/entitled-plugin/src/main/plugin-metadata/entitlement-policy.yaml deleted file mode 100644 index 81acd4c467f94..0000000000000 --- a/libs/entitlement/qa/entitled-plugin/src/main/plugin-metadata/entitlement-policy.yaml +++ /dev/null @@ -1,4 +0,0 @@ -org.elasticsearch.entitlement.qa.entitled: - - write_system_properties: - properties: - - org.elasticsearch.entitlement.qa.selfTest diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java index 6564e0eed41e1..ca03014634076 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java @@ -17,11 +17,38 @@ import java.net.DatagramSocketImpl; import java.net.InetAddress; import java.net.NetworkInterface; +import java.net.ProtocolFamily; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketAddress; import java.net.SocketException; import java.net.SocketImpl; +import java.net.URI; +import java.nio.channels.AsynchronousChannelGroup; +import java.nio.channels.AsynchronousServerSocketChannel; +import java.nio.channels.AsynchronousSocketChannel; +import java.nio.channels.DatagramChannel; +import java.nio.channels.Pipe; +import java.nio.channels.SeekableByteChannel; +import java.nio.channels.ServerSocketChannel; +import java.nio.channels.SocketChannel; +import java.nio.channels.spi.AbstractSelector; +import java.nio.channels.spi.AsynchronousChannelProvider; +import java.nio.channels.spi.SelectorProvider; +import java.nio.charset.Charset; +import java.nio.charset.spi.CharsetProvider; +import java.nio.file.AccessMode; +import java.nio.file.CopyOption; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.FileSystem; +import java.nio.file.LinkOption; +import java.nio.file.OpenOption; +import java.nio.file.Path; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.FileAttributeView; +import java.nio.file.spi.FileSystemProvider; import java.security.cert.Certificate; import java.text.BreakIterator; import java.text.Collator; @@ -35,8 +62,12 @@ import java.text.spi.DateFormatSymbolsProvider; import java.text.spi.DecimalFormatSymbolsProvider; import java.text.spi.NumberFormatProvider; +import java.util.Iterator; import java.util.Locale; import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ThreadFactory; import java.util.spi.CalendarDataProvider; import java.util.spi.CalendarNameProvider; import java.util.spi.CurrencyNameProvider; @@ -486,4 +517,163 @@ protected void connect(InetAddress address, int port) throws SocketException {} private static RuntimeException unexpected() { return new IllegalStateException("This method isn't supposed to be called"); } + + static class DummySelectorProvider extends SelectorProvider { + @Override + public DatagramChannel openDatagramChannel() throws IOException { + return null; + } + + @Override + public DatagramChannel openDatagramChannel(ProtocolFamily family) throws IOException { + return null; + } + + @Override + public Pipe openPipe() throws IOException { + return null; + } + + @Override + public AbstractSelector openSelector() throws IOException { + return null; + } + + @Override + public ServerSocketChannel openServerSocketChannel() throws IOException { + return null; + } + + @Override + public SocketChannel openSocketChannel() throws IOException { + return null; + } + } + + static class DummyAsynchronousChannelProvider extends AsynchronousChannelProvider { + @Override + public AsynchronousChannelGroup openAsynchronousChannelGroup(int nThreads, ThreadFactory threadFactory) throws IOException { + return null; + } + + @Override + public AsynchronousChannelGroup openAsynchronousChannelGroup(ExecutorService executor, int initialSize) throws IOException { + return null; + } + + @Override + public AsynchronousServerSocketChannel openAsynchronousServerSocketChannel(AsynchronousChannelGroup group) throws IOException { + return null; + } + + @Override + public AsynchronousSocketChannel openAsynchronousSocketChannel(AsynchronousChannelGroup group) throws IOException { + return null; + } + } + + static class DummyCharsetProvider extends CharsetProvider { + @Override + public Iterator charsets() { + return null; + } + + @Override + public Charset charsetForName(String charsetName) { + return null; + } + } + + static class DummyFileSystemProvider extends FileSystemProvider { + @Override + public String getScheme() { + return ""; + } + + @Override + public FileSystem newFileSystem(URI uri, Map env) throws IOException { + return null; + } + + @Override + public FileSystem getFileSystem(URI uri) { + return null; + } + + @Override + public Path getPath(URI uri) { + return null; + } + + @Override + public SeekableByteChannel newByteChannel(Path path, Set options, FileAttribute... attrs) + throws IOException { + return null; + } + + @Override + public DirectoryStream newDirectoryStream(Path dir, DirectoryStream.Filter filter) throws IOException { + return null; + } + + @Override + public void createDirectory(Path dir, FileAttribute... attrs) throws IOException { + + } + + @Override + public void delete(Path path) throws IOException { + + } + + @Override + public void copy(Path source, Path target, CopyOption... options) throws IOException { + + } + + @Override + public void move(Path source, Path target, CopyOption... options) throws IOException { + + } + + @Override + public boolean isSameFile(Path path, Path path2) throws IOException { + return false; + } + + @Override + public boolean isHidden(Path path) throws IOException { + return false; + } + + @Override + public FileStore getFileStore(Path path) throws IOException { + return null; + } + + @Override + public void checkAccess(Path path, AccessMode... modes) throws IOException { + + } + + @Override + public V getFileAttributeView(Path path, Class type, LinkOption... options) { + return null; + } + + @Override + public A readAttributes(Path path, Class type, LinkOption... options) throws IOException { + return null; + } + + @Override + public Map readAttributes(Path path, String attributes, LinkOption... options) throws IOException { + return Map.of(); + } + + @Override + public void setAttribute(Path path, String attribute, Object value, LinkOption... options) throws IOException { + + } + } } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java new file mode 100644 index 0000000000000..a4b9767c4c64f --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +public @interface EntitlementTest { + enum ExpectedAccess { + PLUGINS, + ES_MODULES_ONLY, + SERVER_ONLY, + ALWAYS_DENIED + } + + ExpectedAccess expectedAccess(); + + int fromJavaVersion() default -1; +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java index 6e15ff4d0cdd1..9a6c59e204728 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.qa.entitled.EntitledActions; +import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; @@ -22,59 +23,230 @@ import java.nio.file.attribute.UserPrincipal; import java.util.Scanner; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; + @SuppressForbidden(reason = "Explicitly checking APIs that are forbidden") class FileCheckActions { - private static Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir")); + static Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir")); - private static Path readDir() { + static Path readDir() { return testRootDir.resolve("read_dir"); } - private static Path readWriteDir() { + static Path readWriteDir() { return testRootDir.resolve("read_write_dir"); } - private static Path readFile() { + static Path readFile() { return testRootDir.resolve("read_file"); } - private static Path readWriteFile() { + static Path readWriteFile() { return testRootDir.resolve("read_write_file"); } + @EntitlementTest(expectedAccess = PLUGINS) + static void fileCanExecute() throws IOException { + readFile().toFile().canExecute(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileCanRead() throws IOException { + readFile().toFile().canRead(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileCanWrite() throws IOException { + readFile().toFile().canWrite(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileCreateNewFile() throws IOException { + readWriteDir().resolve("new_file").toFile().createNewFile(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileCreateTempFile() throws IOException { + File.createTempFile("prefix", "suffix", readWriteDir().toFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileDelete() throws IOException { + Path toDelete = readWriteDir().resolve("to_delete"); + EntitledActions.createFile(toDelete); + toDelete.toFile().delete(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileDeleteOnExit() throws IOException { + Path toDelete = readWriteDir().resolve("to_delete_on_exit"); + EntitledActions.createFile(toDelete); + toDelete.toFile().deleteOnExit(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileExists() throws IOException { + readFile().toFile().exists(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileIsDirectory() throws IOException { + readFile().toFile().isDirectory(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileIsFile() throws IOException { + readFile().toFile().isFile(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileIsHidden() throws IOException { + readFile().toFile().isHidden(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileLastModified() throws IOException { + readFile().toFile().lastModified(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileLength() throws IOException { + readFile().toFile().length(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileList() throws IOException { + readDir().toFile().list(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileListWithFilter() throws IOException { + readDir().toFile().list((dir, name) -> true); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileListFiles() throws IOException { + readDir().toFile().listFiles(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileListFilesWithFileFilter() throws IOException { + readDir().toFile().listFiles(pathname -> true); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileListFilesWithFilenameFilter() throws IOException { + readDir().toFile().listFiles((dir, name) -> true); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileMkdir() throws IOException { + Path mkdir = readWriteDir().resolve("mkdir"); + mkdir.toFile().mkdir(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileMkdirs() throws IOException { + Path mkdir = readWriteDir().resolve("mkdirs"); + mkdir.toFile().mkdirs(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileRenameTo() throws IOException { + Path toRename = readWriteDir().resolve("to_rename"); + EntitledActions.createFile(toRename); + toRename.toFile().renameTo(readWriteDir().resolve("renamed").toFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetExecutable() throws IOException { + readWriteFile().toFile().setExecutable(false); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetExecutableOwner() throws IOException { + readWriteFile().toFile().setExecutable(false, false); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetLastModified() throws IOException { + readWriteFile().toFile().setLastModified(System.currentTimeMillis()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetReadable() throws IOException { + readWriteFile().toFile().setReadable(true); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetReadableOwner() throws IOException { + readWriteFile().toFile().setReadable(true, false); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetReadOnly() throws IOException { + Path readOnly = readWriteDir().resolve("read_only"); + EntitledActions.createFile(readOnly); + readOnly.toFile().setReadOnly(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetWritable() throws IOException { + readWriteFile().toFile().setWritable(true); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetWritableOwner() throws IOException { + readWriteFile().toFile().setWritable(true, false); + } + + @EntitlementTest(expectedAccess = PLUGINS) static void createScannerFile() throws FileNotFoundException { new Scanner(readFile().toFile()); } + @EntitlementTest(expectedAccess = PLUGINS) static void createScannerFileWithCharset() throws IOException { new Scanner(readFile().toFile(), StandardCharsets.UTF_8); } + @EntitlementTest(expectedAccess = PLUGINS) static void createScannerFileWithCharsetName() throws FileNotFoundException { new Scanner(readFile().toFile(), "UTF-8"); } + @EntitlementTest(expectedAccess = PLUGINS) static void createFileOutputStreamString() throws IOException { new FileOutputStream(readWriteFile().toString()).close(); } + @EntitlementTest(expectedAccess = PLUGINS) static void createFileOutputStreamStringWithAppend() throws IOException { new FileOutputStream(readWriteFile().toString(), false).close(); } + @EntitlementTest(expectedAccess = PLUGINS) static void createFileOutputStreamFile() throws IOException { new FileOutputStream(readWriteFile().toFile()).close(); } + @EntitlementTest(expectedAccess = PLUGINS) static void createFileOutputStreamFileWithAppend() throws IOException { new FileOutputStream(readWriteFile().toFile(), false).close(); } + @EntitlementTest(expectedAccess = PLUGINS) + static void filesGetOwner() throws IOException { + Files.getOwner(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) static void filesProbeContentType() throws IOException { Files.probeContentType(readFile()); } + @EntitlementTest(expectedAccess = PLUGINS) static void filesSetOwner() throws IOException { UserPrincipal owner = EntitledActions.getFileOwner(readWriteFile()); Files.setOwner(readWriteFile(), owner); // set to existing owner, just trying to execute the method diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileStoreActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileStoreActions.java new file mode 100644 index 0000000000000..0c8026ea9fee4 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileStoreActions.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.attribute.FileStoreAttributeView; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.SERVER_ONLY; + +class FileStoreActions { + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void checkGetFileStoreAttributeView() throws IOException { + Files.getFileStore(FileCheckActions.readWriteFile()).getFileStoreAttributeView(FileStoreAttributeView.class); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkGetAttribute() throws IOException { + try { + Files.getFileStore(FileCheckActions.readFile()).getAttribute("zfs:compression"); + } catch (UnsupportedOperationException e) { + // It's OK if the attribute view is not available or it does not support reading the attribute + } + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkGetBlockSize() throws IOException { + Files.getFileStore(FileCheckActions.readWriteFile()).getBlockSize(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkGetTotalSpace() throws IOException { + Files.getFileStore(FileCheckActions.readWriteFile()).getTotalSpace(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkGetUnallocatedSpace() throws IOException { + Files.getFileStore(FileCheckActions.readWriteFile()).getUnallocatedSpace(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkGetUsableSpace() throws IOException { + Files.getFileStore(FileCheckActions.readFile()).getUsableSpace(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkIsReadOnly() throws IOException { + Files.getFileStore(FileCheckActions.readFile()).isReadOnly(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkName() throws IOException { + Files.getFileStore(FileCheckActions.readFile()).name(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkType() throws IOException { + Files.getFileStore(FileCheckActions.readFile()).type(); + } + + private FileStoreActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/LoadNativeLibrariesCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/LoadNativeLibrariesCheckActions.java index 50980bc230f55..5b3265c5496ba 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/LoadNativeLibrariesCheckActions.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/LoadNativeLibrariesCheckActions.java @@ -12,7 +12,7 @@ class LoadNativeLibrariesCheckActions { static void runtimeLoad() { try { - Runtime.getRuntime().load("libSomeLibFile.so"); + Runtime.getRuntime().load(FileCheckActions.readDir().resolve("libSomeLibFile.so").toString()); } catch (UnsatisfiedLinkError ignored) { // The library does not exist, so we expect to fail loading it } @@ -20,7 +20,7 @@ static void runtimeLoad() { static void systemLoad() { try { - System.load("libSomeLibFile.so"); + System.load(FileCheckActions.readDir().resolve("libSomeLibFile.so").toString()); } catch (UnsatisfiedLinkError ignored) { // The library does not exist, so we expect to fail loading it } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/ManageThreadsActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/ManageThreadsActions.java new file mode 100644 index 0000000000000..53f17faf06998 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/ManageThreadsActions.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.core.SuppressForbidden; + +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.atomic.AtomicBoolean; + +import static java.lang.Thread.currentThread; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; + +@SuppressForbidden(reason = "testing entitlements") +@SuppressWarnings("unused") // used via reflection +class ManageThreadsActions { + private ManageThreadsActions() {} + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_Thread$start() throws InterruptedException { + AtomicBoolean threadRan = new AtomicBoolean(false); + Thread thread = new Thread(() -> threadRan.set(true), "test"); + thread.start(); + thread.join(); + assert threadRan.get(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_Thread$setDaemon() { + new Thread().setDaemon(true); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_ThreadGroup$setDaemon() { + currentThread().getThreadGroup().setDaemon(currentThread().getThreadGroup().isDaemon()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_util_concurrent_ForkJoinPool$setParallelism() { + ForkJoinPool.commonPool().setParallelism(ForkJoinPool.commonPool().getParallelism()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_Thread$setName() { + currentThread().setName(currentThread().getName()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_Thread$setPriority() { + currentThread().setPriority(currentThread().getPriority()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_Thread$setUncaughtExceptionHandler() { + currentThread().setUncaughtExceptionHandler(currentThread().getUncaughtExceptionHandler()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_ThreadGroup$setMaxPriority() { + currentThread().getThreadGroup().setMaxPriority(currentThread().getThreadGroup().getMaxPriority()); + } + +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main22/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NativeActions.java similarity index 83% rename from libs/entitlement/qa/entitlement-test-plugin/src/main22/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NativeActions.java index 0a69f7255a200..d731f850e0f4d 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main22/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NativeActions.java @@ -30,9 +30,12 @@ import static java.lang.foreign.ValueLayout.ADDRESS; import static java.lang.foreign.ValueLayout.JAVA_LONG; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.SERVER_ONLY; -class VersionSpecificNativeChecks { +class NativeActions { + @EntitlementTest(expectedAccess = SERVER_ONLY) static void enableNativeAccess() throws Exception { ModuleLayer parent = ModuleLayer.boot(); @@ -49,16 +52,19 @@ static void enableNativeAccess() throws Exception { controller.enableNativeAccess(targetModule.get()); } + @EntitlementTest(expectedAccess = PLUGINS) static void addressLayoutWithTargetLayout() { AddressLayout addressLayout = ADDRESS.withoutTargetLayout(); addressLayout.withTargetLayout(MemoryLayout.sequenceLayout(Long.MAX_VALUE, ValueLayout.JAVA_BYTE)); } + @EntitlementTest(expectedAccess = PLUGINS) static void linkerDowncallHandle() { Linker linker = Linker.nativeLinker(); linker.downcallHandle(FunctionDescriptor.of(JAVA_LONG, ADDRESS)); } + @EntitlementTest(expectedAccess = PLUGINS) static void linkerDowncallHandleWithAddress() { Linker linker = Linker.nativeLinker(); linker.downcallHandle(linker.defaultLookup().find("strlen").get(), FunctionDescriptor.of(JAVA_LONG, ADDRESS)); @@ -68,12 +74,13 @@ static int callback() { return 0; } + @EntitlementTest(expectedAccess = PLUGINS) static void linkerUpcallStub() throws NoSuchMethodException { Linker linker = Linker.nativeLinker(); MethodHandle mh = null; try { - mh = MethodHandles.lookup().findStatic(VersionSpecificNativeChecks.class, "callback", MethodType.methodType(int.class)); + mh = MethodHandles.lookup().findStatic(NativeActions.class, "callback", MethodType.methodType(int.class)); } catch (IllegalAccessException e) { assert false; } @@ -82,32 +89,37 @@ static void linkerUpcallStub() throws NoSuchMethodException { linker.upcallStub(mh, callbackDescriptor, Arena.ofAuto()); } + @EntitlementTest(expectedAccess = PLUGINS) static void memorySegmentReinterpret() { Arena arena = Arena.ofAuto(); MemorySegment segment = arena.allocate(100); segment.reinterpret(50); } + @EntitlementTest(expectedAccess = PLUGINS) static void memorySegmentReinterpretWithCleanup() { Arena arena = Arena.ofAuto(); MemorySegment segment = arena.allocate(100); segment.reinterpret(Arena.ofAuto(), s -> {}); } + @EntitlementTest(expectedAccess = PLUGINS) static void memorySegmentReinterpretWithSizeAndCleanup() { Arena arena = Arena.ofAuto(); MemorySegment segment = arena.allocate(100); segment.reinterpret(50, Arena.ofAuto(), s -> {}); } + @EntitlementTest(expectedAccess = PLUGINS) static void symbolLookupWithPath() { try { - SymbolLookup.libraryLookup(Path.of("/foo/bar/libFoo.so"), Arena.ofAuto()); + SymbolLookup.libraryLookup(FileCheckActions.readDir().resolve("libFoo.so"), Arena.ofAuto()); } catch (IllegalArgumentException e) { // IllegalArgumentException is thrown if path does not point to a valid library (and it does not) } } + @EntitlementTest(expectedAccess = PLUGINS) static void symbolLookupWithName() { try { SymbolLookup.libraryLookup("foo", Arena.ofAuto()); diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioFileSystemActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioFileSystemActions.java new file mode 100644 index 0000000000000..9dc36bda840e5 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioFileSystemActions.java @@ -0,0 +1,230 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.entitlement.qa.entitled.EntitledActions; + +import java.io.IOException; +import java.net.URI; +import java.nio.file.FileSystemException; +import java.nio.file.FileSystems; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.FileOwnerAttributeView; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.SERVER_ONLY; + +class NioFileSystemActions { + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void createFileSystemProvider() { + new DummyImplementations.DummyFileSystemProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void checkNewFileSystemFromUri() throws IOException { + try (var fs = FileSystems.getDefault().provider().newFileSystem(URI.create("/dummy/path"), Map.of())) {} + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void checkNewFileSystemFromPath() { + var fs = FileSystems.getDefault().provider(); + try (var newFs = fs.newFileSystem(Path.of("/dummy/path"), Map.of())) {} catch (IOException e) { + // When entitled, we expect to throw IOException, as the path is not valid - we don't really want to create a FS + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewInputStream() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var is = fs.newInputStream(FileCheckActions.readFile())) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewOutputStream() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var os = fs.newOutputStream(FileCheckActions.readWriteFile())) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewFileChannelRead() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var fc = fs.newFileChannel(FileCheckActions.readFile(), Set.of(StandardOpenOption.READ))) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewFileChannelWrite() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var fc = fs.newFileChannel(FileCheckActions.readWriteFile(), Set.of(StandardOpenOption.WRITE))) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewAsynchronousFileChannel() throws IOException { + var fs = FileSystems.getDefault().provider(); + try ( + var fc = fs.newAsynchronousFileChannel( + FileCheckActions.readWriteFile(), + Set.of(StandardOpenOption.WRITE), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ) + ) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewByteChannel() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var bc = fs.newByteChannel(FileCheckActions.readWriteFile(), Set.of(StandardOpenOption.WRITE))) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewDirectoryStream() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var bc = fs.newDirectoryStream(FileCheckActions.readDir(), entry -> false)) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkCreateDirectory() throws IOException { + var fs = FileSystems.getDefault().provider(); + var directory = EntitledActions.createTempDirectoryForWrite(); + fs.createDirectory(directory.resolve("subdir")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkCreateSymbolicLink() throws IOException { + var fs = FileSystems.getDefault().provider(); + var directory = EntitledActions.createTempDirectoryForWrite(); + try { + fs.createSymbolicLink(directory.resolve("link"), FileCheckActions.readFile()); + } catch (UnsupportedOperationException | FileSystemException e) { + // OK not to implement symbolic link in the filesystem + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkCreateLink() throws IOException { + var fs = FileSystems.getDefault().provider(); + var directory = EntitledActions.createTempDirectoryForWrite(); + try { + fs.createLink(directory.resolve("link"), FileCheckActions.readFile()); + } catch (UnsupportedOperationException | FileSystemException e) { + // OK not to implement symbolic link in the filesystem + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkDelete() throws IOException { + var fs = FileSystems.getDefault().provider(); + var file = EntitledActions.createTempFileForWrite(); + fs.delete(file); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkDeleteIfExists() throws IOException { + var fs = FileSystems.getDefault().provider(); + var file = EntitledActions.createTempFileForWrite(); + fs.deleteIfExists(file); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkReadSymbolicLink() throws IOException { + var fs = FileSystems.getDefault().provider(); + var link = EntitledActions.createTempSymbolicLink(); + fs.readSymbolicLink(link); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkCopy() throws IOException { + var fs = FileSystems.getDefault().provider(); + var directory = EntitledActions.createTempDirectoryForWrite(); + fs.copy(FileCheckActions.readFile(), directory.resolve("copied")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkMove() throws IOException { + var fs = FileSystems.getDefault().provider(); + var directory = EntitledActions.createTempDirectoryForWrite(); + var file = EntitledActions.createTempFileForWrite(); + fs.move(file, directory.resolve("moved")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkIsSameFile() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.isSameFile(FileCheckActions.readWriteFile(), FileCheckActions.readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkIsHidden() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.isHidden(FileCheckActions.readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkGetFileStore() throws IOException { + var fs = FileSystems.getDefault().provider(); + var file = EntitledActions.createTempFileForRead(); + var store = fs.getFileStore(file); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkCheckAccess() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.checkAccess(FileCheckActions.readFile()); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void checkGetFileAttributeView() { + var fs = FileSystems.getDefault().provider(); + fs.getFileAttributeView(FileCheckActions.readFile(), FileOwnerAttributeView.class); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkReadAttributesWithClass() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.readAttributes(FileCheckActions.readFile(), BasicFileAttributes.class); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkReadAttributesWithString() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.readAttributes(FileCheckActions.readFile(), "*"); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkReadAttributesIfExists() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.readAttributesIfExists(FileCheckActions.readFile(), BasicFileAttributes.class); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkSetAttribute() throws IOException { + var fs = FileSystems.getDefault().provider(); + var file = EntitledActions.createTempFileForWrite(); + try { + fs.setAttribute(file, "dos:hidden", true); + } catch (UnsupportedOperationException | IllegalArgumentException | FileSystemException e) { + // OK if the file does not have/does not support the attribute + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkExists() { + var fs = FileSystems.getDefault().provider(); + fs.exists(FileCheckActions.readFile()); + } + + private NioFileSystemActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/PathActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/PathActions.java new file mode 100644 index 0000000000000..5ccb0fa87ebda --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/PathActions.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import java.io.IOException; +import java.nio.file.FileSystems; +import java.nio.file.LinkOption; +import java.nio.file.WatchEvent; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; + +class PathActions { + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkToRealPath() throws IOException { + FileCheckActions.readFile().toRealPath(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkToRealPathNoFollow() throws IOException { + FileCheckActions.readFile().toRealPath(LinkOption.NOFOLLOW_LINKS); + } + + @SuppressWarnings("rawtypes") + @EntitlementTest(expectedAccess = PLUGINS) + static void checkRegister() throws IOException { + try (var watchService = FileSystems.getDefault().newWatchService()) { + FileCheckActions.readFile().register(watchService, new WatchEvent.Kind[0]); + } catch (IllegalArgumentException e) { + // intentionally no events registered + } + } + + @SuppressWarnings("rawtypes") + @EntitlementTest(expectedAccess = PLUGINS) + static void checkRegisterWithModifiers() throws IOException { + try (var watchService = FileSystems.getDefault().newWatchService()) { + FileCheckActions.readFile().register(watchService, new WatchEvent.Kind[0], new WatchEvent.Modifier[0]); + } catch (IllegalArgumentException e) { + // intentionally no events registered + } + } +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java index 9b8cae1b72d29..8c0b8d18612f2 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java @@ -13,18 +13,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyBreakIteratorProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCalendarDataProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCalendarNameProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCollatorProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCurrencyNameProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyDateFormatProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyDateFormatSymbolsProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyDecimalFormatSymbolsProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyLocaleNameProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyLocaleServiceProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyNumberFormatProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyTimeZoneNameProvider; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.BaseRestHandler; @@ -33,6 +21,9 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.HttpURLConnection; @@ -51,9 +42,12 @@ import java.net.URLStreamHandler; import java.net.spi.URLStreamHandlerProvider; import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -61,6 +55,7 @@ import javax.net.ssl.SSLContext; import static java.util.Map.entry; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; import static org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction.CheckAction.alwaysDenied; import static org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction.CheckAction.deniedToPlugins; import static org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction.CheckAction.forPlugins; @@ -69,7 +64,6 @@ @SuppressWarnings("unused") public class RestEntitlementsCheckAction extends BaseRestHandler { private static final Logger logger = LogManager.getLogger(RestEntitlementsCheckAction.class); - public static final Thread NO_OP_SHUTDOWN_HOOK = new Thread(() -> {}, "Shutdown hook for testing"); record CheckAction(CheckedRunnable action, boolean isAlwaysDeniedToPlugins, Integer fromJavaVersion) { /** @@ -88,143 +82,160 @@ static CheckAction alwaysDenied(CheckedRunnable action) { } } - private static final Map checkActions = Stream.>of( - entry("runtime_exit", deniedToPlugins(RestEntitlementsCheckAction::runtimeExit)), - entry("runtime_halt", deniedToPlugins(RestEntitlementsCheckAction::runtimeHalt)), - entry("system_exit", deniedToPlugins(RestEntitlementsCheckAction::systemExit)), - entry("create_classloader", forPlugins(RestEntitlementsCheckAction::createClassLoader)), - entry("processBuilder_start", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_start)), - entry("processBuilder_startPipeline", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_startPipeline)), - entry("set_https_connection_properties", forPlugins(RestEntitlementsCheckAction::setHttpsConnectionProperties)), - entry("set_default_ssl_socket_factory", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLSocketFactory)), - entry("set_default_hostname_verifier", alwaysDenied(RestEntitlementsCheckAction::setDefaultHostnameVerifier)), - entry("set_default_ssl_context", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLContext)), - entry("system_setIn", alwaysDenied(RestEntitlementsCheckAction::system$$setIn)), - entry("system_setOut", alwaysDenied(RestEntitlementsCheckAction::system$$setOut)), - entry("system_setErr", alwaysDenied(RestEntitlementsCheckAction::system$$setErr)), - entry("runtime_addShutdownHook", alwaysDenied(RestEntitlementsCheckAction::runtime$addShutdownHook)), - entry("runtime_removeShutdownHook", alwaysDenied(RestEntitlementsCheckAction::runtime$$removeShutdownHook)), - entry( - "thread_setDefaultUncaughtExceptionHandler", - alwaysDenied(RestEntitlementsCheckAction::thread$$setDefaultUncaughtExceptionHandler) + private static final Map checkActions = Stream.of( + Stream.>of( + entry("create_classloader", forPlugins(RestEntitlementsCheckAction::createClassLoader)), + entry("processBuilder_start", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_start)), + entry("processBuilder_startPipeline", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_startPipeline)), + entry("set_https_connection_properties", forPlugins(RestEntitlementsCheckAction::setHttpsConnectionProperties)), + entry("set_default_ssl_socket_factory", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLSocketFactory)), + entry("set_default_hostname_verifier", alwaysDenied(RestEntitlementsCheckAction::setDefaultHostnameVerifier)), + entry("set_default_ssl_context", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLContext)), + entry( + "thread_setDefaultUncaughtExceptionHandler", + alwaysDenied(RestEntitlementsCheckAction::thread$$setDefaultUncaughtExceptionHandler) + ), + entry("logManager", alwaysDenied(RestEntitlementsCheckAction::logManager$)), + + entry("locale_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultLocale)), + entry("locale_setDefaultForCategory", alwaysDenied(WritePropertiesCheckActions::setDefaultLocaleForCategory)), + entry("timeZone_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultTimeZone)), + + entry("system_setProperty", forPlugins(WritePropertiesCheckActions::setSystemProperty)), + entry("system_clearProperty", forPlugins(WritePropertiesCheckActions::clearSystemProperty)), + entry("system_setSystemProperties", alwaysDenied(WritePropertiesCheckActions::setSystemProperties)), + + // This group is a bit nasty: if entitlements don't prevent these, then networking is + // irreparably borked for the remainder of the test run. + entry( + "datagramSocket_setDatagramSocketImplFactory", + alwaysDenied(RestEntitlementsCheckAction::datagramSocket$$setDatagramSocketImplFactory) + ), + entry("httpURLConnection_setFollowRedirects", alwaysDenied(RestEntitlementsCheckAction::httpURLConnection$$setFollowRedirects)), + entry("serverSocket_setSocketFactory", alwaysDenied(RestEntitlementsCheckAction::serverSocket$$setSocketFactory)), + entry("socket_setSocketImplFactory", alwaysDenied(RestEntitlementsCheckAction::socket$$setSocketImplFactory)), + entry("url_setURLStreamHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::url$$setURLStreamHandlerFactory)), + entry("urlConnection_setFileNameMap", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setFileNameMap)), + entry( + "urlConnection_setContentHandlerFactory", + alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setContentHandlerFactory) + ), + + entry("proxySelector_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultProxySelector)), + entry("responseCache_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultResponseCache)), + entry( + "createInetAddressResolverProvider", + new CheckAction(VersionSpecificNetworkChecks::createInetAddressResolverProvider, true, 18) + ), + entry("createURLStreamHandlerProvider", alwaysDenied(RestEntitlementsCheckAction::createURLStreamHandlerProvider)), + entry("createURLWithURLStreamHandler", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler)), + entry("createURLWithURLStreamHandler2", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler2)), + entry("datagram_socket_bind", forPlugins(RestEntitlementsCheckAction::bindDatagramSocket)), + entry("datagram_socket_connect", forPlugins(RestEntitlementsCheckAction::connectDatagramSocket)), + entry("datagram_socket_send", forPlugins(RestEntitlementsCheckAction::sendDatagramSocket)), + entry("datagram_socket_receive", forPlugins(RestEntitlementsCheckAction::receiveDatagramSocket)), + entry("datagram_socket_join_group", forPlugins(RestEntitlementsCheckAction::joinGroupDatagramSocket)), + entry("datagram_socket_leave_group", forPlugins(RestEntitlementsCheckAction::leaveGroupDatagramSocket)), + + entry("create_socket_with_proxy", forPlugins(NetworkAccessCheckActions::createSocketWithProxy)), + entry("socket_bind", forPlugins(NetworkAccessCheckActions::socketBind)), + entry("socket_connect", forPlugins(NetworkAccessCheckActions::socketConnect)), + entry("server_socket_bind", forPlugins(NetworkAccessCheckActions::serverSocketBind)), + entry("server_socket_accept", forPlugins(NetworkAccessCheckActions::serverSocketAccept)), + + entry("url_open_connection_proxy", forPlugins(NetworkAccessCheckActions::urlOpenConnectionWithProxy)), + entry("http_client_send", forPlugins(VersionSpecificNetworkChecks::httpClientSend)), + entry("http_client_send_async", forPlugins(VersionSpecificNetworkChecks::httpClientSendAsync)), + entry("create_ldap_cert_store", forPlugins(NetworkAccessCheckActions::createLDAPCertStore)), + + entry("server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::serverSocketChannelBind)), + entry("server_socket_channel_bind_backlog", forPlugins(NetworkAccessCheckActions::serverSocketChannelBindWithBacklog)), + entry("server_socket_channel_accept", forPlugins(NetworkAccessCheckActions::serverSocketChannelAccept)), + entry("asynchronous_server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBind)), + entry( + "asynchronous_server_socket_channel_bind_backlog", + forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBindWithBacklog) + ), + entry( + "asynchronous_server_socket_channel_accept", + forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAccept) + ), + entry( + "asynchronous_server_socket_channel_accept_with_handler", + forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAcceptWithHandler) + ), + entry("socket_channel_bind", forPlugins(NetworkAccessCheckActions::socketChannelBind)), + entry("socket_channel_connect", forPlugins(NetworkAccessCheckActions::socketChannelConnect)), + entry("asynchronous_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelBind)), + entry("asynchronous_socket_channel_connect", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnect)), + entry( + "asynchronous_socket_channel_connect_with_completion", + forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnectWithCompletion) + ), + entry("datagram_channel_bind", forPlugins(NetworkAccessCheckActions::datagramChannelBind)), + entry("datagram_channel_connect", forPlugins(NetworkAccessCheckActions::datagramChannelConnect)), + entry("datagram_channel_send", forPlugins(NetworkAccessCheckActions::datagramChannelSend)), + entry("datagram_channel_receive", forPlugins(NetworkAccessCheckActions::datagramChannelReceive)), + + entry("runtime_load", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoad)), + entry("runtime_load_library", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoadLibrary)), + entry("system_load", forPlugins(LoadNativeLibrariesCheckActions::systemLoad)), + entry("system_load_library", forPlugins(LoadNativeLibrariesCheckActions::systemLoadLibrary)) + + // MAINTENANCE NOTE: Please don't add any more entries to this map. + // Put new tests into their own "Actions" class using the @EntitlementTest annotation. ), - entry("localeServiceProvider", alwaysDenied(RestEntitlementsCheckAction::localeServiceProvider$)), - entry("breakIteratorProvider", alwaysDenied(RestEntitlementsCheckAction::breakIteratorProvider$)), - entry("collatorProvider", alwaysDenied(RestEntitlementsCheckAction::collatorProvider$)), - entry("dateFormatProvider", alwaysDenied(RestEntitlementsCheckAction::dateFormatProvider$)), - entry("dateFormatSymbolsProvider", alwaysDenied(RestEntitlementsCheckAction::dateFormatSymbolsProvider$)), - entry("decimalFormatSymbolsProvider", alwaysDenied(RestEntitlementsCheckAction::decimalFormatSymbolsProvider$)), - entry("numberFormatProvider", alwaysDenied(RestEntitlementsCheckAction::numberFormatProvider$)), - entry("calendarDataProvider", alwaysDenied(RestEntitlementsCheckAction::calendarDataProvider$)), - entry("calendarNameProvider", alwaysDenied(RestEntitlementsCheckAction::calendarNameProvider$)), - entry("currencyNameProvider", alwaysDenied(RestEntitlementsCheckAction::currencyNameProvider$)), - entry("localeNameProvider", alwaysDenied(RestEntitlementsCheckAction::localeNameProvider$)), - entry("timeZoneNameProvider", alwaysDenied(RestEntitlementsCheckAction::timeZoneNameProvider$)), - entry("logManager", alwaysDenied(RestEntitlementsCheckAction::logManager$)), - - entry("locale_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultLocale)), - entry("locale_setDefaultForCategory", alwaysDenied(WritePropertiesCheckActions::setDefaultLocaleForCategory)), - entry("timeZone_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultTimeZone)), - - entry("system_setProperty", forPlugins(WritePropertiesCheckActions::setSystemProperty)), - entry("system_clearProperty", forPlugins(WritePropertiesCheckActions::clearSystemProperty)), - entry("system_setSystemProperties", alwaysDenied(WritePropertiesCheckActions::setSystemProperties)), - - // This group is a bit nasty: if entitlements don't prevent these, then networking is - // irreparably borked for the remainder of the test run. - entry( - "datagramSocket_setDatagramSocketImplFactory", - alwaysDenied(RestEntitlementsCheckAction::datagramSocket$$setDatagramSocketImplFactory) - ), - entry("httpURLConnection_setFollowRedirects", alwaysDenied(RestEntitlementsCheckAction::httpURLConnection$$setFollowRedirects)), - entry("serverSocket_setSocketFactory", alwaysDenied(RestEntitlementsCheckAction::serverSocket$$setSocketFactory)), - entry("socket_setSocketImplFactory", alwaysDenied(RestEntitlementsCheckAction::socket$$setSocketImplFactory)), - entry("url_setURLStreamHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::url$$setURLStreamHandlerFactory)), - entry("urlConnection_setFileNameMap", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setFileNameMap)), - entry("urlConnection_setContentHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setContentHandlerFactory)), - - entry("proxySelector_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultProxySelector)), - entry("responseCache_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultResponseCache)), - entry( - "createInetAddressResolverProvider", - new CheckAction(VersionSpecificNetworkChecks::createInetAddressResolverProvider, true, 18) - ), - entry("createURLStreamHandlerProvider", alwaysDenied(RestEntitlementsCheckAction::createURLStreamHandlerProvider)), - entry("createURLWithURLStreamHandler", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler)), - entry("createURLWithURLStreamHandler2", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler2)), - entry("datagram_socket_bind", forPlugins(RestEntitlementsCheckAction::bindDatagramSocket)), - entry("datagram_socket_connect", forPlugins(RestEntitlementsCheckAction::connectDatagramSocket)), - entry("datagram_socket_send", forPlugins(RestEntitlementsCheckAction::sendDatagramSocket)), - entry("datagram_socket_receive", forPlugins(RestEntitlementsCheckAction::receiveDatagramSocket)), - entry("datagram_socket_join_group", forPlugins(RestEntitlementsCheckAction::joinGroupDatagramSocket)), - entry("datagram_socket_leave_group", forPlugins(RestEntitlementsCheckAction::leaveGroupDatagramSocket)), - - entry("create_socket_with_proxy", forPlugins(NetworkAccessCheckActions::createSocketWithProxy)), - entry("socket_bind", forPlugins(NetworkAccessCheckActions::socketBind)), - entry("socket_connect", forPlugins(NetworkAccessCheckActions::socketConnect)), - entry("server_socket_bind", forPlugins(NetworkAccessCheckActions::serverSocketBind)), - entry("server_socket_accept", forPlugins(NetworkAccessCheckActions::serverSocketAccept)), - - entry("url_open_connection_proxy", forPlugins(NetworkAccessCheckActions::urlOpenConnectionWithProxy)), - entry("http_client_send", forPlugins(VersionSpecificNetworkChecks::httpClientSend)), - entry("http_client_send_async", forPlugins(VersionSpecificNetworkChecks::httpClientSendAsync)), - entry("create_ldap_cert_store", forPlugins(NetworkAccessCheckActions::createLDAPCertStore)), - - entry("server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::serverSocketChannelBind)), - entry("server_socket_channel_bind_backlog", forPlugins(NetworkAccessCheckActions::serverSocketChannelBindWithBacklog)), - entry("server_socket_channel_accept", forPlugins(NetworkAccessCheckActions::serverSocketChannelAccept)), - entry("asynchronous_server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBind)), - entry( - "asynchronous_server_socket_channel_bind_backlog", - forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBindWithBacklog) - ), - entry("asynchronous_server_socket_channel_accept", forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAccept)), - entry( - "asynchronous_server_socket_channel_accept_with_handler", - forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAcceptWithHandler) - ), - entry("socket_channel_bind", forPlugins(NetworkAccessCheckActions::socketChannelBind)), - entry("socket_channel_connect", forPlugins(NetworkAccessCheckActions::socketChannelConnect)), - entry("asynchronous_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelBind)), - entry("asynchronous_socket_channel_connect", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnect)), - entry( - "asynchronous_socket_channel_connect_with_completion", - forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnectWithCompletion) - ), - entry("datagram_channel_bind", forPlugins(NetworkAccessCheckActions::datagramChannelBind)), - entry("datagram_channel_connect", forPlugins(NetworkAccessCheckActions::datagramChannelConnect)), - entry("datagram_channel_send", forPlugins(NetworkAccessCheckActions::datagramChannelSend)), - entry("datagram_channel_receive", forPlugins(NetworkAccessCheckActions::datagramChannelReceive)), - - entry("runtime_load", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoad)), - entry("runtime_load_library", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoadLibrary)), - entry("system_load", forPlugins(LoadNativeLibrariesCheckActions::systemLoad)), - entry("system_load_library", forPlugins(LoadNativeLibrariesCheckActions::systemLoadLibrary)), - entry("enable_native_access", new CheckAction(VersionSpecificNativeChecks::enableNativeAccess, false, 22)), - entry("address_target_layout", new CheckAction(VersionSpecificNativeChecks::addressLayoutWithTargetLayout, false, 22)), - entry("donwncall_handle", new CheckAction(VersionSpecificNativeChecks::linkerDowncallHandle, false, 22)), - entry("donwncall_handle_with_address", new CheckAction(VersionSpecificNativeChecks::linkerDowncallHandleWithAddress, false, 22)), - entry("upcall_stub", new CheckAction(VersionSpecificNativeChecks::linkerUpcallStub, false, 22)), - entry("reinterpret", new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpret, false, 22)), - entry("reinterpret_cleanup", new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpretWithCleanup, false, 22)), - entry( - "reinterpret_size_cleanup", - new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpretWithSizeAndCleanup, false, 22) - ), - entry("symbol_lookup_name", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithName, false, 22)), - entry("symbol_lookup_path", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithPath, false, 22)), - entry("create_scanner", forPlugins(FileCheckActions::createScannerFile)), - entry("create_scanner_with_charset", forPlugins(FileCheckActions::createScannerFileWithCharset)), - entry("create_scanner_with_charset_name", forPlugins(FileCheckActions::createScannerFileWithCharsetName)), - entry("create_file_output_stream_string", forPlugins(FileCheckActions::createFileOutputStreamString)), - entry("create_file_output_stream_string_with_append", forPlugins(FileCheckActions::createFileOutputStreamStringWithAppend)), - entry("create_file_output_stream_file", forPlugins(FileCheckActions::createFileOutputStreamFile)), - entry("create_file_output_stream_file_with_append", forPlugins(FileCheckActions::createFileOutputStreamFileWithAppend)), - entry("files_probe_content_type", forPlugins(FileCheckActions::filesProbeContentType)), - entry("files_set_owner", forPlugins(FileCheckActions::filesSetOwner)) + getTestEntries(FileCheckActions.class), + getTestEntries(FileStoreActions.class), + getTestEntries(ManageThreadsActions.class), + getTestEntries(NativeActions.class), + getTestEntries(NioFileSystemActions.class), + getTestEntries(PathActions.class), + getTestEntries(SpiActions.class), + getTestEntries(SystemActions.class) ) + .flatMap(Function.identity()) .filter(entry -> entry.getValue().fromJavaVersion() == null || Runtime.version().feature() >= entry.getValue().fromJavaVersion()) - .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + .collect(Collectors.toUnmodifiableMap(Entry::getKey, Entry::getValue)); + + @SuppressForbidden(reason = "Need package private methods so we don't have to make them all public") + private static Method[] getDeclaredMethods(Class clazz) { + return clazz.getDeclaredMethods(); + } + + private static Stream> getTestEntries(Class actionsClass) { + List> entries = new ArrayList<>(); + for (var method : getDeclaredMethods(actionsClass)) { + var testAnnotation = method.getAnnotation(EntitlementTest.class); + if (testAnnotation == null) { + continue; + } + if (Modifier.isStatic(method.getModifiers()) == false) { + throw new AssertionError("Entitlement test method [" + method + "] must be static"); + } + if (method.getParameterTypes().length != 0) { + throw new AssertionError("Entitlement test method [" + method + "] must not have parameters"); + } + + CheckedRunnable runnable = () -> { + try { + method.invoke(null); + } catch (IllegalAccessException e) { + throw new AssertionError(e); + } catch (InvocationTargetException e) { + if (e.getCause() instanceof Exception exc) { + throw exc; + } else { + throw new AssertionError(e); + } + } + }; + boolean deniedToPlugins = testAnnotation.expectedAccess() != PLUGINS; + Integer fromJavaVersion = testAnnotation.fromJavaVersion() == -1 ? null : testAnnotation.fromJavaVersion(); + entries.add(entry(method.getName(), new CheckAction(runnable, deniedToPlugins, fromJavaVersion))); + } + return entries.stream(); + } private static void createURLStreamHandlerProvider() { var x = new URLStreamHandlerProvider() { @@ -275,21 +286,6 @@ private static void setDefaultSSLSocketFactory() { HttpsURLConnection.setDefaultSSLSocketFactory(new DummyImplementations.DummySSLSocketFactory()); } - @SuppressForbidden(reason = "Specifically testing Runtime.exit") - private static void runtimeExit() { - Runtime.getRuntime().exit(123); - } - - @SuppressForbidden(reason = "Specifically testing Runtime.halt") - private static void runtimeHalt() { - Runtime.getRuntime().halt(123); - } - - @SuppressForbidden(reason = "Specifically testing System.exit") - private static void systemExit() { - System.exit(123); - } - private static void createClassLoader() throws IOException { try (var classLoader = new URLClassLoader("test", new URL[0], RestEntitlementsCheckAction.class.getClassLoader())) { logger.info("Created URLClassLoader [{}]", classLoader.getName()); @@ -308,80 +304,10 @@ private static void setHttpsConnectionProperties() { new DummyImplementations.DummyHttpsURLConnection().setSSLSocketFactory(new DummyImplementations.DummySSLSocketFactory()); } - private static void system$$setIn() { - System.setIn(System.in); - } - - @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") - private static void system$$setOut() { - System.setOut(System.out); - } - - @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") - private static void system$$setErr() { - System.setErr(System.err); - } - - private static void runtime$addShutdownHook() { - Runtime.getRuntime().addShutdownHook(NO_OP_SHUTDOWN_HOOK); - } - - private static void runtime$$removeShutdownHook() { - Runtime.getRuntime().removeShutdownHook(NO_OP_SHUTDOWN_HOOK); - } - private static void thread$$setDefaultUncaughtExceptionHandler() { Thread.setDefaultUncaughtExceptionHandler(Thread.getDefaultUncaughtExceptionHandler()); } - private static void localeServiceProvider$() { - new DummyLocaleServiceProvider(); - } - - private static void breakIteratorProvider$() { - new DummyBreakIteratorProvider(); - } - - private static void collatorProvider$() { - new DummyCollatorProvider(); - } - - private static void dateFormatProvider$() { - new DummyDateFormatProvider(); - } - - private static void dateFormatSymbolsProvider$() { - new DummyDateFormatSymbolsProvider(); - } - - private static void decimalFormatSymbolsProvider$() { - new DummyDecimalFormatSymbolsProvider(); - } - - private static void numberFormatProvider$() { - new DummyNumberFormatProvider(); - } - - private static void calendarDataProvider$() { - new DummyCalendarDataProvider(); - } - - private static void calendarNameProvider$() { - new DummyCalendarNameProvider(); - } - - private static void currencyNameProvider$() { - new DummyCurrencyNameProvider(); - } - - private static void localeNameProvider$() { - new DummyLocaleNameProvider(); - } - - private static void timeZoneNameProvider$() { - new DummyTimeZoneNameProvider(); - } - private static void logManager$() { new java.util.logging.LogManager() { }; @@ -470,7 +396,7 @@ public static Set getCheckActionsAllowedInPlugins() { return checkActions.entrySet() .stream() .filter(kv -> kv.getValue().isAlwaysDeniedToPlugins() == false) - .map(Map.Entry::getKey) + .map(Entry::getKey) .collect(Collectors.toSet()); } @@ -503,7 +429,9 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli return channel -> { logger.info("Calling check action [{}]", actionName); checkAction.action().run(); + logger.debug("Check action [{}] returned", actionName); channel.sendResponse(new RestResponse(RestStatus.OK, Strings.format("Succesfully executed action [%s]", actionName))); }; } + } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SpiActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SpiActions.java new file mode 100644 index 0000000000000..a335964c6fa81 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SpiActions.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import java.io.IOException; +import java.nio.channels.Channel; +import java.nio.channels.spi.SelectorProvider; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; + +class SpiActions { + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createBreakIteratorProvider() { + new DummyImplementations.DummyBreakIteratorProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCollatorProvider() { + new DummyImplementations.DummyCollatorProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createDateFormatProvider() { + new DummyImplementations.DummyDateFormatProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createDateFormatSymbolsProvider() { + new DummyImplementations.DummyDateFormatSymbolsProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createDecimalFormatSymbolsProvider() { + new DummyImplementations.DummyDecimalFormatSymbolsProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createNumberFormatProvider() { + new DummyImplementations.DummyNumberFormatProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCalendarDataProvider() { + new DummyImplementations.DummyCalendarDataProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCalendarNameProvider() { + new DummyImplementations.DummyCalendarNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCurrencyNameProvider() { + new DummyImplementations.DummyCurrencyNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createLocaleNameProvider() { + new DummyImplementations.DummyLocaleNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createTimeZoneNameProvider() { + new DummyImplementations.DummyTimeZoneNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createLocaleServiceProvider() { + new DummyImplementations.DummyLocaleServiceProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void getInheritedChannel() throws IOException { + Channel channel = null; + try { + channel = SelectorProvider.provider().inheritedChannel(); + } finally { + if (channel != null) { + channel.close(); + } + } + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createSelectorProvider() { + new DummyImplementations.DummySelectorProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createAsynchronousChannelProvider() { + new DummyImplementations.DummyAsynchronousChannelProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCharsetProvider() { + new DummyImplementations.DummyCharsetProvider(); + } + + private SpiActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SystemActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SystemActions.java new file mode 100644 index 0000000000000..4df1b1dd26d61 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SystemActions.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.core.SuppressForbidden; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.SERVER_ONLY; + +class SystemActions { + + @SuppressForbidden(reason = "Specifically testing Runtime.exit") + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void runtimeExit() { + Runtime.getRuntime().exit(123); + } + + @SuppressForbidden(reason = "Specifically testing Runtime.halt") + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void runtimeHalt() { + Runtime.getRuntime().halt(123); + } + + @SuppressForbidden(reason = "Specifically testing System.exit") + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void systemExit() { + System.exit(123); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void systemSetIn() { + System.setIn(System.in); + } + + @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void systemSetOut() { + System.setOut(System.out); + } + + @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void systemSetErr() { + System.setErr(System.err); + } + + private static final Thread NO_OP_SHUTDOWN_HOOK = new Thread(() -> {}, "Shutdown hook for testing"); + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void runtimeAddShutdownHook() { + Runtime.getRuntime().addShutdownHook(NO_OP_SHUTDOWN_HOOK); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void runtimeRemoveShutdownHook() { + Runtime.getRuntime().removeShutdownHook(NO_OP_SHUTDOWN_HOOK); + } + + private SystemActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java deleted file mode 100644 index cb84c9bd9042d..0000000000000 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.entitlement.qa.test; - -class VersionSpecificNativeChecks { - - static void enableNativeAccess() throws Exception {} - - static void addressLayoutWithTargetLayout() {} - - static void linkerDowncallHandle() {} - - static void linkerDowncallHandleWithAddress() {} - - static void linkerUpcallStub() throws NoSuchMethodException {} - - static void memorySegmentReinterpret() {} - - static void memorySegmentReinterpretWithCleanup() {} - - static void memorySegmentReinterpretWithSizeAndCleanup() {} - - static void symbolLookupWithPath() {} - - static void symbolLookupWithName() {} -} diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java index 487f692ef4488..bd88c23fc5b91 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.entitlement.qa.EntitlementsTestRule.PolicyBuilder; import org.elasticsearch.test.rest.ESRestTestCase; import java.io.IOException; @@ -22,23 +23,30 @@ public abstract class AbstractEntitlementsIT extends ESRestTestCase { - static final EntitlementsTestRule.PolicyBuilder ALLOWED_TEST_ENTITLEMENTS = (builder, tempDir) -> { + static final PolicyBuilder ALLOWED_TEST_ENTITLEMENTS = (builder, tempDir) -> { builder.value("create_class_loader"); builder.value("set_https_connection_properties"); builder.value("inbound_network"); builder.value("outbound_network"); builder.value("load_native_libraries"); + builder.value("manage_threads"); builder.value( Map.of( "write_system_properties", Map.of("properties", List.of("es.entitlements.checkSetSystemProperty", "es.entitlements.checkClearSystemProperty")) ) ); - - builder.value(Map.of("file", Map.of("path", tempDir.resolve("read_dir"), "mode", "read"))); - builder.value(Map.of("file", Map.of("path", tempDir.resolve("read_write_dir"), "mode", "read_write"))); - builder.value(Map.of("file", Map.of("path", tempDir.resolve("read_file"), "mode", "read"))); - builder.value(Map.of("file", Map.of("path", tempDir.resolve("read_write_file"), "mode", "read_write"))); + builder.value( + Map.of( + "files", + List.of( + Map.of("path", tempDir.resolve("read_dir"), "mode", "read"), + Map.of("path", tempDir.resolve("read_write_dir"), "mode", "read_write"), + Map.of("path", tempDir.resolve("read_file"), "mode", "read"), + Map.of("path", tempDir.resolve("read_write_file"), "mode", "read_write") + ) + ) + ); }; private final String actionName; diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java index 33d5eeca595ab..9dc1028148a31 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java @@ -26,9 +26,27 @@ import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.List; +import java.util.Map; class EntitlementsTestRule implements TestRule { + // entitlements that test methods may use, see EntitledActions + private static final PolicyBuilder ENTITLED_POLICY = (builder, tempDir) -> { + builder.value("manage_threads"); + builder.value( + Map.of( + "files", + List.of( + Map.of("path", tempDir.resolve("read_dir"), "mode", "read"), + Map.of("path", tempDir.resolve("read_write_dir"), "mode", "read_write"), + Map.of("path", tempDir.resolve("read_file"), "mode", "read"), + Map.of("path", tempDir.resolve("read_write_file"), "mode", "read_write") + ) + ) + ); + }; + interface PolicyBuilder { void build(XContentBuilder builder, Path tempDir) throws IOException; } @@ -51,11 +69,13 @@ protected void before() throws Throwable { } }; cluster = ElasticsearchCluster.local() - .module("entitled") + .module("entitled", spec -> buildEntitlements(spec, "org.elasticsearch.entitlement.qa.entitled", ENTITLED_POLICY)) .module("entitlement-test-plugin", spec -> setupEntitlements(spec, modular, policyBuilder)) .systemProperty("es.entitlements.enabled", "true") .systemProperty("es.entitlements.testdir", () -> testDir.getRoot().getAbsolutePath()) .setting("xpack.security.enabled", "false") + // Logs in libs/entitlement/qa/build/test-results/javaRestTest/TEST-org.elasticsearch.entitlement.qa.EntitlementsXXX.xml + // .setting("logger.org.elasticsearch.entitlement", "DEBUG") .build(); ruleChain = RuleChain.outerRule(testDir).around(tempDirSetup).around(cluster); } @@ -65,29 +85,30 @@ public Statement apply(Statement statement, Description description) { return ruleChain.apply(statement, description); } - private void setupEntitlements(PluginInstallSpec spec, boolean modular, PolicyBuilder policyBuilder) { - String moduleName = modular ? "org.elasticsearch.entitlement.qa.test" : "ALL-UNNAMED"; - if (policyBuilder != null) { - spec.withEntitlementsOverride(old -> { - try { - try (var builder = YamlXContent.contentBuilder()) { - builder.startObject(); - builder.field(moduleName); - builder.startArray(); + private void buildEntitlements(PluginInstallSpec spec, String moduleName, PolicyBuilder policyBuilder) { + spec.withEntitlementsOverride(old -> { + try (var builder = YamlXContent.contentBuilder()) { + builder.startObject(); + builder.field(moduleName); + builder.startArray(); - policyBuilder.build(builder, testDir.getRoot().toPath()); - builder.endArray(); - builder.endObject(); + policyBuilder.build(builder, testDir.getRoot().toPath()); + builder.endArray(); + builder.endObject(); - String policy = Strings.toString(builder); - System.out.println("Using entitlement policy:\n" + policy); - return Resource.fromString(policy); - } + String policy = Strings.toString(builder); + System.out.println("Using entitlement policy for module " + moduleName + ":\n" + policy); + return Resource.fromString(policy); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); + } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - }); + private void setupEntitlements(PluginInstallSpec spec, boolean modular, PolicyBuilder policyBuilder) { + String moduleName = modular ? "org.elasticsearch.entitlement.qa.test" : "ALL-UNNAMED"; + if (policyBuilder != null) { + buildEntitlements(spec, moduleName, policyBuilder); } if (modular == false) { diff --git a/libs/entitlement/src/main/java/module-info.java b/libs/entitlement/src/main/java/module-info.java index c0959f212558a..5c8441bcecb9c 100644 --- a/libs/entitlement/src/main/java/module-info.java +++ b/libs/entitlement/src/main/java/module-info.java @@ -19,6 +19,7 @@ exports org.elasticsearch.entitlement.runtime.api; exports org.elasticsearch.entitlement.runtime.policy; + exports org.elasticsearch.entitlement.runtime.policy.entitlements to org.elasticsearch.server; exports org.elasticsearch.entitlement.instrumentation; exports org.elasticsearch.entitlement.bootstrap to org.elasticsearch.server; exports org.elasticsearch.entitlement.initialization to java.base; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 496a28a448381..364c81bf2d263 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -14,6 +14,7 @@ import com.sun.tools.attach.AttachNotSupportedException; import com.sun.tools.attach.VirtualMachine; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.initialization.EntitlementInitialization; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; @@ -22,6 +23,7 @@ import org.elasticsearch.logging.Logger; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; @@ -31,10 +33,23 @@ public class EntitlementBootstrap { - public record BootstrapArgs(Map pluginPolicies, Function, String> pluginResolver) { + public record BootstrapArgs( + Map pluginPolicies, + Function, String> pluginResolver, + Path[] dataDirs, + Path configDir, + Path tempDir, + Path logsDir + ) { public BootstrapArgs { requireNonNull(pluginPolicies); requireNonNull(pluginResolver); + requireNonNull(dataDirs); + if (dataDirs.length == 0) { + throw new IllegalArgumentException("must provide at least one data directory"); + } + requireNonNull(configDir); + requireNonNull(tempDir); } } @@ -50,13 +65,24 @@ public static BootstrapArgs bootstrapArgs() { * * @param pluginPolicies a map holding policies for plugins (and modules), by plugin (or module) name. * @param pluginResolver a functor to map a Java Class to the plugin it belongs to (the plugin name). + * @param dataDirs data directories for Elasticsearch + * @param configDir the config directory for Elasticsearch + * @param tempDir the temp directory for Elasticsearch + * @param logsDir the log directory for Elasticsearch */ - public static void bootstrap(Map pluginPolicies, Function, String> pluginResolver) { + public static void bootstrap( + Map pluginPolicies, + Function, String> pluginResolver, + Path[] dataDirs, + Path configDir, + Path tempDir, + Path logsDir + ) { logger.debug("Loading entitlement agent"); if (EntitlementBootstrap.bootstrapArgs != null) { throw new IllegalStateException("plugin data is already set"); } - EntitlementBootstrap.bootstrapArgs = new BootstrapArgs(pluginPolicies, pluginResolver); + EntitlementBootstrap.bootstrapArgs = new BootstrapArgs(pluginPolicies, pluginResolver, dataDirs, configDir, tempDir, logsDir); exportInitializationToAgent(); loadAgent(findAgentJar()); selfTest(); @@ -123,44 +149,31 @@ private static String findAgentJar() { * @throws IllegalStateException if the entitlements system can't prevent an unauthorized action of our choosing */ private static void selfTest() { - ensureCannotStartProcess(); - ensureCanCreateTempFile(); + ensureCannotStartProcess(ProcessBuilder::start); + // Try again with reflection + ensureCannotStartProcess(EntitlementBootstrap::reflectiveStartProcess); } - private static void ensureCannotStartProcess() { + private static void ensureCannotStartProcess(CheckedConsumer startProcess) { try { // The command doesn't matter; it doesn't even need to exist - new ProcessBuilder("").start(); + startProcess.accept(new ProcessBuilder("")); } catch (NotEntitledException e) { logger.debug("Success: Entitlement protection correctly prevented process creation"); return; - } catch (IOException e) { + } catch (Exception e) { throw new IllegalStateException("Failed entitlement protection self-test", e); } throw new IllegalStateException("Entitlement protection self-test was incorrectly permitted"); } - /** - * Originally {@code Security.selfTest}. - */ - @SuppressForbidden(reason = "accesses jvm default tempdir as a self-test") - private static void ensureCanCreateTempFile() { + private static void reflectiveStartProcess(ProcessBuilder pb) throws Exception { try { - Path p = Files.createTempFile(null, null); - p.toFile().deleteOnExit(); - - // Make an effort to clean up the file immediately; also, deleteOnExit leaves the file if the JVM exits abnormally. - try { - Files.delete(p); - } catch (IOException ignored) { - // Can be caused by virus scanner - } - } catch (NotEntitledException e) { - throw new IllegalStateException("Entitlement protection self-test was incorrectly forbidden", e); - } catch (Exception e) { - throw new IllegalStateException("Unable to perform entitlement protection self-test", e); + var start = ProcessBuilder.class.getMethod("start"); + start.invoke(pb); + } catch (InvocationTargetException e) { + throw (Exception) e.getCause(); } - logger.debug("Success: Entitlement protection correctly permitted temp file creation"); } private static final Logger logger = LogManager.getLogger(EntitlementBootstrap.class); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 5b80afa521e57..93b417e732a6f 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.initialization; +import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.internal.provider.ProviderLocator; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.entitlement.bridge.EntitlementChecker; @@ -18,30 +19,52 @@ import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.entitlement.instrumentation.Transformer; import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; -import org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement; -import org.elasticsearch.entitlement.runtime.policy.Entitlement; -import org.elasticsearch.entitlement.runtime.policy.ExitVMEntitlement; -import org.elasticsearch.entitlement.runtime.policy.InboundNetworkEntitlement; -import org.elasticsearch.entitlement.runtime.policy.LoadNativeLibrariesEntitlement; -import org.elasticsearch.entitlement.runtime.policy.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.PathLookup; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import org.elasticsearch.entitlement.runtime.policy.Scope; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.FileData; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ManageThreadsEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ReadStoreAttributesEntitlement; import java.lang.instrument.Instrumentation; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; +import java.net.URI; +import java.nio.channels.spi.SelectorProvider; +import java.nio.file.AccessMode; +import java.nio.file.CopyOption; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; import java.nio.file.FileSystems; +import java.nio.file.LinkOption; import java.nio.file.OpenOption; import java.nio.file.Path; +import java.nio.file.WatchEvent; +import java.nio.file.WatchService; +import java.nio.file.attribute.FileAttribute; import java.nio.file.spi.FileSystemProvider; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ; +import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ_WRITE; /** * Called by the agent during {@code agentmain} to configure the entitlement system, @@ -57,6 +80,11 @@ public class EntitlementInitialization { private static ElasticsearchEntitlementChecker manager; + interface InstrumentationInfoFactory { + InstrumentationService.InstrumentationInfo of(String methodName, Class... parameterTypes) throws ClassNotFoundException, + NoSuchMethodException; + } + // Note: referenced by bridge reflectively public static EntitlementChecker checker() { return manager; @@ -66,24 +94,29 @@ public static EntitlementChecker checker() { public static void initialize(Instrumentation inst) throws Exception { manager = initChecker(); - Map checkMethods = new HashMap<>(INSTRUMENTATION_SERVICE.lookupMethods(EntitlementChecker.class)); + var latestCheckerInterface = getVersionSpecificCheckerClass(EntitlementChecker.class); - var fileSystemProviderClass = FileSystems.getDefault().provider().getClass(); + Map checkMethods = new HashMap<>(INSTRUMENTATION_SERVICE.lookupMethods(latestCheckerInterface)); Stream.of( - INSTRUMENTATION_SERVICE.lookupImplementationMethod( - FileSystemProvider.class, - "newInputStream", - fileSystemProviderClass, - EntitlementChecker.class, - "checkNewInputStream", - Path.class, - OpenOption[].class + fileSystemProviderChecks(), + fileStoreChecks(), + pathChecks(), + Stream.of( + INSTRUMENTATION_SERVICE.lookupImplementationMethod( + SelectorProvider.class, + "inheritedChannel", + SelectorProvider.provider().getClass(), + EntitlementChecker.class, + "checkSelectorProviderInheritedChannel" + ) ) - ).forEach(instrumentation -> checkMethods.put(instrumentation.targetMethod(), instrumentation.checkMethod())); + ) + .flatMap(Function.identity()) + .forEach(instrumentation -> checkMethods.put(instrumentation.targetMethod(), instrumentation.checkMethod())); var classesToTransform = checkMethods.keySet().stream().map(MethodKey::className).collect(Collectors.toSet()); - Instrumenter instrumenter = INSTRUMENTATION_SERVICE.newInstrumenter(EntitlementChecker.class, checkMethods); + Instrumenter instrumenter = INSTRUMENTATION_SERVICE.newInstrumenter(latestCheckerInterface, checkMethods); inst.addTransformer(new Transformer(instrumenter, classesToTransform), true); inst.retransformClasses(findClassesToRetransform(inst.getAllLoadedClasses(), classesToTransform)); } @@ -99,7 +132,10 @@ private static Class[] findClassesToRetransform(Class[] loadedClasses, Set } private static PolicyManager createPolicyManager() { - Map pluginPolicies = EntitlementBootstrap.bootstrapArgs().pluginPolicies(); + EntitlementBootstrap.BootstrapArgs bootstrapArgs = EntitlementBootstrap.bootstrapArgs(); + Map pluginPolicies = bootstrapArgs.pluginPolicies(); + var pathLookup = new PathLookup(getUserHome(), bootstrapArgs.configDir(), bootstrapArgs.dataDirs(), bootstrapArgs.tempDir()); + Path logsDir = EntitlementBootstrap.bootstrapArgs().logsDir(); // TODO(ES-10031): Decide what goes in the elasticsearch default policy and extend it var serverPolicy = new Policy( @@ -111,42 +147,238 @@ private static PolicyManager createPolicyManager() { "org.elasticsearch.server", List.of( new ExitVMEntitlement(), + new ReadStoreAttributesEntitlement(), new CreateClassLoaderEntitlement(), new InboundNetworkEntitlement(), new OutboundNetworkEntitlement(), - new LoadNativeLibrariesEntitlement() + new LoadNativeLibrariesEntitlement(), + new ManageThreadsEntitlement(), + new FilesEntitlement( + Stream.concat( + Stream.of( + FileData.ofPath(bootstrapArgs.tempDir(), READ_WRITE), + FileData.ofPath(bootstrapArgs.configDir(), READ), + FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE), + // OS release on Linux + FileData.ofPath(Path.of("/etc/os-release"), READ), + FileData.ofPath(Path.of("/etc/system-release"), READ), + FileData.ofPath(Path.of("/usr/lib/os-release"), READ), + // read max virtual memory areas + FileData.ofPath(Path.of("/proc/sys/vm/max_map_count"), READ), + FileData.ofPath(Path.of("/proc/meminfo"), READ), + // load averages on Linux + FileData.ofPath(Path.of("/proc/loadavg"), READ), + // control group stats on Linux. cgroup v2 stats are in an unpredicable + // location under `/sys/fs/cgroup`, so unfortunately we have to allow + // read access to the entire directory hierarchy. + FileData.ofPath(Path.of("/proc/self/cgroup"), READ), + FileData.ofPath(Path.of("/sys/fs/cgroup/"), READ), + // // io stats on Linux + FileData.ofPath(Path.of("/proc/self/mountinfo"), READ), + FileData.ofPath(Path.of("/proc/diskstats"), READ) + ), + Arrays.stream(bootstrapArgs.dataDirs()).map(d -> FileData.ofPath(d, READ)) + ).toList() + ) ) ), new Scope("org.apache.httpcomponents.httpclient", List.of(new OutboundNetworkEntitlement())), new Scope("io.netty.transport", List.of(new InboundNetworkEntitlement(), new OutboundNetworkEntitlement())), - new Scope("org.apache.lucene.core", List.of(new LoadNativeLibrariesEntitlement())), - new Scope("org.elasticsearch.nativeaccess", List.of(new LoadNativeLibrariesEntitlement())) + new Scope( + "org.apache.lucene.core", + List.of( + new LoadNativeLibrariesEntitlement(), + new ManageThreadsEntitlement(), + new FilesEntitlement( + Stream.concat( + Stream.of(FileData.ofPath(bootstrapArgs.configDir(), READ)), + Arrays.stream(bootstrapArgs.dataDirs()).map(d -> FileData.ofPath(d, READ_WRITE)) + ).toList() + ) + ) + ), + new Scope("org.apache.logging.log4j.core", List.of(new ManageThreadsEntitlement())), + new Scope( + "org.elasticsearch.nativeaccess", + List.of( + new LoadNativeLibrariesEntitlement(), + new FilesEntitlement(List.of(FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE))) + ) + ) ) ); // agents run without a module, so this is a special hack for the apm agent // this should be removed once https://github.com/elastic/elasticsearch/issues/109335 is completed - List agentEntitlements = List.of(new CreateClassLoaderEntitlement()); + List agentEntitlements = List.of(new CreateClassLoaderEntitlement(), new ManageThreadsEntitlement()); var resolver = EntitlementBootstrap.bootstrapArgs().pluginResolver(); - return new PolicyManager(serverPolicy, agentEntitlements, pluginPolicies, resolver, AGENTS_PACKAGE_NAME, ENTITLEMENTS_MODULE); + return new PolicyManager( + serverPolicy, + agentEntitlements, + pluginPolicies, + resolver, + AGENTS_PACKAGE_NAME, + ENTITLEMENTS_MODULE, + pathLookup + ); } - private static ElasticsearchEntitlementChecker initChecker() { - final PolicyManager policyManager = createPolicyManager(); + private static Path getUserHome() { + String userHome = System.getProperty("user.home"); + if (userHome == null) { + throw new IllegalStateException("user.home system property is required"); + } + return PathUtils.get(userHome); + } + + private static Stream fileSystemProviderChecks() throws ClassNotFoundException, + NoSuchMethodException { + var fileSystemProviderClass = FileSystems.getDefault().provider().getClass(); + + var instrumentation = new InstrumentationInfoFactory() { + @Override + public InstrumentationService.InstrumentationInfo of(String methodName, Class... parameterTypes) + throws ClassNotFoundException, NoSuchMethodException { + return INSTRUMENTATION_SERVICE.lookupImplementationMethod( + FileSystemProvider.class, + methodName, + fileSystemProviderClass, + EntitlementChecker.class, + "check" + Character.toUpperCase(methodName.charAt(0)) + methodName.substring(1), + parameterTypes + ); + } + }; + return Stream.of( + instrumentation.of("newFileSystem", URI.class, Map.class), + instrumentation.of("newFileSystem", Path.class, Map.class), + instrumentation.of("newInputStream", Path.class, OpenOption[].class), + instrumentation.of("newOutputStream", Path.class, OpenOption[].class), + instrumentation.of("newFileChannel", Path.class, Set.class, FileAttribute[].class), + instrumentation.of("newAsynchronousFileChannel", Path.class, Set.class, ExecutorService.class, FileAttribute[].class), + instrumentation.of("newByteChannel", Path.class, Set.class, FileAttribute[].class), + instrumentation.of("newDirectoryStream", Path.class, DirectoryStream.Filter.class), + instrumentation.of("createDirectory", Path.class, FileAttribute[].class), + instrumentation.of("createSymbolicLink", Path.class, Path.class, FileAttribute[].class), + instrumentation.of("createLink", Path.class, Path.class), + instrumentation.of("delete", Path.class), + instrumentation.of("deleteIfExists", Path.class), + instrumentation.of("readSymbolicLink", Path.class), + instrumentation.of("copy", Path.class, Path.class, CopyOption[].class), + instrumentation.of("move", Path.class, Path.class, CopyOption[].class), + instrumentation.of("isSameFile", Path.class, Path.class), + instrumentation.of("isHidden", Path.class), + instrumentation.of("getFileStore", Path.class), + instrumentation.of("checkAccess", Path.class, AccessMode[].class), + instrumentation.of("getFileAttributeView", Path.class, Class.class, LinkOption[].class), + instrumentation.of("readAttributes", Path.class, Class.class, LinkOption[].class), + instrumentation.of("readAttributes", Path.class, String.class, LinkOption[].class), + instrumentation.of("readAttributesIfExists", Path.class, Class.class, LinkOption[].class), + instrumentation.of("setAttribute", Path.class, String.class, Object.class, LinkOption[].class), + instrumentation.of("exists", Path.class, LinkOption[].class) + ); + } + + private static Stream fileStoreChecks() { + var fileStoreClasses = StreamSupport.stream(FileSystems.getDefault().getFileStores().spliterator(), false) + .map(FileStore::getClass) + .distinct(); + return fileStoreClasses.flatMap(fileStoreClass -> { + var instrumentation = new InstrumentationInfoFactory() { + @Override + public InstrumentationService.InstrumentationInfo of(String methodName, Class... parameterTypes) + throws ClassNotFoundException, NoSuchMethodException { + return INSTRUMENTATION_SERVICE.lookupImplementationMethod( + FileStore.class, + methodName, + fileStoreClass, + EntitlementChecker.class, + "check" + Character.toUpperCase(methodName.charAt(0)) + methodName.substring(1), + parameterTypes + ); + } + }; + + try { + return Stream.of( + instrumentation.of("getFileStoreAttributeView", Class.class), + instrumentation.of("getAttribute", String.class), + instrumentation.of("getBlockSize"), + instrumentation.of("getTotalSpace"), + instrumentation.of("getUnallocatedSpace"), + instrumentation.of("getUsableSpace"), + instrumentation.of("isReadOnly"), + instrumentation.of("name"), + instrumentation.of("type") + + ); + } catch (NoSuchMethodException | ClassNotFoundException e) { + throw new RuntimeException(e); + } + }); + } + + private static Stream pathChecks() { + var pathClasses = StreamSupport.stream(FileSystems.getDefault().getRootDirectories().spliterator(), false) + .map(Path::getClass) + .distinct(); + return pathClasses.flatMap(pathClass -> { + InstrumentationInfoFactory instrumentation = (String methodName, Class... parameterTypes) -> INSTRUMENTATION_SERVICE + .lookupImplementationMethod( + Path.class, + methodName, + pathClass, + EntitlementChecker.class, + "checkPath" + Character.toUpperCase(methodName.charAt(0)) + methodName.substring(1), + parameterTypes + ); + + try { + return Stream.of( + instrumentation.of("toRealPath", LinkOption[].class), + instrumentation.of("register", WatchService.class, WatchEvent.Kind[].class), + instrumentation.of("register", WatchService.class, WatchEvent.Kind[].class, WatchEvent.Modifier[].class) + ); + } catch (NoSuchMethodException | ClassNotFoundException e) { + throw new RuntimeException(e); + } + }); + } + + /** + * Returns the "most recent" checker class compatible with the current runtime Java version. + * For checkers, we have (optionally) version specific classes, each with a prefix (e.g. Java23). + * The mapping cannot be automatic, as it depends on the actual presence of these classes in the final Jar (see + * the various mainXX source sets). + */ + private static Class getVersionSpecificCheckerClass(Class baseClass) { + String packageName = baseClass.getPackageName(); + String baseClassName = baseClass.getSimpleName(); int javaVersion = Runtime.version().feature(); + final String classNamePrefix; if (javaVersion >= 23) { + // All Java version from 23 onwards will be able to use che checks in the Java23EntitlementChecker interface and implementation classNamePrefix = "Java23"; } else { + // For any other Java version, the basic EntitlementChecker interface and implementation contains all the supported checks classNamePrefix = ""; } - final String className = "org.elasticsearch.entitlement.runtime.api." + classNamePrefix + "ElasticsearchEntitlementChecker"; + final String className = packageName + "." + classNamePrefix + baseClassName; Class clazz; try { clazz = Class.forName(className); } catch (ClassNotFoundException e) { - throw new AssertionError("entitlement lib cannot find entitlement impl", e); + throw new AssertionError("entitlement lib cannot find entitlement class " + className, e); } + return clazz; + } + + private static ElasticsearchEntitlementChecker initChecker() { + final PolicyManager policyManager = createPolicyManager(); + + final Class clazz = getVersionSpecificCheckerClass(ElasticsearchEntitlementChecker.class); + Constructor constructor; try { constructor = clazz.getConstructor(PolicyManager.class); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java index 79673418eb321..ece51a8414b70 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java @@ -9,7 +9,6 @@ package org.elasticsearch.entitlement.instrumentation; -import java.io.IOException; import java.util.Map; /** @@ -23,7 +22,7 @@ record InstrumentationInfo(MethodKey targetMethod, CheckMethod checkMethod) {} Instrumenter newInstrumenter(Class clazz, Map methods); - Map lookupMethods(Class clazz) throws IOException; + Map lookupMethods(Class clazz) throws ClassNotFoundException; InstrumentationInfo lookupImplementationMethod( Class targetSuperclass, diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index d1318845c22f4..aba0ab57feb22 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -14,6 +14,9 @@ import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import java.io.File; +import java.io.FileFilter; +import java.io.FilenameFilter; +import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; @@ -40,6 +43,7 @@ import java.net.Socket; import java.net.SocketAddress; import java.net.SocketImplFactory; +import java.net.URI; import java.net.URL; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; @@ -53,16 +57,31 @@ import java.nio.channels.DatagramChannel; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; +import java.nio.channels.spi.SelectorProvider; import java.nio.charset.Charset; +import java.nio.file.AccessMode; +import java.nio.file.CopyOption; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.Files; +import java.nio.file.LinkOption; import java.nio.file.OpenOption; import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.nio.file.WatchEvent; +import java.nio.file.WatchService; +import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.UserPrincipal; import java.nio.file.spi.FileSystemProvider; import java.security.cert.CertStoreParameters; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Properties; +import java.util.Set; import java.util.TimeZone; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ForkJoinPool; import java.util.function.Consumer; import javax.net.ssl.HostnameVerifier; @@ -84,6 +103,11 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { this.policyManager = policyManager; } + /// ///////////////// + // + // Exit the JVM process + // + @Override public void check$java_lang_Runtime$exit(Class callerClass, Runtime runtime, int status) { policyManager.checkExitVM(callerClass); @@ -99,6 +123,11 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkExitVM(callerClass); } + /// ///////////////// + // + // create class loaders + // + @Override public void check$java_lang_ClassLoader$(Class callerClass) { policyManager.checkCreateClassLoader(callerClass); @@ -115,51 +144,85 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { } @Override - public void check$java_security_SecureClassLoader$(Class callerClass) { + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_security_SecureClassLoader$(Class callerClass, ClassLoader parent) { + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_security_SecureClassLoader$(Class callerClass, String name, ClassLoader parent) { + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls) { + public void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent) { + public void check$java_net_URLClassLoader$( + Class callerClass, + String name, + URL[] urls, + ClassLoader parent, + URLStreamHandlerFactory factory + ) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory) { + public void check$java_security_SecureClassLoader$(Class callerClass) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent) { + public void check$java_security_SecureClassLoader$(Class callerClass, ClassLoader parent) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_net_URLClassLoader$( + public void check$java_security_SecureClassLoader$(Class callerClass, String name, ClassLoader parent) { + policyManager.checkCreateClassLoader(callerClass); + } + + /// ///////////////// + // + // "setFactory" methods + // + + @Override + public void check$javax_net_ssl_HttpsURLConnection$setSSLSocketFactory( Class callerClass, - String name, - URL[] urls, - ClassLoader parent, - URLStreamHandlerFactory factory + HttpsURLConnection connection, + SSLSocketFactory sf ) { - policyManager.checkCreateClassLoader(callerClass); + policyManager.checkSetHttpsConnectionProperties(callerClass); } + @Override + public void check$javax_net_ssl_HttpsURLConnection$$setDefaultSSLSocketFactory(Class callerClass, SSLSocketFactory sf) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void check$javax_net_ssl_HttpsURLConnection$$setDefaultHostnameVerifier(Class callerClass, HostnameVerifier hv) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void check$javax_net_ssl_SSLContext$$setDefault(Class callerClass, SSLContext context) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + /// ///////////////// + // + // Process creation + // + @Override public void check$java_lang_ProcessBuilder$start(Class callerClass, ProcessBuilder processBuilder) { policyManager.checkStartProcess(callerClass); @@ -170,6 +233,31 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkStartProcess(callerClass); } + /// ///////////////// + // + // System Properties and similar + // + + @Override + public void check$java_lang_System$$clearProperty(Class callerClass, String key) { + policyManager.checkWriteProperty(callerClass, key); + } + + @Override + public void check$java_lang_System$$setProperties(Class callerClass, Properties props) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void check$java_lang_System$$setProperty(Class callerClass, String key, String value) { + policyManager.checkWriteProperty(callerClass, key); + } + + /// ///////////////// + // + // JVM-wide state changes + // + @Override public void check$java_lang_System$$setIn(Class callerClass, InputStream in) { policyManager.checkChangeJVMGlobalState(callerClass); @@ -221,27 +309,17 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { } @Override - public void check$com_sun_tools_jdi_VirtualMachineManagerImpl$$virtualMachineManager(Class callerClass) { + public void check$java_nio_charset_spi_CharsetProvider$(Class callerClass) { policyManager.checkChangeJVMGlobalState(callerClass); } @Override - public void check$java_lang_Thread$$setDefaultUncaughtExceptionHandler(Class callerClass, Thread.UncaughtExceptionHandler ueh) { + public void check$com_sun_tools_jdi_VirtualMachineManagerImpl$$virtualMachineManager(Class callerClass) { policyManager.checkChangeJVMGlobalState(callerClass); } @Override - public void check$java_lang_System$$clearProperty(Class callerClass, String key) { - policyManager.checkWriteProperty(callerClass, key); - } - - @Override - public void check$java_lang_System$$setProperty(Class callerClass, String key, String value) { - policyManager.checkWriteProperty(callerClass, key); - } - - @Override - public void check$java_lang_System$$setProperties(Class callerClass, Properties props) { + public void check$java_lang_Thread$$setDefaultUncaughtExceptionHandler(Class callerClass, Thread.UncaughtExceptionHandler ueh) { policyManager.checkChangeJVMGlobalState(callerClass); } @@ -360,29 +438,10 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkChangeJVMGlobalState(callerClass); } - @Override - public void check$javax_net_ssl_HttpsURLConnection$setSSLSocketFactory( - Class callerClass, - HttpsURLConnection connection, - SSLSocketFactory sf - ) { - policyManager.checkSetHttpsConnectionProperties(callerClass); - } - - @Override - public void check$javax_net_ssl_HttpsURLConnection$$setDefaultSSLSocketFactory(Class callerClass, SSLSocketFactory sf) { - policyManager.checkChangeJVMGlobalState(callerClass); - } - - @Override - public void check$javax_net_ssl_HttpsURLConnection$$setDefaultHostnameVerifier(Class callerClass, HostnameVerifier hv) { - policyManager.checkChangeJVMGlobalState(callerClass); - } - - @Override - public void check$javax_net_ssl_SSLContext$$setDefault(Class callerClass, SSLContext context) { - policyManager.checkChangeJVMGlobalState(callerClass); - } + /// ///////////////// + // + // Network access + // @Override public void check$java_net_ProxySelector$$setDefault(Class callerClass, ProxySelector ps) { @@ -766,9 +825,24 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkInboundNetworkAccess(callerClass); } + @Override + public void check$java_nio_channels_spi_SelectorProvider$(Class callerClass) { + policyManager.checkChangeNetworkHandling(callerClass); + } + + @Override + public void check$java_nio_channels_spi_AsynchronousChannelProvider$(Class callerClass) { + policyManager.checkChangeNetworkHandling(callerClass); + } + + @Override + public void checkSelectorProviderInheritedChannel(Class callerClass, SelectorProvider that) { + policyManager.checkChangeNetworkHandling(callerClass); + } + @Override public void check$java_lang_Runtime$load(Class callerClass, Runtime that, String filename) { - // TODO: check filesystem entitlement READ + policyManager.checkFileRead(callerClass, Path.of(filename)); policyManager.checkLoadingNativeLibraries(callerClass); } @@ -779,7 +853,7 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { @Override public void check$java_lang_System$$load(Class callerClass, String filename) { - // TODO: check filesystem entitlement READ + policyManager.checkFileRead(callerClass, Path.of(filename)); policyManager.checkLoadingNativeLibraries(callerClass); } @@ -863,7 +937,7 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { @Override public void check$java_lang_foreign_SymbolLookup$$libraryLookup(Class callerClass, Path path, Arena arena) { - // TODO: check filesystem entitlement READ + policyManager.checkFileRead(callerClass, path); policyManager.checkLoadingNativeLibraries(callerClass); } @@ -873,22 +947,160 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { ModuleLayer.Controller that, Module target ) { - policyManager.checkLoadingNativeLibraries(callerClass); + policyManager.checkChangeJVMGlobalState(callerClass); } + /// ///////////////// + // + // File access + // + + // old io (ie File) + @Override - public void check$java_util_Scanner$(Class callerClass, File source) { - policyManager.checkFileRead(callerClass, source); + public void check$java_io_File$canExecute(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); } @Override - public void check$java_util_Scanner$(Class callerClass, File source, String charsetName) { - policyManager.checkFileRead(callerClass, source); + public void check$java_io_File$canRead(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); } @Override - public void check$java_util_Scanner$(Class callerClass, File source, Charset charset) { - policyManager.checkFileRead(callerClass, source); + public void check$java_io_File$canWrite(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$createNewFile(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$$createTempFile(Class callerClass, String prefix, String suffix, File directory) { + policyManager.checkFileWrite(callerClass, directory); + } + + @Override + public void check$java_io_File$delete(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$deleteOnExit(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$exists(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$isDirectory(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$isFile(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$isHidden(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$lastModified(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$length(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$list(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$list(Class callerClass, File file, FilenameFilter filter) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$listFiles(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$listFiles(Class callerClass, File file, FileFilter filter) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$listFiles(Class callerClass, File file, FilenameFilter filter) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_File$mkdir(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$mkdirs(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$renameTo(Class callerClass, File file, File dest) { + policyManager.checkFileRead(callerClass, file); + policyManager.checkFileWrite(callerClass, dest); + } + + @Override + public void check$java_io_File$setExecutable(Class callerClass, File file, boolean executable) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setExecutable(Class callerClass, File file, boolean executable, boolean ownerOnly) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setLastModified(Class callerClass, File file, long time) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setReadable(Class callerClass, File file, boolean readable) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setReadable(Class callerClass, File file, boolean readable, boolean ownerOnly) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setReadOnly(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setWritable(Class callerClass, File file, boolean writable) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setWritable(Class callerClass, File file, boolean writable, boolean ownerOnly) { + policyManager.checkFileWrite(callerClass, file); } @Override @@ -911,6 +1123,28 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkFileWrite(callerClass, file); } + @Override + public void check$java_util_Scanner$(Class callerClass, File source) { + policyManager.checkFileRead(callerClass, source); + } + + @Override + public void check$java_util_Scanner$(Class callerClass, File source, String charsetName) { + policyManager.checkFileRead(callerClass, source); + } + + @Override + public void check$java_util_Scanner$(Class callerClass, File source, Charset charset) { + policyManager.checkFileRead(callerClass, source); + } + + // nio + + @Override + public void check$java_nio_file_Files$$getOwner(Class callerClass, Path path, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + @Override public void check$java_nio_file_Files$$probeContentType(Class callerClass, Path path) { policyManager.checkFileRead(callerClass, path); @@ -921,8 +1155,328 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkFileWrite(callerClass, path); } + // file system providers + + @Override + public void check$java_nio_file_spi_FileSystemProvider$(Class callerClass) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void checkNewFileSystem(Class callerClass, FileSystemProvider that, URI uri, Map env) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void checkNewFileSystem(Class callerClass, FileSystemProvider that, Path path, Map env) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + @Override public void checkNewInputStream(Class callerClass, FileSystemProvider that, Path path, OpenOption... options) { - // TODO: policyManger.checkFileSystemRead(path); + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkNewOutputStream(Class callerClass, FileSystemProvider that, Path path, OpenOption... options) { + policyManager.checkFileWrite(callerClass, path); + } + + private static boolean isOpenForWrite(Set options) { + return options.contains(StandardOpenOption.WRITE) + || options.contains(StandardOpenOption.APPEND) + || options.contains(StandardOpenOption.CREATE) + || options.contains(StandardOpenOption.CREATE_NEW) + || options.contains(StandardOpenOption.DELETE_ON_CLOSE); + } + + @Override + public void checkNewFileChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + FileAttribute... attrs + ) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void checkNewAsynchronousFileChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + ExecutorService executor, + FileAttribute... attrs + ) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void checkNewByteChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + FileAttribute... attrs + ) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void checkNewDirectoryStream( + Class callerClass, + FileSystemProvider that, + Path dir, + DirectoryStream.Filter filter + ) { + policyManager.checkFileRead(callerClass, dir); + } + + @Override + public void checkCreateDirectory(Class callerClass, FileSystemProvider that, Path dir, FileAttribute... attrs) { + policyManager.checkFileWrite(callerClass, dir); + } + + @Override + public void checkCreateSymbolicLink(Class callerClass, FileSystemProvider that, Path link, Path target, FileAttribute... attrs) { + policyManager.checkFileWrite(callerClass, link); + policyManager.checkFileRead(callerClass, target); + } + + @Override + public void checkCreateLink(Class callerClass, FileSystemProvider that, Path link, Path existing) { + policyManager.checkFileWrite(callerClass, link); + policyManager.checkFileRead(callerClass, existing); + } + + @Override + public void checkDelete(Class callerClass, FileSystemProvider that, Path path) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void checkDeleteIfExists(Class callerClass, FileSystemProvider that, Path path) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void checkReadSymbolicLink(Class callerClass, FileSystemProvider that, Path link) { + policyManager.checkFileRead(callerClass, link); + } + + @Override + public void checkCopy(Class callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options) { + policyManager.checkFileWrite(callerClass, target); + policyManager.checkFileRead(callerClass, source); + } + + @Override + public void checkMove(Class callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options) { + policyManager.checkFileWrite(callerClass, target); + policyManager.checkFileWrite(callerClass, source); + } + + @Override + public void checkIsSameFile(Class callerClass, FileSystemProvider that, Path path, Path path2) { + policyManager.checkFileRead(callerClass, path); + policyManager.checkFileRead(callerClass, path2); + } + + @Override + public void checkIsHidden(Class callerClass, FileSystemProvider that, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkGetFileStore(Class callerClass, FileSystemProvider that, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkCheckAccess(Class callerClass, FileSystemProvider that, Path path, AccessMode... modes) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkGetFileAttributeView(Class callerClass, FileSystemProvider that, Path path, Class type, LinkOption... options) { + policyManager.checkGetFileAttributeView(callerClass); + } + + @Override + public void checkReadAttributes(Class callerClass, FileSystemProvider that, Path path, Class type, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkReadAttributes(Class callerClass, FileSystemProvider that, Path path, String attributes, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkReadAttributesIfExists( + Class callerClass, + FileSystemProvider that, + Path path, + Class type, + LinkOption... options + ) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkSetAttribute( + Class callerClass, + FileSystemProvider that, + Path path, + String attribute, + Object value, + LinkOption... options + ) { + policyManager.checkFileWrite(callerClass, path); + + } + + @Override + public void checkExists(Class callerClass, FileSystemProvider that, Path path, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + // Thread management + + @Override + public void check$java_lang_Thread$start(Class callerClass, Thread thread) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_Thread$setDaemon(Class callerClass, Thread thread, boolean on) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_ThreadGroup$setDaemon(Class callerClass, ThreadGroup threadGroup, boolean daemon) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_util_concurrent_ForkJoinPool$setParallelism(Class callerClass, ForkJoinPool forkJoinPool, int size) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_Thread$setName(Class callerClass, Thread thread, String name) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_Thread$setPriority(Class callerClass, Thread thread, int newPriority) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_Thread$setUncaughtExceptionHandler( + Class callerClass, + Thread thread, + Thread.UncaughtExceptionHandler ueh + ) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_ThreadGroup$setMaxPriority(Class callerClass, ThreadGroup threadGroup, int pri) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void checkGetFileStoreAttributeView(Class callerClass, FileStore that, Class type) { + policyManager.checkWriteStoreAttributes(callerClass); + } + + @Override + public void checkGetAttribute(Class callerClass, FileStore that, String attribute) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkGetBlockSize(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkGetTotalSpace(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkGetUnallocatedSpace(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkGetUsableSpace(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkIsReadOnly(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkName(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkType(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkPathToRealPath(Class callerClass, Path that, LinkOption... options) { + boolean followLinks = true; + for (LinkOption option : options) { + if (option == LinkOption.NOFOLLOW_LINKS) { + followLinks = false; + } + } + if (followLinks) { + try { + policyManager.checkFileRead(callerClass, Files.readSymbolicLink(that)); + } catch (IOException | UnsupportedOperationException e) { + // that is not a link, or unrelated IOException or unsupported + } + } + policyManager.checkFileRead(callerClass, that); + } + + @Override + public void checkPathRegister(Class callerClass, Path that, WatchService watcher, WatchEvent.Kind... events) { + policyManager.checkFileRead(callerClass, that); + } + + @Override + public void checkPathRegister( + Class callerClass, + Path that, + WatchService watcher, + WatchEvent.Kind[] events, + WatchEvent.Modifier... modifiers + ) { + policyManager.checkFileRead(callerClass, that); } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java index 768babdb840f5..fef7b5d11aff0 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java @@ -9,6 +9,8 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -20,7 +22,7 @@ * using this annotation is considered parseable as part of a policy file * for entitlements. */ -@Target(ElementType.CONSTRUCTOR) +@Target({ ElementType.CONSTRUCTOR, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) public @interface ExternalEntitlement { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java index 55813df28b6f8..46ee46c7b30c5 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java @@ -9,32 +9,42 @@ package org.elasticsearch.entitlement.runtime.policy; -import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; -import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; -final class FileAccessTree { - static final FileAccessTree EMPTY = new FileAccessTree(List.of()); +import static org.elasticsearch.core.PathUtils.getDefaultFileSystem; + +public final class FileAccessTree { + + private static final String FILE_SEPARATOR = getDefaultFileSystem().getSeparator(); private final String[] readPaths; private final String[] writePaths; - FileAccessTree(List fileEntitlements) { + private FileAccessTree(FilesEntitlement filesEntitlement, PathLookup pathLookup) { List readPaths = new ArrayList<>(); List writePaths = new ArrayList<>(); - for (FileEntitlement fileEntitlement : fileEntitlements) { - var mode = fileEntitlement.mode(); - if (mode == FileEntitlement.Mode.READ_WRITE) { - writePaths.add(fileEntitlement.path()); - } - readPaths.add(fileEntitlement.path()); + for (FilesEntitlement.FileData fileData : filesEntitlement.filesData()) { + var mode = fileData.mode(); + var paths = fileData.resolvePaths(pathLookup); + paths.forEach(path -> { + var normalized = normalizePath(path); + if (mode == FilesEntitlement.Mode.READ_WRITE) { + writePaths.add(normalized); + } + readPaths.add(normalized); + }); } + // everything has access to the temp dir + readPaths.add(pathLookup.tempDir().toString()); + writePaths.add(pathLookup.tempDir().toString()); + readPaths.sort(String::compareTo); writePaths.sort(String::compareTo); @@ -42,25 +52,25 @@ final class FileAccessTree { this.writePaths = writePaths.toArray(new String[0]); } - boolean canRead(Path path) { - return checkPath(normalize(path), readPaths); + public static FileAccessTree of(FilesEntitlement filesEntitlement, PathLookup pathLookup) { + return new FileAccessTree(filesEntitlement, pathLookup); } - @SuppressForbidden(reason = "Explicitly checking File apis") - boolean canRead(File file) { - return checkPath(normalize(file.toPath()), readPaths); + boolean canRead(Path path) { + return checkPath(normalizePath(path), readPaths); } boolean canWrite(Path path) { - return checkPath(normalize(path), writePaths); - } - - @SuppressForbidden(reason = "Explicitly checking File apis") - boolean canWrite(File file) { - return checkPath(normalize(file.toPath()), writePaths); + return checkPath(normalizePath(path), writePaths); } - private static String normalize(Path path) { + /** + * @return the "canonical" form of the given {@code path}, to be used for entitlement checks. + */ + static String normalizePath(Path path) { + // Note that toAbsolutePath produces paths separated by the default file separator, + // so on Windows, if the given path uses forward slashes, this consistently + // converts it to backslashes. return path.toAbsolutePath().normalize().toString(); } @@ -71,7 +81,7 @@ private static boolean checkPath(String path, String[] paths) { int ndx = Arrays.binarySearch(paths, path); if (ndx < -1) { String maybeParent = paths[-ndx - 2]; - return path.startsWith(maybeParent); + return path.startsWith(maybeParent) && path.startsWith(FILE_SEPARATOR, maybeParent.length()); } return ndx >= 0; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java deleted file mode 100644 index 4bd1dc10c85bb..0000000000000 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.entitlement.runtime.policy; - -import java.nio.file.Paths; - -/** - * Describes a file entitlement with a path and mode. - */ -public record FileEntitlement(String path, Mode mode) implements Entitlement { - - public enum Mode { - READ, - READ_WRITE - } - - public FileEntitlement { - path = normalizePath(path); - } - - private static String normalizePath(String path) { - return Paths.get(path).toAbsolutePath().normalize().toString(); - } - - private static Mode parseMode(String mode) { - if (mode.equals("read")) { - return Mode.READ; - } else if (mode.equals("read_write")) { - return Mode.READ_WRITE; - } else { - throw new PolicyValidationException("invalid mode: " + mode + ", valid values: [read, read_write]"); - } - } - - @ExternalEntitlement(parameterNames = { "path", "mode" }, esModulesOnly = false) - public FileEntitlement(String path, String mode) { - this(path, parseMode(mode)); - } -} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java new file mode 100644 index 0000000000000..5790e7245aad9 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import java.nio.file.Path; + +public record PathLookup(Path homeDir, Path configDir, Path[] dataDirs, Path tempDir) {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index b08db25a2430b..33ccf6fb05c9c 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -13,6 +13,17 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ManageThreadsEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ReadStoreAttributesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -41,24 +52,24 @@ public class PolicyManager { private static final Logger logger = LogManager.getLogger(PolicyManager.class); - record ModuleEntitlements(Map, List> entitlementsByType, FileAccessTree fileAccess) { - public static final ModuleEntitlements NONE = new ModuleEntitlements(Map.of(), FileAccessTree.EMPTY); + static final String UNKNOWN_COMPONENT_NAME = "(unknown)"; + static final String SERVER_COMPONENT_NAME = "(server)"; + static final String APM_AGENT_COMPONENT_NAME = "(APM agent)"; + + /** + * @param componentName the plugin name; or else one of the special component names + * like {@link #SERVER_COMPONENT_NAME} or {@link #APM_AGENT_COMPONENT_NAME}. + */ + record ModuleEntitlements( + String componentName, + Map, List> entitlementsByType, + FileAccessTree fileAccess + ) { ModuleEntitlements { entitlementsByType = Map.copyOf(entitlementsByType); } - public static ModuleEntitlements from(List entitlements) { - var fileEntitlements = entitlements.stream() - .filter(e -> e.getClass().equals(FileEntitlement.class)) - .map(e -> (FileEntitlement) e) - .toList(); - return new ModuleEntitlements( - entitlements.stream().collect(groupingBy(Entitlement::getClass)), - new FileAccessTree(fileEntitlements) - ); - } - public boolean hasEntitlement(Class entitlementClass) { return entitlementsByType.containsKey(entitlementClass); } @@ -72,12 +83,34 @@ public Stream getEntitlements(Class entitlementCla } } + // pkg private for testing + ModuleEntitlements defaultEntitlements(String componentName) { + return new ModuleEntitlements(componentName, Map.of(), defaultFileAccess); + } + + // pkg private for testing + ModuleEntitlements policyEntitlements(String componentName, List entitlements) { + FilesEntitlement filesEntitlement = FilesEntitlement.EMPTY; + for (Entitlement entitlement : entitlements) { + if (entitlement instanceof FilesEntitlement) { + filesEntitlement = (FilesEntitlement) entitlement; + } + } + return new ModuleEntitlements( + componentName, + entitlements.stream().collect(groupingBy(Entitlement::getClass)), + FileAccessTree.of(filesEntitlement, pathLookup) + ); + } + final Map moduleEntitlementsMap = new ConcurrentHashMap<>(); - protected final Map> serverEntitlements; - protected final List agentEntitlements; - protected final Map>> pluginsEntitlements; + private final Map> serverEntitlements; + private final List apmAgentEntitlements; + private final Map>> pluginsEntitlements; private final Function, String> pluginResolver; + private final PathLookup pathLookup; + private final FileAccessTree defaultFileAccess; public static final String ALL_UNNAMED = "ALL-UNNAMED"; @@ -97,9 +130,9 @@ private static Set findSystemModules() { } /** - * The package name containing agent classes. + * The package name containing classes from the APM agent. */ - private final String agentsPackageName; + private final String apmAgentPackageName; /** * Frames originating from this module are ignored in the permission logic. @@ -108,25 +141,28 @@ private static Set findSystemModules() { public PolicyManager( Policy serverPolicy, - List agentEntitlements, + List apmAgentEntitlements, Map pluginPolicies, Function, String> pluginResolver, - String agentsPackageName, - Module entitlementsModule + String apmAgentPackageName, + Module entitlementsModule, + PathLookup pathLookup ) { this.serverEntitlements = buildScopeEntitlementsMap(requireNonNull(serverPolicy)); - this.agentEntitlements = agentEntitlements; + this.apmAgentEntitlements = apmAgentEntitlements; this.pluginsEntitlements = requireNonNull(pluginPolicies).entrySet() .stream() .collect(toUnmodifiableMap(Map.Entry::getKey, e -> buildScopeEntitlementsMap(e.getValue()))); this.pluginResolver = pluginResolver; - this.agentsPackageName = agentsPackageName; + this.apmAgentPackageName = apmAgentPackageName; this.entitlementsModule = entitlementsModule; + this.pathLookup = requireNonNull(pathLookup); + this.defaultFileAccess = FileAccessTree.of(FilesEntitlement.EMPTY, pathLookup); for (var e : serverEntitlements.entrySet()) { - validateEntitlementsPerModule("server", e.getKey(), e.getValue()); + validateEntitlementsPerModule(SERVER_COMPONENT_NAME, e.getKey(), e.getValue()); } - validateEntitlementsPerModule("agent", "unnamed", agentEntitlements); + validateEntitlementsPerModule(APM_AGENT_COMPONENT_NAME, "unnamed", apmAgentEntitlements); for (var p : pluginsEntitlements.entrySet()) { for (var m : p.getValue().entrySet()) { validateEntitlementsPerModule(p.getKey(), m.getKey(), m.getValue()); @@ -138,45 +174,28 @@ private static Map> buildScopeEntitlementsMap(Policy p return policy.scopes().stream().collect(toUnmodifiableMap(Scope::moduleName, Scope::entitlements)); } - private static void validateEntitlementsPerModule(String sourceName, String moduleName, List entitlements) { - Set> flagEntitlements = new HashSet<>(); + private static void validateEntitlementsPerModule(String componentName, String moduleName, List entitlements) { + Set> found = new HashSet<>(); for (var e : entitlements) { - if (e instanceof FileEntitlement) { - continue; - } - if (flagEntitlements.contains(e.getClass())) { + if (found.contains(e.getClass())) { throw new IllegalArgumentException( - "[" - + sourceName - + "] using module [" - + moduleName - + "] found duplicate flag entitlements [" - + e.getClass().getName() - + "]" + "[" + componentName + "] using module [" + moduleName + "] found duplicate entitlement [" + e.getClass().getName() + "]" ); } - flagEntitlements.add(e.getClass()); + found.add(e.getClass()); } } public void checkStartProcess(Class callerClass) { - neverEntitled(callerClass, "start process"); + neverEntitled(callerClass, () -> "start process"); } - private void neverEntitled(Class callerClass, String operationDescription) { - var requestingClass = requestingClass(callerClass); - if (isTriviallyAllowed(requestingClass)) { - return; - } + public void checkWriteStoreAttributes(Class callerClass) { + neverEntitled(callerClass, () -> "change file store attributes"); + } - throw new NotEntitledException( - Strings.format( - "Not entitled: caller [%s], module [%s], operation [%s]", - callerClass, - requestingClass.getModule() == null ? "" : requestingClass.getModule().getName(), - operationDescription - ) - ); + public void checkReadStoreAttributes(Class callerClass) { + checkEntitlementPresent(callerClass, ReadStoreAttributesEntitlement.class); } /** @@ -189,11 +208,12 @@ private void neverEntitled(Class callerClass, Supplier operationDescr return; } - throw new NotEntitledException( + notEntitled( Strings.format( - "Not entitled: caller [%s], module [%s], operation [%s]", - callerClass, - requestingClass.getModule() == null ? "" : requestingClass.getModule().getName(), + "Not entitled: component [%s], module [%s], class [%s], operation [%s]", + getEntitlements(requestingClass).componentName(), + requestingClass.getModule().getName(), + requestingClass, operationDescription.get() ) ); @@ -212,17 +232,19 @@ public void checkSetHttpsConnectionProperties(Class callerClass) { } public void checkChangeJVMGlobalState(Class callerClass) { - neverEntitled(callerClass, () -> { - // Look up the check$ method to compose an informative error message. - // This way, we don't need to painstakingly describe every individual global-state change. - Optional checkMethodName = StackWalker.getInstance() - .walk( - frames -> frames.map(StackFrame::getMethodName) - .dropWhile(not(methodName -> methodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX))) - .findFirst() - ); - return checkMethodName.map(this::operationDescription).orElse("change JVM global state"); - }); + neverEntitled(callerClass, () -> walkStackForCheckMethodName().orElse("change JVM global state")); + } + + private Optional walkStackForCheckMethodName() { + // Look up the check$ method to compose an informative error message. + // This way, we don't need to painstakingly describe every individual global-state change. + return StackWalker.getInstance() + .walk( + frames -> frames.map(StackFrame::getMethodName) + .dropWhile(not(methodName -> methodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX))) + .findFirst() + ) + .map(this::operationDescription); } /** @@ -232,31 +254,9 @@ public void checkChangeNetworkHandling(Class callerClass) { checkChangeJVMGlobalState(callerClass); } - /** - * Check for operations that can access sensitive network information, e.g. secrets, tokens or SSL sessions - */ - public void checkReadSensitiveNetworkInformation(Class callerClass) { - neverEntitled(callerClass, "access sensitive network information"); - } - @SuppressForbidden(reason = "Explicitly checking File apis") public void checkFileRead(Class callerClass, File file) { - var requestingClass = requestingClass(callerClass); - if (isTriviallyAllowed(requestingClass)) { - return; - } - - ModuleEntitlements entitlements = getEntitlements(requestingClass); - if (entitlements.fileAccess().canRead(file) == false) { - throw new NotEntitledException( - Strings.format( - "Not entitled: caller [%s], module [%s], entitlement [file], operation [read], path [%s]", - callerClass, - requestingClass.getModule(), - file - ) - ); - } + checkFileRead(callerClass, file.toPath()); } public void checkFileRead(Class callerClass, Path path) { @@ -267,11 +267,12 @@ public void checkFileRead(Class callerClass, Path path) { ModuleEntitlements entitlements = getEntitlements(requestingClass); if (entitlements.fileAccess().canRead(path) == false) { - throw new NotEntitledException( + notEntitled( Strings.format( - "Not entitled: caller [%s], module [%s], entitlement [file], operation [read], path [%s]", - callerClass, - requestingClass.getModule(), + "Not entitled: component [%s], module [%s], class [%s], entitlement [file], operation [read], path [%s]", + entitlements.componentName(), + requestingClass.getModule().getName(), + requestingClass, path ) ); @@ -280,22 +281,7 @@ public void checkFileRead(Class callerClass, Path path) { @SuppressForbidden(reason = "Explicitly checking File apis") public void checkFileWrite(Class callerClass, File file) { - var requestingClass = requestingClass(callerClass); - if (isTriviallyAllowed(requestingClass)) { - return; - } - - ModuleEntitlements entitlements = getEntitlements(requestingClass); - if (entitlements.fileAccess().canWrite(file) == false) { - throw new NotEntitledException( - Strings.format( - "Not entitled: caller [%s], module [%s], entitlement [file], operation [write], path [%s]", - callerClass, - requestingClass.getModule(), - file - ) - ); - } + checkFileWrite(callerClass, file.toPath()); } public void checkFileWrite(Class callerClass, Path path) { @@ -306,17 +292,27 @@ public void checkFileWrite(Class callerClass, Path path) { ModuleEntitlements entitlements = getEntitlements(requestingClass); if (entitlements.fileAccess().canWrite(path) == false) { - throw new NotEntitledException( + notEntitled( Strings.format( - "Not entitled: caller [%s], module [%s], entitlement [file], operation [write], path [%s]", - callerClass, - requestingClass.getModule(), + "Not entitled: component [%s], module [%s], class [%s], entitlement [file], operation [write], path [%s]", + entitlements.componentName(), + requestingClass.getModule().getName(), + requestingClass, path ) ); } } + /** + * Invoked when we try to get an arbitrary {@code FileAttributeView} class. Such a class can modify attributes, like owner etc.; + * we could think about introducing checks for each of the operations, but for now we over-approximate this and simply deny when it is + * used directly. + */ + public void checkGetFileAttributeView(Class callerClass) { + neverEntitled(callerClass, () -> "get file attribute view"); + } + /** * Check for operations that can access sensitive network information, e.g. secrets, tokens or SSL sessions */ @@ -344,30 +340,33 @@ public void checkAllNetworkAccess(Class callerClass) { } var classEntitlements = getEntitlements(requestingClass); - if (classEntitlements.hasEntitlement(InboundNetworkEntitlement.class) == false) { - throw new NotEntitledException( - Strings.format( - "Missing entitlement: class [%s], module [%s], entitlement [inbound_network]", - requestingClass, - requestingClass.getModule().getName() - ) - ); - } + checkFlagEntitlement(classEntitlements, InboundNetworkEntitlement.class, requestingClass); + checkFlagEntitlement(classEntitlements, OutboundNetworkEntitlement.class, requestingClass); + } - if (classEntitlements.hasEntitlement(OutboundNetworkEntitlement.class) == false) { - throw new NotEntitledException( + private static void checkFlagEntitlement( + ModuleEntitlements classEntitlements, + Class entitlementClass, + Class requestingClass + ) { + if (classEntitlements.hasEntitlement(entitlementClass) == false) { + notEntitled( Strings.format( - "Missing entitlement: class [%s], module [%s], entitlement [outbound_network]", + "Not entitled: component [%s], module [%s], class [%s], entitlement [%s]", + classEntitlements.componentName(), + requestingClass.getModule().getName(), requestingClass, - requestingClass.getModule().getName() + PolicyParser.getEntitlementTypeName(entitlementClass) ) ); } logger.debug( () -> Strings.format( - "Entitled: class [%s], module [%s], entitlements [inbound_network, outbound_network]", + "Entitled: component [%s], module [%s], class [%s], entitlement [%s]", + classEntitlements.componentName(), + requestingClass.getModule().getName(), requestingClass, - requestingClass.getModule().getName() + PolicyParser.getEntitlementTypeName(entitlementClass) ) ); } @@ -382,50 +381,40 @@ public void checkWriteProperty(Class callerClass, String property) { if (entitlements.getEntitlements(WriteSystemPropertiesEntitlement.class).anyMatch(e -> e.properties().contains(property))) { logger.debug( () -> Strings.format( - "Entitled: class [%s], module [%s], entitlement [write_system_properties], property [%s]", - requestingClass, + "Entitled: component [%s], module [%s], class [%s], entitlement [write_system_properties], property [%s]", + entitlements.componentName(), requestingClass.getModule().getName(), + requestingClass, property ) ); return; } - throw new NotEntitledException( + notEntitled( Strings.format( - "Missing entitlement: class [%s], module [%s], entitlement [write_system_properties], property [%s]", - requestingClass, + "Not entitled: component [%s], module [%s], class [%s], entitlement [write_system_properties], property [%s]", + entitlements.componentName(), requestingClass.getModule().getName(), + requestingClass, property ) ); } + private static void notEntitled(String message) { + throw new NotEntitledException(message); + } + + public void checkManageThreadsEntitlement(Class callerClass) { + checkEntitlementPresent(callerClass, ManageThreadsEntitlement.class); + } + private void checkEntitlementPresent(Class callerClass, Class entitlementClass) { var requestingClass = requestingClass(callerClass); if (isTriviallyAllowed(requestingClass)) { return; } - - ModuleEntitlements entitlements = getEntitlements(requestingClass); - if (entitlements.hasEntitlement(entitlementClass)) { - logger.debug( - () -> Strings.format( - "Entitled: class [%s], module [%s], entitlement [%s]", - requestingClass, - requestingClass.getModule().getName(), - PolicyParser.getEntitlementTypeName(entitlementClass) - ) - ); - return; - } - throw new NotEntitledException( - Strings.format( - "Missing entitlement: class [%s], module [%s], entitlement [%s]", - requestingClass, - requestingClass.getModule().getName(), - PolicyParser.getEntitlementTypeName(entitlementClass) - ) - ); + checkFlagEntitlement(getEntitlements(requestingClass), entitlementClass, requestingClass); } ModuleEntitlements getEntitlements(Class requestingClass) { @@ -435,45 +424,44 @@ ModuleEntitlements getEntitlements(Class requestingClass) { private ModuleEntitlements computeEntitlements(Class requestingClass) { Module requestingModule = requestingClass.getModule(); if (isServerModule(requestingModule)) { - return getModuleScopeEntitlements(requestingClass, serverEntitlements, requestingModule.getName(), "server"); + return getModuleScopeEntitlements(serverEntitlements, requestingModule.getName(), SERVER_COMPONENT_NAME); } // plugins var pluginName = pluginResolver.apply(requestingClass); if (pluginName != null) { var pluginEntitlements = pluginsEntitlements.get(pluginName); - if (pluginEntitlements != null) { + if (pluginEntitlements == null) { + return defaultEntitlements(pluginName); + } else { final String scopeName; if (requestingModule.isNamed() == false) { scopeName = ALL_UNNAMED; } else { scopeName = requestingModule.getName(); } - return getModuleScopeEntitlements(requestingClass, pluginEntitlements, scopeName, pluginName); + return getModuleScopeEntitlements(pluginEntitlements, scopeName, pluginName); } } - if (requestingModule.isNamed() == false && requestingClass.getPackageName().startsWith(agentsPackageName)) { - // agents are the only thing running non-modular in the system classloader - return ModuleEntitlements.from(agentEntitlements); + if (requestingModule.isNamed() == false && requestingClass.getPackageName().startsWith(apmAgentPackageName)) { + // The APM agent is the only thing running non-modular in the system classloader + return policyEntitlements(APM_AGENT_COMPONENT_NAME, apmAgentEntitlements); } - logger.warn("No applicable entitlement policy for class [{}]", requestingClass.getName()); - return ModuleEntitlements.NONE; + return defaultEntitlements(UNKNOWN_COMPONENT_NAME); } private ModuleEntitlements getModuleScopeEntitlements( - Class callerClass, Map> scopeEntitlements, String moduleName, - String component + String componentName ) { var entitlements = scopeEntitlements.get(moduleName); if (entitlements == null) { - logger.warn("No applicable entitlement policy for [{}], module [{}], class [{}]", component, moduleName, callerClass); - return ModuleEntitlements.NONE; + return defaultEntitlements(componentName); } - return ModuleEntitlements.from(entitlements); + return policyEntitlements(componentName, entitlements); } private static boolean isServerModule(Module requestingModule) { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java index 95437027239b0..9698b9e86704a 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java @@ -9,6 +9,16 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ManageThreadsEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteAllSystemPropertiesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; import org.elasticsearch.xcontent.XContentLocation; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -19,6 +29,8 @@ import java.io.UncheckedIOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -35,20 +47,22 @@ */ public class PolicyParser { - private static final Map> EXTERNAL_ENTITLEMENTS = Stream.of( - FileEntitlement.class, + private static final Map> EXTERNAL_ENTITLEMENTS = Stream.of( CreateClassLoaderEntitlement.class, - SetHttpsConnectionPropertiesEntitlement.class, - OutboundNetworkEntitlement.class, + FilesEntitlement.class, InboundNetworkEntitlement.class, - WriteSystemPropertiesEntitlement.class, - LoadNativeLibrariesEntitlement.class + LoadNativeLibrariesEntitlement.class, + ManageThreadsEntitlement.class, + OutboundNetworkEntitlement.class, + SetHttpsConnectionPropertiesEntitlement.class, + WriteAllSystemPropertiesEntitlement.class, + WriteSystemPropertiesEntitlement.class ).collect(Collectors.toUnmodifiableMap(PolicyParser::getEntitlementTypeName, Function.identity())); protected final XContentParser policyParser; protected final String policyName; private final boolean isExternalPlugin; - private final Map> externalEntitlements; + private final Map> externalEntitlements; static String getEntitlementTypeName(Class entitlementClass) { var entitlementClassName = entitlementClass.getSimpleName(); @@ -71,8 +85,12 @@ public PolicyParser(InputStream inputStream, String policyName, boolean isExtern } // package private for tests - PolicyParser(InputStream inputStream, String policyName, boolean isExternalPlugin, Map> externalEntitlements) - throws IOException { + PolicyParser( + InputStream inputStream, + String policyName, + boolean isExternalPlugin, + Map> externalEntitlements + ) throws IOException { this.policyParser = YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, Objects.requireNonNull(inputStream)); this.policyName = policyName; this.isExternalPlugin = isExternalPlugin; @@ -139,6 +157,7 @@ protected Entitlement parseEntitlement(String scopeName, String entitlementType) } Constructor entitlementConstructor = null; + Method entitlementMethod = null; ExternalEntitlement entitlementMetadata = null; for (var ctor : entitlementClass.getConstructors()) { var metadata = ctor.getAnnotation(ExternalEntitlement.class); @@ -153,8 +172,27 @@ protected Entitlement parseEntitlement(String scopeName, String entitlementType) entitlementConstructor = ctor; entitlementMetadata = metadata; } - } + for (var method : entitlementClass.getMethods()) { + var metadata = method.getAnnotation(ExternalEntitlement.class); + if (metadata != null) { + if (Modifier.isStatic(method.getModifiers()) == false) { + throw new IllegalStateException( + "entitlement class [" + entitlementClass.getName() + "] has non-static method annotated with ExternalEntitlement" + ); + } + if (entitlementMetadata != null) { + throw new IllegalStateException( + "entitlement class [" + + entitlementClass.getName() + + "] has more than one constructor and/or method annotated with ExternalEntitlement" + ); + } + entitlementMethod = method; + entitlementMetadata = metadata; + } + } + if (entitlementMetadata == null) { throw newPolicyParserException(scopeName, "unknown entitlement type [" + entitlementType + "]"); } @@ -163,40 +201,53 @@ protected Entitlement parseEntitlement(String scopeName, String entitlementType) throw newPolicyParserException("entitlement type [" + entitlementType + "] is allowed only on modules"); } - Class[] parameterTypes = entitlementConstructor.getParameterTypes(); + Class[] parameterTypes = entitlementConstructor != null + ? entitlementConstructor.getParameterTypes() + : entitlementMethod.getParameterTypes(); String[] parametersNames = entitlementMetadata.parameterNames(); + Object[] parameterValues = new Object[parameterTypes.length]; if (parameterTypes.length != 0 || parametersNames.length != 0) { - if (policyParser.nextToken() != XContentParser.Token.START_OBJECT) { - throw newPolicyParserException(scopeName, entitlementType, "expected entitlement parameters"); - } - } + if (policyParser.nextToken() == XContentParser.Token.START_OBJECT) { + Map parsedValues = policyParser.map(); - Map parsedValues = policyParser.map(); - - Object[] parameterValues = new Object[parameterTypes.length]; - for (int parameterIndex = 0; parameterIndex < parameterTypes.length; ++parameterIndex) { - String parameterName = parametersNames[parameterIndex]; - Object parameterValue = parsedValues.remove(parameterName); - if (parameterValue == null) { - throw newPolicyParserException(scopeName, entitlementType, "missing entitlement parameter [" + parameterName + "]"); - } - Class parameterType = parameterTypes[parameterIndex]; - if (parameterType.isAssignableFrom(parameterValue.getClass()) == false) { - throw newPolicyParserException( - scopeName, - entitlementType, - "unexpected parameter type [" + parameterType.getSimpleName() + "] for entitlement parameter [" + parameterName + "]" - ); + for (int parameterIndex = 0; parameterIndex < parameterTypes.length; ++parameterIndex) { + String parameterName = parametersNames[parameterIndex]; + Object parameterValue = parsedValues.remove(parameterName); + if (parameterValue == null) { + throw newPolicyParserException(scopeName, entitlementType, "missing entitlement parameter [" + parameterName + "]"); + } + Class parameterType = parameterTypes[parameterIndex]; + if (parameterType.isAssignableFrom(parameterValue.getClass()) == false) { + throw newPolicyParserException( + scopeName, + entitlementType, + "unexpected parameter type [" + + parameterType.getSimpleName() + + "] for entitlement parameter [" + + parameterName + + "]" + ); + } + parameterValues[parameterIndex] = parameterValue; + } + if (parsedValues.isEmpty() == false) { + throw newPolicyParserException(scopeName, entitlementType, "extraneous entitlement parameter(s) " + parsedValues); + } + } else if (policyParser.currentToken() == XContentParser.Token.START_ARRAY) { + List parsedValues = policyParser.list(); + parameterValues[0] = parsedValues; + } else { + throw newPolicyParserException(scopeName, entitlementType, "expected entitlement parameters"); } - parameterValues[parameterIndex] = parameterValue; - } - if (parsedValues.isEmpty() == false) { - throw newPolicyParserException(scopeName, entitlementType, "extraneous entitlement parameter(s) " + parsedValues); } try { - return (Entitlement) entitlementConstructor.newInstance(parameterValues); + if (entitlementConstructor != null) { + return (Entitlement) entitlementConstructor.newInstance(parameterValues); + } else { + return (Entitlement) entitlementMethod.invoke(null, parameterValues); + } } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { if (e.getCause() instanceof PolicyValidationException piae) { throw newPolicyParserException(startLocation, scopeName, entitlementType, piae); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java index a2bc49d99b44f..5f21db011884d 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java @@ -15,13 +15,13 @@ * parser is able to wrap this exception with a line/character number for * additional useful error information. */ -class PolicyValidationException extends RuntimeException { +public class PolicyValidationException extends RuntimeException { - PolicyValidationException(String message) { + public PolicyValidationException(String message) { super(message); } - PolicyValidationException(String message, Throwable cause) { + public PolicyValidationException(String message, Throwable cause) { super(message, cause); } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java index 55e257797d603..6342a155da940 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java @@ -9,6 +9,8 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; + import java.util.List; import java.util.Objects; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/CreateClassLoaderEntitlement.java similarity index 81% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/CreateClassLoaderEntitlement.java index 55e4b66595642..4b7137f8c7cd6 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/CreateClassLoaderEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; public record CreateClassLoaderEntitlement() implements Entitlement { @ExternalEntitlement diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/Entitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/Entitlement.java index 5b53c399cc1b7..996b8a19ac8b0 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/Entitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.Policy; /** * Marker interface to ensure that only {@link Entitlement} are diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ExitVMEntitlement.java similarity index 90% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ExitVMEntitlement.java index e5c836ea22b20..470277c482461 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ExitVMEntitlement.java @@ -7,7 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; /** * Internal policy type (not-parseable -- not available to plugins). diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java new file mode 100644 index 0000000000000..3e4b9b22d4ce8 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java @@ -0,0 +1,153 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; +import org.elasticsearch.entitlement.runtime.policy.PathLookup; +import org.elasticsearch.entitlement.runtime.policy.PolicyValidationException; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Stream; + +/** + * Describes a file entitlement with a path and mode. + */ +public record FilesEntitlement(List filesData) implements Entitlement { + + public static final FilesEntitlement EMPTY = new FilesEntitlement(List.of()); + + public enum Mode { + READ, + READ_WRITE + } + + public enum BaseDir { + CONFIG, + DATA, + HOME + } + + public sealed interface FileData { + + Stream resolvePaths(PathLookup pathLookup); + + Mode mode(); + + static FileData ofPath(Path path, Mode mode) { + assert path.isAbsolute(); + return new AbsolutePathFileData(path, mode); + } + + static FileData ofRelativePath(Path relativePath, BaseDir baseDir, Mode mode) { + assert relativePath.isAbsolute() == false; + return new RelativePathFileData(relativePath, baseDir, mode); + } + } + + private record AbsolutePathFileData(Path path, Mode mode) implements FileData { + @Override + public Stream resolvePaths(PathLookup pathLookup) { + return Stream.of(path); + } + } + + private record RelativePathFileData(Path relativePath, BaseDir baseDir, Mode mode) implements FileData { + + @Override + public Stream resolvePaths(PathLookup pathLookup) { + Objects.requireNonNull(pathLookup); + switch (baseDir) { + case CONFIG: + return Stream.of(pathLookup.configDir().resolve(relativePath)); + case DATA: + return Arrays.stream(pathLookup.dataDirs()).map(d -> d.resolve(relativePath)); + case HOME: + return Stream.of(pathLookup.homeDir().resolve(relativePath)); + default: + throw new IllegalArgumentException(); + } + } + } + + private static Mode parseMode(String mode) { + if (mode.equals("read")) { + return Mode.READ; + } else if (mode.equals("read_write")) { + return Mode.READ_WRITE; + } else { + throw new PolicyValidationException("invalid mode: " + mode + ", valid values: [read, read_write]"); + } + } + + private static BaseDir parseBaseDir(String baseDir) { + return switch (baseDir) { + case "config" -> BaseDir.CONFIG; + case "data" -> BaseDir.DATA; + case "home" -> BaseDir.HOME; + default -> throw new PolicyValidationException( + "invalid relative directory: " + baseDir + ", valid values: [config, data, home]" + ); + }; + } + + @ExternalEntitlement(parameterNames = { "paths" }, esModulesOnly = false) + @SuppressWarnings("unchecked") + public static FilesEntitlement build(List paths) { + if (paths == null || paths.isEmpty()) { + throw new PolicyValidationException("must specify at least one path"); + } + List filesData = new ArrayList<>(); + for (Object object : paths) { + Map file = new HashMap<>((Map) object); + String pathAsString = file.remove("path"); + String relativePathAsString = file.remove("relative_path"); + String relativeTo = file.remove("relative_to"); + String mode = file.remove("mode"); + + if (file.isEmpty() == false) { + throw new PolicyValidationException("unknown key(s) [" + file + "] in a listed file for files entitlement"); + } + if (mode == null) { + throw new PolicyValidationException("files entitlement must contain 'mode' for every listed file"); + } + if (pathAsString != null && relativePathAsString != null) { + throw new PolicyValidationException("a files entitlement entry cannot contain both 'path' and 'relative_path'"); + } + + if (relativePathAsString != null) { + if (relativeTo == null) { + throw new PolicyValidationException("files entitlement with a 'relative_path' must specify 'relative_to'"); + } + final BaseDir baseDir = parseBaseDir(relativeTo); + + Path relativePath = Path.of(relativePathAsString); + if (relativePath.isAbsolute()) { + throw new PolicyValidationException("'relative_path' [" + relativePathAsString + "] must be relative"); + } + filesData.add(FileData.ofRelativePath(relativePath, baseDir, parseMode(mode))); + } else if (pathAsString != null) { + Path path = Path.of(pathAsString); + if (path.isAbsolute() == false) { + throw new PolicyValidationException("'path' [" + pathAsString + "] must be absolute"); + } + filesData.add(FileData.ofPath(path, parseMode(mode))); + } else { + throw new PolicyValidationException("files entitlement must contain either 'path' or 'relative_path' for every entry"); + } + } + return new FilesEntitlement(filesData); + } +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java index 482d4e5100c0b..7c00a53cc16cb 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * Describes an entitlement for inbound network actions (listen/accept/receive) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/LoadNativeLibrariesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/LoadNativeLibrariesEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/LoadNativeLibrariesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/LoadNativeLibrariesEntitlement.java index 9a840c4e3e32e..b297685876925 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/LoadNativeLibrariesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/LoadNativeLibrariesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * An Entitlement to allow loading native libraries diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ManageThreadsEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ManageThreadsEntitlement.java new file mode 100644 index 0000000000000..c75ccf26d1432 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ManageThreadsEntitlement.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; + +public record ManageThreadsEntitlement() implements Entitlement { + @ExternalEntitlement(esModulesOnly = false) + public ManageThreadsEntitlement {} +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/OutboundNetworkEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/OutboundNetworkEntitlement.java index 50d9a47f580e5..dbdd6840f2ebe 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/OutboundNetworkEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * Describes an entitlement for outbound network actions (connect/send) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ReadStoreAttributesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ReadStoreAttributesEntitlement.java new file mode 100644 index 0000000000000..ccb84c4a68c97 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ReadStoreAttributesEntitlement.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +/** + * Describes an entitlement for reading file store attributes (e.g. disk space) + */ +public record ReadStoreAttributesEntitlement() implements Entitlement {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/SetHttpsConnectionPropertiesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/SetHttpsConnectionPropertiesEntitlement.java similarity index 84% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/SetHttpsConnectionPropertiesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/SetHttpsConnectionPropertiesEntitlement.java index bb2f65def9e18..abfcfdf18db20 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/SetHttpsConnectionPropertiesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/SetHttpsConnectionPropertiesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * An Entitlement to allow setting properties to a single Https connection after this has been created diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteAllSystemPropertiesEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteAllSystemPropertiesEntitlement.java index f0d1d14177332..f0b02e82d3cb5 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteAllSystemPropertiesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * An Entitlement to allow writing all properties such as system properties. diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteSystemPropertiesEntitlement.java similarity index 86% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteSystemPropertiesEntitlement.java index 654ebbda9dab3..b7818bb14030b 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteSystemPropertiesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; import java.util.List; import java.util.Set; diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java index 1521c80341b9d..37b2bfb19f819 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java @@ -9,12 +9,17 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; import org.elasticsearch.test.ESTestCase; import org.junit.BeforeClass; import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import static org.elasticsearch.core.PathUtils.getDefaultFileSystem; import static org.hamcrest.Matchers.is; public class FileAccessTreeTests extends ESTestCase { @@ -30,34 +35,45 @@ private static Path path(String s) { return root.resolve(s); } + private static final PathLookup TEST_PATH_LOOKUP = new PathLookup( + Path.of("/home"), + Path.of("/config"), + new Path[] { Path.of("/data1"), Path.of("/data2") }, + Path.of("/tmp") + ); + public void testEmpty() { - var tree = new FileAccessTree(List.of()); + var tree = accessTree(FilesEntitlement.EMPTY); assertThat(tree.canRead(path("path")), is(false)); assertThat(tree.canWrite(path("path")), is(false)); } public void testRead() { - var tree = new FileAccessTree(List.of(entitlement("foo", "read"))); + var tree = accessTree(entitlement("foo", "read")); assertThat(tree.canRead(path("foo")), is(true)); assertThat(tree.canRead(path("foo/subdir")), is(true)); + assertThat(tree.canRead(path("food")), is(false)); assertThat(tree.canWrite(path("foo")), is(false)); + assertThat(tree.canWrite(path("food")), is(false)); assertThat(tree.canRead(path("before")), is(false)); assertThat(tree.canRead(path("later")), is(false)); } public void testWrite() { - var tree = new FileAccessTree(List.of(entitlement("foo", "read_write"))); + var tree = accessTree(entitlement("foo", "read_write")); assertThat(tree.canWrite(path("foo")), is(true)); assertThat(tree.canWrite(path("foo/subdir")), is(true)); + assertThat(tree.canWrite(path("food")), is(false)); assertThat(tree.canRead(path("foo")), is(true)); + assertThat(tree.canRead(path("food")), is(false)); assertThat(tree.canWrite(path("before")), is(false)); assertThat(tree.canWrite(path("later")), is(false)); } public void testTwoPaths() { - var tree = new FileAccessTree(List.of(entitlement("foo", "read"), entitlement("bar", "read"))); + var tree = accessTree(entitlement("foo", "read", "bar", "read")); assertThat(tree.canRead(path("a")), is(false)); assertThat(tree.canRead(path("bar")), is(true)); assertThat(tree.canRead(path("bar/subdir")), is(true)); @@ -68,22 +84,110 @@ public void testTwoPaths() { } public void testReadWriteUnderRead() { - var tree = new FileAccessTree(List.of(entitlement("foo", "read"), entitlement("foo/bar", "read_write"))); + var tree = accessTree(entitlement("foo", "read", "foo/bar", "read_write")); assertThat(tree.canRead(path("foo")), is(true)); assertThat(tree.canWrite(path("foo")), is(false)); assertThat(tree.canRead(path("foo/bar")), is(true)); assertThat(tree.canWrite(path("foo/bar")), is(true)); } + public void testReadWithRelativePath() { + for (var dir : List.of("config", "home")) { + var tree = accessTree(entitlement(Map.of("relative_path", "foo", "mode", "read", "relative_to", dir))); + assertThat(tree.canRead(path("foo")), is(false)); + + assertThat(tree.canRead(path("/" + dir + "/foo")), is(true)); + + assertThat(tree.canRead(path("/" + dir + "/foo/subdir")), is(true)); + assertThat(tree.canRead(path("/" + dir + "/food")), is(false)); + assertThat(tree.canWrite(path("/" + dir + "/foo")), is(false)); + + assertThat(tree.canRead(path("/" + dir)), is(false)); + assertThat(tree.canRead(path("/" + dir + "/before")), is(false)); + assertThat(tree.canRead(path("/" + dir + "/later")), is(false)); + } + } + + public void testWriteWithRelativePath() { + for (var dir : List.of("config", "home")) { + var tree = accessTree(entitlement(Map.of("relative_path", "foo", "mode", "read_write", "relative_to", dir))); + assertThat(tree.canWrite(path("/" + dir + "/foo")), is(true)); + assertThat(tree.canWrite(path("/" + dir + "/foo/subdir")), is(true)); + assertThat(tree.canWrite(path("/" + dir)), is(false)); + assertThat(tree.canWrite(path("/" + dir + "/food")), is(false)); + assertThat(tree.canRead(path("/" + dir + "/foo")), is(true)); + assertThat(tree.canRead(path("/" + dir)), is(false)); + + assertThat(tree.canWrite(path("/" + dir)), is(false)); + assertThat(tree.canWrite(path("/" + dir + "/before")), is(false)); + assertThat(tree.canWrite(path("/" + dir + "/later")), is(false)); + } + } + + public void testMultipleDataDirs() { + var tree = accessTree(entitlement(Map.of("relative_path", "foo", "mode", "read_write", "relative_to", "data"))); + assertThat(tree.canWrite(path("/data1/foo")), is(true)); + assertThat(tree.canWrite(path("/data2/foo")), is(true)); + assertThat(tree.canWrite(path("/data3/foo")), is(false)); + assertThat(tree.canWrite(path("/data1/foo/subdir")), is(true)); + assertThat(tree.canWrite(path("foo")), is(false)); + assertThat(tree.canWrite(path("/data1/food")), is(false)); + assertThat(tree.canRead(path("/data1/foo")), is(true)); + assertThat(tree.canRead(path("/data2/foo")), is(true)); + assertThat(tree.canRead(path("foo")), is(false)); + + assertThat(tree.canWrite(path("/data1")), is(false)); + assertThat(tree.canWrite(path("/data2")), is(false)); + assertThat(tree.canWrite(path("/config/before")), is(false)); + assertThat(tree.canWrite(path("/config/later")), is(false)); + } + public void testNormalizePath() { - var tree = new FileAccessTree(List.of(entitlement("foo/../bar", "read"))); + var tree = accessTree(entitlement("foo/../bar", "read")); assertThat(tree.canRead(path("foo/../bar")), is(true)); assertThat(tree.canRead(path("foo")), is(false)); assertThat(tree.canRead(path("")), is(false)); } - FileEntitlement entitlement(String path, String mode) { - Path p = path(path); - return new FileEntitlement(p.toString(), mode); + public void testForwardSlashes() { + String sep = getDefaultFileSystem().getSeparator(); + var tree = accessTree(entitlement("a/b", "read", "m" + sep + "n", "read")); + + // Native separators work + assertThat(tree.canRead(path("a" + sep + "b")), is(true)); + assertThat(tree.canRead(path("m" + sep + "n")), is(true)); + + // Forward slashes also work + assertThat(tree.canRead(path("a/b")), is(true)); + assertThat(tree.canRead(path("m/n")), is(true)); + } + + public void testTempDirAccess() { + Path tempDir = createTempDir(); + var tree = FileAccessTree.of( + FilesEntitlement.EMPTY, + new PathLookup(Path.of("/home"), Path.of("/config"), new Path[] { Path.of("/data1"), Path.of("/data2") }, tempDir) + ); + assertThat(tree.canRead(tempDir), is(true)); + assertThat(tree.canWrite(tempDir), is(true)); + } + + FileAccessTree accessTree(FilesEntitlement entitlement) { + return FileAccessTree.of(entitlement, TEST_PATH_LOOKUP); + } + + static FilesEntitlement entitlement(String... values) { + List filesData = new ArrayList<>(); + for (int i = 0; i < values.length; i += 2) { + Map fileData = new HashMap<>(); + fileData.put("path", path(values[i]).toString()); + fileData.put("mode", values[i + 1]); + filesData.add(fileData); + } + return FilesEntitlement.build(filesData); + } + + static FilesEntitlement entitlement(Map value) { + return FilesEntitlement.build(List.of(value)); } } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index 6854ef54ca5f0..a4322ece247b7 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -12,6 +12,9 @@ import org.elasticsearch.entitlement.runtime.policy.PolicyManager.ModuleEntitlements; import org.elasticsearch.entitlement.runtime.policy.agent.TestAgent; import org.elasticsearch.entitlement.runtime.policy.agent.inner.TestInnerAgent; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.compiler.InMemoryJavaCompiler; import org.elasticsearch.test.jar.JarUtils; @@ -31,6 +34,7 @@ import static java.util.Map.entry; import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; +import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.SERVER_COMPONENT_NAME; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; @@ -49,6 +53,13 @@ public class PolicyManagerTests extends ESTestCase { */ private static Module NO_ENTITLEMENTS_MODULE; + private static final PathLookup TEST_PATH_LOOKUP = new PathLookup( + Path.of("/user/home"), + Path.of("/config"), + new Path[] { Path.of("/data1/"), Path.of("/data2") }, + Path.of("/temp") + ); + @BeforeClass public static void beforeClass() { try { @@ -57,7 +68,6 @@ public static void beforeClass() { } catch (Exception e) { throw new IllegalStateException(e); } - } public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() { @@ -67,16 +77,21 @@ public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() { Map.of("plugin1", createPluginPolicy("plugin.module")), c -> "plugin1", TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ); // Any class from the current module (unnamed) will do var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals("No policy for the unnamed module", ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); + assertEquals( + "No policy for the unnamed module", + policyManager.defaultEntitlements("plugin1"), + policyManager.getEntitlements(callerClass) + ); - assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); + assertEquals(Map.of(requestingModule, policyManager.defaultEntitlements("plugin1")), policyManager.moduleEntitlementsMap); } public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() { @@ -86,16 +101,17 @@ public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() { Map.of(), c -> "plugin1", TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ); // Any class from the current module (unnamed) will do var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals("No policy for this plugin", ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); + assertEquals("No policy for this plugin", policyManager.defaultEntitlements("plugin1"), policyManager.getEntitlements(callerClass)); - assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); + assertEquals(Map.of(requestingModule, policyManager.defaultEntitlements("plugin1")), policyManager.moduleEntitlementsMap); } public void testGetEntitlementsFailureIsCached() { @@ -105,21 +121,22 @@ public void testGetEntitlementsFailureIsCached() { Map.of(), c -> "plugin1", TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ); // Any class from the current module (unnamed) will do var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); - assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); + assertEquals(policyManager.defaultEntitlements("plugin1"), policyManager.getEntitlements(callerClass)); + assertEquals(Map.of(requestingModule, policyManager.defaultEntitlements("plugin1")), policyManager.moduleEntitlementsMap); // A second time - assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); + assertEquals(policyManager.defaultEntitlements("plugin1"), policyManager.getEntitlements(callerClass)); // Nothing new in the map - assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); + assertEquals(Map.of(requestingModule, policyManager.defaultEntitlements("plugin1")), policyManager.moduleEntitlementsMap); } public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() { @@ -129,7 +146,8 @@ public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() { Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), c -> "plugin2", TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ); // Any class from the current module (unnamed) will do @@ -146,7 +164,8 @@ public void testGetEntitlementsThrowsOnMissingPolicyForServer() throws ClassNotF Map.of(), c -> null, TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ); // Tests do not run modular, so we cannot use a server class. @@ -156,9 +175,16 @@ public void testGetEntitlementsThrowsOnMissingPolicyForServer() throws ClassNotF var mockServerClass = ModuleLayer.boot().findLoader("jdk.httpserver").loadClass("com.sun.net.httpserver.HttpServer"); var requestingModule = mockServerClass.getModule(); - assertEquals("No policy for this module in server", ModuleEntitlements.NONE, policyManager.getEntitlements(mockServerClass)); + assertEquals( + "No policy for this module in server", + policyManager.defaultEntitlements(SERVER_COMPONENT_NAME), + policyManager.getEntitlements(mockServerClass) + ); - assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); + assertEquals( + Map.of(requestingModule, policyManager.defaultEntitlements(SERVER_COMPONENT_NAME)), + policyManager.moduleEntitlementsMap + ); } public void testGetEntitlementsReturnsEntitlementsForServerModule() throws ClassNotFoundException { @@ -168,7 +194,8 @@ public void testGetEntitlementsReturnsEntitlementsForServerModule() throws Class Map.of(), c -> null, TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ); // Tests do not run modular, so we cannot use a server class. @@ -193,7 +220,8 @@ public void testGetEntitlementsReturnsEntitlementsForPluginModule() throws IOExc Map.of("mock-plugin", createPluginPolicy("org.example.plugin")), c -> "mock-plugin", TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ); var layer = createLayerForJar(jar, "org.example.plugin"); @@ -212,7 +240,8 @@ public void testGetEntitlementsResultIsCached() { Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), c -> "plugin2", TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ); // Any class from the current module (unnamed) will do @@ -235,7 +264,6 @@ public void testRequestingClassFastPath() throws IOException, ClassNotFoundExcep } public void testRequestingModuleWithStackWalk() throws IOException, ClassNotFoundException { - var agentsClass = new TestAgent(); var entitlementsClass = makeClassInItsOwnModule(); // A class in the entitlements library itself var requestingClass = makeClassInItsOwnModule(); // This guy is always the right answer var instrumentedClass = makeClassInItsOwnModule(); // The class that called the check method @@ -269,9 +297,10 @@ public void testAgentsEntitlements() throws IOException, ClassNotFoundException createEmptyTestServerPolicy(), List.of(new CreateClassLoaderEntitlement()), Map.of(), - c -> "test", + c -> c.getPackageName().startsWith(TEST_AGENTS_PACKAGE_NAME) ? null : "test", TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ); ModuleEntitlements agentsEntitlements = policyManager.getEntitlements(TestAgent.class); assertThat(agentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); @@ -286,7 +315,7 @@ public void testAgentsEntitlements() throws IOException, ClassNotFoundException } } - public void testDuplicateFlagEntitlements() { + public void testDuplicateEntitlements() { IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, () -> new PolicyManager( @@ -298,12 +327,12 @@ public void testDuplicateFlagEntitlements() { Map.of(), c -> "test", TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ) ); assertEquals( - "[server] using module [test] found duplicate flag entitlements " - + "[org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement]", + "[(server)] using module [test] found duplicate entitlement " + "[" + CreateClassLoaderEntitlement.class.getName() + "]", iae.getMessage() ); @@ -315,12 +344,12 @@ public void testDuplicateFlagEntitlements() { Map.of(), c -> "test", TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ) ); assertEquals( - "[agent] using module [unnamed] found duplicate flag entitlements " - + "[org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement]", + "[(APM agent)] using module [unnamed] found duplicate entitlement " + "[" + CreateClassLoaderEntitlement.class.getName() + "]", iae.getMessage() ); @@ -337,10 +366,11 @@ public void testDuplicateFlagEntitlements() { new Scope( "test", List.of( - new FileEntitlement("/test/path", FileEntitlement.Mode.READ), + FilesEntitlement.EMPTY, new CreateClassLoaderEntitlement(), - new FileEntitlement("/test/test", FileEntitlement.Mode.READ), - new CreateClassLoaderEntitlement() + new FilesEntitlement( + List.of(FilesEntitlement.FileData.ofPath(Path.of("/tmp/test"), FilesEntitlement.Mode.READ)) + ) ) ) ) @@ -348,24 +378,34 @@ public void testDuplicateFlagEntitlements() { ), c -> "plugin1", TEST_AGENTS_PACKAGE_NAME, - NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP ) ); assertEquals( - "[plugin1] using module [test] found duplicate flag entitlements " - + "[org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement]", + "[plugin1] using module [test] found duplicate entitlement " + "[" + FilesEntitlement.class.getName() + "]", iae.getMessage() ); } - private static Class makeClassInItsOwnModule() throws IOException, ClassNotFoundException { - final Path home = createTempDir(); - Path jar = createMockPluginJar(home); - var layer = createLayerForJar(jar, "org.example.plugin"); - return layer.findLoader("org.example.plugin").loadClass("q.B"); + /** + * If the plugin resolver tells us a class is in a plugin, don't conclude that it's in an agent. + */ + public void testPluginResolverOverridesAgents() { + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + List.of(new CreateClassLoaderEntitlement()), + Map.of(), + c -> "test", // Insist that the class is in a plugin + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE, + TEST_PATH_LOOKUP + ); + ModuleEntitlements notAgentsEntitlements = policyManager.getEntitlements(TestAgent.class); + assertThat(notAgentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(false)); } - private static Class makeClassInItsOwnUnnamedModule() throws IOException, ClassNotFoundException { + private static Class makeClassInItsOwnModule() throws IOException, ClassNotFoundException { final Path home = createTempDir(); Path jar = createMockPluginJar(home); var layer = createLayerForJar(jar, "org.example.plugin"); @@ -373,7 +413,15 @@ private static Class makeClassInItsOwnUnnamedModule() throws IOException, Cla } private static PolicyManager policyManager(String agentsPackageName, Module entitlementsModule) { - return new PolicyManager(createEmptyTestServerPolicy(), List.of(), Map.of(), c -> "test", agentsPackageName, entitlementsModule); + return new PolicyManager( + createEmptyTestServerPolicy(), + List.of(), + Map.of(), + c -> "test", + agentsPackageName, + entitlementsModule, + TEST_PATH_LOOKUP + ); } private static Policy createEmptyTestServerPolicy() { @@ -391,7 +439,12 @@ private static Policy createPluginPolicy(String... pluginModules) { .map( name -> new Scope( name, - List.of(new FileEntitlement("/test/path", FileEntitlement.Mode.READ), new CreateClassLoaderEntitlement()) + List.of( + new FilesEntitlement( + List.of(FilesEntitlement.FileData.ofPath(Path.of("/test/path"), FilesEntitlement.Mode.READ)) + ), + new CreateClassLoaderEntitlement() + ) ) ) .toList() diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java index cc8043990930d..924864d57b1cf 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java @@ -40,22 +40,83 @@ public void testEntitlementDoesNotExist() { public void testEntitlementMissingParameter() { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - - file: {} + - files: + - path: test-path """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( - "[2:12] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " - + "for entitlement type [file]: missing entitlement parameter [path]", + "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [files]: files entitlement must contain 'mode' for every listed file", ppe.getMessage() ); + } + + public void testEntitlementMissingDependentParameter() { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - files: + - relative_path: test-path + mode: read + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); + assertEquals( + "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [files]: files entitlement with a 'relative_path' must specify 'relative_to'", + ppe.getMessage() + ); + } + + public void testEntitlementRelativePathWhenAbsolute() { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - files: + - path: test-path + mode: read + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); + assertEquals( + "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [files]: 'path' [test-path] must be absolute", + ppe.getMessage() + ); + } - ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + public void testEntitlementAbsolutePathWhenRelative() { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - files: + - relative_path: /test-path + relative_to: data + mode: read + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); + assertEquals( + "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [files]: 'relative_path' [/test-path] must be relative", + ppe.getMessage() + ); + } + + public void testEntitlementMutuallyExclusiveParameters() { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - files: + - relative_path: test-path + path: test-path + mode: read + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); + assertEquals( + "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [files]: a files entitlement entry cannot contain both 'path' and 'relative_path'", + ppe.getMessage() + ); + } + + public void testEntitlementAtLeastOneParameter() { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - - file: - path: test-path + - files: + - mode: read """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( - "[4:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " - + "for entitlement type [file]: missing entitlement parameter [mode]", + "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [files]: files entitlement must contain either 'path' or 'relative_path' for every entry", ppe.getMessage() ); } @@ -63,14 +124,14 @@ public void testEntitlementMissingParameter() { public void testEntitlementExtraneousParameter() { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - - file: - path: test-path - mode: read - extra: test + - files: + - path: test-path + mode: read + extra: test """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( - "[6:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " - + "for entitlement type [file]: extraneous entitlement parameter(s) {extra=test}", + "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [files]: unknown key(s) [{extra=test}] in a listed file for files entitlement", ppe.getMessage() ); } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java index 191b3afcdc674..b27a29978eec7 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java @@ -9,6 +9,14 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; @@ -32,6 +40,35 @@ public ManyConstructorsEntitlement(String s) {} public ManyConstructorsEntitlement(int i) {} } + public static class ManyMethodsEntitlement implements Entitlement { + @ExternalEntitlement + public static ManyMethodsEntitlement create(String s) { + return new ManyMethodsEntitlement(); + } + + @ExternalEntitlement + public static ManyMethodsEntitlement create(int i) { + return new ManyMethodsEntitlement(); + } + } + + public static class ConstructorAndMethodEntitlement implements Entitlement { + @ExternalEntitlement + public static ConstructorAndMethodEntitlement create(String s) { + return new ConstructorAndMethodEntitlement(s); + } + + @ExternalEntitlement + public ConstructorAndMethodEntitlement(String s) {} + } + + public static class NonStaticMethodEntitlement implements Entitlement { + @ExternalEntitlement + public NonStaticMethodEntitlement create() { + return new NonStaticMethodEntitlement(); + } + } + public void testGetEntitlementTypeName() { assertEquals("create_class_loader", PolicyParser.getEntitlementTypeName(CreateClassLoaderEntitlement.class)); @@ -47,7 +84,12 @@ public void testPolicyBuilder() throws IOException { .parsePolicy(); Policy expected = new Policy( "test-policy.yaml", - List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", "read_write")))) + List.of( + new Scope( + "entitlement-module-name", + List.of(FilesEntitlement.build(List.of(Map.of("path", "/test/path/to/file", "mode", "read_write")))) + ) + ) ); assertEquals(expected, parsedPolicy); } @@ -57,11 +99,92 @@ public void testPolicyBuilderOnExternalPlugin() throws IOException { .parsePolicy(); Policy expected = new Policy( "test-policy.yaml", - List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", "read_write")))) + List.of( + new Scope( + "entitlement-module-name", + List.of(FilesEntitlement.build(List.of(Map.of("path", "/test/path/to/file", "mode", "read_write")))) + ) + ) ); assertEquals(expected, parsedPolicy); } + public void testParseFiles() throws IOException { + Policy policyWithOnePath = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - files: + - path: "/test/path/to/file" + mode: "read_write" + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", false).parsePolicy(); + Policy expected = new Policy( + "test-policy.yaml", + List.of( + new Scope( + "entitlement-module-name", + List.of(FilesEntitlement.build(List.of(Map.of("path", "/test/path/to/file", "mode", "read_write")))) + ) + ) + ); + assertEquals(expected, policyWithOnePath); + + Policy policyWithTwoPaths = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - files: + - path: "/test/path/to/file" + mode: "read_write" + - path: "/test/path/to/read-dir/" + mode: "read" + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", false).parsePolicy(); + expected = new Policy( + "test-policy.yaml", + List.of( + new Scope( + "entitlement-module-name", + List.of( + FilesEntitlement.build( + List.of( + Map.of("path", "/test/path/to/file", "mode", "read_write"), + Map.of("path", "/test/path/to/read-dir/", "mode", "read") + ) + ) + ) + ) + ) + ); + assertEquals(expected, policyWithTwoPaths); + + Policy policyWithMultiplePathsAndBaseDir = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - files: + - relative_path: "test/path/to/file" + relative_to: "data" + mode: "read_write" + - relative_path: "test/path/to/read-dir/" + relative_to: "config" + mode: "read" + - path: "/path/to/file" + mode: "read_write" + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", false).parsePolicy(); + expected = new Policy( + "test-policy.yaml", + List.of( + new Scope( + "entitlement-module-name", + List.of( + FilesEntitlement.build( + List.of( + Map.of("relative_path", "test/path/to/file", "mode", "read_write", "relative_to", "data"), + Map.of("relative_path", "test/path/to/read-dir/", "mode", "read", "relative_to", "config"), + Map.of("path", "/path/to/file", "mode", "read_write") + ) + ) + ) + ) + ) + ); + assertEquals(expected, policyWithMultiplePathsAndBaseDir); + } + public void testParseNetwork() throws IOException { Policy parsedPolicy = new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: @@ -166,4 +289,60 @@ public void testMultipleConstructorsAnnotated() throws IOException { ) ); } + + public void testMultipleMethodsAnnotated() throws IOException { + var parser = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - many_methods + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", true, Map.of("many_methods", ManyMethodsEntitlement.class)); + + var e = expectThrows(IllegalStateException.class, parser::parsePolicy); + assertThat( + e.getMessage(), + equalTo( + "entitlement class " + + "[org.elasticsearch.entitlement.runtime.policy.PolicyParserTests$ManyMethodsEntitlement]" + + " has more than one constructor and/or method annotated with ExternalEntitlement" + ) + ); + } + + public void testConstructorAndMethodAnnotated() throws IOException { + var parser = new PolicyParser( + new ByteArrayInputStream(""" + entitlement-module-name: + - constructor_and_method + """.getBytes(StandardCharsets.UTF_8)), + "test-policy.yaml", + true, + Map.of("constructor_and_method", ConstructorAndMethodEntitlement.class) + ); + + var e = expectThrows(IllegalStateException.class, parser::parsePolicy); + assertThat( + e.getMessage(), + equalTo( + "entitlement class " + + "[org.elasticsearch.entitlement.runtime.policy.PolicyParserTests$ConstructorAndMethodEntitlement]" + + " has more than one constructor and/or method annotated with ExternalEntitlement" + ) + ); + } + + public void testNonStaticMethodAnnotated() throws IOException { + var parser = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - non_static + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", true, Map.of("non_static", NonStaticMethodEntitlement.class)); + + var e = expectThrows(IllegalStateException.class, parser::parsePolicy); + assertThat( + e.getMessage(), + equalTo( + "entitlement class " + + "[org.elasticsearch.entitlement.runtime.policy.PolicyParserTests$NonStaticMethodEntitlement]" + + " has non-static method annotated with ExternalEntitlement" + ) + ); + } } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java new file mode 100644 index 0000000000000..511299ba73c6e --- /dev/null +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.PathLookup; +import org.elasticsearch.entitlement.runtime.policy.PolicyValidationException; +import org.elasticsearch.test.ESTestCase; + +import java.nio.file.Path; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ_WRITE; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.is; + +public class FilesEntitlementTests extends ESTestCase { + + public void testEmptyBuild() { + PolicyValidationException pve = expectThrows(PolicyValidationException.class, () -> FilesEntitlement.build(List.of())); + assertEquals("must specify at least one path", pve.getMessage()); + pve = expectThrows(PolicyValidationException.class, () -> FilesEntitlement.build(null)); + assertEquals("must specify at least one path", pve.getMessage()); + } + + public void testInvalidRelativeDirectory() { + var ex = expectThrows( + PolicyValidationException.class, + () -> FilesEntitlement.build(List.of((Map.of("relative_path", "foo", "mode", "read", "relative_to", "bar")))) + ); + assertThat(ex.getMessage(), is("invalid relative directory: bar, valid values: [config, data, home]")); + } + + public void testFileDataRelativeWithEmptyDirectory() { + var fileData = FilesEntitlement.FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE); + var dataDirs = fileData.resolvePaths( + new PathLookup(Path.of("/home"), Path.of("/config"), new Path[] { Path.of("/data1/"), Path.of("/data2") }, Path.of("/temp")) + ); + assertThat(dataDirs.toList(), contains(Path.of("/data1/"), Path.of("/data2"))); + } +} diff --git a/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml b/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml index bbb926ccdd37d..2b5a4cfa783fe 100644 --- a/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml +++ b/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml @@ -1,4 +1,4 @@ entitlement-module-name: - - file: - path: "test/path/to/file" - mode: "read_write" + - files: + - path: "/test/path/to/file" + mode: "read_write" diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java index 5c1789f3aa66b..c83c9a6234a77 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java @@ -11,11 +11,15 @@ public class NativeAccessUtil { /** - * Enables native access for the provided module. No-op for JDK 21 or before. + * Enables native access for the provided module. + * We need to have this adapter even if the method is available in JDK 21, as it was in preview. + * Available to JDK 22+, required for JDK 24+ when using --illegal-native-access=deny */ - public static void enableNativeAccess(ModuleLayer.Controller controller, Module module) {} + public static void enableNativeAccess(ModuleLayer.Controller controller, Module module) { + controller.enableNativeAccess(module); + } public static boolean isNativeAccessEnabled(Module module) { - return true; + return module.isNativeAccessEnabled(); } } diff --git a/libs/native/src/main22/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java b/libs/native/src/main22/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java deleted file mode 100644 index 34776407f759e..0000000000000 --- a/libs/native/src/main22/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.nativeaccess; - -public class NativeAccessUtil { - /** - * Enables native access for the provided module. Available to JDK 22+, required for JDK 24+ when using --illegal-native-access=deny - */ - public static void enableNativeAccess(ModuleLayer.Controller controller, Module module) { - controller.enableNativeAccess(module); - } - - public static boolean isNativeAccessEnabled(Module module) { - return module.isNativeAccessEnabled(); - } -} diff --git a/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java b/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java index 79bfaec6a6d11..3e1815a74fbfe 100644 --- a/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java +++ b/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java @@ -13,8 +13,6 @@ import java.nio.file.Path; import java.security.KeyStore; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Set; @@ -25,7 +23,6 @@ import javax.net.ssl.TrustManagerFactory; import static org.elasticsearch.common.ssl.KeyStoreUtil.inferKeyStoreType; -import static org.elasticsearch.common.ssl.SslConfiguration.ORDERED_PROTOCOL_ALGORITHM_MAP; import static org.elasticsearch.common.ssl.SslConfigurationKeys.CERTIFICATE; import static org.elasticsearch.common.ssl.SslConfigurationKeys.CERTIFICATE_AUTHORITIES; import static org.elasticsearch.common.ssl.SslConfigurationKeys.CIPHERS; @@ -63,11 +60,7 @@ */ public abstract class SslConfigurationLoader { - static final List DEFAULT_PROTOCOLS = Collections.unmodifiableList( - ORDERED_PROTOCOL_ALGORITHM_MAP.containsKey("TLSv1.3") - ? Arrays.asList("TLSv1.3", "TLSv1.2", "TLSv1.1") - : Arrays.asList("TLSv1.2", "TLSv1.1") - ); + static final List DEFAULT_PROTOCOLS = List.of("TLSv1.3", "TLSv1.2"); private static final List JDK12_CIPHERS = List.of( // TLSv1.3 cipher has PFS, AEAD, hardware support diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index 6add1b0ac4a13..abd482d8298ef 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -35,6 +35,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.tasks.TaskCancelledException; import java.io.IOException; import java.util.Collections; @@ -573,7 +574,15 @@ private void rebucket() { long[] mergeMap = new long[Math.toIntExact(oldOrds.size())]; bucketOrds = new LongKeyedBucketOrds.FromMany(bigArrays()); success = true; - for (long owningBucketOrd = 0; owningBucketOrd <= oldOrds.maxOwningBucketOrd(); owningBucketOrd++) { + long maxOwning = oldOrds.maxOwningBucketOrd(); + for (long owningBucketOrd = 0; owningBucketOrd <= maxOwning; owningBucketOrd++) { + /* + * Check for cancelation during this tight loop as it can take a while and the standard + * cancelation checks don't run during the loop. Becuase it's a tight loop. + */ + if (context.isCancelled()) { + throw new TaskCancelledException("cancelled"); + } LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = oldOrds.ordsEnum(owningBucketOrd); Rounding.Prepared preparedRounding = preparedRoundings[roundingIndexFor(owningBucketOrd)]; while (ordsEnum.next()) { diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java index 74c1f3c16278f..2eb21cfc09650 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java @@ -36,7 +36,7 @@ public void testNoData() throws Exception { MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( Collections.singletonList("field") ); - InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ft)); + InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ft).noReductionCancellation()); assertNull(stats.getStats()); assertEquals(0L, stats.getDocCount()); } @@ -54,7 +54,7 @@ public void testUnmapped() throws Exception { MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( Collections.singletonList("bogus") ); - InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ft)); + InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ft).noReductionCancellation()); assertNull(stats.getStats()); assertEquals(0L, stats.getDocCount()); } @@ -88,7 +88,7 @@ public void testTwoFields() throws Exception { MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( Arrays.asList(fieldA, fieldB) ); - InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ftA, ftB)); + InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ftA, ftB).noReductionCancellation()); multiPassStats.assertNearlyEqual(stats); assertTrue(MatrixAggregationInspectionHelper.hasValue(stats)); } diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index 173e1eeef60a2..0c8821f29dbf1 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -20,7 +20,7 @@ esplugin { restResources { restApi { - include '_common', 'indices', 'index', 'cluster', 'search', 'nodes', 'bulk', 'termvectors', 'explain', 'count' + include '_common', 'indices', 'index', 'cluster', 'search', 'nodes', 'bulk', 'termvectors', 'explain', 'count', 'capabilities' } } diff --git a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java index 40a7b64bc91e7..a385db95d882a 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java @@ -207,7 +207,7 @@ public void testSynonymsInMultiplexerUpdateable() throws FileNotFoundException, public void testUpdateableSynonymsRejectedAtIndexTime() throws FileNotFoundException, IOException { String synonymsFileName = "synonyms.txt"; setupResourceFile(synonymsFileName, "foo, baz"); - Path configDir = node().getEnvironment().configFile(); + Path configDir = node().getEnvironment().configDir(); if (Files.exists(configDir) == false) { Files.createDirectory(configDir); } @@ -319,7 +319,7 @@ public void testKeywordMarkerUpdateable() throws IOException { } private Path setupResourceFile(String fileName, String... content) throws IOException { - Path configDir = node().getEnvironment().configFile(); + Path configDir = node().getEnvironment().configDir(); if (Files.exists(configDir) == false) { Files.createDirectory(configDir); } diff --git a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java index 8209d9f543a31..06f19c0d60dba 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java @@ -57,7 +57,7 @@ public void testSynonymsWithPreview() throws FileNotFoundException, IOException, } private void testSynonymsUpdate(boolean preview) throws FileNotFoundException, IOException, InterruptedException { - Path config = internalCluster().getInstance(Environment.class).configFile(); + Path config = internalCluster().getInstance(Environment.class).configDir(); String synonymsFileName = "synonyms.txt"; Path synonymsFile = config.resolve(synonymsFileName); writeFile(synonymsFile, "foo, baz"); @@ -106,7 +106,7 @@ public void testSynonymsUpdateInvalid() throws IOException { final String synonymsFileName = "synonyms.txt"; final String fieldName = "field"; - Path config = internalCluster().getInstance(Environment.class).configFile(); + Path config = internalCluster().getInstance(Environment.class).configDir(); Path synonymsFile = config.resolve(synonymsFileName); writeFile(synonymsFile, "foo, baz"); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java index e091f0175009e..92e2b3085cc29 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java @@ -40,7 +40,7 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW throw new IllegalArgumentException("hyphenation_patterns_path is a required setting."); } - Path hyphenationPatternsFile = env.configFile().resolve(hyphenationPatternsPath); + Path hyphenationPatternsFile = env.configDir().resolve(hyphenationPatternsPath); try { InputStream in = Files.newInputStream(hyphenationPatternsFile); diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml index 971f530cebeb5..24e04174cd1e4 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml @@ -59,3 +59,28 @@ - match: { detail.tokenizer.tokens.0.token: ABc } - match: { detail.tokenfilters.0.name: lowercase } - match: { detail.tokenfilters.0.tokens.0.token: abc } + +--- +"Custom analyzer is not buildable": + - requires: + test_runner_features: [ capabilities ] + reason: This capability required to run test + capabilities: + - method: GET + path: /_analyze + capabilities: [ wrong_custom_analyzer_returns_400 ] + + - do: + catch: bad_request + indices.analyze: + body: + text: the foxes jumping quickly + tokenizer: + standard + filter: + type: hunspell + locale: en_US + + - match: { status: 400 } + - match: { error.type: illegal_argument_exception } + - match: { error.reason: "Can not build a custom analyzer" } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index 8026ec641d040..506c107b382a1 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -21,6 +21,10 @@ public class DataStreamFeatures implements FeatureSpecification { public static final NodeFeature DATA_STREAM_FAILURE_STORE_TSDB_FIX = new NodeFeature("data_stream.failure_store.tsdb_fix"); + public static final NodeFeature DOWNSAMPLE_AGGREGATE_DEFAULT_METRIC_FIX = new NodeFeature( + "data_stream.downsample.default_aggregate_metric_fix" + ); + @Override public Set getFeatures() { return Set.of(); @@ -28,6 +32,6 @@ public Set getFeatures() { @Override public Set getTestFeatures() { - return Set.of(DATA_STREAM_FAILURE_STORE_TSDB_FIX); + return Set.of(DATA_STREAM_FAILURE_STORE_TSDB_FIX, DOWNSAMPLE_AGGREGATE_DEFAULT_METRIC_FIX); } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java index 3d08be1f24a42..c150f64e8cc4a 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java @@ -248,7 +248,7 @@ public void setup() throws Exception { MetadataCreateIndexService createIndexService; { Environment env = mock(Environment.class); - when(env.sharedDataFile()).thenReturn(null); + when(env.sharedDataDir()).thenReturn(null); AllocationService allocationService = mock(AllocationService.class); when(allocationService.reroute(any(ClusterState.class), any(String.class), any())).then(i -> i.getArguments()[0]); when(allocationService.getShardRoutingRoleStrategy()).thenReturn(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY); diff --git a/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java b/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java index 22f2a9fa394fb..68c6a5c826b34 100644 --- a/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java +++ b/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java @@ -49,7 +49,8 @@ private static ElasticsearchCluster createCluster() { .feature(FAILURE_STORE_ENABLED) .setting("xpack.security.enabled", "true") .keystore("bootstrap.password", "x-pack-test-password") - .user("x_pack_rest_user", "x-pack-test-password"); + .user("x_pack_rest_user", "x-pack-test-password") + .systemProperty("es.queryable_built_in_roles_enabled", "false"); if (initTestSeed().nextBoolean()) { clusterBuilder.setting("xpack.license.self_generated.type", "trial"); } diff --git a/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java b/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java index 02d85ef0ecfbf..1b8bf67ff6fec 100644 --- a/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java +++ b/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java @@ -18,9 +18,11 @@ import org.apache.tika.parser.ParserDecorator; import org.elasticsearch.SpecialPermission; import org.elasticsearch.bootstrap.FilePermissionUtils; +import org.elasticsearch.core.Booleans; import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.jdk.JarHell; +import org.elasticsearch.jdk.RuntimeVersionFeature; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -122,15 +124,22 @@ static String parse(final byte content[], final Metadata metadata, final int lim // apply additional containment for parsers, this is intersected with the current permissions // its hairy, but worth it so we don't have some XML flaw reading random crap from the FS - private static final AccessControlContext RESTRICTED_CONTEXT = new AccessControlContext( - new ProtectionDomain[] { new ProtectionDomain(null, getRestrictedPermissions()) } - ); + private static final AccessControlContext RESTRICTED_CONTEXT = isUsingSecurityManager() + ? new AccessControlContext(new ProtectionDomain[] { new ProtectionDomain(null, getRestrictedPermissions()) }) + : null; + + private static boolean isUsingSecurityManager() { + boolean entitlementsEnabled = Booleans.parseBoolean(System.getProperty("es.entitlements.enabled"), false) + || RuntimeVersionFeature.isSecurityManagerAvailable() == false; + return entitlementsEnabled == false; + } // compute some minimal permissions for parsers. they only get r/w access to the java temp directory, // the ability to load some resources from JARs, and read sysprops @SuppressForbidden(reason = "adds access to tmp directory") static PermissionCollection getRestrictedPermissions() { Permissions perms = new Permissions(); + // property/env access needed for parsing perms.add(new PropertyPermission("*", "read")); perms.add(new RuntimePermission("getenv.TIKA_CONFIG")); diff --git a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java index 6a28a09dc7c6b..0ba3b4ebb69f5 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java +++ b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; @@ -119,10 +120,16 @@ public void testGeoIpSystemFeaturesMigration() throws Exception { // before the upgrade, Kibana should work assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndex)); + + // as should a normal get * + assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndex)); } else { // after the upgrade, but before the migration, Kibana should work assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndex)); + // as should a normal get * + assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndex)); + // migrate the system features and give the cluster a moment to settle Request migrateSystemFeatures = new Request("POST", "/_migration/system_features"); assertOK(client().performRequest(migrateSystemFeatures)); @@ -132,9 +139,10 @@ public void testGeoIpSystemFeaturesMigration() throws Exception { assertBusy(() -> testIndexGeoDoc()); // after the migration, Kibana should work - if (useSecurity == false) { // BUT IT DOESN'T if security is enabled - assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndexReindexed)); - } + assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndexReindexed)); + + // as should a normal get * + assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndexReindexed)); Request disableDownloader = new Request("PUT", "/_cluster/settings"); disableDownloader.setJsonEntity(""" @@ -163,7 +171,7 @@ public void testGeoIpSystemFeaturesMigration() throws Exception { @SuppressWarnings("unchecked") private void testDatabasesLoaded() throws IOException { Request getTaskState = new Request("GET", "/_cluster/state"); - ObjectPath state = ObjectPath.createFromResponse(client().performRequest(getTaskState)); + ObjectPath state = ObjectPath.createFromResponse(assertOK(client().performRequest(getTaskState))); List tasks = state.evaluate("metadata.persistent_tasks.tasks"); // Short-circuit to avoid using steams if the list is empty @@ -189,7 +197,10 @@ private void testDatabasesLoaded() throws IOException { private void testCatIndices(List indexNames, @Nullable List additionalIndexNames) throws IOException { Request catIndices = new Request("GET", "_cat/indices/*?s=index&h=index&expand_wildcards=all"); - String response = EntityUtils.toString(client().performRequest(catIndices).getEntity()); + // the cat APIs can sometimes 404, erroneously + // see https://github.com/elastic/elasticsearch/issues/104371 + setIgnoredErrorResponseCodes(catIndices, RestStatus.NOT_FOUND); + String response = EntityUtils.toString(assertOK(client().performRequest(catIndices)).getEntity()); List indices = List.of(response.trim().split("\\s+")); if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) { @@ -208,11 +219,27 @@ private void testIndexGeoDoc() throws IOException { assertOK(client().performRequest(putDoc)); Request getDoc = new Request("GET", "/my-index-00001/_doc/my_id"); - ObjectPath doc = ObjectPath.createFromResponse(client().performRequest(getDoc)); + ObjectPath doc = ObjectPath.createFromResponse(assertOK(client().performRequest(getDoc))); assertNull(doc.evaluate("_source.tags")); assertEquals("Sweden", doc.evaluate("_source.geo.country_name")); } + private void testGetStar(List indexNames, @Nullable List additionalIndexNames) throws IOException { + Request getStar = new Request("GET", "*?expand_wildcards=all"); + getStar.setOptions( + RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE) // we don't care about warnings, just errors + ); + Response response = assertOK(client().performRequest(getStar)); + + if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) { + indexNames = new ArrayList<>(indexNames); // recopy into a mutable list + indexNames.addAll(additionalIndexNames); + } + + Map map = responseAsMap(response); + assertThat(map.keySet(), is(new HashSet<>(indexNames))); + } + private void testGetStarAsKibana(List indexNames, @Nullable List additionalIndexNames) throws IOException { Request getStar = new Request("GET", "*?expand_wildcards=all"); getStar.setOptions( @@ -220,8 +247,7 @@ private void testGetStarAsKibana(List indexNames, @Nullable List .addHeader("X-elastic-product-origin", "kibana") .setWarningsHandler(WarningsHandler.PERMISSIVE) // we don't care about warnings, just errors ); - Response response = client().performRequest(getStar); - assertOK(response); + Response response = assertOK(client().performRequest(getStar)); if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) { indexNames = new ArrayList<>(indexNames); // recopy into a mutable list diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 4d70a83c6752a..3bcc9f4355349 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -664,7 +664,7 @@ private List getGeoIpTmpDirs() throws IOException { .map(DiscoveryNode::getId) .collect(Collectors.toSet()); // All nodes share the same geoip base dir in the shared tmp dir: - Path geoipBaseTmpDir = internalCluster().getDataNodeInstance(Environment.class).tmpFile().resolve("geoip-databases"); + Path geoipBaseTmpDir = internalCluster().getDataNodeInstance(Environment.class).tmpDir().resolve("geoip-databases"); assertThat(Files.exists(geoipBaseTmpDir), is(true)); final List geoipTmpDirs; try (Stream files = Files.list(geoipBaseTmpDir)) { @@ -676,7 +676,7 @@ private List getGeoIpTmpDirs() throws IOException { private void setupDatabasesInConfigDirectory() throws Exception { StreamSupport.stream(internalCluster().getInstances(Environment.class).spliterator(), false) - .map(Environment::configFile) + .map(Environment::configDir) .map(path -> path.resolve("ingest-geoip")) .distinct() .forEach(path -> { @@ -704,7 +704,7 @@ private void setupDatabasesInConfigDirectory() throws Exception { private void deleteDatabasesInConfigDirectory() throws Exception { StreamSupport.stream(internalCluster().getInstances(Environment.class).spliterator(), false) - .map(Environment::configFile) + .map(Environment::configDir) .map(path -> path.resolve("ingest-geoip")) .distinct() .forEach(path -> { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java index 3d2b54b04695f..289008236a852 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java @@ -42,7 +42,7 @@ final class ConfigDatabases implements Closeable { private final ConcurrentMap configDatabases; ConfigDatabases(Environment environment, GeoIpCache cache) { - this(environment.configFile().resolve("ingest-geoip"), cache); + this(environment.configDir().resolve("ingest-geoip"), cache); } ConfigDatabases(Path geoipConfigDir, GeoIpCache cache) { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java index 940231b12c894..13958254b9020 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java @@ -114,7 +114,7 @@ public final class DatabaseNodeService implements IpDatabaseProvider { ClusterService clusterService ) { this( - environment.tmpFile(), + environment.tmpDir(), new OriginSettingClient(client, IngestService.INGEST_ORIGIN), cache, new ConfigDatabases(environment, cache), diff --git a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java index 4d71417ec982c..53f8e5c4ea7d1 100644 --- a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java +++ b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java @@ -41,7 +41,7 @@ public class IngestUserAgentPlugin extends Plugin implements IngestPlugin { @Override public Map getProcessors(Processor.Parameters parameters) { - Path userAgentConfigDirectory = parameters.env.configFile().resolve("ingest-user-agent"); + Path userAgentConfigDirectory = parameters.env.configDir().resolve("ingest-user-agent"); if (Files.exists(userAgentConfigDirectory) == false && Files.isDirectory(userAgentConfigDirectory)) { throw new IllegalStateException( diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index bc1cd30ad45bf..c327ba49e6d1c 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -423,7 +423,7 @@ public void testResultSizeLimit() throws IOException { ex.getCause().getCause(), allOf( instanceOf(SizeLimitingStringWriter.SizeLimitExceededException.class), - transformedMatch(Throwable::getMessage, endsWith("has exceeded the size limit [1024]")) + transformedMatch(Throwable::getMessage, endsWith("has size [1030] which exceeds the size limit [1024]")) ) ); } diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 05cd906f61160..8ea52de5a23a8 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -167,3 +167,7 @@ if (OS.current() == OS.WINDOWS) { } } } + +tasks.named("yamlRestCompatTestTransform").configure { task -> + task.skipTest("reindex/20_validation/specifying size fails", "size is rejected in 9.0") +} diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java index 04d8bae9fda2f..fc60b6373d285 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java @@ -97,6 +97,7 @@ public void setUpCluster() { // Use a single thread pool for writes so we can enforce a consistent ordering internalCluster().startDataOnlyNode(Settings.builder().put("thread_pool.write.size", 1).build()); internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); + ensureStableCluster(3); } public void testUpdateByQuery() throws Exception { diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java index 860d63000f124..84e45024b69ff 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java @@ -9,14 +9,17 @@ package org.elasticsearch.migration; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; @@ -28,6 +31,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.AssociatedIndexDescriptor; import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.SystemIndexPlugin; @@ -50,6 +54,10 @@ import java.util.function.BiConsumer; import java.util.function.Function; +import static java.util.Collections.emptySet; +import static java.util.Collections.singletonList; +import static java.util.Collections.unmodifiableSet; +import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; @@ -255,12 +263,18 @@ protected void assertIndexHasCorrectProperties( assertThat(thisIndexStats.getTotal().getDocs().getCount(), is((long) INDEX_DOC_COUNT)); } - public static class TestPlugin extends Plugin implements SystemIndexPlugin { + public static class TestPlugin extends Plugin implements SystemIndexPlugin, ActionPlugin { public final AtomicReference>> preMigrationHook = new AtomicReference<>(); public final AtomicReference>> postMigrationHook = new AtomicReference<>(); + private final BlockingActionFilter blockingActionFilter; public TestPlugin() { + blockingActionFilter = new BlockingActionFilter(); + } + @Override + public List getActionFilters() { + return singletonList(blockingActionFilter); } @Override @@ -299,5 +313,26 @@ public void indicesMigrationComplete( postMigrationHook.get().accept(clusterService.state(), preUpgradeMetadata); listener.onResponse(true); } + + public static class BlockingActionFilter extends org.elasticsearch.action.support.ActionFilter.Simple { + private Set blockedActions = emptySet(); + + @Override + protected boolean apply(String action, ActionRequest request, ActionListener listener) { + if (blockedActions.contains(action)) { + throw new ElasticsearchException("force exception on [" + action + "]"); + } + return true; + } + + @Override + public int order() { + return 0; + } + + public void blockActions(String... actions) { + blockedActions = unmodifiableSet(newHashSet(actions)); + } + } } } diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index cdf817a6b17b8..ee95ce5513820 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -17,11 +17,14 @@ import org.elasticsearch.action.admin.cluster.migration.PostFeatureUpgradeRequest; import org.elasticsearch.action.admin.cluster.migration.PostFeatureUpgradeResponse; import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.put.PutComponentTemplateAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -36,10 +39,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.migration.AbstractFeatureMigrationIntegTest.TestPlugin.BlockingActionFilter; import org.elasticsearch.painless.PainlessPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.upgrades.FeatureMigrationResults; import org.elasticsearch.upgrades.SingleFeatureMigrationResult; @@ -272,6 +277,60 @@ public void testMigrateIndexWithWriteBlock() throws Exception { }); } + @AwaitsFix(bugUrl = "ES-10666") // This test uncovered an existing issue + public void testIndexBlockIsRemovedWhenAliasRequestFails() throws Exception { + createSystemIndexForDescriptor(INTERNAL_UNMANAGED); + ensureGreen(); + + // Block the alias request to simulate a failure + InternalTestCluster internalTestCluster = internalCluster(); + ActionFilters actionFilters = internalTestCluster.getInstance(ActionFilters.class, internalTestCluster.getMasterName()); + BlockingActionFilter blockingActionFilter = null; + for (ActionFilter filter : actionFilters.filters()) { + if (filter instanceof BlockingActionFilter) { + blockingActionFilter = (BlockingActionFilter) filter; + break; + } + } + assertNotNull("BlockingActionFilter should exist", blockingActionFilter); + blockingActionFilter.blockActions(TransportIndicesAliasesAction.NAME); + + // Start the migration + client().execute(PostFeatureUpgradeAction.INSTANCE, new PostFeatureUpgradeRequest(TEST_REQUEST_TIMEOUT)).get(); + + // Wait till the migration fails + assertBusy(() -> { + GetFeatureUpgradeStatusResponse statusResp = client().execute( + GetFeatureUpgradeStatusAction.INSTANCE, + new GetFeatureUpgradeStatusRequest(TEST_REQUEST_TIMEOUT) + ).get(); + logger.info(Strings.toString(statusResp)); + assertThat(statusResp.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.ERROR)); + }); + + // Get the settings to see if the write block was removed + var allsettings = client().admin().indices().prepareGetSettings(INTERNAL_UNMANAGED.getIndexPattern()).get().getIndexToSettings(); + var internalUnmanagedOldIndexSettings = allsettings.get(".int-unman-old"); + var writeBlock = internalUnmanagedOldIndexSettings.get(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey()); + assertThat("Write block on old index should be removed on migration ERROR status", writeBlock, equalTo("false")); + + // Unblock the alias request + blockingActionFilter.blockActions(); + + // Retry the migration + client().execute(PostFeatureUpgradeAction.INSTANCE, new PostFeatureUpgradeRequest(TEST_REQUEST_TIMEOUT)).get(); + + // Ensure that the migration is successful after the alias request is unblocked + assertBusy(() -> { + GetFeatureUpgradeStatusResponse statusResp = client().execute( + GetFeatureUpgradeStatusAction.INSTANCE, + new GetFeatureUpgradeStatusRequest(TEST_REQUEST_TIMEOUT) + ).get(); + logger.info(Strings.toString(statusResp)); + assertThat(statusResp.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.NO_MIGRATION_NEEDED)); + }); + } + public void testMigrationWillRunAfterError() throws Exception { createSystemIndexForDescriptor(INTERNAL_MANAGED); diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java index 914311e1190c1..ceba20570e7e5 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java @@ -106,7 +106,7 @@ protected List getSettingAsList(String key) throws Exception { return settings.getAsList(key); } }; - configuration = loader.load(environment.configFile()); + configuration = loader.load(environment.configDir()); reload(); final FileChangesListener listener = new FileChangesListener() { diff --git a/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml index df557f9944253..394e5e38d9f59 100644 --- a/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,3 @@ ALL-UNNAMED: + - manage_threads - outbound_network diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml index c96414e46f7e7..cb155bc2219ab 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml @@ -107,7 +107,7 @@ body: { "text": "test" } - do: - catch: /invalid parameter \[size\], use \[max_docs\] instead/ + catch: /(invalid parameter \[size\], use \[max_docs\] instead|unknown field \[size\])/ reindex: body: source: diff --git a/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml b/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml index 74197fb3ed9ae..f22076b360b6a 100644 --- a/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,8 @@ io.netty.common: - outbound_network + - manage_threads + - files: + - path: "/etc/os-release" + mode: "read" + - path: "/usr/lib/os-release" + mode: "read" diff --git a/modules/repository-s3/build.gradle b/modules/repository-s3/build.gradle index 1db83b9e9bc42..db42e4c7ecaae 100644 --- a/modules/repository-s3/build.gradle +++ b/modules/repository-s3/build.gradle @@ -139,15 +139,6 @@ tasks.named("thirdPartyAudit").configure { 'org.apache.log.Hierarchy', 'org.apache.log.Logger', 'javax.jms.Message', - 'software.amazon.ion.IonReader', - 'software.amazon.ion.IonSystem', - 'software.amazon.ion.IonType', - 'software.amazon.ion.IonWriter', - 'software.amazon.ion.Timestamp', - 'software.amazon.ion.system.IonBinaryWriterBuilder', - 'software.amazon.ion.system.IonSystemBuilder', - 'software.amazon.ion.system.IonTextWriterBuilder', - 'software.amazon.ion.system.IonWriterBuilder', // We don't use the kms dependency 'com.amazonaws.services.kms.AWSKMS', 'com.amazonaws.services.kms.AWSKMSClient', diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index a8a6986ccbb7a..f1369bae6e306 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -369,7 +369,7 @@ static class CustomWebIdentityTokenCredentialsProvider implements AWSCredentials } // Make sure that a readable symlink to the token file exists in the plugin config directory // AWS_WEB_IDENTITY_TOKEN_FILE exists but we only use Web Identity Tokens if a corresponding symlink exists and is readable - Path webIdentityTokenFileSymlink = environment.configFile().resolve(WEB_IDENTITY_TOKEN_FILE_LOCATION); + Path webIdentityTokenFileSymlink = environment.configDir().resolve(WEB_IDENTITY_TOKEN_FILE_LOCATION); if (Files.exists(webIdentityTokenFileSymlink) == false) { LOGGER.warn( "Cannot use AWS Web Identity Tokens: AWS_WEB_IDENTITY_TOKEN_FILE is defined but no corresponding symlink exists " diff --git a/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml b/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml index df557f9944253..394e5e38d9f59 100644 --- a/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,3 @@ ALL-UNNAMED: + - manage_threads - outbound_network diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java index 69fd0c0f5d6a7..2698eb718ded0 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java @@ -65,7 +65,7 @@ private static Environment getEnvironment() throws IOException { Files.createDirectory(configDirectory.resolve("repository-s3")); Files.writeString(configDirectory.resolve("repository-s3/aws-web-identity-token-file"), "YXdzLXdlYi1pZGVudGl0eS10b2tlbi1maWxl"); Environment environment = Mockito.mock(Environment.class); - Mockito.when(environment.configFile()).thenReturn(configDirectory); + Mockito.when(environment.configDir()).thenReturn(configDirectory); return environment; } @@ -212,7 +212,7 @@ public void testPickUpNewWebIdentityTokenWhenItsChanged() throws Exception { latch.countDown(); } }); - Files.writeString(environment.configFile().resolve("repository-s3/aws-web-identity-token-file"), newWebIdentityToken); + Files.writeString(environment.configDir().resolve("repository-s3/aws-web-identity-token-file"), newWebIdentityToken); safeAwait(latch); assertCredentials(awsCredentialsProvider.getCredentials()); diff --git a/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java b/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java index 51a223315644a..eca846f955bfd 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java @@ -158,7 +158,7 @@ private URL checkURL(URL urlToCheck) { if (normalizedUrl == null) { String logMessage = "The specified url [{}] doesn't start with any repository paths specified by the " + "path.repo setting or by {} setting: [{}] "; - logger.warn(logMessage, urlToCheck, ALLOWED_URLS_SETTING.getKey(), environment.repoFiles()); + logger.warn(logMessage, urlToCheck, ALLOWED_URLS_SETTING.getKey(), environment.repoDirs()); String exceptionMessage = "file url [" + urlToCheck + "] doesn't match any of the locations specified by path.repo or " diff --git a/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml b/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml index eb772a06423a3..7a3f2c11d69ba 100644 --- a/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,6 +1,13 @@ io.netty.transport: - inbound_network - outbound_network + - manage_threads io.netty.common: - inbound_network - outbound_network + - manage_threads + - files: + - path: "/etc/os-release" + mode: "read" + - path: "/usr/lib/os-release" + mode: "read" diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandlerTests.java index d87889c6a2416..c59b1cc1d0a4a 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandlerTests.java @@ -109,6 +109,7 @@ public void testThrottleLargeCompositeMessage() { Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE * fullSizeChunks + extraChunkSize ); int splitOffset = randomIntBetween(0, messageBytes.length); + int lastChunkSizeOfTheFirstSplit = splitOffset % Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE; final BytesReference message = CompositeBytesReference.of( new BytesArray(messageBytes, 0, splitOffset), new BytesArray(messageBytes, splitOffset, messageBytes.length - splitOffset) @@ -120,7 +121,9 @@ public void testThrottleLargeCompositeMessage() { assertFalse(promise.isDone()); embeddedChannel.flush(); assertTrue(promise.isDone()); - assertThat(seen, hasSize(oneOf(fullSizeChunks, fullSizeChunks + 1))); + // If the extra chunk size is greater than the last chunk size for the first half of the split, it means we will need to send + // (extraChunkSize - lastChunkSizeOfTheFirstSplit) bytes as the very last chunk of the entire message. + assertThat(seen, hasSize(oneOf(fullSizeChunks, fullSizeChunks + 1 + (extraChunkSize > lastChunkSizeOfTheFirstSplit ? 1 : 0)))); assertTrue(capturingHandler.didWriteAfterThrottled); assertBufferEquals(Unpooled.compositeBuffer().addComponents(true, seen), message); } diff --git a/muted-tests.yml b/muted-tests.yml index 6093166a0d061..958ecba63bf6b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -46,9 +46,6 @@ tests: - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test81JavaOptsInJvmOptions issue: https://github.com/elastic/elasticsearch/issues/113313 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=mtermvectors/10_basic/Tests catching other exceptions per item} - issue: https://github.com/elastic/elasticsearch/issues/113325 - class: org.elasticsearch.xpack.transform.integration.TransformIT method: testStopWaitForCheckpoint issue: https://github.com/elastic/elasticsearch/issues/106113 @@ -125,15 +122,9 @@ tests: - class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT method: test {p0=search.highlight/50_synthetic_source/text multi unified from vectors} issue: https://github.com/elastic/elasticsearch/issues/117815 -- class: org.elasticsearch.xpack.esql.plugin.ClusterRequestTests - method: testFallbackIndicesOptions - issue: https://github.com/elastic/elasticsearch/issues/117937 - class: org.elasticsearch.xpack.ml.integration.RegressionIT method: testTwoJobsWithSameRandomizeSeedUseSameTrainingSet issue: https://github.com/elastic/elasticsearch/issues/117805 -- class: org.elasticsearch.xpack.esql.action.EsqlActionTaskIT - method: testCancelRequestWhenFailingFetchingPages - issue: https://github.com/elastic/elasticsearch/issues/118193 - class: org.elasticsearch.packaging.test.ArchiveTests method: test44AutoConfigurationNotTriggeredOnNotWriteableConfDir issue: https://github.com/elastic/elasticsearch/issues/118208 @@ -149,8 +140,6 @@ tests: - class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests method: testSearcherId issue: https://github.com/elastic/elasticsearch/issues/118374 -- class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT - issue: https://github.com/elastic/elasticsearch/issues/118238 - class: org.elasticsearch.xpack.ccr.rest.ShardChangesRestIT method: testShardChangesNoOperation issue: https://github.com/elastic/elasticsearch/issues/118800 @@ -159,9 +148,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/118914 - class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectoryRunAsIT issue: https://github.com/elastic/elasticsearch/issues/115727 -- class: org.elasticsearch.xpack.esql.action.EsqlNodeFailureIT - method: testFailureLoadingFields - issue: https://github.com/elastic/elasticsearch/issues/118000 - class: org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapperTests method: testCartesianBoundsBlockLoader issue: https://github.com/elastic/elasticsearch/issues/119201 @@ -176,9 +162,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/117740 - class: org.elasticsearch.xpack.security.authc.ldap.MultiGroupMappingIT issue: https://github.com/elastic/elasticsearch/issues/119599 -- class: org.elasticsearch.search.profile.dfs.DfsProfilerIT - method: testProfileDfs - issue: https://github.com/elastic/elasticsearch/issues/119711 - class: org.elasticsearch.multi_cluster.MultiClusterYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/119983 - class: org.elasticsearch.xpack.test.rest.XPackRestIT @@ -210,30 +193,16 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120482 - class: org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeToCharProcessorTests issue: https://github.com/elastic/elasticsearch/issues/120575 -- class: org.elasticsearch.index.reindex.BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests - method: testReindex - issue: https://github.com/elastic/elasticsearch/issues/120605 - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testMultipleInferencesTriggeringDownloadAndDeploy issue: https://github.com/elastic/elasticsearch/issues/120668 - class: org.elasticsearch.xpack.security.authc.ldap.ADLdapUserSearchSessionFactoryTests issue: https://github.com/elastic/elasticsearch/issues/119882 -- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncEnrichStopIT - method: testEnrichAfterStop - issue: https://github.com/elastic/elasticsearch/issues/120757 -- class: org.elasticsearch.search.fieldcaps.FieldCapabilitiesIT - issue: https://github.com/elastic/elasticsearch/issues/120772 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/3rd_party_deployment/Test start deployment fails while model download in progress} issue: https://github.com/elastic/elasticsearch/issues/120810 - class: org.elasticsearch.indices.mapping.UpdateMappingIntegrationIT issue: https://github.com/elastic/elasticsearch/issues/116126 -- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT - method: test {p0=data_stream/140_data_stream_aliases/Create data stream aliases using wildcard expression} - issue: https://github.com/elastic/elasticsearch/issues/120890 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=ml/inference_crud/*} - issue: https://github.com/elastic/elasticsearch/issues/120816 - class: org.elasticsearch.xpack.security.authc.service.ServiceAccountIT method: testAuthenticateShouldNotFallThroughInCaseOfFailure issue: https://github.com/elastic/elasticsearch/issues/120902 @@ -243,9 +212,6 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test140CgroupOsStatsAreAvailable issue: https://github.com/elastic/elasticsearch/issues/120914 -- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT - method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias} - issue: https://github.com/elastic/elasticsearch/issues/120920 - class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT method: testReservedStatePersistsOnRestart issue: https://github.com/elastic/elasticsearch/issues/120923 @@ -261,86 +227,26 @@ tests: - class: org.elasticsearch.action.search.SearchProgressActionListenerIT method: testSearchProgressWithQuery issue: https://github.com/elastic/elasticsearch/issues/120994 -- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT - method: test {p0=data_stream/80_resolve_index_data_streams/Resolve index with hidden and closed indices} - issue: https://github.com/elastic/elasticsearch/issues/120965 -- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT - method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias with filter} - issue: https://github.com/elastic/elasticsearch/issues/121014 -- class: org.elasticsearch.xpack.esql.parser.StatementParserTests - method: testNamedFunctionArgumentInMap - issue: https://github.com/elastic/elasticsearch/issues/121020 -- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - method: testSuggestProfilesWithName - issue: https://github.com/elastic/elasticsearch/issues/121022 -- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - method: testProfileAPIsWhenIndexNotCreated - issue: https://github.com/elastic/elasticsearch/issues/121096 -- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - method: testGetProfiles - issue: https://github.com/elastic/elasticsearch/issues/121101 -- class: org.elasticsearch.xpack.security.authc.service.ServiceAccountSingleNodeTests - method: testAuthenticateWithServiceFileToken - issue: https://github.com/elastic/elasticsearch/issues/120988 -- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - method: testUpdateProfileData - issue: https://github.com/elastic/elasticsearch/issues/121108 - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} issue: https://github.com/elastic/elasticsearch/issues/120950 - class: org.elasticsearch.xpack.shutdown.AllocationFailuresResetOnShutdownIT method: testResetAllocationFailuresOnNodeShutdown issue: https://github.com/elastic/elasticsearch/issues/121129 -- class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests - method: testActivateProfileForJWT - issue: https://github.com/elastic/elasticsearch/issues/120983 -- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT - method: test {yaml=cluster.health/20_request_timeout/cluster health request timeout waiting for active shards} - issue: https://github.com/elastic/elasticsearch/issues/121130 -- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - method: testProfileIndexAutoCreation - issue: https://github.com/elastic/elasticsearch/issues/120987 - class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT method: testFileSettingsReprocessedOnRestartWithoutVersionChange issue: https://github.com/elastic/elasticsearch/issues/120964 -- class: org.elasticsearch.xpack.ml.integration.ClassificationIT - method: testWithOnlyTrainingRowsAndTrainingPercentIsFifty_DependentVariableIsKeyword - issue: https://github.com/elastic/elasticsearch/issues/120071 -- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - method: testGetUsersWithProfileUidWhenProfileIndexDoesNotExists - issue: https://github.com/elastic/elasticsearch/issues/121179 - class: org.elasticsearch.xpack.ml.integration.PyTorchModelIT issue: https://github.com/elastic/elasticsearch/issues/121165 -- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - method: testSetEnabled - issue: https://github.com/elastic/elasticsearch/issues/121183 -- class: org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests - issue: https://github.com/elastic/elasticsearch/issues/121185 -- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT - method: test {yaml=cat.aliases/10_basic/Simple alias} - issue: https://github.com/elastic/elasticsearch/issues/121186 -- class: org.elasticsearch.xpack.ml.integration.ClassificationIT - method: testWithDatastreams - issue: https://github.com/elastic/elasticsearch/issues/121236 -- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT - method: test {yaml=nodes.stats/11_indices_metrics/Metric - blank for indices mappings} - issue: https://github.com/elastic/elasticsearch/issues/121238 -- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT - method: test {yaml=indices.get_alias/10_basic/Get aliases via /_alias/_all} - issue: https://github.com/elastic/elasticsearch/issues/121242 -- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT - method: test {yaml=cluster.stats/10_basic/Sparse vector stats} - issue: https://github.com/elastic/elasticsearch/issues/121246 -- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT - method: testCrossClusterAsyncQueryStop - issue: https://github.com/elastic/elasticsearch/issues/121249 - class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=transform/*} + issue: https://github.com/elastic/elasticsearch/issues/120816 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/*} issue: https://github.com/elastic/elasticsearch/issues/120816 - class: org.elasticsearch.upgrades.VectorSearchIT method: testBBQVectorSearch {upgradedNodes=0} issue: https://github.com/elastic/elasticsearch/issues/121253 -- class: org.elasticsearch.lucene.FullClusterRestartLuceneIndexCompatibilityIT - issue: https://github.com/elastic/elasticsearch/issues/121257 - class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/121269 - class: org.elasticsearch.upgrades.VectorSearchIT @@ -357,23 +263,26 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_357} issue: https://github.com/elastic/elasticsearch/issues/121287 -- class: org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT - method: test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only} - issue: https://github.com/elastic/elasticsearch/issues/115475 - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/index-modules/slowlog/line_102} issue: https://github.com/elastic/elasticsearch/issues/121288 -- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT - method: test {yaml=indices.get_alias/10_basic/Get aliases via /*/_alias/} - issue: https://github.com/elastic/elasticsearch/issues/121290 -- class: org.elasticsearch.xpack.inference.action.TransportInferenceActionTests - method: testRerouting_HandlesTransportException_FromOtherNode - issue: https://github.com/elastic/elasticsearch/issues/121292 -- class: org.elasticsearch.xpack.inference.action.TransportInferenceActionTests - method: testRerouting_ToOtherNode - issue: https://github.com/elastic/elasticsearch/issues/121293 -- class: org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests - issue: https://github.com/elastic/elasticsearch/issues/121294 +- class: org.elasticsearch.env.NodeEnvironmentTests + method: testGetBestDowngradeVersion + issue: https://github.com/elastic/elasticsearch/issues/121316 +- class: org.elasticsearch.index.engine.ShuffleForcedMergePolicyTests + method: testDiagnostics + issue: https://github.com/elastic/elasticsearch/issues/121336 +- class: org.elasticsearch.search.CrossClusterSearchUnavailableClusterIT + method: testSearchSkipUnavailable + issue: https://github.com/elastic/elasticsearch/issues/121497 +- class: org.elasticsearch.compute.operator.exchange.ExchangeServiceTests + method: testExchangeSourceContinueOnFailure + issue: https://github.com/elastic/elasticsearch/issues/122408 +- class: org.elasticsearch.repositories.blobstore.testkit.analyze.MinioRepositoryAnalysisRestIT + method: testRepositoryAnalysis + issue: https://github.com/elastic/elasticsearch/issues/122670 +- class: org.elasticsearch.telemetry.apm.ApmAgentSettingsIT + issue: https://github.com/elastic/elasticsearch/issues/122546 # Examples: # diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java index fe0b3a00b2bbb..6854984c49c26 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java @@ -51,7 +51,7 @@ public IcuCollationTokenFilterFactory(IndexSettings indexSettings, Environment e if (rules != null) { Exception failureToResolve = null; try { - rules = Streams.copyToString(Files.newBufferedReader(environment.configFile().resolve(rules), Charset.forName("UTF-8"))); + rules = Streams.copyToString(Files.newBufferedReader(environment.configDir().resolve(rules), Charset.forName("UTF-8"))); } catch (IOException | SecurityException | InvalidPathException e) { failureToResolve = e; } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java index c66d25ffa2f3b..4a0ead6a893e8 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java @@ -99,7 +99,7 @@ public RuleBasedBreakIterator getBreakIterator(int script) { // parse a single RBBi rule file private static BreakIterator parseRules(String filename, Environment env) throws IOException { - final Path path = env.configFile().resolve(filename); + final Path path = env.configDir().resolve(filename); String rules = Files.readAllLines(path).stream().filter((v) -> v.startsWith("#") == false).collect(Collectors.joining("\n")); return new RuleBasedBreakIterator(rules.toString()); diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index a824eda6a4620..6cf5997c24a8a 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -106,15 +106,6 @@ tasks.named("thirdPartyAudit").configure { // classes are missing 'com.amazonaws.jmespath.JmesPathExpression', 'com.amazonaws.jmespath.ObjectMapperSingleton', - 'software.amazon.ion.IonReader', - 'software.amazon.ion.IonSystem', - 'software.amazon.ion.IonType', - 'software.amazon.ion.IonWriter', - 'software.amazon.ion.Timestamp', - 'software.amazon.ion.system.IonBinaryWriterBuilder', - 'software.amazon.ion.system.IonSystemBuilder', - 'software.amazon.ion.system.IonTextWriterBuilder', - 'software.amazon.ion.system.IonWriterBuilder', 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener', 'org.apache.avalon.framework.logger.Logger', diff --git a/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml index df557f9944253..394e5e38d9f59 100644 --- a/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml +++ b/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,3 @@ ALL-UNNAMED: + - manage_threads - outbound_network diff --git a/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java b/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java index 243201f632c8f..a085973e82b0a 100644 --- a/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java +++ b/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java @@ -70,7 +70,7 @@ public class ExampleCustomSettingsConfig { public ExampleCustomSettingsConfig(final Environment environment) { // Elasticsearch config directory - final Path configDir = environment.configFile(); + final Path configDir = environment.configDir(); // Resolve the plugin's custom settings file final Path customSettingsYamlFile = configDir.resolve("custom-settings/custom.yml"); diff --git a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java index 523fdc51f6423..5c4580cac4f27 100644 --- a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java +++ b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.metadata.IndexAbstraction; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse.Indices; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -85,10 +86,13 @@ public void authorizeClusterAction(RequestInfo requestInfo, AuthorizationInfo au } @Override - public void authorizeIndexAction(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, - AsyncSupplier indicesAsyncSupplier, - Map aliasOrIndexLookup, - ActionListener listener) { + public void authorizeIndexAction( + RequestInfo requestInfo, + AuthorizationInfo authorizationInfo, + AsyncSupplier indicesAsyncSupplier, + Metadata metadata, + ActionListener listener + ) { if (isSuperuser(requestInfo.getAuthentication().getEffectiveSubject().getUser())) { indicesAsyncSupplier.getAsync(ActionListener.wrap(resolvedIndices -> { Map indexAccessControlMap = new HashMap<>(); diff --git a/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java b/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java index dbf797e3d0899..d57af86531865 100644 --- a/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java +++ b/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexAbstraction.ConcreteIndex; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.ESTestCase; @@ -31,6 +32,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.stream.Stream; import static org.hamcrest.Matchers.is; @@ -117,12 +119,13 @@ public void testAuthorizeClusterAction() { public void testAuthorizeIndexAction() { CustomAuthorizationEngine engine = new CustomAuthorizationEngine(); - Map indicesMap = new HashMap<>(); - indicesMap.put("index", new ConcreteIndex(IndexMetadata.builder("index") - .settings(Settings.builder().put("index.version.created", IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .build(), null)); + Metadata metadata = Metadata.builder().put(IndexMetadata.builder("index") + .settings(Settings.builder().put("index.version.created", IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + .build(), + false + ).build(); // authorized { RequestInfo requestInfo = @@ -136,7 +139,7 @@ public void testAuthorizeIndexAction() { PlainActionFuture resultFuture = new PlainActionFuture<>(); engine.authorizeIndexAction(requestInfo, authzInfo, listener -> listener.onResponse(new ResolvedIndices(Collections.singletonList("index"), Collections.emptyList())), - indicesMap, resultFuture); + metadata, resultFuture); IndexAuthorizationResult result = resultFuture.actionGet(); assertThat(result.isGranted(), is(true)); IndicesAccessControl indicesAccessControl = result.getIndicesAccessControl(); @@ -156,7 +159,7 @@ public void testAuthorizeIndexAction() { PlainActionFuture resultFuture = new PlainActionFuture<>(); engine.authorizeIndexAction(requestInfo, authzInfo, listener -> listener.onResponse(new ResolvedIndices(Collections.singletonList("index"), Collections.emptyList())), - indicesMap, resultFuture); + metadata, resultFuture); IndexAuthorizationResult result = resultFuture.actionGet(); assertThat(result.isGranted(), is(false)); IndicesAccessControl indicesAccessControl = result.getIndicesAccessControl(); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java index ce6acd79a0bb9..e74d1a87959f2 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java @@ -81,7 +81,7 @@ class HdfsSecurityContext { * Expects keytab file to exist at {@code $CONFIG_DIR$/repository-hdfs/krb5.keytab} */ static Path locateKeytabFile(Environment environment) { - Path keytabPath = environment.configFile().resolve("repository-hdfs").resolve("krb5.keytab"); + Path keytabPath = environment.configDir().resolve("repository-hdfs").resolve("krb5.keytab"); try { if (Files.exists(keytabPath) == false) { throw new RuntimeException("Could not locate keytab at [" + keytabPath + "]."); diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml index b5020dc1b7468..30e61739a0633 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml +++ b/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,5 +1,7 @@ ALL-UNNAMED: + - manage_threads - outbound_network + - load_native_libraries - write_system_properties: properties: - hadoop.home.dir diff --git a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 780f3994ce627..d912ccbe07454 100644 --- a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -101,8 +101,8 @@ private static MockTransportService startTransport( TransportSearchAction.TYPE.name(), EsExecutors.DIRECT_EXECUTOR_SERVICE, SearchRequest::new, - (request, channel, task) -> channel.sendResponse( - new SearchResponse( + (request, channel, task) -> { + var searchResponse = new SearchResponse( SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), InternalAggregations.EMPTY, null, @@ -117,8 +117,13 @@ private static MockTransportService startTransport( 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY - ) - ) + ); + try { + channel.sendResponse(searchResponse); + } finally { + searchResponse.decRef(); + } + } ); newService.registerRequestHandler( ClusterStateAction.NAME, diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java index 08d7e3b45702b..bc8308f48e52d 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java @@ -103,23 +103,23 @@ public void testEnvironmentPaths() throws Exception { // check that all directories got permissions: // bin file: ro - assertExactPermissions(new FilePermission(environment.binFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.binDir().toString(), "read,readlink"), permissions); // lib file: ro - assertExactPermissions(new FilePermission(environment.libFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.libDir().toString(), "read,readlink"), permissions); // modules file: ro - assertExactPermissions(new FilePermission(environment.modulesFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.modulesDir().toString(), "read,readlink"), permissions); // config file: ro - assertExactPermissions(new FilePermission(environment.configFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.configDir().toString(), "read,readlink"), permissions); // plugins: ro - assertExactPermissions(new FilePermission(environment.pluginsFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.pluginsDir().toString(), "read,readlink"), permissions); // data paths: r/w - for (Path dataPath : environment.dataFiles()) { + for (Path dataPath : environment.dataDirs()) { assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions); } - assertExactPermissions(new FilePermission(environment.sharedDataFile().toString(), "read,readlink,write,delete"), permissions); + assertExactPermissions(new FilePermission(environment.sharedDataDir().toString(), "read,readlink,write,delete"), permissions); // logs: r/w - assertExactPermissions(new FilePermission(environment.logsFile().toString(), "read,readlink,write,delete"), permissions); + assertExactPermissions(new FilePermission(environment.logsDir().toString(), "read,readlink,write,delete"), permissions); // temp dir: r/w assertExactPermissions(new FilePermission(fakeTmpDir.toString(), "read,readlink,write,delete"), permissions); } diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index a5e1041dab279..8c3acfb12d201 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; import org.elasticsearch.search.SearchFeatures; import org.elasticsearch.test.NotEqualMessageBuilder; @@ -628,13 +629,14 @@ public void testRollover() throws Exception { ) ); - // assertBusy to work around https://github.com/elastic/elasticsearch/issues/104371 - assertBusy( - () -> assertThat( - EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices?v&error_trace")).getEntity()), - containsString("testrollover-000002") - ) - ); + assertBusy(() -> { + Request catIndices = new Request("GET", "/_cat/indices?v&error_trace"); + // the cat APIs can sometimes 404, erroneously + // see https://github.com/elastic/elasticsearch/issues/104371 + setIgnoredErrorResponseCodes(catIndices, RestStatus.NOT_FOUND); + Response response = assertOK(client().performRequest(catIndices)); + assertThat(EntityUtils.toString(response.getEntity()), containsString("testrollover-000002")); + }); } Request countRequest = new Request("POST", "/" + index + "-*/_search"); diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java index 9bb5b7e944389..1f30505e00104 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java @@ -12,7 +12,9 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.InputStreamEntity; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.WarningsHandler; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MetadataIndexStateService; @@ -27,6 +29,7 @@ import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.xcontent.XContentType; import org.hamcrest.Matcher; import org.junit.After; @@ -161,8 +164,21 @@ protected static boolean isFullyUpgradedTo(Version version) throws Exception { } protected static Version indexVersion(String indexName) throws Exception { - var response = assertOK(client().performRequest(new Request("GET", "/" + indexName + "/_settings"))); - int id = Integer.parseInt(createFromResponse(response).evaluate(indexName + ".settings.index.version.created")); + return indexVersion(indexName, false); + } + + protected static Version indexVersion(String indexName, boolean ignoreWarnings) throws Exception { + Request request = new Request("GET", "/" + indexName + "/_settings"); + request.addParameter("flat_settings", "true"); + if (ignoreWarnings) { + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.setWarningsHandler(WarningsHandler.PERMISSIVE); + request.setOptions(options); + } + var response = assertOK(client().performRequest(request)); + ObjectPath fromResponse = createFromResponse(response); + Map settings = fromResponse.evaluateExact(indexName, "settings"); + int id = Integer.parseInt((String) settings.get("index.version.created")); return new Version((byte) ((id / 1000000) % 100), (byte) ((id / 10000) % 100), (byte) ((id / 100) % 100)); } diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java index f37fca16a4b78..501a46deca9d1 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java @@ -11,8 +11,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.translog.Translog; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; @@ -184,7 +182,6 @@ public void testClosedIndexUpgrade() throws Exception { Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2)) - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), randomFrom(Translog.Durability.values())) .build() ); indexDocs(index, numDocs); diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSystemIndexCompatibilityIT.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSystemIndexCompatibilityIT.java new file mode 100644 index 0000000000000..985a073bd6034 --- /dev/null +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSystemIndexCompatibilityIT.java @@ -0,0 +1,154 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.lucene; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.WarningsHandler; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.util.Version; +import org.elasticsearch.test.rest.ObjectPath; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class FullClusterRestartSystemIndexCompatibilityIT extends FullClusterRestartIndexCompatibilityTestCase { + + static { + clusterConfig = config -> config.setting("xpack.license.self_generated.type", "trial"); + } + + public FullClusterRestartSystemIndexCompatibilityIT(Version version) { + super(version); + } + + // we need a place to store async_search ids across cluster restarts + private static Map async_search_ids = new HashMap<>(3); + + /** + * 1. creates an index on N-2 and performs async_search on it that is kept in system index + * 2. After update to N-1 (latest) perform a system index migration step, also write block the index + * 3. on N, check that async search results are still retrievable and we can write to the system index + */ + public void testAsyncSearchIndexMigration() throws Exception { + final String index = suffix("index"); + final String asyncSearchIndex = ".async-search"; + final int numDocs = 2431; + + final Request asyncSearchRequest = new Request("POST", "/" + index + "/_async_search?size=100&keep_on_completion=true"); + + if (isFullyUpgradedTo(VERSION_MINUS_2)) { + createIndex( + client(), + index, + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2)) + .build() + ); + indexDocs(index, numDocs); + ensureGreen(index); + + assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + String asyncId = searchAsyncAndStoreId(asyncSearchRequest, "n-2_id"); + ensureGreen(asyncSearchIndex); + + assertAsyncSearchHitCount(asyncId, numDocs); + assertBusy(() -> assertDocCountNoWarnings(client(), asyncSearchIndex, 1)); + assertThat(indexVersion(asyncSearchIndex, true), equalTo(VERSION_MINUS_2)); + return; + } + + if (isFullyUpgradedTo(VERSION_MINUS_1)) { + // check .async-search index is readable + assertThat(indexVersion(asyncSearchIndex, true), equalTo(VERSION_MINUS_2)); + assertAsyncSearchHitCount(async_search_ids.get("n-2_id"), numDocs); + + // migrate system indices + Request migrateRequest = new Request("POST", "/_migration/system_features"); + assertThat( + ObjectPath.createFromResponse(client().performRequest(migrateRequest)).evaluate("features.0.feature_name"), + equalTo("async_search") + ); + assertBusy(() -> { + Request checkMigrateProgress = new Request("GET", "/_migration/system_features"); + Response resp = null; + try { + assertFalse( + ObjectPath.createFromResponse(client().performRequest(checkMigrateProgress)) + .evaluate("migration_status") + .equals("IN_PROGRESS") + ); + } catch (IOException e) { + throw new AssertionError("System feature migration failed", e); + } + }); + + // check search results from n-2 search are still readable + assertAsyncSearchHitCount(async_search_ids.get("n-2_id"), numDocs); + + // perform new async search and check its readable + String asyncId = searchAsyncAndStoreId(asyncSearchRequest, "n-1_id"); + assertAsyncSearchHitCount(asyncId, numDocs); + assertBusy(() -> assertDocCountNoWarnings(client(), asyncSearchIndex, 2)); + + // in order to move to current version we need write block for n-2 index + addIndexBlock(index, IndexMetadata.APIBlock.WRITE); + } + + if (isFullyUpgradedTo(VERSION_CURRENT)) { + assertThat(indexVersion(index, true), equalTo(VERSION_MINUS_2)); + assertAsyncSearchHitCount(async_search_ids.get("n-2_id"), numDocs); + assertAsyncSearchHitCount(async_search_ids.get("n-1_id"), numDocs); + + // check system index is still writeable + String asyncId = searchAsyncAndStoreId(asyncSearchRequest, "n_id"); + assertAsyncSearchHitCount(asyncId, numDocs); + assertBusy(() -> assertDocCountNoWarnings(client(), asyncSearchIndex, 3)); + } + + } + + private static String searchAsyncAndStoreId(Request asyncSearchRequest, String asyncIdName) throws IOException { + ObjectPath resp = ObjectPath.createFromResponse(client().performRequest(asyncSearchRequest)); + String asyncId = resp.evaluate("id"); + assertNotNull(asyncId); + async_search_ids.put(asyncIdName, asyncId); + return asyncId; + } + + private static void assertAsyncSearchHitCount(String asyncId, int numDocs) throws IOException { + var asyncGet = new Request("GET", "/_async_search/" + asyncId); + ObjectPath resp = ObjectPath.createFromResponse(client().performRequest(asyncGet)); + assertEquals(Integer.valueOf(numDocs), resp.evaluate("response.hits.total.value")); + } + + /** + * Assert that the index in question has the given number of documents present + */ + private static void assertDocCountNoWarnings(RestClient client, String indexName, long docCount) throws IOException { + Request countReq = new Request("GET", "/" + indexName + "/_count"); + RequestOptions.Builder options = countReq.getOptions().toBuilder(); + options.setWarningsHandler(WarningsHandler.PERMISSIVE); + countReq.setOptions(options); + ObjectPath resp = ObjectPath.createFromResponse(client.performRequest(countReq)); + assertEquals( + "expected " + docCount + " documents but it was a different number", + docCount, + Long.parseLong(resp.evaluate("count").toString()) + ); + } +} diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java index 12374cf623a8c..7b9e2d64bbae4 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java @@ -13,8 +13,6 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.translog.Translog; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; @@ -189,11 +187,7 @@ public void testClosedIndexUpgrade() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), randomFrom(Translog.Durability.values())) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); indexDocs(index, numDocs); return; diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 168493eb52f60..5c26a744b2fbf 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -80,8 +80,8 @@ public void testNoControllerSpawn() throws IOException { Environment environment = TestEnvironment.newEnvironment(settings); // This plugin will NOT have a controller daemon - Path plugin = environment.modulesFile().resolve("a_plugin"); - Files.createDirectories(environment.modulesFile()); + Path plugin = environment.modulesDir().resolve("a_plugin"); + Files.createDirectories(environment.modulesDir()); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( plugin, @@ -111,8 +111,8 @@ public void testNoControllerSpawn() throws IOException { * Two plugins - one with a controller daemon and one without. */ public void testControllerSpawn() throws Exception { - assertControllerSpawns(Environment::pluginsFile, false); - assertControllerSpawns(Environment::modulesFile, true); + assertControllerSpawns(Environment::pluginsDir, false); + assertControllerSpawns(Environment::modulesDir, true); } private void assertControllerSpawns(final Function pluginsDirFinder, boolean expectSpawn) throws Exception { @@ -131,8 +131,8 @@ private void assertControllerSpawns(final Function pluginsDir // this plugin will have a controller daemon Path plugin = pluginsDirFinder.apply(environment).resolve("test_plugin"); - Files.createDirectories(environment.modulesFile()); - Files.createDirectories(environment.pluginsFile()); + Files.createDirectories(environment.modulesDir()); + Files.createDirectories(environment.pluginsDir()); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( plugin, @@ -217,7 +217,7 @@ public void testControllerSpawnWithIncorrectDescriptor() throws IOException { Environment environment = TestEnvironment.newEnvironment(settings); - Path plugin = environment.modulesFile().resolve("test_plugin"); + Path plugin = environment.modulesDir().resolve("test_plugin"); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( plugin, @@ -250,10 +250,10 @@ public void testSpawnerHandlingOfDesktopServicesStoreFiles() throws IOException final Environment environment = TestEnvironment.newEnvironment(settings); - Files.createDirectories(environment.modulesFile()); - Files.createDirectories(environment.pluginsFile()); + Files.createDirectories(environment.modulesDir()); + Files.createDirectories(environment.pluginsDir()); - final Path desktopServicesStore = environment.modulesFile().resolve(".DS_Store"); + final Path desktopServicesStore = environment.modulesDir().resolve(".DS_Store"); Files.createFile(desktopServicesStore); final Spawner spawner = new Spawner(); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index 30367bf55d8cc..c0f12f95269ef 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -82,8 +82,7 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), randomDoubleProcessorCount(), ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - null + ByteSizeValue.ofGb(randomIntBetween(128, 256)) ) ) .toList(); @@ -94,8 +93,7 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), new DesiredNode.ProcessorsRange(minProcessors, minProcessors + randomIntBetween(10, 20)), ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - null + ByteSizeValue.ofGb(randomIntBetween(128, 256)) ); }).toList(); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java index 98572de6618ee..d0ec946cc1895 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java @@ -18,7 +18,6 @@ import java.util.List; import java.util.Map; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class SourceModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase { @@ -83,20 +82,11 @@ public void testConfigureStoredSourceWhenIndexIsCreatedLegacy() throws IOExcepti private void assertDeprecationWarningForTemplate(String templateName) throws IOException { var request = new Request("GET", "/_migration/deprecations"); var response = entityAsMap(client().performRequest(request)); - if (response.containsKey("templates")) { - // Check the newer version of the deprecation API that contains the templates section - Map issuesByTemplate = (Map) response.get("templates"); - assertThat(issuesByTemplate.containsKey(templateName), equalTo(true)); - var templateIssues = (List) issuesByTemplate.get(templateName); - assertThat(((Map) templateIssues.getFirst()).get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); - } else { - // Bwc version with 8.18 until https://github.com/elastic/elasticsearch/pull/120505/ gets backported, clean up after backport - var nodeSettings = (Map) ((List) response.get("node_settings")).getFirst(); - assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); - assertThat( - (String) nodeSettings.get("details"), - containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") - ); - } + assertThat(response.containsKey("templates"), equalTo(true)); + Map issuesByTemplate = (Map) response.get("templates"); + assertThat(issuesByTemplate.containsKey(templateName), equalTo(true)); + var templateIssue = (Map) ((List) issuesByTemplate.get(templateName)).getFirst(); + assertThat(templateIssue.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING_TITLE)); + assertThat(templateIssue.get("details"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); } } diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java index b1e28de1a5264..683990d51d4a8 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java @@ -10,7 +10,6 @@ package org.elasticsearch.http.snapshots; import org.apache.http.client.methods.HttpGet; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; @@ -37,7 +36,6 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -516,10 +514,9 @@ private static GetSnapshotsResponse sortedWithLimit( true, (args) -> new GetSnapshotsResponse( (List) args[0], - (Map) args[1], - (String) args[2], - args[3] == null ? UNKNOWN_COUNT : (int) args[3], - args[4] == null ? UNKNOWN_COUNT : (int) args[4] + (String) args[1], + args[2] == null ? UNKNOWN_COUNT : (int) args[2], + args[3] == null ? UNKNOWN_COUNT : (int) args[3] ) ); @@ -529,11 +526,6 @@ private static GetSnapshotsResponse sortedWithLimit( (p, c) -> SnapshotInfoUtils.snapshotInfoFromXContent(p), new ParseField("snapshots") ); - GET_SNAPSHOT_PARSER.declareObject( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> p.map(HashMap::new, ElasticsearchException::fromXContent), - new ParseField("failures") - ); GET_SNAPSHOT_PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("next")); GET_SNAPSHOT_PARSER.declareIntOrNull(ConstructingObjectParser.optionalConstructorArg(), UNKNOWN_COUNT, new ParseField("total")); GET_SNAPSHOT_PARSER.declareIntOrNull(ConstructingObjectParser.optionalConstructorArg(), UNKNOWN_COUNT, new ParseField("remaining")); diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 2452265e336a9..8012379f9732d 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -100,4 +100,12 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("index/91_metrics_no_subobjects/Metrics object indexing with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("index/91_metrics_no_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("logsdb/10_settings/routing path allowed in logs mode with routing on sort fields", "Unknown feature routing.logsb_route_on_sort_fields") + task.skipTest( + "cluster.desired_nodes/10_basic/Test delete desired nodes with node_version generates a warning", + "node_version warning is removed in 9.0" + ) + task.skipTest( + "cluster.desired_nodes/10_basic/Test update desired nodes with node_version generates a warning", + "node_version warning is removed in 9.0" + ) }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml index 1d1aa524ffb21..a45146a4e147a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml @@ -59,61 +59,6 @@ teardown: - contains: { nodes: { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb" } } - contains: { nodes: { settings: { node: { name: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb" } } --- -"Test update desired nodes with node_version generates a warning": - - skip: - reason: "contains is a newly added assertion" - features: ["contains", "allowed_warnings"] - - do: - cluster.state: {} - - # Get master node id - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.version: es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - match: - $body: - history_id: "test" - version: 1 - nodes: - - { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 2 - body: - nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - - { settings: { "node.name": "instance-000188" }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - - match: { history_id: "test" } - - match: { version: 2 } - - length: { nodes: 2 } - - contains: { nodes: { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } } - - contains: { nodes: { settings: { node: { name: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } } ---- "Test update move to a new history id": - skip: reason: "contains is a newly added assertion" @@ -199,46 +144,6 @@ teardown: _internal.get_desired_nodes: {} - match: { status: 404 } --- -"Test delete desired nodes with node_version generates a warning": - - skip: - features: allowed_warnings - - do: - cluster.state: {} - - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.version: es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - match: - $body: - history_id: "test" - version: 1 - nodes: - - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - - - do: - _internal.delete_desired_nodes: {} - - - do: - catch: missing - _internal.get_desired_nodes: {} - - match: { status: 404 } ---- "Test update desired nodes is idempotent": - skip: reason: "contains is a newly added assertion" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml index d1c492caf9b48..dc476147c9601 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml @@ -2008,3 +2008,143 @@ create index with use_synthetic_source: flush: false - gt: { test.store_size_in_bytes: 0 } - is_false: test.fields._recovery_source +--- +"Nested synthetic source with indexed dense vectors": + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: PUT + path: /{index} + capabilities: [ synthetic_nested_dense_vector_bug_fix ] + reason: "Requires synthetic source bugfix for dense vectors in nested objects" + - do: + indices.create: + index: nested_dense_vector_synthetic_test + body: + mappings: + properties: + parent: + type: nested + properties: + vector: + type: dense_vector + index: true + similarity: l2_norm + text: + type: text + settings: + index: + mapping: + source: + mode: synthetic + - do: + index: + index: nested_dense_vector_synthetic_test + id: 0 + refresh: true + body: { "parent": [ { "vector": [ 1, 2 ],"text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + - do: + index: + index: nested_dense_vector_synthetic_test + id: 1 + refresh: true + body: { "parent": [ { "text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + - do: + index: + index: nested_dense_vector_synthetic_test + id: 2 + refresh: true + body: { "parent": [ { "vector": [ 1, 2 ] }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + + - do: + search: + index: nested_dense_vector_synthetic_test + body: + query: + match_all: {} + + - match: { hits.hits.0._source.parent.0.vector: [ 1.0, 2.0 ] } + - match: { hits.hits.0._source.parent.0.text: "foo" } + - match: { hits.hits.0._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.0._source.parent.1.text: "bar" } + - is_false: hits.hits.1._source.parent.0.vector + - match: { hits.hits.1._source.parent.0.text: "foo" } + - match: { hits.hits.1._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.1._source.parent.1.text: "bar" } + - match: {hits.hits.2._source.parent.0.vector: [ 1.0, 2.0 ] } + - is_false: hits.hits.2._source.parent.0.text + - match: { hits.hits.2._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.2._source.parent.1.text: "bar" } +--- +"Nested synthetic source with un-indexed dense vectors": + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: PUT + path: /{index} + capabilities: [ synthetic_nested_dense_vector_bug_fix ] + reason: "Requires synthetic source bugfix for dense vectors in nested objects" + - do: + indices.create: + index: nested_dense_vector_synthetic_test + body: + mappings: + properties: + parent: + type: nested + properties: + vector: + type: dense_vector + index: false + text: + type: text + settings: + index: + mapping: + source: + mode: synthetic + - do: + index: + index: nested_dense_vector_synthetic_test + id: 0 + refresh: true + body: { "parent": [ { "vector": [ 1, 2 ],"text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + - do: + index: + index: nested_dense_vector_synthetic_test + id: 1 + refresh: true + body: { "parent": [ { "text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + - do: + index: + index: nested_dense_vector_synthetic_test + id: 2 + refresh: true + body: { "parent": [ { "vector": [ 1, 2 ] }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + + - do: + search: + index: nested_dense_vector_synthetic_test + body: + query: + match_all: {} + + - match: { hits.hits.0._source.parent.0.vector: [ 1.0, 2.0 ] } + - match: { hits.hits.0._source.parent.0.text: "foo" } + - match: { hits.hits.0._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.0._source.parent.1.text: "bar" } + - is_false: hits.hits.1._source.parent.0.vector + - match: { hits.hits.1._source.parent.0.text: "foo" } + - match: { hits.hits.1._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.1._source.parent.1.text: "bar" } + - match: {hits.hits.2._source.parent.0.vector: [ 1.0, 2.0 ] } + - is_false: hits.hits.2._source.parent.0.text + - match: { hits.hits.2._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.2._source.parent.1.text: "bar" } + diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/10_copy_to.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/10_copy_to.yml new file mode 100644 index 0000000000000..58f09ec71ad61 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/10_copy_to.yml @@ -0,0 +1,41 @@ +--- +copy_to from object with dynamic strict to dynamic field: + - requires: + cluster_features: ["mapper.copy_to.dynamic_handling"] + reason: requires a fix + + - do: + indices.create: + index: test + body: + mappings: + properties: + one: + dynamic: strict + properties: + k: + type: keyword + copy_to: two.k + + - do: + index: + index: test + id: 1 + refresh: true + body: + one: + k: "hey" + + - do: + search: + index: test + body: + docvalue_fields: [ "two.k.keyword" ] + + - match: + hits.hits.0._source: + one: + k: "hey" + - match: + hits.hits.0.fields: + two.k.keyword: [ "hey" ] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/10_basic.yml index fbfb06dcda9a1..c28b947b112a8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/10_basic.yml @@ -3,6 +3,7 @@ setup: - do: indices.create: index: testidx + wait_for_active_shards: all body: mappings: properties: @@ -80,6 +81,7 @@ setup: - do: indices.create: index: testidx2 + wait_for_active_shards: all - do: indices.put_alias: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml index 73e54532ca152..0a2da6e14a6ad 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml @@ -57,7 +57,7 @@ setup: another_vector: [-0.5, 11.0, 0, 12] - do: - indices.refresh: {} + indices.flush: { } # For added test reliability, pending the resolution of https://github.com/elastic/elasticsearch/issues/109416. - do: @@ -66,10 +66,6 @@ setup: index: int4_flat - do: indices.refresh: {} - - do: - indices.forcemerge: - max_num_segments: 1 - index: int4_flat --- "kNN search only": - do: @@ -203,13 +199,14 @@ setup: num_candidates: 3 k: 3 field: vector - similarity: 10.3 + # Set high allowed similarity, reduce once we can update underlying quantization algo + similarity: 110 query_vector: [-0.5, 90.0, -10, 14.8] - - length: {hits.hits: 1} + - is_true: hits.hits.0 - - match: {hits.hits.0._id: "2"} - - match: {hits.hits.0.fields.name.0: "moose.jpg"} + #- match: {hits.hits.0._id: "2"} + #- match: {hits.hits.0.fields.name.0: "moose.jpg"} --- "Vector similarity with filter only": - do: @@ -221,7 +218,8 @@ setup: num_candidates: 3 k: 3 field: vector - similarity: 11 + # Set high allowed similarity, reduce once we can update underlying quantization algo + similarity: 110 query_vector: [-0.5, 90.0, -10, 14.8] filter: {"term": {"name": "moose.jpg"}} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java index a5aa39f5feb1e..83e79ff7f45a8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java @@ -85,7 +85,7 @@ public void testMissingKeystoreFile() throws Exception { final Environment environment = internalCluster().getInstance(Environment.class); final AtomicReference reloadSettingsError = new AtomicReference<>(); // keystore file should be missing for this test case - Files.deleteIfExists(KeyStoreWrapper.keystorePath(environment.configFile())); + Files.deleteIfExists(KeyStoreWrapper.keystorePath(environment.configDir())); final int initialReloadCount = mockReloadablePlugin.getReloadCount(); final CountDownLatch latch = new CountDownLatch(1); executeReloadSecureSettings(Strings.EMPTY_ARRAY, emptyPassword(), new ActionListener<>() { @@ -129,10 +129,10 @@ public void testInvalidKeystoreFile() throws Exception { final int initialReloadCount = mockReloadablePlugin.getReloadCount(); // invalid "keystore" file should be present in the config dir try (InputStream keystore = ReloadSecureSettingsIT.class.getResourceAsStream("invalid.txt.keystore")) { - if (Files.exists(environment.configFile()) == false) { - Files.createDirectory(environment.configFile()); + if (Files.exists(environment.configDir()) == false) { + Files.createDirectory(environment.configDir()); } - Files.copy(keystore, KeyStoreWrapper.keystorePath(environment.configFile()), StandardCopyOption.REPLACE_EXISTING); + Files.copy(keystore, KeyStoreWrapper.keystorePath(environment.configDir()), StandardCopyOption.REPLACE_EXISTING); } final CountDownLatch latch = new CountDownLatch(1); executeReloadSecureSettings(Strings.EMPTY_ARRAY, emptyPassword(), new ActionListener<>() { @@ -363,7 +363,7 @@ public void testInvalidKeyInSettings() throws Exception { try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) { keyStoreWrapper.setString(VALID_SECURE_SETTING_NAME, new char[0]); - keyStoreWrapper.save(environment.configFile(), new char[0], false); + keyStoreWrapper.save(environment.configDir(), new char[0], false); } PlainActionFuture actionFuture = new PlainActionFuture<>(); @@ -374,7 +374,7 @@ public void testInvalidKeyInSettings() throws Exception { try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) { assertThat(keyStoreWrapper, notNullValue()); keyStoreWrapper.setString("some.setting.that.does.not.exist", new char[0]); - keyStoreWrapper.save(environment.configFile(), new char[0], false); + keyStoreWrapper.save(environment.configDir(), new char[0], false); } actionFuture = new PlainActionFuture<>(); @@ -432,7 +432,7 @@ public void onFailure(Exception e) { private SecureSettings writeEmptyKeystore(Environment environment, char[] password) throws Exception { final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); - keyStoreWrapper.save(environment.configFile(), password, false); + keyStoreWrapper.save(environment.configDir(), password, false); return keyStoreWrapper; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingIT.java new file mode 100644 index 0000000000000..0a29b99ca6fdc --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingIT.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.admin.indices.mapping.put; + +import org.apache.logging.log4j.Level; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.MockLog; +import org.elasticsearch.test.junit.annotations.TestLogging; + +import static org.hamcrest.Matchers.equalTo; + +public class PutMappingIT extends ESSingleNodeTestCase { + + @TestLogging( + reason = "testing DEBUG logging", + value = "org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction:DEBUG" + ) + public void testFailureLogging() { + final var indexName = randomIdentifier(); + createIndex(indexName); + final var fieldName = randomIdentifier(); + safeGet(client().execute(TransportPutMappingAction.TYPE, new PutMappingRequest(indexName).source(fieldName, "type=keyword"))); + MockLog.assertThatLogger( + () -> assertThat( + asInstanceOf( + IllegalArgumentException.class, + safeAwaitFailure( + AcknowledgedResponse.class, + l -> client().execute( + TransportPutMappingAction.TYPE, + new PutMappingRequest(indexName).source(fieldName, "type=long"), + l + ) + ) + ).getMessage(), + equalTo("mapper [" + fieldName + "] cannot be changed from type [keyword] to [long]") + ), + TransportPutMappingAction.class, + new MockLog.SeenEventExpectation( + "failure message", + TransportPutMappingAction.class.getCanonicalName(), + Level.DEBUG, + "failed to put mappings on indices [[" + indexName + ) + ); + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateNodeRemovalIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateNodeRemovalIT.java index 90e3196d76378..56ac96d592f49 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateNodeRemovalIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateNodeRemovalIT.java @@ -179,28 +179,35 @@ public void testNodeRemovalFromRedClusterWithTimeout() throws Exception { // make it red! internalCluster().stopNode(node1); ensureRed(indexName); + CountDownLatch stallPrevalidateShardPathActionLatch = new CountDownLatch(1); MockTransportService.getInstance(node2) .addRequestHandlingBehavior(TransportPrevalidateShardPathAction.ACTION_NAME + "[n]", (handler, request, channel, task) -> { logger.info("drop the check shards request"); + safeAwait(stallPrevalidateShardPathActionLatch); + handler.messageReceived(request, channel, task); }); - PrevalidateNodeRemovalRequest req = PrevalidateNodeRemovalRequest.builder() - .setNames(node2) - .build(TEST_REQUEST_TIMEOUT) - .masterNodeTimeout(TimeValue.timeValueSeconds(1)) - .timeout(TimeValue.timeValueSeconds(1)); - PrevalidateNodeRemovalResponse resp = client().execute(PrevalidateNodeRemovalAction.INSTANCE, req).get(); - assertFalse("prevalidation result should return false", resp.getPrevalidation().isSafe()); - String node2Id = getNodeId(node2); - assertThat( - resp.getPrevalidation().message(), - equalTo("cannot prevalidate removal of nodes with the following IDs: [" + node2Id + "]") - ); - assertThat(resp.getPrevalidation().nodes().size(), equalTo(1)); - NodesRemovalPrevalidation.NodeResult nodeResult = resp.getPrevalidation().nodes().get(0); - assertThat(nodeResult.name(), equalTo(node2)); - assertFalse(nodeResult.result().isSafe()); - assertThat(nodeResult.result().message(), startsWith("failed contacting the node")); - assertThat(nodeResult.result().reason(), equalTo(NodesRemovalPrevalidation.Reason.UNABLE_TO_VERIFY)); + try { + PrevalidateNodeRemovalRequest req = PrevalidateNodeRemovalRequest.builder() + .setNames(node2) + .build(TEST_REQUEST_TIMEOUT) + .masterNodeTimeout(TimeValue.timeValueSeconds(1)) + .timeout(TimeValue.timeValueSeconds(1)); + PrevalidateNodeRemovalResponse resp = client().execute(PrevalidateNodeRemovalAction.INSTANCE, req).get(); + assertFalse("prevalidation result should return false", resp.getPrevalidation().isSafe()); + String node2Id = getNodeId(node2); + assertThat( + resp.getPrevalidation().message(), + equalTo("cannot prevalidate removal of nodes with the following IDs: [" + node2Id + "]") + ); + assertThat(resp.getPrevalidation().nodes().size(), equalTo(1)); + NodesRemovalPrevalidation.NodeResult nodeResult = resp.getPrevalidation().nodes().get(0); + assertThat(nodeResult.name(), equalTo(node2)); + assertFalse(nodeResult.result().isSafe()); + assertThat(nodeResult.result().message(), startsWith("failed contacting the node")); + assertThat(nodeResult.result().reason(), equalTo(NodesRemovalPrevalidation.Reason.UNABLE_TO_VERIFY)); + } finally { + stallPrevalidateShardPathActionLatch.countDown(); + } } private void ensureRed(String indexName) throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java index a130a5b869adc..3338675160268 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -220,7 +220,7 @@ public void testUpdatePriority() { public void testIndexDirIsDeletedWhenShardRemoved() throws Exception { Environment env = getInstanceFromNode(Environment.class); - Path idxPath = env.sharedDataFile().resolve(randomAlphaOfLength(10)); + Path idxPath = env.sharedDataDir().resolve(randomAlphaOfLength(10)); logger.info("--> idxPath: [{}]", idxPath); Settings idxSettings = Settings.builder().put(IndexMetadata.SETTING_DATA_PATH, idxPath).build(); createIndex("test", idxSettings); @@ -254,7 +254,7 @@ public void testExpectedShardSizeIsPresent() throws InterruptedException { public void testIndexCanChangeCustomDataPath() throws Exception { final String index = "test-custom-data-path"; - final Path sharedDataPath = getInstanceFromNode(Environment.class).sharedDataFile().resolve(randomAsciiLettersOfLength(10)); + final Path sharedDataPath = getInstanceFromNode(Environment.class).sharedDataDir().resolve(randomAsciiLettersOfLength(10)); final Path indexDataPath = sharedDataPath.resolve("start-" + randomAsciiLettersOfLength(10)); logger.info("--> creating index [{}] with data_path [{}]", index, indexDataPath); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java index 4e9e4b4d641d2..ac22be9f41f4d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java @@ -562,7 +562,7 @@ public void testResolvePath() throws Exception { command.findAndProcessShardPath( options, environmentByNodeName.get(nodeName), - environmentByNodeName.get(nodeName).dataFiles(), + environmentByNodeName.get(nodeName).dataDirs(), state, shardPath -> assertThat(shardPath.resolveIndex(), equalTo(indexPath)) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java index f63f09764621b..8100e911f77cb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java @@ -9,36 +9,70 @@ package org.elasticsearch.search; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; +import org.apache.lucene.search.Scorable; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.MockScriptPlugin; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptType; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.rescore.RescoreContext; +import org.elasticsearch.search.rescore.Rescorer; +import org.elasticsearch.search.rescore.RescorerBuilder; +import org.elasticsearch.search.suggest.SortBy; +import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.Suggester; +import org.elasticsearch.search.suggest.SuggestionSearchContext; +import org.elasticsearch.search.suggest.term.TermSuggestion; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; +import org.elasticsearch.xcontent.XContentBuilder; +import java.io.IOException; import java.util.Collection; import java.util.Collections; -import java.util.Map; +import java.util.List; import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; -import static org.elasticsearch.search.SearchTimeoutIT.ScriptedTimeoutPlugin.SCRIPT_NAME; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) +@ESIntegTestCase.SuiteScopeTestCase public class SearchTimeoutIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(ScriptedTimeoutPlugin.class); + return Collections.singleton(SearchTimeoutPlugin.class); } @Override @@ -46,75 +80,475 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)).build(); } - private void indexDocs() { - for (int i = 0; i < 32; i++) { - prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").get(); - } - refresh("test"); - } - - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98369") - public void testTopHitsTimeout() { - indexDocs(); - SearchResponse searchResponse = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) - .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap()))) - .get(); - assertThat(searchResponse.isTimedOut(), equalTo(true)); - assertEquals(0, searchResponse.getShardFailures().length); - assertEquals(0, searchResponse.getFailedShards()); - assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); - assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); - assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); - } - - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98053") - public void testAggsTimeout() { - indexDocs(); - SearchResponse searchResponse = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) - .setSize(0) - .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap()))) - .addAggregation(new TermsAggregationBuilder("terms").field("field.keyword")) - .get(); - assertThat(searchResponse.isTimedOut(), equalTo(true)); - assertEquals(0, searchResponse.getShardFailures().length); - assertEquals(0, searchResponse.getFailedShards()); - assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); - assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); - assertEquals(searchResponse.getHits().getHits().length, 0); - StringTerms terms = searchResponse.getAggregations().get("terms"); - assertEquals(1, terms.getBuckets().size()); - StringTerms.Bucket bucket = terms.getBuckets().get(0); - assertEquals("value", bucket.getKeyAsString()); - assertThat(bucket.getDocCount(), greaterThan(0L)); - } - - public void testPartialResultsIntolerantTimeout() throws Exception { - prepareIndex("test").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + @Override + protected void setupSuiteScopeCluster() throws Exception { + super.setupSuiteScopeCluster(); + indexRandom(true, "test", randomIntBetween(20, 50)); + } + + /** + * Test the scenario where the query times out before starting to collect documents, verify that partial hits are not returned + */ + public void testTopHitsTimeoutBeforeCollecting() { + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(false)); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + // timeout happened before we could collect any doc, total hits is 0 and no hits are returned + assertEquals(0, searchResponse.getHits().getTotalHits().value()); + assertEquals(0, searchResponse.getHits().getHits().length); + }); + } + + /** + * Test the scenario where the query times out while collecting documents, verify that partial hits results are returned + */ + public void testTopHitsTimeoutWhileCollecting() { + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(true)); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); + assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); + }); + } + + /** + * Test the scenario where the query times out before starting to collect documents, verify that partial aggs results are not returned + */ + public void testAggsTimeoutBeforeCollecting() { + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setSize(0) + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + .setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(false)) + .addAggregation(new TermsAggregationBuilder("terms").field("field.keyword")); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + assertEquals(0, searchResponse.getHits().getTotalHits().value()); + assertEquals(0, searchResponse.getHits().getHits().length); + StringTerms terms = searchResponse.getAggregations().get("terms"); + // timeout happened before we could collect any doc, total hits is 0 and no buckets are returned + assertEquals(0, terms.getBuckets().size()); + }); + } + + /** + * Test the scenario where the query times out while collecting documents, verify that partial aggs results are returned + */ + public void testAggsTimeoutWhileCollecting() { + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setSize(0) + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + .setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(true)) + .addAggregation(new TermsAggregationBuilder("terms").field("field.keyword")); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); + assertEquals(0, searchResponse.getHits().getHits().length); + StringTerms terms = searchResponse.getAggregations().get("terms"); + assertEquals(1, terms.getBuckets().size()); + StringTerms.Bucket bucket = terms.getBuckets().get(0); + assertEquals("value", bucket.getKeyAsString()); + assertThat(bucket.getDocCount(), greaterThan(0L)); + }); + } + + /** + * Test the scenario where the suggest phase (part of the query phase) times out, yet there are results + * available coming from executing the query and aggs on each shard. + */ + public void testSuggestTimeoutWithPartialResults() { + SuggestBuilder suggestBuilder = new SuggestBuilder(); + suggestBuilder.setGlobalText("text"); + TimeoutSuggestionBuilder timeoutSuggestionBuilder = new TimeoutSuggestionBuilder(); + suggestBuilder.addSuggestion("suggest", timeoutSuggestionBuilder); + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").suggest(suggestBuilder) + .addAggregation(new TermsAggregationBuilder("terms").field("field.keyword")); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); + assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); + StringTerms terms = searchResponse.getAggregations().get("terms"); + assertEquals(1, terms.getBuckets().size()); + StringTerms.Bucket bucket = terms.getBuckets().get(0); + assertEquals("value", bucket.getKeyAsString()); + assertThat(bucket.getDocCount(), greaterThan(0L)); + }); + } + + /** + * Test the scenario where the rescore phase (part of the query phase) times out, yet there are results + * available coming from executing the query and aggs on each shard. + */ + public void testRescoreTimeoutWithPartialResults() { + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setRescorer(new TimeoutRescorerBuilder()) + .addAggregation(new TermsAggregationBuilder("terms").field("field.keyword")); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); + assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); + StringTerms terms = searchResponse.getAggregations().get("terms"); + assertEquals(1, terms.getBuckets().size()); + StringTerms.Bucket bucket = terms.getBuckets().get(0); + assertEquals("value", bucket.getKeyAsString()); + assertThat(bucket.getDocCount(), greaterThan(0L)); + }); + } + + public void testPartialResultsIntolerantTimeoutBeforeCollecting() { + ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + prepareSearch("test") + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + .setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(false)) + .setAllowPartialSearchResults(false) // this line causes timeouts to report failures + ); + assertTrue(ex.toString().contains("Time exceeded")); + assertEquals(429, ex.status().getStatus()); + } + public void testPartialResultsIntolerantTimeoutWhileCollecting() { ElasticsearchException ex = expectThrows( ElasticsearchException.class, - prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) - .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap()))) + prepareSearch("test") + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + .setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(true)) .setAllowPartialSearchResults(false) // this line causes timeouts to report failures ); assertTrue(ex.toString().contains("Time exceeded")); + assertEquals(429, ex.status().getStatus()); } - public static class ScriptedTimeoutPlugin extends MockScriptPlugin { - static final String SCRIPT_NAME = "search_timeout"; + public void testPartialResultsIntolerantTimeoutWhileSuggestingOnly() { + SuggestBuilder suggestBuilder = new SuggestBuilder(); + suggestBuilder.setGlobalText("text"); + TimeoutSuggestionBuilder timeoutSuggestionBuilder = new TimeoutSuggestionBuilder(); + suggestBuilder.addSuggestion("suggest", timeoutSuggestionBuilder); + ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + prepareSearch("test").suggest(suggestBuilder).setAllowPartialSearchResults(false) // this line causes timeouts to report + // failures + ); + assertTrue(ex.toString().contains("Time exceeded")); + assertEquals(429, ex.status().getStatus()); + } + + public void testPartialResultsIntolerantTimeoutWhileSuggesting() { + SuggestBuilder suggestBuilder = new SuggestBuilder(); + suggestBuilder.setGlobalText("text"); + TimeoutSuggestionBuilder timeoutSuggestionBuilder = new TimeoutSuggestionBuilder(); + suggestBuilder.addSuggestion("suggest", timeoutSuggestionBuilder); + ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + prepareSearch("test").setQuery(new TermQueryBuilder("field", "value")) + .suggest(suggestBuilder) + .setAllowPartialSearchResults(false) // this line causes timeouts to report failures + ); + assertTrue(ex.toString().contains("Time exceeded")); + assertEquals(429, ex.status().getStatus()); + } + + public void testPartialResultsIntolerantTimeoutWhileRescoring() { + ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + prepareSearch("test").setQuery(new TermQueryBuilder("field", "value")) + .setRescorer(new TimeoutRescorerBuilder()) + .setAllowPartialSearchResults(false) // this line causes timeouts to report failures + ); + assertTrue(ex.toString().contains("Time exceeded")); + assertEquals(429, ex.status().getStatus()); + } + + public static final class SearchTimeoutPlugin extends Plugin implements SearchPlugin { + @Override + public List> getQueries() { + return Collections.singletonList(new QuerySpec("timeout", BulkScorerTimeoutQuery::new, parser -> { + throw new UnsupportedOperationException(); + })); + } @Override - public Map, Object>> pluginScripts() { - return Collections.singletonMap(SCRIPT_NAME, params -> { - try { - Thread.sleep(500); - } catch (InterruptedException e) { - throw new RuntimeException(e); + public List> getSuggesters() { + return Collections.singletonList(new SuggesterSpec<>("timeout", TimeoutSuggestionBuilder::new, parser -> { + throw new UnsupportedOperationException(); + }, TermSuggestion::new)); + } + + @Override + public List> getRescorers() { + return Collections.singletonList(new RescorerSpec<>("timeout", TimeoutRescorerBuilder::new, parser -> { + throw new UnsupportedOperationException(); + })); + } + } + + /** + * Query builder that produces a Lucene Query which throws a + * {@link org.elasticsearch.search.internal.ContextIndexSearcher.TimeExceededException} before or while scoring documents. + * This helps make this test not time dependent, otherwise it would be unpredictable when exactly the timeout happens, which is + * rather important if we want to test that we are able to return partial results on timeout. + */ + public static final class BulkScorerTimeoutQuery extends AbstractQueryBuilder { + + private final boolean partialResults; + + BulkScorerTimeoutQuery(boolean partialResults) { + this.partialResults = partialResults; + } + + BulkScorerTimeoutQuery(StreamInput in) throws IOException { + super(in); + this.partialResults = in.readBoolean(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeBoolean(partialResults); + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) {} + + @Override + protected Query doToQuery(SearchExecutionContext context) { + return new Query() { + @Override + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { + return new ConstantScoreWeight(this, boost) { + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return false; + } + + @Override + public ScorerSupplier scorerSupplier(LeafReaderContext context) { + return new ScorerSupplier() { + @Override + public BulkScorer bulkScorer() { + if (partialResults == false) { + ((ContextIndexSearcher) searcher).throwTimeExceededException(); + } + final int maxDoc = context.reader().maxDoc(); + return new BulkScorer() { + @Override + public int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException { + max = Math.min(max, maxDoc); + collector.setScorer(new Scorable() { + @Override + public float score() { + return 1f; + } + }); + for (int doc = min; doc < max; ++doc) { + if (acceptDocs == null || acceptDocs.get(doc)) { + collector.collect(doc); + // collect one doc per segment, only then throw a timeout: this ensures partial + // results are returned + ((ContextIndexSearcher) searcher).throwTimeExceededException(); + } + } + // there is a slight chance that no docs are scored for a specific segment. + // other shards / slices will throw the timeout anyway, one is enough. + return max == maxDoc ? DocIdSetIterator.NO_MORE_DOCS : max; + } + + @Override + public long cost() { + return maxDoc; + } + }; + } + + @Override + public Scorer get(long leadCost) { + assert false; + return new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + } + + @Override + public long cost() { + assert false; + return context.reader().maxDoc(); + } + }; + } + }; + } + + @Override + public String toString(String field) { + return "timeout query"; + } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + + @Override + public boolean equals(Object obj) { + return sameClassAs(obj); + } + + @Override + public int hashCode() { + return classHash(); + } + }; + } + + @Override + protected boolean doEquals(BulkScorerTimeoutQuery other) { + return false; + } + + @Override + protected int doHashCode() { + return 0; + } + + @Override + public String getWriteableName() { + return "timeout"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return null; + } + } + + /** + * Suggestion builder that triggers a timeout as part of its execution + */ + private static final class TimeoutSuggestionBuilder extends TermSuggestionBuilder { + TimeoutSuggestionBuilder() { + super("field"); + } + + TimeoutSuggestionBuilder(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getWriteableName() { + return "timeout"; + } + + @Override + public SuggestionSearchContext.SuggestionContext build(SearchExecutionContext context) { + return new TimeoutSuggestionContext(new TimeoutSuggester((ContextIndexSearcher) context.searcher()), context); + } + } + + private static final class TimeoutSuggester extends Suggester { + private final ContextIndexSearcher contextIndexSearcher; + + TimeoutSuggester(ContextIndexSearcher contextIndexSearcher) { + this.contextIndexSearcher = contextIndexSearcher; + } + + @Override + protected TermSuggestion innerExecute( + String name, + TimeoutSuggestionContext suggestion, + IndexSearcher searcher, + CharsRefBuilder spare + ) { + contextIndexSearcher.throwTimeExceededException(); + throw new AssertionError("should have thrown TimeExceededException"); + } + + @Override + protected TermSuggestion emptySuggestion(String name, TimeoutSuggestionContext suggestion, CharsRefBuilder spare) { + return new TermSuggestion(name, suggestion.getSize(), SortBy.SCORE); + } + } + + private static final class TimeoutSuggestionContext extends SuggestionSearchContext.SuggestionContext { + TimeoutSuggestionContext(Suggester suggester, SearchExecutionContext searchExecutionContext) { + super(suggester, searchExecutionContext); + } + } + + private static final class TimeoutRescorerBuilder extends RescorerBuilder { + TimeoutRescorerBuilder() { + super(); + } + + TimeoutRescorerBuilder(StreamInput in) throws IOException { + super(in); + } + + @Override + protected void doWriteTo(StreamOutput out) {} + + @Override + protected void doXContent(XContentBuilder builder, Params params) {} + + @Override + protected RescoreContext innerBuildContext(int windowSize, SearchExecutionContext context) throws IOException { + return new RescoreContext(10, new Rescorer() { + @Override + public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext rescoreContext) { + ((ContextIndexSearcher) context.searcher()).throwTimeExceededException(); + assert false; + return null; + } + + @Override + public Explanation explain( + int topLevelDocId, + IndexSearcher searcher, + RescoreContext rescoreContext, + Explanation sourceExplanation + ) { + throw new UnsupportedOperationException(); } - return true; }); } + + @Override + public String getWriteableName() { + return "timeout"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return null; + } + + @Override + public RescorerBuilder rewrite(QueryRewriteContext ctx) { + return this; + } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java index fe49ce57d0400..cbd22856f09a2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.client.Cancellable; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; @@ -73,6 +74,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; @@ -591,21 +593,31 @@ public void testNoActiveCopy() throws Exception { private void moveOrCloseShardsOnNodes(String nodeName) throws Exception { final IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodeName); + final ClusterState clusterState = clusterService().state(); for (IndexService indexService : indicesService) { for (IndexShard indexShard : indexService) { if (randomBoolean()) { closeShardNoCheck(indexShard, randomBoolean()); } else if (randomBoolean()) { final ShardId shardId = indexShard.shardId(); - + final var assignedNodes = new HashSet<>(); + clusterState.routingTable().shardRoutingTable(shardId).allShards().forEach(shr -> { + if (shr.currentNodeId() != null) { + assignedNodes.add(shr.currentNodeId()); + } + if (shr.relocatingNodeId() != null) { + assignedNodes.add(shr.relocatingNodeId()); + } + }); final var targetNodes = new ArrayList(); for (final var targetIndicesService : internalCluster().getInstances(IndicesService.class)) { final var targetNode = targetIndicesService.clusterService().localNode(); - if (targetNode.canContainData() && targetIndicesService.getShardOrNull(shardId) == null) { + if (targetNode.canContainData() + && targetIndicesService.getShardOrNull(shardId) == null + && assignedNodes.contains(targetNode.getId()) == false) { targetNodes.add(targetNode.getId()); } } - if (targetNodes.isEmpty()) { continue; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index fbdcfe26d28ee..0ba4c13c352c1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -994,22 +993,6 @@ public void testRescoreAfterCollapseRandom() throws Exception { }); } - public void testRescoreWithTimeout() throws Exception { - // no dummy docs since merges can change scores while we run queries. - int numDocs = indexRandomNumbers("whitespace", -1, false); - - String intToEnglish = English.intToEnglish(between(0, numDocs - 1)); - String query = intToEnglish.split(" ")[0]; - assertResponse( - prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) - .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) - .setSize(10) - .addRescorer(new QueryRescorerBuilder(functionScoreQuery(new TestTimedScoreFunctionBuilder())).windowSize(100)) - .setTimeout(TimeValue.timeValueMillis(10)), - r -> assertTrue(r.isTimedOut()) - ); - } - @Override protected Collection> nodePlugins() { return List.of(TestTimedQueryPlugin.class); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java index 77c7b4b762e6a..c1549c1f3d384 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java @@ -195,7 +195,7 @@ public void testConcurrentWipeAndRecreateFromOtherCluster() throws IOException { ); assertAcked(clusterAdmin().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName)); - IOUtils.rm(internalCluster().getCurrentMasterNodeInstance(Environment.class).resolveRepoFile(repoPath.toString())); + IOUtils.rm(internalCluster().getCurrentMasterNodeInstance(Environment.class).resolveRepoDir(repoPath.toString())); createRepository(repoName, "fs", repoPath); createFullSnapshot(repoName, "snap-1"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java index 6922b21be37f7..2f4014bf4d350 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java @@ -316,7 +316,6 @@ public void testGetSnapshotsNoRepos() { .get(); assertTrue(getSnapshotsResponse.getSnapshots().isEmpty()); - assertTrue(getSnapshotsResponse.getFailures().isEmpty()); } public void testGetSnapshotsMultipleRepos() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java index b9e47740e2945..b86cae1c2fb60 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java @@ -17,10 +17,12 @@ import org.elasticsearch.cluster.SnapshotDeletionsInProgress; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.snapshots.mockstore.MockRepository; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.MockLog; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.List; import java.util.concurrent.TimeUnit; @@ -223,4 +225,30 @@ public void testRerouteWhenShardSnapshotsCompleted() throws Exception { safeAwait(shardMovedListener); ensureGreen(indexName); } + + @TestLogging(reason = "testing task description, logged at DEBUG", value = "org.elasticsearch.cluster.service.MasterService:DEBUG") + public void testCreateSnapshotTaskDescription() { + createIndexWithRandomDocs(randomIdentifier(), randomIntBetween(1, 5)); + final var repositoryName = randomIdentifier(); + createRepository(repositoryName, "mock"); + + final var snapshotName = randomIdentifier(); + MockLog.assertThatLogger( + () -> createFullSnapshot(repositoryName, snapshotName), + MasterService.class, + new MockLog.SeenEventExpectation( + "executing cluster state update debug message", + MasterService.class.getCanonicalName(), + Level.DEBUG, + "executing cluster state update for [create_snapshot [" + + snapshotName + + "][CreateSnapshotTask{repository=" + + repositoryName + + ", snapshot=*" + + snapshotName + + "*}]]" + ) + ); + } + } diff --git a/server/src/main/java/org/elasticsearch/ReleaseVersions.java b/server/src/main/java/org/elasticsearch/ReleaseVersions.java index 22cd18c7b4ac3..5e6986a5bf924 100644 --- a/server/src/main/java/org/elasticsearch/ReleaseVersions.java +++ b/server/src/main/java/org/elasticsearch/ReleaseVersions.java @@ -78,10 +78,10 @@ public static IntFunction generateVersionsLookup(Class versionContain // replace all version lists with the smallest & greatest versions versions.replaceAll((k, v) -> { if (v.size() == 1) { - return List.of(v.get(0)); + return List.of(v.getFirst()); } else { v.sort(Comparator.naturalOrder()); - return List.of(v.get(0), v.get(v.size() - 1)); + return List.of(v.getFirst(), v.getLast()); } }); @@ -100,14 +100,14 @@ private static IntFunction lookupFunction(NavigableMap lookupFunction(NavigableMap lookupFunction(NavigableMap T lastItem(List list) { - return list.get(list.size() - 1); - } - private static Version nextVersion(Version version) { return new Version(version.id + 100); // +1 to revision } diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index 64d1c0535a561..47cbe605b98e3 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -118,6 +118,14 @@ public static List getAllVersions() { return VersionsHolder.ALL_VERSIONS; } + /** + * @return whether this is a known {@link TransportVersion}, i.e. one declared in {@link TransportVersions}. Other versions may exist + * in the wild (they're sent over the wire by numeric ID) but we don't know how to communicate using such versions. + */ + public boolean isKnown() { + return VersionsHolder.ALL_VERSIONS_MAP.containsKey(id); + } + public static TransportVersion fromString(String str) { return TransportVersion.fromId(Integer.parseInt(str)); } @@ -130,20 +138,20 @@ public static TransportVersion fromString(String str) { * When a patch version of an existing transport version is created, {@code transportVersion.isPatchFrom(patchVersion)} * will match any transport version at or above {@code patchVersion} that is also of the same base version. *

- * For example, {@code version.isPatchFrom(8_800_00_4)} will return the following for the given {@code version}: + * For example, {@code version.isPatchFrom(8_800_0_04)} will return the following for the given {@code version}: *

    - *
  • {@code 8_799_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_799_00_9.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_800_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_800_00_3.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_800_00_4.isPatchFrom(8_800_00_4)}: {@code true}
  • - *
  • {@code 8_800_00_9.isPatchFrom(8_800_00_4)}: {@code true}
  • - *
  • {@code 8_800_01_0.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_801_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • + *
  • {@code 8_799_0_00.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_799_0_09.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_800_0_00.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_800_0_03.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_800_0_04.isPatchFrom(8_800_0_04)}: {@code true}
  • + *
  • {@code 8_800_0_49.isPatchFrom(8_800_0_04)}: {@code true}
  • + *
  • {@code 8_800_1_00.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_801_0_00.isPatchFrom(8_800_0_04)}: {@code false}
  • *
*/ public boolean isPatchFrom(TransportVersion version) { - return onOrAfter(version) && id < version.id + 10 - (version.id % 10); + return onOrAfter(version) && id < version.id + 100 - (version.id % 100); } /** diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 8f747a59ae5e0..24ca2f6ee589f 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -90,87 +90,96 @@ static TransportVersion def(int id) { */ public static final TransportVersion V_8_9_X = def(8_500_020); public static final TransportVersion V_8_10_X = def(8_500_061); - public static final TransportVersion V_8_11_X = def(8_512_00_1); - public static final TransportVersion V_8_12_0 = def(8_560_00_0); - public static final TransportVersion V_8_12_1 = def(8_560_00_1); - public static final TransportVersion V_8_13_0 = def(8_595_00_0); - public static final TransportVersion V_8_13_4 = def(8_595_00_1); - public static final TransportVersion V_8_14_0 = def(8_636_00_1); - public static final TransportVersion V_8_15_0 = def(8_702_00_2); - public static final TransportVersion V_8_15_2 = def(8_702_00_3); - public static final TransportVersion V_8_16_0 = def(8_772_00_1); - public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO_BACKPORT_8_16 = def(8_772_00_2); - public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE_BACKPORT_8_16 = def(8_772_00_3); - public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16 = def(8_772_00_4); - public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); - public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); - public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); - public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_00_0); - public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_00_0); - public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_00_0); - public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_00_0); - public static final TransportVersion INDEX_REQUEST_REMOVE_METERING = def(8_780_00_0); - public static final TransportVersion CPU_STAT_STRING_PARSING = def(8_781_00_0); - public static final TransportVersion QUERY_RULES_RETRIEVER = def(8_782_00_0); - public static final TransportVersion ESQL_CCS_EXEC_INFO_WITH_FAILURES = def(8_783_00_0); - public static final TransportVersion LOGSDB_TELEMETRY = def(8_784_00_0); - public static final TransportVersion LOGSDB_TELEMETRY_STATS = def(8_785_00_0); - public static final TransportVersion KQL_QUERY_ADDED = def(8_786_00_0); - public static final TransportVersion ROLE_MONITOR_STATS = def(8_787_00_0); - public static final TransportVersion DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK = def(8_788_00_0); - public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO = def(8_789_00_0); - public static final TransportVersion VERTEX_AI_INPUT_TYPE_ADDED = def(8_790_00_0); - public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE = def(8_791_00_0); - public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_00_0); - public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_00_0); - public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_00_0); - public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); - public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); - public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); - public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17 = def(8_797_00_1); - public static final TransportVersion SOURCE_MODE_TELEMETRY_FIX_8_17 = def(8_797_00_2); - public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_00_0); - public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_00_0); - public static final TransportVersion ESQL_REMOVE_NODE_LEVEL_PLAN = def(8_800_00_0); - public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE = def(8_801_00_0); - public static final TransportVersion SOURCE_MODE_TELEMETRY = def(8_802_00_0); - public static final TransportVersion NEW_REFRESH_CLUSTER_BLOCK = def(8_803_00_0); - public static final TransportVersion RETRIES_AND_OPERATIONS_IN_BLOBSTORE_STATS = def(8_804_00_0); - public static final TransportVersion ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES = def(8_805_00_0); - public static final TransportVersion KNN_QUERY_RESCORE_OVERSAMPLE = def(8_806_00_0); - public static final TransportVersion SEMANTIC_QUERY_LENIENT = def(8_807_00_0); - public static final TransportVersion ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS = def(8_808_00_0); - public static final TransportVersion EQL_ALLOW_PARTIAL_SEARCH_RESULTS = def(8_809_00_0); - public static final TransportVersion NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION = def(8_810_00_0); - public static final TransportVersion ERROR_TRACE_IN_TRANSPORT_HEADER = def(8_811_00_0); - public static final TransportVersion FAILURE_STORE_ENABLED_BY_CLUSTER_SETTING = def(8_812_00_0); - public static final TransportVersion SIMULATE_IGNORED_FIELDS = def(8_813_00_0); - public static final TransportVersion TRANSFORMS_UPGRADE_MODE = def(8_814_00_0); - public static final TransportVersion NODE_SHUTDOWN_EPHEMERAL_ID_ADDED = def(8_815_00_0); - public static final TransportVersion ESQL_CCS_TELEMETRY_STATS = def(8_816_00_0); - public static final TransportVersion TEXT_EMBEDDING_QUERY_VECTOR_BUILDER_INFER_MODEL_ID = def(8_817_00_0); - public static final TransportVersion ESQL_ENABLE_NODE_LEVEL_REDUCTION = def(8_818_00_0); - public static final TransportVersion JINA_AI_INTEGRATION_ADDED = def(8_819_00_0); - public static final TransportVersion TRACK_INDEX_FAILED_DUE_TO_VERSION_CONFLICT_METRIC = def(8_820_00_0); - public static final TransportVersion REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX = def(8_821_00_0); - public static final TransportVersion ELASTIC_INFERENCE_SERVICE_UNIFIED_CHAT_COMPLETIONS_INTEGRATION = def(8_822_00_0); - public static final TransportVersion KQL_QUERY_TECH_PREVIEW = def(8_823_00_0); - public static final TransportVersion ESQL_PROFILE_ROWS_PROCESSED = def(8_824_00_0); - public static final TransportVersion BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_825_00_0); - public static final TransportVersion REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_826_00_0); - public static final TransportVersion ESQL_SKIP_ES_INDEX_SERIALIZATION = def(8_827_00_0); - public static final TransportVersion ADD_INDEX_BLOCK_TWO_PHASE = def(8_828_00_0); - public static final TransportVersion RESOLVE_CLUSTER_NO_INDEX_EXPRESSION = def(8_829_00_0); - public static final TransportVersion ML_ROLLOVER_LEGACY_INDICES = def(8_830_00_0); - public static final TransportVersion ADD_INCLUDE_FAILURE_INDICES_OPTION = def(8_831_00_0); - public static final TransportVersion ESQL_RESPONSE_PARTIAL = def(8_832_00_0); - public static final TransportVersion RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN = def(8_833_00_0); - public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR = def(8_834_00_0); - public static final TransportVersion INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR = def(8_835_00_0); - public static final TransportVersion RESOURCE_DEPRECATION_CHECKS = def(8_836_00_0); - public static final TransportVersion LINEAR_RETRIEVER_SUPPORT = def(8_837_00_0); - public static final TransportVersion TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER = def(8_838_00_0); - public static final TransportVersion INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING = def(8_839_00_0); + public static final TransportVersion V_8_11_X = def(8_512_0_01); + public static final TransportVersion V_8_12_0 = def(8_560_0_00); + public static final TransportVersion V_8_12_1 = def(8_560_0_01); + public static final TransportVersion V_8_13_0 = def(8_595_0_00); + public static final TransportVersion V_8_13_4 = def(8_595_0_01); + public static final TransportVersion V_8_14_0 = def(8_636_0_01); + public static final TransportVersion V_8_15_0 = def(8_702_0_02); + public static final TransportVersion V_8_15_2 = def(8_702_0_03); + public static final TransportVersion V_8_16_0 = def(8_772_0_01); + public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO_BACKPORT_8_16 = def(8_772_0_02); + public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE_BACKPORT_8_16 = def(8_772_0_03); + public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16 = def(8_772_0_04); + public static final TransportVersion INITIAL_ELASTICSEARCH_8_16_5 = def(8_772_0_05); + public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_0_00); + public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_0_00); + public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_0_00); + public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_0_00); + public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_0_00); + public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_0_00); + public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_0_00); + public static final TransportVersion INDEX_REQUEST_REMOVE_METERING = def(8_780_0_00); + public static final TransportVersion CPU_STAT_STRING_PARSING = def(8_781_0_00); + public static final TransportVersion QUERY_RULES_RETRIEVER = def(8_782_0_00); + public static final TransportVersion ESQL_CCS_EXEC_INFO_WITH_FAILURES = def(8_783_0_00); + public static final TransportVersion LOGSDB_TELEMETRY = def(8_784_0_00); + public static final TransportVersion LOGSDB_TELEMETRY_STATS = def(8_785_0_00); + public static final TransportVersion KQL_QUERY_ADDED = def(8_786_0_00); + public static final TransportVersion ROLE_MONITOR_STATS = def(8_787_0_00); + public static final TransportVersion DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK = def(8_788_0_00); + public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO = def(8_789_0_00); + public static final TransportVersion VERTEX_AI_INPUT_TYPE_ADDED = def(8_790_0_00); + public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE = def(8_791_0_00); + public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_0_00); + public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_0_00); + public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_0_00); + public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_0_00); + public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_0_00); + public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_0_00); + public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17 = def(8_797_0_01); + public static final TransportVersion SOURCE_MODE_TELEMETRY_FIX_8_17 = def(8_797_0_02); + public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_3 = def(8_797_0_03); + public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_0_00); + public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_0_00); + public static final TransportVersion ESQL_REMOVE_NODE_LEVEL_PLAN = def(8_800_0_00); + public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE = def(8_801_0_00); + public static final TransportVersion SOURCE_MODE_TELEMETRY = def(8_802_0_00); + public static final TransportVersion NEW_REFRESH_CLUSTER_BLOCK = def(8_803_0_00); + public static final TransportVersion RETRIES_AND_OPERATIONS_IN_BLOBSTORE_STATS = def(8_804_0_00); + public static final TransportVersion ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES = def(8_805_0_00); + public static final TransportVersion KNN_QUERY_RESCORE_OVERSAMPLE = def(8_806_0_00); + public static final TransportVersion SEMANTIC_QUERY_LENIENT = def(8_807_0_00); + public static final TransportVersion ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS = def(8_808_0_00); + public static final TransportVersion EQL_ALLOW_PARTIAL_SEARCH_RESULTS = def(8_809_0_00); + public static final TransportVersion NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION = def(8_810_0_00); + public static final TransportVersion ERROR_TRACE_IN_TRANSPORT_HEADER = def(8_811_0_00); + public static final TransportVersion FAILURE_STORE_ENABLED_BY_CLUSTER_SETTING = def(8_812_0_00); + public static final TransportVersion SIMULATE_IGNORED_FIELDS = def(8_813_0_00); + public static final TransportVersion TRANSFORMS_UPGRADE_MODE = def(8_814_0_00); + public static final TransportVersion NODE_SHUTDOWN_EPHEMERAL_ID_ADDED = def(8_815_0_00); + public static final TransportVersion ESQL_CCS_TELEMETRY_STATS = def(8_816_0_00); + public static final TransportVersion TEXT_EMBEDDING_QUERY_VECTOR_BUILDER_INFER_MODEL_ID = def(8_817_0_00); + public static final TransportVersion ESQL_ENABLE_NODE_LEVEL_REDUCTION = def(8_818_0_00); + public static final TransportVersion JINA_AI_INTEGRATION_ADDED = def(8_819_0_00); + public static final TransportVersion TRACK_INDEX_FAILED_DUE_TO_VERSION_CONFLICT_METRIC = def(8_820_0_00); + public static final TransportVersion REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX = def(8_821_0_00); + public static final TransportVersion ELASTIC_INFERENCE_SERVICE_UNIFIED_CHAT_COMPLETIONS_INTEGRATION = def(8_822_0_00); + public static final TransportVersion KQL_QUERY_TECH_PREVIEW = def(8_823_0_00); + public static final TransportVersion ESQL_PROFILE_ROWS_PROCESSED = def(8_824_0_00); + public static final TransportVersion BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_825_0_00); + public static final TransportVersion REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_826_0_00); + public static final TransportVersion ESQL_SKIP_ES_INDEX_SERIALIZATION = def(8_827_0_00); + public static final TransportVersion ADD_INDEX_BLOCK_TWO_PHASE = def(8_828_0_00); + public static final TransportVersion RESOLVE_CLUSTER_NO_INDEX_EXPRESSION = def(8_829_0_00); + public static final TransportVersion ML_ROLLOVER_LEGACY_INDICES = def(8_830_0_00); + public static final TransportVersion ADD_INCLUDE_FAILURE_INDICES_OPTION = def(8_831_0_00); + public static final TransportVersion ESQL_RESPONSE_PARTIAL = def(8_832_0_00); + public static final TransportVersion RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN = def(8_833_0_00); + public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR = def(8_834_0_00); + public static final TransportVersion INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR = def(8_835_0_00); + public static final TransportVersion RESOURCE_DEPRECATION_CHECKS = def(8_836_0_00); + public static final TransportVersion LINEAR_RETRIEVER_SUPPORT = def(8_837_0_00); + public static final TransportVersion TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER = def(8_838_0_00); + public static final TransportVersion INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING = def(8_839_0_00); + public static final TransportVersion ML_INFERENCE_IBM_WATSONX_RERANK_ADDED = def(8_840_0_00); + public static final TransportVersion INITIAL_ELASTICSEARCH_8_19 = def(8_841_0_00); + public static final TransportVersion INITIAL_ELASTICSEARCH_9_0 = def(9_000_0_00); + public static final TransportVersion REMOVE_SNAPSHOT_FAILURES_90 = def(9_000_0_01); + public static final TransportVersion TRANSPORT_STATS_HANDLING_TIME_REQUIRED_90 = def(9_000_0_02); + public static final TransportVersion REMOVE_DESIRED_NODE_VERSION_90 = def(9_000_0_03); + public static final TransportVersion ESQL_DRIVER_TASK_DESCRIPTION_90 = def(9_000_0_04); /* * STOP! READ THIS FIRST! No, really, @@ -187,17 +196,17 @@ static TransportVersion def(int id) { * To add a new transport version, add a new constant at the bottom of the list, above this comment. Don't add other lines, * comments, etc. The version id has the following layout: * - * M_NNN_SS_P + * M_NNN_S_PP * * M - The major version of Elasticsearch * NNN - The server version part - * SS - The serverless version part. It should always be 00 here, it is used by serverless only. - * P - The patch version part + * S - The subsidiary version part. It should always be 0 here, it is only used in subsidiary repositories. + * PP - The patch version part * * To determine the id of the next TransportVersion constant, do the following: * - Use the same major version, unless bumping majors * - Bump the server version part by 1, unless creating a patch version - * - Leave the serverless part as 00 + * - Leave the subsidiary part as 0 * - Bump the patch part if creating a patch version * * If a patch version is created, it should be placed sorted among the other existing constants. @@ -229,15 +238,13 @@ static TransportVersion def(int id) { * Reference to the earliest compatible transport version to this version of the codebase. * This should be the transport version used by the highest minor version of the previous major. */ - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - // This needs to be bumped to the 8.last - public static final TransportVersion MINIMUM_COMPATIBLE = V_7_17_0; + public static final TransportVersion MINIMUM_COMPATIBLE = BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1; /** * Reference to the minimum transport version that can be used with CCS. * This should be the transport version used by the previous minor release. */ - public static final TransportVersion MINIMUM_CCS_VERSION = V_8_15_0; + public static final TransportVersion MINIMUM_CCS_VERSION = BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1; /** * Sorted list of all versions defined in this class diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 1249f36745835..cafeabac81348 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -193,9 +193,11 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_16_2 = new Version(8_16_02_99); public static final Version V_8_16_3 = new Version(8_16_03_99); public static final Version V_8_16_4 = new Version(8_16_04_99); + public static final Version V_8_16_5 = new Version(8_16_05_99); public static final Version V_8_17_0 = new Version(8_17_00_99); public static final Version V_8_17_1 = new Version(8_17_01_99); public static final Version V_8_17_2 = new Version(8_17_02_99); + public static final Version V_8_17_3 = new Version(8_17_03_99); public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version CURRENT = V_9_0_0; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java index c84df0ddfe644..3b773ae686845 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java @@ -108,7 +108,7 @@ protected NodesReloadSecureSettingsResponse.NodeResponse nodeOperation( Task task ) { // We default to using an empty string as the keystore password so that we mimic pre 7.3 API behavior - try (KeyStoreWrapper keystore = KeyStoreWrapper.load(environment.configFile())) { + try (KeyStoreWrapper keystore = KeyStoreWrapper.load(environment.configDir())) { // reread keystore from config file if (keystore == null) { return new NodesReloadSecureSettingsResponse.NodeResponse( diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java index dc261177567cc..087e6425d40c8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java @@ -9,7 +9,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; @@ -17,12 +17,10 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -35,9 +33,6 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo private final List snapshots; - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // always empty, can be dropped - private final Map failures; - @Nullable private final String next; @@ -45,15 +40,8 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo private final int remaining; - public GetSnapshotsResponse( - List snapshots, - Map failures, - @Nullable String next, - final int total, - final int remaining - ) { + public GetSnapshotsResponse(List snapshots, @Nullable String next, final int total, final int remaining) { this.snapshots = List.copyOf(snapshots); - this.failures = failures == null ? Map.of() : Map.copyOf(failures); this.next = next; this.total = total; this.remaining = remaining; @@ -61,7 +49,10 @@ public GetSnapshotsResponse( public GetSnapshotsResponse(StreamInput in) throws IOException { this.snapshots = in.readCollectionAsImmutableList(SnapshotInfo::readFrom); - this.failures = Collections.unmodifiableMap(in.readMap(StreamInput::readException)); + if (in.getTransportVersion().before(TransportVersions.REMOVE_SNAPSHOT_FAILURES_90)) { + // Deprecated `failures` field + in.readMap(StreamInput::readException); + } this.next = in.readOptionalString(); this.total = in.readVInt(); this.remaining = in.readVInt(); @@ -76,25 +67,11 @@ public List getSnapshots() { return snapshots; } - /** - * Returns a map of repository name to {@link ElasticsearchException} for each unsuccessful response. - */ - public Map getFailures() { - return failures; - } - @Nullable public String next() { return next; } - /** - * Returns true if there is at least one failed response. - */ - public boolean isFailed() { - return failures.isEmpty() == false; - } - public int totalCount() { return total; } @@ -106,7 +83,10 @@ public int remaining() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(snapshots); - out.writeMap(failures, StreamOutput::writeException); + if (out.getTransportVersion().before(TransportVersions.REMOVE_SNAPSHOT_FAILURES_90)) { + // Deprecated `failures` field + out.writeMap(Map.of(), StreamOutput::writeException); + } out.writeOptionalString(next); out.writeVInt(total); out.writeVInt(remaining); @@ -120,18 +100,6 @@ public Iterator toXContentChunked(ToXContent.Params params) { return b; }), Iterators.map(getSnapshots().iterator(), snapshotInfo -> snapshotInfo::toXContentExternal), Iterators.single((b, p) -> { b.endArray(); - if (failures.isEmpty() == false) { - b.startObject("failures"); - for (Map.Entry error : failures.entrySet()) { - b.field(error.getKey(), (bb, pa) -> { - bb.startObject(); - error.getValue().toXContent(bb, pa); - bb.endObject(); - return bb; - }); - } - b.endObject(); - } if (next != null) { b.field("next", next); } @@ -151,12 +119,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetSnapshotsResponse that = (GetSnapshotsResponse) o; - return Objects.equals(snapshots, that.snapshots) && Objects.equals(failures, that.failures) && Objects.equals(next, that.next); + return Objects.equals(snapshots, that.snapshots) && Objects.equals(next, that.next); } @Override public int hashCode() { - return Objects.hash(snapshots, failures, next); + return Objects.hash(snapshots, next); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 896b336d54d7b..ec4a578ef25cd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -543,7 +543,6 @@ private GetSnapshotsResponse buildResponse() { } return new GetSnapshotsResponse( snapshotInfos, - null, remaining > 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, totalCount.get(), remaining diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java index 69ab9f57d2be7..071e9b42752c0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java @@ -77,6 +77,17 @@ public boolean hasErrors() { return errors; } + /** + * Get a list of all errors from the response. If there are no errors, an empty list is returned. + */ + public List getErrors() { + if (errors == false) { + return List.of(); + } else { + return actionResults.stream().filter(a -> a.getError() != null).map(AliasActionResult::getError).toList(); + } + } + /** * Build a response from a list of action results. Sets the errors boolean based * on whether an of the individual results contain an error. @@ -165,6 +176,13 @@ public static AliasActionResult buildSuccess(List indices, AliasActions return new AliasActionResult(indices, action, null); } + /** + * The error result if the action failed, null if the action succeeded. + */ + public ElasticsearchException getError() { + return error; + } + private int getStatus() { return error == null ? 200 : error.status().getStatus(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeCapabilities.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeCapabilities.java new file mode 100644 index 0000000000000..0574e05001f12 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeCapabilities.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.admin.indices.analyze; + +import java.util.Set; + +public final class AnalyzeCapabilities { + private AnalyzeCapabilities() {} + + private static final String WRONG_CUSTOM_ANALYZER_RETURNS_400_CAPABILITY = "wrong_custom_analyzer_returns_400"; + + public static final Set CAPABILITIES = Set.of(WRONG_CUSTOM_ANALYZER_RETURNS_400_CAPABILITY); +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index fb672b49c2f5a..5f5d27bda2708 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -144,6 +144,8 @@ public static AnalyzeAction.Response analyze( if (analyzer != null) { return analyze(request, analyzer, maxTokenCount); } + } catch (IllegalStateException e) { + throw new IllegalArgumentException("Can not build a custom analyzer", e); } // Otherwise we use a built-in analyzer, which should not be closed diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index 2870a6538f8bb..1c99d84900866 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -36,7 +36,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -126,7 +125,7 @@ protected void masterOperation( performMappingUpdate(concreteIndices, request, listener, metadataMappingService, false); } catch (IndexNotFoundException ex) { - logger.debug(() -> "failed to put mappings on indices [" + Arrays.asList(request.indices() + "]"), ex); + logger.debug(() -> "failed to put mappings on indices " + Arrays.toString(request.indices()), ex); throw ex; } } @@ -162,25 +161,21 @@ static void performMappingUpdate( MetadataMappingService metadataMappingService, boolean autoUpdate ) { - final ActionListener wrappedListener = listener.delegateResponse((l, e) -> { - logger.debug(() -> "failed to put mappings on indices [" + Arrays.asList(concreteIndices) + "]", e); + ActionListener.run(listener.delegateResponse((l, e) -> { + logger.debug(() -> "failed to put mappings on indices " + Arrays.toString(concreteIndices), e); l.onFailure(e); - }); - final PutMappingClusterStateUpdateRequest updateRequest; - try { - updateRequest = new PutMappingClusterStateUpdateRequest( - request.masterNodeTimeout(), - request.ackTimeout(), - request.source(), - autoUpdate, - concreteIndices - ); - } catch (IOException e) { - wrappedListener.onFailure(e); - return; - } - - metadataMappingService.putMapping(updateRequest, wrappedListener); + }), + wrappedListener -> metadataMappingService.putMapping( + new PutMappingClusterStateUpdateRequest( + request.masterNodeTimeout(), + request.ackTimeout(), + request.source(), + autoUpdate, + concreteIndices + ), + wrappedListener + ) + ); } static String checkForFailureStoreViolations(ClusterState clusterState, Index[] concreteIndices, PutMappingRequest request) { diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java index 7890a0f9f9738..17c272d75d014 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java @@ -136,7 +136,7 @@ public CanMatchNodeRequest( ) { this.source = getCanMatchSource(searchRequest); this.indicesOptions = indicesOptions; - this.shards = new ArrayList<>(shards); + this.shards = shards; this.searchType = searchRequest.searchType(); this.requestCache = searchRequest.requestCache(); // If allowPartialSearchResults is unset (ie null), the cluster-level default should have been substituted diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index d45a8a6f01cd1..ac701085d8d09 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -41,7 +41,6 @@ import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.BiFunction; -import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.core.Types.forciblyCast; @@ -350,10 +349,7 @@ private record SendingTarget(@Nullable String clusterAlias, @Nullable String nod private CanMatchNodeRequest createCanMatchRequest(Map.Entry> entry) { final SearchShardIterator first = entry.getValue().get(0); - final List shardLevelRequests = entry.getValue() - .stream() - .map(this::buildShardLevelRequest) - .collect(Collectors.toCollection(ArrayList::new)); + final List shardLevelRequests = entry.getValue().stream().map(this::buildShardLevelRequest).toList(); assert entry.getValue().stream().allMatch(Objects::nonNull); assert entry.getValue() .stream() diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java index 8f1537d917c15..f4363ce1948b9 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java @@ -24,7 +24,7 @@ private BootstrapUtil() {} public static SecureSettings loadSecureSettings(Environment initialEnv, SecureString keystorePassword) throws BootstrapException { try { - return KeyStoreWrapper.bootstrap(initialEnv.configFile(), () -> keystorePassword); + return KeyStoreWrapper.bootstrap(initialEnv.configDir(), () -> keystorePassword); } catch (Exception e) { throw new BootstrapException(e); } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java b/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java index 7b85b369b5dd5..54244f320840a 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java @@ -52,7 +52,7 @@ static Supplier buildConsoleLoader(ClassLoader classLoader) { } private static ClassLoader buildClassLoader(Environment env) { - final Path libDir = env.libFile().resolve("tools").resolve("ansi-console"); + final Path libDir = env.libDir().resolve("tools").resolve("ansi-console"); try (var libDirFilesStream = Files.list(libDir)) { final URL[] urls = libDirFilesStream.filter(each -> each.getFileName().toString().endsWith(".jar")) diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 311df05f9b07b..6e07c7012cc06 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -32,9 +32,9 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; -import org.elasticsearch.entitlement.runtime.policy.LoadNativeLibrariesEntitlement; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.PolicyParserUtils; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.jdk.JarHell; @@ -187,7 +187,7 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { nodeEnv.validateNativesConfig(); // temporary directories are important for JNA initializeNatives( - nodeEnv.tmpFile(), + nodeEnv.tmpDir(), BootstrapSettings.MEMORY_LOCK_SETTING.get(args.nodeSettings()), true, // always install system call filters, not user-configurable since 8.0.0 BootstrapSettings.CTRLHANDLER_SETTING.get(args.nodeSettings()) @@ -223,8 +223,8 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { ); // load the plugin Java modules and layers now for use in entitlements - var modulesBundles = PluginsLoader.loadModulesBundles(nodeEnv.modulesFile()); - var pluginsBundles = PluginsLoader.loadPluginsBundles(nodeEnv.pluginsFile()); + var modulesBundles = PluginsLoader.loadModulesBundles(nodeEnv.modulesDir()); + var pluginsBundles = PluginsLoader.loadPluginsBundles(nodeEnv.pluginsDir()); final PluginsLoader pluginsLoader; @@ -242,8 +242,16 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, findPluginsWithNativeAccess(pluginPolicies)); var pluginsResolver = PluginsResolver.create(pluginsLoader); - EntitlementBootstrap.bootstrap(pluginPolicies, pluginsResolver::resolveClassToPluginName); - } else if (RuntimeVersionFeature.isSecurityManagerAvailable()) { + EntitlementBootstrap.bootstrap( + pluginPolicies, + pluginsResolver::resolveClassToPluginName, + nodeEnv.dataDirs(), + nodeEnv.configDir(), + nodeEnv.tmpDir(), + nodeEnv.logsDir() + ); + } else { + assert RuntimeVersionFeature.isSecurityManagerAvailable(); // no need to explicitly enable native access for legacy code pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, Map.of()); // install SM after natives, shutdown hooks, etc. @@ -253,10 +261,6 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(args.nodeSettings()), args.pidFile() ); - } else { - // TODO: should we throw/interrupt startup in this case? - pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, Map.of()); - LogManager.getLogger(Elasticsearch.class).warn("Bootstrapping without any protection"); } bootstrap.setPluginsLoader(pluginsLoader); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Security.java b/server/src/main/java/org/elasticsearch/bootstrap/Security.java index dc6de9a6b2c91..a352112b67afb 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -178,11 +178,11 @@ static Map getPluginAndModulePermissions(Environment environment) t } }; - for (Path plugin : PluginsUtils.findPluginDirs(environment.pluginsFile())) { - addPolicy.accept(PolicyUtil.getPluginPolicyInfo(plugin, environment.tmpFile())); + for (Path plugin : PluginsUtils.findPluginDirs(environment.pluginsDir())) { + addPolicy.accept(PolicyUtil.getPluginPolicyInfo(plugin, environment.tmpDir())); } - for (Path plugin : PluginsUtils.findPluginDirs(environment.modulesFile())) { - addPolicy.accept(PolicyUtil.getModulePolicyInfo(plugin, environment.tmpFile())); + for (Path plugin : PluginsUtils.findPluginDirs(environment.modulesDir())) { + addPolicy.accept(PolicyUtil.getModulePolicyInfo(plugin, environment.tmpDir())); } return Collections.unmodifiableMap(map); @@ -199,7 +199,7 @@ static Permissions createPermissions(Environment environment, Path pidFile) thro private static List createRecursiveDataPathPermission(Environment environment) throws IOException { Permissions policy = new Permissions(); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { addDirectoryPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete", true); } return toFilePermissions(policy); @@ -215,13 +215,13 @@ private static Map> readSecuredConfigFiles( Map> securedSettingKeys = new HashMap<>(); for (URL url : mainCodebases) { - for (Permission p : PolicyUtil.getPolicyPermissions(url, template, environment.tmpFile())) { + for (Permission p : PolicyUtil.getPolicyPermissions(url, template, environment.tmpDir())) { readSecuredConfigFilePermissions(environment, url, p, securedConfigFiles, securedSettingKeys); } } for (var pp : pluginPolicies.entrySet()) { - for (Permission p : PolicyUtil.getPolicyPermissions(pp.getKey(), pp.getValue(), environment.tmpFile())) { + for (Permission p : PolicyUtil.getPolicyPermissions(pp.getKey(), pp.getValue(), environment.tmpDir())) { readSecuredConfigFilePermissions(environment, pp.getKey(), p, securedConfigFiles, securedSettingKeys); } } @@ -242,8 +242,8 @@ private static Map> readSecuredConfigFiles( // If the setting shouldn't be an HTTPS URL, that'll be caught by that setting's validation later in the process. // HTTP (no S) URLs are not supported. if (settingValue.toLowerCase(Locale.ROOT).startsWith("https://") == false) { - Path file = environment.configFile().resolve(settingValue); - if (file.startsWith(environment.configFile()) == false) { + Path file = environment.configDir().resolve(settingValue); + if (file.startsWith(environment.configDir()) == false) { throw new IllegalStateException( ps.getValue() + " tried to grant access to file outside config directory " + file ); @@ -263,9 +263,9 @@ private static Map> readSecuredConfigFiles( // always add some config files as exclusive files that no one can access // there's no reason for anyone to read these once the security manager is initialized // so if something has tried to grant itself access, crash out with an error - addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("elasticsearch.yml").toString()); - addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("jvm.options").toString()); - addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("jvm.options.d/-").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configDir().resolve("elasticsearch.yml").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configDir().resolve("jvm.options").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configDir().resolve("jvm.options.d/-").toString()); return Collections.unmodifiableMap(securedConfigFiles); } @@ -279,8 +279,8 @@ private static void readSecuredConfigFilePermissions( ) { String securedFileName = extractSecuredName(p, SecuredConfigFileAccessPermission.class); if (securedFileName != null) { - Path securedFile = environment.configFile().resolve(securedFileName); - if (securedFile.startsWith(environment.configFile()) == false) { + Path securedFile = environment.configDir().resolve(securedFileName); + if (securedFile.startsWith(environment.configDir()) == false) { throw new IllegalStateException("[" + url + "] tried to grant access to file outside config directory " + securedFile); } logger.debug("Jar {} securing access to config file {}", url, securedFile); @@ -336,26 +336,26 @@ static void addClasspathPermissions(Permissions policy) throws IOException { */ static void addFilePermissions(Permissions policy, Environment environment, Path pidFile) throws IOException { // read-only dirs - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.binFile(), "read,readlink", false); - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.libFile(), "read,readlink", false); - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.modulesFile(), "read,readlink", false); - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.pluginsFile(), "read,readlink", false); - addDirectoryPath(policy, "path.conf", environment.configFile(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.binDir(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.libDir(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.modulesDir(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.pluginsDir(), "read,readlink", false); + addDirectoryPath(policy, "path.conf", environment.configDir(), "read,readlink", false); // read-write dirs - addDirectoryPath(policy, "java.io.tmpdir", environment.tmpFile(), "read,readlink,write,delete", false); - addDirectoryPath(policy, Environment.PATH_LOGS_SETTING.getKey(), environment.logsFile(), "read,readlink,write,delete", false); - if (environment.sharedDataFile() != null) { + addDirectoryPath(policy, "java.io.tmpdir", environment.tmpDir(), "read,readlink,write,delete", false); + addDirectoryPath(policy, Environment.PATH_LOGS_SETTING.getKey(), environment.logsDir(), "read,readlink,write,delete", false); + if (environment.sharedDataDir() != null) { addDirectoryPath( policy, Environment.PATH_SHARED_DATA_SETTING.getKey(), - environment.sharedDataFile(), + environment.sharedDataDir(), "read,readlink,write,delete", false ); } final Set dataFilesPaths = new HashSet<>(); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { addDirectoryPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete", false); /* * We have to do this after adding the path because a side effect of that is that the directory is created; the Path#toRealPath @@ -371,7 +371,7 @@ static void addFilePermissions(Permissions policy, Environment environment, Path throw new IllegalStateException("unable to access [" + path + "]", e); } } - for (Path path : environment.repoFiles()) { + for (Path path : environment.repoDirs()) { addDirectoryPath(policy, Environment.PATH_REPO_SETTING.getKey(), path, "read,readlink,write,delete", false); } @@ -380,7 +380,7 @@ static void addFilePermissions(Permissions policy, Environment environment, Path addSingleFilePath(policy, pidFile, "delete"); } // we need to touch the operator/settings.json file when restoring from snapshots, on some OSs it needs file write permission - addSingleFilePath(policy, environment.configFile().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME), "read,readlink,write"); + addSingleFilePath(policy, environment.configDir().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME), "read,readlink,write"); } /** diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java index 6a4296d9b0478..35284cebf22ad 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java @@ -69,14 +69,14 @@ void spawnNativeControllers(final Environment environment) throws IOException { if (spawned.compareAndSet(false, true) == false) { throw new IllegalStateException("native controllers already spawned"); } - if (Files.exists(environment.modulesFile()) == false) { - throw new IllegalStateException("modules directory [" + environment.modulesFile() + "] not found"); + if (Files.exists(environment.modulesDir()) == false) { + throw new IllegalStateException("modules directory [" + environment.modulesDir() + "] not found"); } /* * For each module, attempt to spawn the controller daemon. Silently ignore any module that doesn't include a controller for the * correct platform. */ - List paths = PluginsUtils.findPluginDirs(environment.modulesFile()); + List paths = PluginsUtils.findPluginDirs(environment.modulesDir()); for (final Path modules : paths) { final PluginDescriptor info = PluginDescriptor.readFromProperties(modules); final Path spawnPath = Platforms.nativeControllerPath(modules); @@ -91,7 +91,7 @@ void spawnNativeControllers(final Environment environment) throws IOException { ); throw new IllegalArgumentException(message); } - final Process process = spawnNativeController(spawnPath, environment.tmpFile()); + final Process process = spawnNativeController(spawnPath, environment.tmpDir()); // The process _shouldn't_ write any output via its stdout or stderr, but if it does then // it will block if nothing is reading that output. To avoid this we can pipe the // outputs and create pump threads to write any messages there to the ES log. diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java index 8366083b1907e..31c327ae5da64 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java @@ -11,10 +11,8 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -22,7 +20,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.Processors; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -36,7 +33,6 @@ import java.util.Objects; import java.util.Set; import java.util.TreeSet; -import java.util.regex.Pattern; import static java.lang.String.format; import static org.elasticsearch.node.Node.NODE_EXTERNAL_ID_SETTING; @@ -52,8 +48,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl private static final ParseField PROCESSORS_RANGE_FIELD = new ParseField("processors_range"); private static final ParseField MEMORY_FIELD = new ParseField("memory"); private static final ParseField STORAGE_FIELD = new ParseField("storage"); - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated field - private static final ParseField VERSION_FIELD = new ParseField("node_version"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "desired_node", @@ -63,8 +57,7 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl (Processors) args[1], (ProcessorsRange) args[2], (ByteSizeValue) args[3], - (ByteSizeValue) args[4], - (String) args[5] + (ByteSizeValue) args[4] ) ); @@ -98,12 +91,6 @@ static void configureParser(ConstructingObjectParser parser) { STORAGE_FIELD, ObjectParser.ValueType.STRING ); - parser.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> p.text(), - VERSION_FIELD, - ObjectParser.ValueType.STRING - ); } private final Settings settings; @@ -112,21 +99,9 @@ static void configureParser(ConstructingObjectParser parser) { private final ByteSizeValue memory; private final ByteSizeValue storage; - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated version field - private final String version; private final String externalId; private final Set roles; - @Deprecated - public DesiredNode(Settings settings, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage, String version) { - this(settings, null, processorsRange, memory, storage, version); - } - - @Deprecated - public DesiredNode(Settings settings, double processors, ByteSizeValue memory, ByteSizeValue storage, String version) { - this(settings, Processors.of(processors), null, memory, storage, version); - } - public DesiredNode(Settings settings, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage) { this(settings, null, processorsRange, memory, storage); } @@ -136,17 +111,6 @@ public DesiredNode(Settings settings, double processors, ByteSizeValue memory, B } DesiredNode(Settings settings, Processors processors, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage) { - this(settings, processors, processorsRange, memory, storage, null); - } - - DesiredNode( - Settings settings, - Processors processors, - ProcessorsRange processorsRange, - ByteSizeValue memory, - ByteSizeValue storage, - @Deprecated String version - ) { assert settings != null; assert memory != null; assert storage != null; @@ -180,7 +144,6 @@ public DesiredNode(Settings settings, double processors, ByteSizeValue memory, B this.processorsRange = processorsRange; this.memory = memory; this.storage = storage; - this.version = version; this.externalId = NODE_EXTERNAL_ID_SETTING.get(settings); this.roles = Collections.unmodifiableSortedSet(new TreeSet<>(DiscoveryNode.getRolesFromSettings(settings))); } @@ -198,25 +161,10 @@ public static DesiredNode readFrom(StreamInput in) throws IOException { } final var memory = ByteSizeValue.readFrom(in); final var storage = ByteSizeValue.readFrom(in); - final String version; - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { - version = in.readOptionalString(); - } else { - version = Version.readVersion(in).toString(); + if (in.getTransportVersion().before(TransportVersions.REMOVE_DESIRED_NODE_VERSION_90)) { + in.readOptionalString(); } - return new DesiredNode(settings, processors, processorsRange, memory, storage, version); - } - - private static final Pattern SEMANTIC_VERSION_PATTERN = Pattern.compile("^(\\d+\\.\\d+\\.\\d+)\\D?.*"); - - private static Version parseLegacyVersion(String version) { - if (version != null) { - var semanticVersionMatcher = SEMANTIC_VERSION_PATTERN.matcher(version); - if (semanticVersionMatcher.matches()) { - return Version.fromString(semanticVersionMatcher.group(1)); - } - } - return null; + return new DesiredNode(settings, processors, processorsRange, memory, storage); } @Override @@ -232,16 +180,8 @@ public void writeTo(StreamOutput out) throws IOException { } memory.writeTo(out); storage.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { - out.writeOptionalString(version); - } else { - Version parsedVersion = parseLegacyVersion(version); - if (version == null) { - // Some node is from before we made the version field not required. If so, fill in with the current node version. - Version.writeVersion(Version.CURRENT, out); - } else { - Version.writeVersion(parsedVersion, out); - } + if (out.getTransportVersion().before(TransportVersions.REMOVE_DESIRED_NODE_VERSION_90)) { + out.writeOptionalString(null); } } @@ -269,14 +209,6 @@ public void toInnerXContent(XContentBuilder builder, Params params) throws IOExc } builder.field(MEMORY_FIELD.getPreferredName(), memory); builder.field(STORAGE_FIELD.getPreferredName(), storage); - addDeprecatedVersionField(builder); - } - - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated field from response - private void addDeprecatedVersionField(XContentBuilder builder) throws IOException { - if (version != null) { - builder.field(VERSION_FIELD.getPreferredName(), version); - } } public boolean hasMasterRole() { @@ -356,7 +288,6 @@ private boolean equalsWithoutProcessorsSpecification(DesiredNode that) { return Objects.equals(settings, that.settings) && Objects.equals(memory, that.memory) && Objects.equals(storage, that.storage) - && Objects.equals(version, that.version) && Objects.equals(externalId, that.externalId) && Objects.equals(roles, that.roles); } @@ -369,7 +300,7 @@ public boolean equalsWithProcessorsCloseTo(DesiredNode that) { @Override public int hashCode() { - return Objects.hash(settings, processors, processorsRange, memory, storage, version, externalId, roles); + return Objects.hash(settings, processors, processorsRange, memory, storage, externalId, roles); } @Override @@ -398,10 +329,6 @@ public String toString() { + '}'; } - public boolean hasVersion() { - return Strings.isNullOrBlank(version) == false; - } - public record ProcessorsRange(Processors min, @Nullable Processors max) implements Writeable, ToXContentObject { private static final ParseField MIN_FIELD = new ParseField("min"); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java index 7b89406be9aa0..606309adf205c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java @@ -44,13 +44,12 @@ public record DesiredNodeWithStatus(DesiredNode desiredNode, Status status) (Processors) args[1], (DesiredNode.ProcessorsRange) args[2], (ByteSizeValue) args[3], - (ByteSizeValue) args[4], - (String) args[5] + (ByteSizeValue) args[4] ), // An unknown status is expected during upgrades to versions >= STATUS_TRACKING_SUPPORT_VERSION // the desired node status would be populated when a node in the newer version is elected as // master, the desired nodes status update happens in NodeJoinExecutor. - args[6] == null ? Status.PENDING : (Status) args[6] + args[5] == null ? Status.PENDING : (Status) args[5] ) ); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolver.java index 015c90ebe450e..d83bde9542d9e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolver.java @@ -168,6 +168,13 @@ public static boolean isIndexVisible( final boolean isHidden = indexAbstraction.isHidden(); boolean isVisible = isHidden == false || indicesOptions.expandWildcardsHidden() || isVisibleDueToImplicitHidden(expression, index); if (indexAbstraction.getType() == IndexAbstraction.Type.ALIAS) { + if (indexAbstraction.isSystem()) { + // check if it is net new + if (resolver.getNetNewSystemIndexPredicate().test(indexAbstraction.getName())) { + return isSystemIndexVisible(resolver, indexAbstraction); + } + } + // it's an alias, ignore expandWildcardsOpen and expandWildcardsClosed. // complicated to support those options with aliases pointing to multiple indices... isVisible = isVisible && indicesOptions.ignoreAliases() == false; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 04f9448a936be..9cfc29c5217ad 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -2077,6 +2077,12 @@ public Builder putRolloverInfo(RolloverInfo rolloverInfo) { return this; } + public Builder putRolloverInfos(Map rolloverInfos) { + this.rolloverInfos.clear(); + this.rolloverInfos.putAllFromMap(rolloverInfos); + return this; + } + public long version() { return this.version; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 97b83b1dc1562..1657c5c0c2d32 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1457,7 +1457,7 @@ public void validateIndexSettings(String indexName, final Settings settings, fin } List getIndexSettingsValidationErrors(final Settings settings, final boolean forbidPrivateIndexSettings) { - List validationErrors = validateIndexCustomPath(settings, env.sharedDataFile()); + List validationErrors = validateIndexCustomPath(settings, env.sharedDataDir()); if (forbidPrivateIndexSettings) { validationErrors.addAll(validatePrivateSettingsNotExplicitlySet(settings, indexScopedSettings)); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 5d45bf1ce127e..8b8c3f12cdf9f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -56,7 +56,7 @@ public class MetadataMappingService { public MetadataMappingService(ClusterService clusterService, IndicesService indicesService) { this.clusterService = clusterService; this.indicesService = indicesService; - taskQueue = clusterService.createTaskQueue("put-mapping", Priority.HIGH, new PutMappingExecutor()); + this.taskQueue = clusterService.createTaskQueue("put-mapping", Priority.HIGH, new PutMappingExecutor()); } record PutMappingClusterStateUpdateTask(PutMappingClusterStateUpdateRequest request, ActionListener listener) diff --git a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index f6d6c7fd68738..134f7746ba627 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -127,7 +127,7 @@ public static void configure(final Environment environment, boolean useConsole) StatusLogger.getLogger().removeListener(ERROR_LISTENER); } configureESLogging(); - configure(environment.settings(), environment.configFile(), environment.logsFile(), useConsole); + configure(environment.settings(), environment.configDir(), environment.logsDir(), useConsole); initializeStatics(); // creates a permanent status logger that can watch for StatusLogger events and forward to a real logger configureStatusLoggerForwarder(); diff --git a/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java b/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java index 6b92f87a9be23..e8354be5ea225 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java +++ b/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java @@ -22,6 +22,12 @@ public Logger getLogger(String name) { @Override public Logger getLogger(Class clazz) { - return new LoggerImpl(LogManager.getLogger(clazz)); + // Elasticsearch configures logging at the root level, it does not support + // programmatic configuration at the logger level. Log4j's method for + // getting a logger by Class doesn't just use the class name, but also + // scans the classloader hierarchy for programmatic configuration. Here we + // just delegate to use the String class name so that regardless of which + // classloader a class comes from, we will use the root logging config. + return getLogger(clazz.getName()); } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 073000979918e..2aa87d808fc93 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -20,6 +20,8 @@ import org.apache.lucene.index.ConcurrentMergeScheduler; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FilterCodecReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterLeafReader; @@ -190,7 +192,18 @@ public static SegmentInfos pruneUnreferencedFiles(String segmentsFileName, Direc throw new IllegalStateException("no commit found in the directory"); } } + // Need to figure out what the parent field is that, so that validation in IndexWriter doesn't fail + // if no parent field is configured, but FieldInfo says there is a parent field. + String parentField = null; final IndexCommit cp = getIndexCommit(si, directory); + try (var reader = DirectoryReader.open(cp)) { + var topLevelFieldInfos = FieldInfos.getMergedFieldInfos(reader); + for (FieldInfo fieldInfo : topLevelFieldInfos) { + if (fieldInfo.isParentField()) { + parentField = fieldInfo.getName(); + } + } + } try ( IndexWriter writer = new IndexWriter( directory, @@ -198,6 +211,7 @@ public static SegmentInfos pruneUnreferencedFiles(String segmentsFileName, Direc .setIndexCommit(cp) .setCommitOnClose(false) .setOpenMode(IndexWriterConfig.OpenMode.APPEND) + .setParentField(parentField) ) ) { // do nothing and close this will kick off IndexFileDeleter which will remove all pending files diff --git a/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java b/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java index 4a2e1cd92d4da..168dec0665f51 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java +++ b/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java @@ -142,7 +142,7 @@ public LocallyMountedSecrets(Environment environment) { * @return Secrets directory within an Elasticsearch environment */ public static Path resolveSecretsDir(Environment environment) { - return environment.configFile().toAbsolutePath().resolve(SECRETS_DIRECTORY); + return environment.configDir().toAbsolutePath().resolve(SECRETS_DIRECTORY); } /** diff --git a/server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java b/server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java index 2df7e6537c609..3aa7c67a14c65 100644 --- a/server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java +++ b/server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java @@ -30,18 +30,29 @@ public SizeLimitingStringWriter(int sizeLimit) { this.sizeLimit = sizeLimit; } - private void checkSizeLimit(int additionalChars) { - int bufLen = getBuffer().length(); - if (bufLen + additionalChars > sizeLimit) { - throw new SizeLimitExceededException( - Strings.format("String [%s...] has exceeded the size limit [%s]", getBuffer().substring(0, Math.min(bufLen, 20)), sizeLimit) - ); + private int limitSize(int additionalChars) { + int neededSize = getBuffer().length() + additionalChars; + if (neededSize > sizeLimit) { + return additionalChars - (neededSize - sizeLimit); } + return additionalChars; + } + + private void throwSizeLimitExceeded(int limitedChars, int requestedChars) { + assert limitedChars < requestedChars; + int bufLen = getBuffer().length(); + int foundSize = bufLen - limitedChars + requestedChars; // reconstitute original + String selection = getBuffer().substring(0, Math.min(bufLen, 20)); + throw new SizeLimitExceededException( + Strings.format("String [%s...] has size [%d] which exceeds the size limit [%d]", selection, foundSize, sizeLimit) + ); } @Override public void write(int c) { - checkSizeLimit(1); + if (limitSize(1) != 1) { + throwSizeLimitExceeded(0, 1); + } super.write(c); } @@ -49,20 +60,29 @@ public void write(int c) { @Override public void write(char[] cbuf, int off, int len) { - checkSizeLimit(len); - super.write(cbuf, off, len); + int limitedLen = limitSize(len); + if (limitedLen > 0) { + super.write(cbuf, off, limitedLen); + } + if (limitedLen != len) { + throwSizeLimitExceeded(limitedLen, len); + } } @Override public void write(String str) { - checkSizeLimit(str.length()); - super.write(str); + this.write(str, 0, str.length()); } @Override public void write(String str, int off, int len) { - checkSizeLimit(len); - super.write(str, off, len); + int limitedLen = limitSize(len); + if (limitedLen > 0) { + super.write(str, off, limitedLen); + } + if (limitedLen != len) { + throwSizeLimitExceeded(limitedLen, len); + } } // append(...) delegates to write(...) methods diff --git a/server/src/main/java/org/elasticsearch/env/Environment.java b/server/src/main/java/org/elasticsearch/env/Environment.java index f9594655719f7..813da761159c5 100644 --- a/server/src/main/java/org/elasticsearch/env/Environment.java +++ b/server/src/main/java/org/elasticsearch/env/Environment.java @@ -46,28 +46,28 @@ public class Environment { private final Settings settings; - private final Path[] dataFiles; + private final Path[] dataDirs; - private final Path[] repoFiles; + private final Path[] repoDirs; - private final Path configFile; + private final Path configDir; - private final Path pluginsFile; + private final Path pluginsDir; - private final Path modulesFile; + private final Path modulesDir; - private final Path sharedDataFile; + private final Path sharedDataDir; /** location of bin/, used by plugin manager */ - private final Path binFile; + private final Path binDir; /** location of lib/, */ - private final Path libFile; + private final Path libDir; - private final Path logsFile; + private final Path logsDir; /** Path to the temporary file directory used by the JDK */ - private final Path tmpFile; + private final Path tmpDir; public Environment(final Settings settings, final Path configPath) { this(settings, configPath, PathUtils.get(System.getProperty("java.io.tmpdir"))); @@ -83,67 +83,67 @@ public Environment(final Settings settings, final Path configPath) { } if (configPath != null) { - configFile = configPath.toAbsolutePath().normalize(); + configDir = configPath.toAbsolutePath().normalize(); } else { - configFile = homeFile.resolve("config"); + configDir = homeFile.resolve("config"); } - tmpFile = Objects.requireNonNull(tmpPath); + tmpDir = Objects.requireNonNull(tmpPath); - pluginsFile = homeFile.resolve("plugins"); + pluginsDir = homeFile.resolve("plugins"); List dataPaths = PATH_DATA_SETTING.get(settings); if (dataPaths.isEmpty() == false) { - dataFiles = new Path[dataPaths.size()]; + dataDirs = new Path[dataPaths.size()]; for (int i = 0; i < dataPaths.size(); i++) { - dataFiles[i] = PathUtils.get(dataPaths.get(i)).toAbsolutePath().normalize(); + dataDirs[i] = PathUtils.get(dataPaths.get(i)).toAbsolutePath().normalize(); } } else { - dataFiles = new Path[] { homeFile.resolve("data") }; + dataDirs = new Path[] { homeFile.resolve("data") }; } if (PATH_SHARED_DATA_SETTING.exists(settings)) { - sharedDataFile = PathUtils.get(PATH_SHARED_DATA_SETTING.get(settings)).toAbsolutePath().normalize(); + sharedDataDir = PathUtils.get(PATH_SHARED_DATA_SETTING.get(settings)).toAbsolutePath().normalize(); } else { - sharedDataFile = null; + sharedDataDir = null; } List repoPaths = PATH_REPO_SETTING.get(settings); if (repoPaths.isEmpty()) { - repoFiles = EMPTY_PATH_ARRAY; + repoDirs = EMPTY_PATH_ARRAY; } else { - repoFiles = new Path[repoPaths.size()]; + repoDirs = new Path[repoPaths.size()]; for (int i = 0; i < repoPaths.size(); i++) { - repoFiles[i] = PathUtils.get(repoPaths.get(i)).toAbsolutePath().normalize(); + repoDirs[i] = PathUtils.get(repoPaths.get(i)).toAbsolutePath().normalize(); } } // this is trappy, Setting#get(Settings) will get a fallback setting yet return false for Settings#exists(Settings) if (PATH_LOGS_SETTING.exists(settings)) { - logsFile = PathUtils.get(PATH_LOGS_SETTING.get(settings)).toAbsolutePath().normalize(); + logsDir = PathUtils.get(PATH_LOGS_SETTING.get(settings)).toAbsolutePath().normalize(); } else { - logsFile = homeFile.resolve("logs"); + logsDir = homeFile.resolve("logs"); } - binFile = homeFile.resolve("bin"); - libFile = homeFile.resolve("lib"); - modulesFile = homeFile.resolve("modules"); + binDir = homeFile.resolve("bin"); + libDir = homeFile.resolve("lib"); + modulesDir = homeFile.resolve("modules"); final Settings.Builder finalSettings = Settings.builder().put(settings); if (PATH_DATA_SETTING.exists(settings)) { if (dataPathUsesList(settings)) { - finalSettings.putList(PATH_DATA_SETTING.getKey(), Arrays.stream(dataFiles).map(Path::toString).toList()); + finalSettings.putList(PATH_DATA_SETTING.getKey(), Arrays.stream(dataDirs).map(Path::toString).toList()); } else { - assert dataFiles.length == 1; - finalSettings.put(PATH_DATA_SETTING.getKey(), dataFiles[0]); + assert dataDirs.length == 1; + finalSettings.put(PATH_DATA_SETTING.getKey(), dataDirs[0]); } } finalSettings.put(PATH_HOME_SETTING.getKey(), homeFile); - finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile.toString()); + finalSettings.put(PATH_LOGS_SETTING.getKey(), logsDir.toString()); if (PATH_REPO_SETTING.exists(settings)) { - finalSettings.putList(Environment.PATH_REPO_SETTING.getKey(), Arrays.stream(repoFiles).map(Path::toString).toList()); + finalSettings.putList(Environment.PATH_REPO_SETTING.getKey(), Arrays.stream(repoDirs).map(Path::toString).toList()); } if (PATH_SHARED_DATA_SETTING.exists(settings)) { - assert sharedDataFile != null; - finalSettings.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataFile.toString()); + assert sharedDataDir != null; + finalSettings.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataDir.toString()); } this.settings = finalSettings.build(); @@ -159,22 +159,22 @@ public Settings settings() { /** * The data location. */ - public Path[] dataFiles() { - return dataFiles; + public Path[] dataDirs() { + return dataDirs; } /** * The shared data location */ - public Path sharedDataFile() { - return sharedDataFile; + public Path sharedDataDir() { + return sharedDataDir; } /** * The shared filesystem repo locations. */ - public Path[] repoFiles() { - return repoFiles; + public Path[] repoDirs() { + return repoDirs; } /** @@ -182,8 +182,8 @@ public Path[] repoFiles() { * * If the specified location doesn't match any of the roots, returns null. */ - public Path resolveRepoFile(String location) { - return PathUtils.get(repoFiles, location); + public Path resolveRepoDir(String location) { + return PathUtils.get(repoDirs, location); } /** @@ -197,7 +197,7 @@ public URL resolveRepoURL(URL url) { if ("file".equalsIgnoreCase(url.getProtocol())) { if (url.getHost() == null || "".equals(url.getHost())) { // only local file urls are supported - Path path = PathUtils.get(repoFiles, url.toURI()); + Path path = PathUtils.get(repoDirs, url.toURI()); if (path == null) { // Couldn't resolve against known repo locations return null; @@ -232,49 +232,48 @@ public URL resolveRepoURL(URL url) { } } - // TODO: rename all these "file" methods to "dir" /** * The config directory. */ - public Path configFile() { - return configFile; + public Path configDir() { + return configDir; } - public Path pluginsFile() { - return pluginsFile; + public Path pluginsDir() { + return pluginsDir; } - public Path binFile() { - return binFile; + public Path binDir() { + return binDir; } - public Path libFile() { - return libFile; + public Path libDir() { + return libDir; } - public Path modulesFile() { - return modulesFile; + public Path modulesDir() { + return modulesDir; } - public Path logsFile() { - return logsFile; + public Path logsDir() { + return logsDir; } /** Path to the default temp directory used by the JDK */ - public Path tmpFile() { - return tmpFile; + public Path tmpDir() { + return tmpDir; } /** Ensure the configured temp directory is a valid directory */ - public void validateTmpFile() throws IOException { - validateTemporaryDirectory("Temporary directory", tmpFile); + public void validateTmpDir() throws IOException { + validateTemporaryDirectory("Temporary directory", tmpDir); } /** * Ensure the temp directories needed for JNA are set up correctly. */ public void validateNativesConfig() throws IOException { - validateTmpFile(); + validateTmpDir(); if (Constants.LINUX) { validateTemporaryDirectory(LIBFFI_TMPDIR_ENVIRONMENT_VARIABLE + " environment variable", getLibffiTemporaryDirectory()); } @@ -335,15 +334,15 @@ public static long getUsableSpace(Path path) throws IOException { * object which may contain different setting) */ public static void assertEquivalent(Environment actual, Environment expected) { - assertEquals(actual.dataFiles(), expected.dataFiles(), "dataFiles"); - assertEquals(actual.repoFiles(), expected.repoFiles(), "repoFiles"); - assertEquals(actual.configFile(), expected.configFile(), "configFile"); - assertEquals(actual.pluginsFile(), expected.pluginsFile(), "pluginsFile"); - assertEquals(actual.binFile(), expected.binFile(), "binFile"); - assertEquals(actual.libFile(), expected.libFile(), "libFile"); - assertEquals(actual.modulesFile(), expected.modulesFile(), "modulesFile"); - assertEquals(actual.logsFile(), expected.logsFile(), "logsFile"); - assertEquals(actual.tmpFile(), expected.tmpFile(), "tmpFile"); + assertEquals(actual.dataDirs(), expected.dataDirs(), "dataDirs"); + assertEquals(actual.repoDirs(), expected.repoDirs(), "repoDirs"); + assertEquals(actual.configDir(), expected.configDir(), "configDir"); + assertEquals(actual.pluginsDir(), expected.pluginsDir(), "pluginsDir"); + assertEquals(actual.binDir(), expected.binDir(), "binDir"); + assertEquals(actual.libDir(), expected.libDir(), "libDir"); + assertEquals(actual.modulesDir(), expected.modulesDir(), "modulesDir"); + assertEquals(actual.logsDir(), expected.logsDir(), "logsDir"); + assertEquals(actual.tmpDir(), expected.tmpDir(), "tmpDir"); } private static void assertEquals(Object actual, Object expected, String name) { diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 90e2ae5c62703..febde6b6a69ac 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -215,10 +215,10 @@ public NodeLock( final CheckedFunction pathFunction, final Function subPathMapping ) throws IOException { - dataPaths = new DataPath[environment.dataFiles().length]; + dataPaths = new DataPath[environment.dataDirs().length]; locks = new Lock[dataPaths.length]; try { - final Path[] dataPaths = environment.dataFiles(); + final Path[] dataPaths = environment.dataDirs(); for (int dirIndex = 0; dirIndex < dataPaths.length; dirIndex++) { Path dataDir = dataPaths[dirIndex]; Path dir = subPathMapping.apply(dataDir); @@ -267,9 +267,9 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce boolean success = false; try { - sharedDataPath = environment.sharedDataFile(); + sharedDataPath = environment.sharedDataDir(); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { if (Files.exists(path)) { // Call to toRealPath required to resolve symlinks. // We let it fall through to create directories to ensure the symlink @@ -287,7 +287,7 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce Locale.ROOT, "failed to obtain node locks, tried %s;" + " maybe these locations are not writable or multiple nodes were started on the same data path?", - Arrays.toString(environment.dataFiles()) + Arrays.toString(environment.dataDirs()) ); throw new IllegalStateException(message, e); } @@ -310,7 +310,7 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce } // versions 7.x and earlier put their data under ${path.data}/nodes/; leave a file at that location to prevent downgrades - for (Path dataPath : environment.dataFiles()) { + for (Path dataPath : environment.dataDirs()) { final Path legacyNodesPath = dataPath.resolve("nodes"); if (Files.isRegularFile(legacyNodesPath) == false) { final String content = "written by Elasticsearch " @@ -349,7 +349,7 @@ private static boolean upgradeLegacyNodeFolders(Logger logger, Settings settings boolean upgradeNeeded = false; // check if we can do an auto-upgrade - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { final Path nodesFolderPath = path.resolve("nodes"); if (Files.isDirectory(nodesFolderPath)) { final List nodeLockIds = new ArrayList<>(); @@ -392,7 +392,7 @@ private static boolean upgradeLegacyNodeFolders(Logger logger, Settings settings return false; } - logger.info("upgrading legacy data folders: {}", Arrays.toString(environment.dataFiles())); + logger.info("upgrading legacy data folders: {}", Arrays.toString(environment.dataDirs())); // acquire locks on legacy path for duration of upgrade (to ensure there is no older ES version running on this path) final NodeLock legacyNodeLock; @@ -403,7 +403,7 @@ private static boolean upgradeLegacyNodeFolders(Logger logger, Settings settings Locale.ROOT, "failed to obtain legacy node locks, tried %s;" + " maybe these locations are not writable or multiple nodes were started on the same data path?", - Arrays.toString(environment.dataFiles()) + Arrays.toString(environment.dataDirs()) ); throw new IllegalStateException(message, e); } @@ -494,7 +494,7 @@ private static boolean upgradeLegacyNodeFolders(Logger logger, Settings settings } // upgrade successfully completed, remove legacy nodes folders - IOUtils.rm(Stream.of(environment.dataFiles()).map(path -> path.resolve("nodes")).toArray(Path[]::new)); + IOUtils.rm(Stream.of(environment.dataDirs()).map(path -> path.resolve("nodes")).toArray(Path[]::new)); return true; } diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 5512dffdda53e..baba9e94db7a7 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -232,7 +232,8 @@ public IndexService( mapperMetrics ); this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, circuitBreakerService); - if (indexSettings.getIndexSortConfig().hasIndexSort()) { + boolean sourceOnly = Boolean.parseBoolean(indexSettings.getSettings().get("index.source_only")); + if (indexSettings.getIndexSortConfig().hasIndexSort() && sourceOnly == false) { // we delay the actual creation of the sort order for this index because the mapping has not been merged yet. // The sort order is validated right after the merge of the mapping later in the process. this.indexSortSupplier = () -> indexSettings.getIndexSortConfig() diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 2470bfb7e5c56..3b173ace0ac7b 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -24,7 +24,6 @@ import java.util.TreeMap; import java.util.TreeSet; import java.util.function.IntFunction; -import java.util.stream.Collectors; @SuppressWarnings("deprecation") public class IndexVersions { @@ -108,43 +107,43 @@ private static Version parseUnchecked(String version) { public static final IndexVersion UPGRADE_LUCENE_9_9_1 = def(8_500_008, Version.LUCENE_9_9_1); public static final IndexVersion ES_VERSION_8_12_1 = def(8_500_009, Version.LUCENE_9_9_1); public static final IndexVersion UPGRADE_8_12_1_LUCENE_9_9_2 = def(8_500_010, Version.LUCENE_9_9_2); - public static final IndexVersion NEW_INDEXVERSION_FORMAT = def(8_501_00_0, Version.LUCENE_9_9_1); - public static final IndexVersion UPGRADE_LUCENE_9_9_2 = def(8_502_00_0, Version.LUCENE_9_9_2); - public static final IndexVersion TIME_SERIES_ID_HASHING = def(8_502_00_1, Version.LUCENE_9_9_2); - public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_503_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion DOC_VALUES_FOR_IGNORED_META_FIELD = def(8_505_00_1, Version.LUCENE_9_10_0); - public static final IndexVersion SOURCE_MAPPER_LOSSY_PARAMS_CHECK = def(8_506_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion SEMANTIC_TEXT_FIELD_TYPE = def(8_507_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion UPGRADE_TO_LUCENE_9_11 = def(8_508_00_0, Version.LUCENE_9_11_0); - public static final IndexVersion UNIQUE_TOKEN_FILTER_POS_FIX = def(8_509_00_0, Version.LUCENE_9_11_0); - public static final IndexVersion ADD_SECURITY_MIGRATION = def(8_510_00_0, Version.LUCENE_9_11_0); - public static final IndexVersion UPGRADE_TO_LUCENE_9_11_1 = def(8_511_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion INDEX_SORTING_ON_NESTED = def(8_512_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion LENIENT_UPDATEABLE_SYNONYMS = def(8_513_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion ENABLE_IGNORE_MALFORMED_LOGSDB = def(8_514_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion MERGE_ON_RECOVERY_VERSION = def(8_515_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_516_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion ENABLE_IGNORE_ABOVE_LOGSDB = def(8_517_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion ADD_ROLE_MAPPING_CLEANUP_MIGRATION = def(8_518_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT_BACKPORT = def(8_519_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID_BACKPORT = def(8_520_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion V8_DEPRECATE_SOURCE_MODE_MAPPER = def(8_521_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BACKPORT = def(8_522_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion UPGRADE_TO_LUCENE_9_12_1 = def(8_523_00_0, parseUnchecked("9.12.1")); - public static final IndexVersion INFERENCE_METADATA_FIELDS_BACKPORT = def(8_524_00_0, parseUnchecked("9.12.1")); - public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME_BACKPORT = def(8_525_00_0, parseUnchecked("9.12.1")); - public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY = def(9_004_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion INFERENCE_METADATA_FIELDS = def(9_005_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME = def(9_006_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP = def(9_007_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion HOSTNAME_DOC_VALUES_SPARSE_INDEX = def(9_008_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion UPGRADE_TO_LUCENE_10_1_0 = def(9_009_00_0, Version.LUCENE_10_1_0); + public static final IndexVersion NEW_INDEXVERSION_FORMAT = def(8_501_0_00, Version.LUCENE_9_9_1); + public static final IndexVersion UPGRADE_LUCENE_9_9_2 = def(8_502_0_00, Version.LUCENE_9_9_2); + public static final IndexVersion TIME_SERIES_ID_HASHING = def(8_502_0_01, Version.LUCENE_9_9_2); + public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_503_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion DOC_VALUES_FOR_IGNORED_META_FIELD = def(8_505_0_01, Version.LUCENE_9_10_0); + public static final IndexVersion SOURCE_MAPPER_LOSSY_PARAMS_CHECK = def(8_506_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion SEMANTIC_TEXT_FIELD_TYPE = def(8_507_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_11 = def(8_508_0_00, Version.LUCENE_9_11_0); + public static final IndexVersion UNIQUE_TOKEN_FILTER_POS_FIX = def(8_509_0_00, Version.LUCENE_9_11_0); + public static final IndexVersion ADD_SECURITY_MIGRATION = def(8_510_0_00, Version.LUCENE_9_11_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_11_1 = def(8_511_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion INDEX_SORTING_ON_NESTED = def(8_512_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion LENIENT_UPDATEABLE_SYNONYMS = def(8_513_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion ENABLE_IGNORE_MALFORMED_LOGSDB = def(8_514_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion MERGE_ON_RECOVERY_VERSION = def(8_515_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_516_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion ENABLE_IGNORE_ABOVE_LOGSDB = def(8_517_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion ADD_ROLE_MAPPING_CLEANUP_MIGRATION = def(8_518_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT_BACKPORT = def(8_519_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID_BACKPORT = def(8_520_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion V8_DEPRECATE_SOURCE_MODE_MAPPER = def(8_521_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BACKPORT = def(8_522_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_12_1 = def(8_523_0_00, parseUnchecked("9.12.1")); + public static final IndexVersion INFERENCE_METADATA_FIELDS_BACKPORT = def(8_524_0_00, parseUnchecked("9.12.1")); + public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME_BACKPORT = def(8_525_0_00, parseUnchecked("9.12.1")); + public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY = def(9_004_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion INFERENCE_METADATA_FIELDS = def(9_005_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME = def(9_006_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP = def(9_007_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion HOSTNAME_DOC_VALUES_SPARSE_INDEX = def(9_008_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion UPGRADE_TO_LUCENE_10_1_0 = def(9_009_0_00, Version.LUCENE_10_1_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ @@ -160,17 +159,17 @@ private static Version parseUnchecked(String version) { * To add a new index version, add a new constant at the bottom of the list, above this comment. Don't add other lines, * comments, etc. The version id has the following layout: * - * M_NNN_SS_P + * M_NNN_S_PP * * M - The major version of Elasticsearch * NNN - The server version part - * SS - The serverless version part. It should always be 00 here, it is used by serverless only. - * P - The patch version part + * S - The subsidiary version part. It should always be 0 here, it is only used in subsidiary repositories. + * PP - The patch version part * * To determine the id of the next IndexVersion constant, do the following: * - Use the same major version, unless bumping majors * - Bump the server version part by 1, unless creating a patch version - * - Leave the serverless part as 00 + * - Leave the subsidiary part as 0 * - Bump the patch part if creating a patch version * * If a patch version is created, it should be placed sorted among the other existing constants. @@ -250,10 +249,6 @@ static NavigableMap getAllVersionIds(Class cls) { return Collections.unmodifiableNavigableMap(builder); } - static Collection getAllWriteVersions() { - return VERSION_IDS.values().stream().filter(v -> v.onOrAfter(IndexVersions.MINIMUM_COMPATIBLE)).collect(Collectors.toSet()); - } - static Collection getAllVersions() { return VERSION_IDS.values(); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java index 505e39a9590ef..a93a480463564 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -233,7 +233,7 @@ public static List getWordList( } } - final Path path = env.configFile().resolve(wordListPath); + final Path path = env.configDir().resolve(wordListPath); try { return loadWordList(path, removeComments); @@ -337,7 +337,7 @@ public static Reader getReaderFromFile(Environment env, String filePath, String if (filePath == null) { return null; } - final Path path = env.configFile().resolve(filePath); + final Path path = env.configDir().resolve(filePath); try { return Files.newBufferedReader(path, StandardCharsets.UTF_8); } catch (CharacterCodingException ex) { diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 7f6fe40dbaaf0..f463dce2ec70e 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -67,6 +67,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; @@ -1020,24 +1021,17 @@ private VersionValue resolveDocVersion(final Operation op, boolean loadSeqNo) th VersionValue versionValue = getVersionFromMap(op.uid()); if (versionValue == null) { assert incrementIndexVersionLookup(); // used for asserting in tests - final VersionsAndSeqNoResolver.DocIdAndVersion docIdAndVersion; - try (Searcher searcher = acquireSearcher("load_version", SearcherScope.INTERNAL)) { - if (engineConfig.getIndexSettings().getMode() == IndexMode.TIME_SERIES) { - assert engineConfig.getLeafSorter() == DataStream.TIMESERIES_LEAF_READERS_SORTER; - docIdAndVersion = VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion( - searcher.getIndexReader(), - op.uid(), - op.id(), - loadSeqNo - ); - } else { - docIdAndVersion = VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion( - searcher.getIndexReader(), - op.uid(), - loadSeqNo - ); + final VersionsAndSeqNoResolver.DocIdAndVersion docIdAndVersion = performActionWithDirectoryReader( + SearcherScope.INTERNAL, + directoryReader -> { + if (engineConfig.getIndexSettings().getMode() == IndexMode.TIME_SERIES) { + assert engineConfig.getLeafSorter() == DataStream.TIMESERIES_LEAF_READERS_SORTER; + return VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, op.uid(), op.id(), loadSeqNo); + } else { + return VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, op.uid(), loadSeqNo); + } } - } + ); if (docIdAndVersion != null) { versionValue = new IndexVersionValue(null, docIdAndVersion.version, docIdAndVersion.seqNo, docIdAndVersion.primaryTerm); } @@ -3470,4 +3464,26 @@ public LiveVersionMap getLiveVersionMap() { protected long getPreCommitSegmentGeneration() { return preCommitSegmentGeneration.get(); } + + T performActionWithDirectoryReader(SearcherScope scope, CheckedFunction action) + throws EngineException { + assert scope == SearcherScope.INTERNAL : "performActionWithDirectoryReader(...) isn't prepared for external usage"; + assert store.hasReferences(); + try { + ReferenceManager referenceManager = getReferenceManager(scope); + ElasticsearchDirectoryReader acquire = referenceManager.acquire(); + try { + return action.apply(acquire); + } finally { + referenceManager.release(acquire); + } + } catch (AlreadyClosedException ex) { + throw ex; + } catch (Exception ex) { + maybeFailEngine("perform_action_directory_reader", ex); + ensureOpen(ex); // throw EngineCloseException here if we are already closed + logger.error("failed to perform action with directory reader", ex); + throw new EngineException(shardId, "failed to perform action with directory reader", ex); + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java index 20154c20b3634..73a92869e31ba 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java @@ -9,6 +9,8 @@ package org.elasticsearch.index.engine; +import com.carrotsearch.hppc.IntArrayList; + import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; @@ -191,8 +193,28 @@ private Translog.Operation[] loadDocuments(List documentRecords) t maxDoc = leafReaderContext.reader().maxDoc(); } while (docRecord.docID() >= docBase + maxDoc); - leafFieldLoader = storedFieldLoader.getLoader(leafReaderContext, null); - leafSourceLoader = sourceLoader.leaf(leafReaderContext.reader(), null); + // TODO: instead of building an array, consider just checking whether doc ids are dense. + // Note, field loaders then would lose the ability to optionally eagerly loading values. + IntArrayList nextDocIds = new IntArrayList(); + for (int j = i; j < documentRecords.size(); j++) { + var record = documentRecords.get(j); + if (record.isTombstone()) { + continue; + } + int docID = record.docID(); + if (docID >= docBase + maxDoc) { + break; + } + int segmentDocID = docID - docBase; + nextDocIds.add(segmentDocID); + } + + // This computed doc ids arrays us used by stored field loader as a heuristic to determine whether to use a sequential + // stored field reader (which bulk loads stored fields and avoids decompressing the same blocks multiple times). For + // source loader, it is also used as a heuristic for bulk reading doc values (E.g. SingletonDocValuesLoader). + int[] nextDocIdArray = nextDocIds.toArray(); + leafFieldLoader = storedFieldLoader.getLoader(leafReaderContext, nextDocIdArray); + leafSourceLoader = sourceLoader.leaf(leafReaderContext.reader(), nextDocIdArray); setNextSourceMetadataReader(leafReaderContext); } int segmentDocID = docRecord.docID() - docBase; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java index 19a1cce746172..7b3ecf365e44b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; /** * Loads values from {@code _source}. This whole process is very slow and cast-tastic, @@ -230,7 +231,7 @@ private static class BytesRefs extends BlockSourceReader { @Override protected void append(BlockLoader.Builder builder, Object v) { - ((BlockLoader.BytesRefBuilder) builder).appendBytesRef(toBytesRef(scratch, (String) v)); + ((BlockLoader.BytesRefBuilder) builder).appendBytesRef(toBytesRef(scratch, Objects.toString(v))); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index ba9e902fee5d9..127ec05b25e63 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -50,6 +50,13 @@ private Wrapper(ObjectMapper parent, DocumentParserContext in) { this.in = in; } + // Used to create a copy_to context. + // It is important to reset `dynamic` here since it is possible that we copy into a completely different object. + private Wrapper(RootObjectMapper root, DocumentParserContext in) { + super(root, ObjectMapper.Dynamic.getRootDynamic(in.mappingLookup()), in); + this.in = in; + } + @Override public Iterable nonRootDocuments() { return in.nonRootDocuments(); @@ -711,6 +718,7 @@ in synthetic _source (to be consistent with stored _source). ContentPath path = new ContentPath(); XContentParser parser = DotExpandingXContentParser.expandDots(new CopyToParser(copyToField, parser()), path); + return new Wrapper(root(), this) { @Override public ContentPath path() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java index be4237fec3303..4039bccf1fd18 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.SearchExecutionContext; @@ -41,6 +42,10 @@ public abstract class InferenceMetadataFieldsMapper extends MetadataFieldMapper Setting.Property.InternalIndex ); + // Check index version SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP because that index version was added in the same serverless promotion + // where the new format was enabled by default + public static final IndexVersion USE_NEW_SEMANTIC_TEXT_FORMAT_BY_DEFAULT = IndexVersions.SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP; + public static final String NAME = "_inference_fields"; public static final String CONTENT_TYPE = "_inference_fields"; @@ -86,10 +91,12 @@ public abstract ValueFetcher valueFetcher( */ public static boolean isEnabled(Settings settings) { var version = IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(settings); - if (version.before(IndexVersions.INFERENCE_METADATA_FIELDS) - && version.between(IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0) == false) { + if ((version.before(IndexVersions.INFERENCE_METADATA_FIELDS) + && version.between(IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0) == false) + || (version.before(USE_NEW_SEMANTIC_TEXT_FORMAT_BY_DEFAULT) && USE_LEGACY_SEMANTIC_TEXT_FORMAT.exists(settings) == false)) { return false; } + return USE_LEGACY_SEMANTIC_TEXT_FORMAT.get(settings) == false; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 0935e219fb5c0..7567fae7d73e6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -30,6 +30,7 @@ public class MapperFeatures implements FeatureSpecification { public static final NodeFeature META_FETCH_FIELDS_ERROR_CODE_CHANGED = new NodeFeature("meta_fetch_fields_error_code_changed"); public static final NodeFeature SPARSE_VECTOR_STORE_SUPPORT = new NodeFeature("mapper.sparse_vector.store_support"); public static final NodeFeature SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX = new NodeFeature("mapper.nested.sorting_fields_check_fix"); + public static final NodeFeature DYNAMIC_HANDLING_IN_COPY_TO = new NodeFeature("mapper.copy_to.dynamic_handling"); @Override public Set getTestFeatures() { @@ -45,8 +46,9 @@ public Set getTestFeatures() { CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX, META_FETCH_FIELDS_ERROR_CODE_CHANGED, SPARSE_VECTOR_STORE_SUPPORT, - SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX, COUNTED_KEYWORD_SYNTHETIC_SOURCE_NATIVE_SUPPORT, + SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX, + DYNAMIC_HANDLING_IN_COPY_TO, SourceFieldMapper.SYNTHETIC_RECOVERY_SOURCE, ObjectMapper.SUBOBJECTS_FALSE_MAPPING_UPDATE_FIX ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index b181225d80ddf..7958fd8e51525 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -801,7 +801,8 @@ public static boolean isMetadataFieldStatic(String fieldName) { * this method considers all mapper plugins */ public boolean isMetadataField(String field) { - return mapperRegistry.getMetadataMapperParsers(indexVersionCreated).containsKey(field); + var mapper = mappingLookup().getMapper(field); + return mapper instanceof MetadataFieldMapper; } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index abca8e057f3b8..4c9214015fba6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -1127,7 +1127,7 @@ public boolean setIgnoredValues(Map { + if (iterator.docID() > docId) { + return hasValue = false; + } + if (iterator.docID() == docId) { + return hasValue = true; + } hasValue = docId == iterator.advance(docId); hasMagnitude = hasValue && magnitudeReader != null && magnitudeReader.advanceExact(docId); ord = iterator.index(); @@ -2414,6 +2420,12 @@ public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf if (byteVectorValues != null) { KnnVectorValues.DocIndexIterator iterator = byteVectorValues.iterator(); return docId -> { + if (iterator.docID() > docId) { + return hasValue = false; + } + if (iterator.docID() == docId) { + return hasValue = true; + } hasValue = docId == iterator.advance(docId); ord = iterator.index(); return hasValue; @@ -2476,6 +2488,12 @@ public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf return null; } return docId -> { + if (values.docID() > docId) { + return hasValue = false; + } + if (values.docID() == docId) { + return hasValue = true; + } hasValue = docId == values.advance(docId); return hasValue; }; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java index f9d027e0c9c1c..c3e429af08a4e 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.QueryBuilder; @@ -355,10 +354,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws ); PARSER.declareInt(ReindexRequest::setMaxDocsValidateIdentical, new ParseField("max_docs")); - - // avoid silently accepting an ignored size. - PARSER.declareInt((r, s) -> failOnSizeSpecified(), new ParseField("size")); - PARSER.declareField((p, v, c) -> v.setScript(Script.parse(p)), new ParseField("script"), ObjectParser.ValueType.OBJECT); PARSER.declareString(ReindexRequest::setConflicts, new ParseField("conflicts")); } @@ -498,10 +493,4 @@ static void setMaxDocsValidateIdentical(AbstractBulkByScrollRequest request, request.setMaxDocs(maxDocs); } } - - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_INDEXING) - // do we still need this ref to [max_docs] or can we remove the field entirely so it's rejected with the default message? - private static void failOnSizeSpecified() { - throw new IllegalArgumentException("invalid parameter [size], use [max_docs] instead"); - } } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 8cf631b660b1e..36b6709661017 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -29,6 +29,7 @@ import org.elasticsearch.index.engine.TranslogOperationAsserter; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.lookup.Source; import java.io.Closeable; import java.io.IOException; @@ -298,8 +299,10 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc + "], with different data. " + "prvOp [" + prvOp + + (prvOp instanceof Translog.Index index ? " source: " + Source.fromBytes(index.source()).source() : "") + "], newOp [" + newOp + + (newOp instanceof Translog.Index index ? " source: " + Source.fromBytes(index.source()).source() : "") + "]", previous.v2() ); diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 886c5e4bf6d3a..bfe1cd9b28de1 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -122,7 +122,7 @@ public Dictionary getDictionary(String locale) { } private static Path resolveHunspellDirectory(Environment env) { - return env.configFile().resolve("hunspell"); + return env.configDir().resolve("hunspell"); } /** @@ -193,7 +193,7 @@ private Dictionary loadDictionary(String locale, Settings nodeSettings, Environm affixStream = Files.newInputStream(affixFiles[0]); - try (Directory tmp = new NIOFSDirectory(env.tmpFile())) { + try (Directory tmp = new NIOFSDirectory(env.tmpDir())) { return new Dictionary(tmp, "hunspell", affixStream, dicStreams, ignoreCase); } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index b819a1686d23c..6ef5da8ee1fb7 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -1164,20 +1164,35 @@ static String getProcessorName(Processor processor) { if (processor instanceof ConditionalProcessor conditionalProcessor) { processor = conditionalProcessor.getInnerProcessor(); } - StringBuilder sb = new StringBuilder(5); - sb.append(processor.getType()); + String tag = processor.getTag(); + if (tag != null && tag.isEmpty()) { + tag = null; // it simplifies the rest of the logic slightly to coalesce to null + } + + String pipelineName = null; if (processor instanceof PipelineProcessor pipelineProcessor) { - String pipelineName = pipelineProcessor.getPipelineTemplate().newInstance(Map.of()).execute(); - sb.append(":"); - sb.append(pipelineName); + pipelineName = pipelineProcessor.getPipelineTemplate().newInstance(Map.of()).execute(); } - String tag = processor.getTag(); - if (tag != null && tag.isEmpty() == false) { - sb.append(":"); - sb.append(tag); + + // if there's a tag, OR if it's a pipeline processor, then the processor name is a compound thing, + // BUT if neither of those apply, then it's just the type -- so we can return the type itself without + // allocating a new String object + if (tag == null && pipelineName == null) { + return processor.getType(); + } else { + StringBuilder sb = new StringBuilder(5); + sb.append(processor.getType()); + if (pipelineName != null) { + sb.append(":"); + sb.append(pipelineName); + } + if (tag != null) { + sb.append(":"); + sb.append(tag); + } + return sb.toString(); } - return sb.toString(); } /** diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java index c75cd3a022cb8..9f403ca9300dd 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java @@ -31,6 +31,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.function.Function; public record IngestStats(Stats totalStats, List pipelineStats, Map> processorStats) implements @@ -57,14 +58,22 @@ public record IngestStats(Stats totalStats, List pipelineStats, Ma * Read from a stream. */ public static IngestStats read(StreamInput in) throws IOException { - var stats = new Stats(in); + // while reading the processors, we're going to encounter identical name and type strings *repeatedly* + // it's advantageous to discard the endless copies of the same strings and canonical-ize them to keep our + // heap usage under control. note: this map is key to key, because of the limitations of the set interface. + final Map namesAndTypesCache = new HashMap<>(); + + var stats = readStats(in); var size = in.readVInt(); + if (stats == Stats.IDENTITY && size == 0) { + return IDENTITY; + } var pipelineStats = new ArrayList(size); var processorStats = Maps.>newMapWithExpectedSize(size); for (var i = 0; i < size; i++) { var pipelineId = in.readString(); - var pipelineStat = new Stats(in); + var pipelineStat = readStats(in); var byteStat = in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? new ByteStats(in) : new ByteStats(0, 0); pipelineStats.add(new PipelineStat(pipelineId, pipelineStat, byteStat)); int processorsSize = in.readVInt(); @@ -72,7 +81,10 @@ public static IngestStats read(StreamInput in) throws IOException { for (var j = 0; j < processorsSize; j++) { var processorName = in.readString(); var processorType = in.readString(); - var processorStat = new Stats(in); + var processorStat = readStats(in); + // pass these name and type through the local names and types cache to canonical-ize them + processorName = namesAndTypesCache.computeIfAbsent(processorName, Function.identity()); + processorType = namesAndTypesCache.computeIfAbsent(processorType, Function.identity()); processorStatsPerPipeline.add(new ProcessorStat(processorName, processorType, processorStat)); } processorStats.put(pipelineId, Collections.unmodifiableList(processorStatsPerPipeline)); @@ -167,6 +179,21 @@ static Map> merge(Map> f return totalsPerPipelineProcessor; } + /** + * Read {@link Stats} from a stream. + */ + private static Stats readStats(StreamInput in) throws IOException { + long ingestCount = in.readVLong(); + long ingestTimeInMillis = in.readVLong(); + long ingestCurrent = in.readVLong(); + long ingestFailedCount = in.readVLong(); + if (ingestCount == 0 && ingestTimeInMillis == 0 && ingestCurrent == 0 && ingestFailedCount == 0) { + return Stats.IDENTITY; + } else { + return new Stats(ingestCount, ingestTimeInMillis, ingestCurrent, ingestFailedCount); + } + } + public record Stats(long ingestCount, long ingestTimeInMillis, long ingestCurrent, long ingestFailedCount) implements Writeable, @@ -174,13 +201,6 @@ public record Stats(long ingestCount, long ingestTimeInMillis, long ingestCurren public static final Stats IDENTITY = new Stats(0, 0, 0, 0); - /** - * Read from a stream. - */ - public Stats(StreamInput in) throws IOException { - this(in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong()); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(ingestCount); diff --git a/server/src/main/java/org/elasticsearch/internal/VersionExtension.java b/server/src/main/java/org/elasticsearch/internal/VersionExtension.java index 5a6c7c1f3671d..fc947738c9e33 100644 --- a/server/src/main/java/org/elasticsearch/internal/VersionExtension.java +++ b/server/src/main/java/org/elasticsearch/internal/VersionExtension.java @@ -12,16 +12,16 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.index.IndexVersion; -import java.util.List; +import java.util.Collection; /** * Allows plugging in current version elements. */ public interface VersionExtension { /** - * Returns list of {@link TransportVersion} defined by extension + * Returns additional {@link TransportVersion} defined by extension */ - List getTransportVersions(); + Collection getTransportVersions(); /** * Returns the {@link IndexVersion} that Elasticsearch should use. diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 80c9aafaa84b4..f4b390c9863f2 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -652,7 +652,7 @@ protected void validateNodeBeforeAcceptingRequests( * Writes a file to the logs dir containing the ports for the given transport type */ private void writePortsFile(String type, BoundTransportAddress boundAddress) { - Path tmpPortsFile = environment.logsFile().resolve(type + ".ports.tmp"); + Path tmpPortsFile = environment.logsDir().resolve(type + ".ports.tmp"); try (BufferedWriter writer = Files.newBufferedWriter(tmpPortsFile, Charset.forName("UTF-8"))) { for (TransportAddress address : boundAddress.boundAddresses()) { InetAddress inetAddress = InetAddress.getByName(address.getAddress()); @@ -661,7 +661,7 @@ private void writePortsFile(String type, BoundTransportAddress boundAddress) { } catch (IOException e) { throw new RuntimeException("Failed to write ports file", e); } - Path portsFile = environment.logsFile().resolve(type + ".ports"); + Path portsFile = environment.logsDir().resolve(type + ".ports"); try { Files.move(tmpPortsFile, portsFile, StandardCopyOption.ATOMIC_MOVE); } catch (IOException e) { diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 61ac8bbbfc69a..9ba21a7d77342 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -446,7 +446,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr ); } - if (initialEnvironment.dataFiles().length > 1) { + if (initialEnvironment.dataDirs().length > 1) { // NOTE: we use initialEnvironment here, but assertEquivalent below ensures the data paths do not change deprecationLogger.warn( DeprecationCategory.SETTINGS, @@ -467,10 +467,10 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr if (logger.isDebugEnabled()) { logger.debug( "using config [{}], data [{}], logs [{}], plugins [{}]", - initialEnvironment.configFile(), - Arrays.toString(initialEnvironment.dataFiles()), - initialEnvironment.logsFile(), - initialEnvironment.pluginsFile() + initialEnvironment.configDir(), + Arrays.toString(initialEnvironment.dataDirs()), + initialEnvironment.logsDir(), + initialEnvironment.pluginsDir() ); } @@ -487,7 +487,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr * Create the environment based on the finalized view of the settings. This is to ensure that components get the same setting * values, no matter they ask for them from. */ - environment = new Environment(settings, initialEnvironment.configFile()); + environment = new Environment(settings, initialEnvironment.configDir()); Environment.assertEquivalent(initialEnvironment, environment); modules.bindToInstance(Environment.class, environment); @@ -1622,7 +1622,7 @@ private DiscoveryModule createDiscoveryModule( pluginsService.filterPlugins(DiscoveryPlugin.class).toList(), pluginsService.filterPlugins(ClusterCoordinationPlugin.class).toList(), allocationService, - environment.configFile(), + environment.configDir(), gatewayMetaState, rerouteService, fsHealthService, diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index 4b7524a7ac011..b9e58863cad6c 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -53,7 +53,7 @@ class NodeServiceProvider { PluginsService newPluginService(Environment initialEnvironment, PluginsLoader pluginsLoader) { // this creates a PluginsService with an empty list of classpath plugins - return new PluginsService(initialEnvironment.settings(), initialEnvironment.configFile(), pluginsLoader); + return new PluginsService(initialEnvironment.settings(), initialEnvironment.configDir(), pluginsLoader); } ScriptService newScriptService( diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryConflictException.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryConflictException.java index 15a6b0d3791d8..03baad7aa67dd 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryConflictException.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryConflictException.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -29,14 +28,12 @@ public RestStatus status() { return RestStatus.CONFLICT; } - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // drop unneeded string from wire format public RepositoryConflictException(StreamInput in) throws IOException { super(in); in.readString(); } @Override - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // drop unneeded string from wire format protected void writeTo(StreamOutput out, Writer nestedExceptionsWriter) throws IOException { super.writeTo(out, nestedExceptionsWriter); out.writeString(""); diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index 2ebbf24d65670..2c429954f5f49 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -605,6 +605,11 @@ public int hashCode() { return Objects.hash(snapshotIds, snapshotsDetails, indices, indexSnapshots, shardGenerations, indexMetaDataGenerations); } + @Override + public String toString() { + return Strings.format("RepositoryData[uuid=%s,gen=%s]", uuid, genId); + } + /** * Resolve the index name to the index id specific to the repository, * throwing an exception if the index could not be resolved. diff --git a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index b88524586abe0..f284faf8f304a 100644 --- a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -92,13 +92,13 @@ public FsRepository( ); throw new RepositoryException(metadata.name(), "missing location"); } - Path locationFile = environment.resolveRepoFile(location); + Path locationFile = environment.resolveRepoDir(location); if (locationFile == null) { - if (environment.repoFiles().length > 0) { + if (environment.repoDirs().length > 0) { logger.warn( "The specified location [{}] doesn't start with any " + "repository paths specified by the path.repo setting: [{}] ", location, - environment.repoFiles() + environment.repoDirs() ); throw new RepositoryException( metadata.name(), @@ -127,7 +127,7 @@ public FsRepository( @Override protected BlobStore createBlobStore() throws Exception { final String location = REPOSITORIES_LOCATION_SETTING.get(getMetadata().settings()); - final Path locationFile = environment.resolveRepoFile(location); + final Path locationFile = environment.resolveRepoDir(location); return new FsBlobStore(bufferSize, locationFile, isReadOnly()); } diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index e36604f9a58c8..8817ebb10cea6 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -80,7 +80,7 @@ public FileSettingsService( Environment environment, FileSettingsHealthIndicatorService healthIndicatorService ) { - super(clusterService, environment.configFile().toAbsolutePath().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME)); + super(clusterService, environment.configDir().toAbsolutePath().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME)); this.stateService = stateService; this.healthIndicatorService = healthIndicatorService; } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java index da7a7d3379ee0..9ab7f275252ae 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java @@ -12,8 +12,6 @@ import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesAction; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.metadata.DesiredNode; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -27,10 +25,6 @@ public class RestUpdateDesiredNodesAction extends BaseRestHandler { - private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestUpdateDesiredNodesAction.class); - private static final String VERSION_DEPRECATION_MESSAGE = - "[version removal] Specifying node_version in desired nodes requests is deprecated."; - @Override public String getName() { return "update_desired_nodes"; @@ -59,10 +53,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli ); } - if (updateDesiredNodesRequest.getNodes().stream().anyMatch(DesiredNode::hasVersion)) { - deprecationLogger.compatibleCritical("desired_nodes_version", VERSION_DEPRECATION_MESSAGE); - } - return restChannel -> client.execute( UpdateDesiredNodesAction.INSTANCE, updateDesiredNodesRequest, diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java index 9083c781ae167..334e68648d853 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java @@ -26,5 +26,11 @@ public class CreateIndexCapabilities { */ private static final String LOOKUP_INDEX_MODE_CAPABILITY = "lookup_index_mode"; - public static final Set CAPABILITIES = Set.of(LOGSDB_INDEX_MODE_CAPABILITY, LOOKUP_INDEX_MODE_CAPABILITY); + private static final String NESTED_DENSE_VECTOR_SYNTHETIC_TEST = "nested_dense_vector_synthetic_test"; + + public static final Set CAPABILITIES = Set.of( + LOGSDB_INDEX_MODE_CAPABILITY, + LOOKUP_INDEX_MODE_CAPABILITY, + NESTED_DENSE_VECTOR_SYNTHETIC_TEST + ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java index 06e9b02a92934..7659e096c115f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeCapabilities; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -19,6 +20,7 @@ import java.io.IOException; import java.util.List; +import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -49,4 +51,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } } + @Override + public Set supportedCapabilities() { + return AnalyzeCapabilities.CAPABILITIES; + } + } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java index 94fa60762800d..f8dc26e9c468a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java @@ -9,11 +9,9 @@ package org.elasticsearch.rest.action.cat; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.core.TimeValue; @@ -99,24 +97,6 @@ protected Table getTableWithHeader(RestRequest request) { private Table buildTable(RestRequest req, GetSnapshotsResponse getSnapshotsResponse) { Table table = getTableWithHeader(req); - if (getSnapshotsResponse.isFailed()) { - ElasticsearchException causes = null; - - for (ElasticsearchException e : getSnapshotsResponse.getFailures().values()) { - if (causes == null) { - causes = e; - } else { - causes.addSuppressed(e); - } - } - throw new ElasticsearchException( - "Repositories [" - + Strings.collectionToCommaDelimitedString(getSnapshotsResponse.getFailures().keySet()) - + "] failed to retrieve snapshots", - causes - ); - } - for (SnapshotInfo snapshotStatus : getSnapshotsResponse.getSnapshots()) { table.startRow(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java index ef21e4103fd88..f763ac8f795ff 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java @@ -211,9 +211,9 @@ private List reducePipelineBuckets(AggregationReduceContext reduceContext, Pi List reducedBuckets = new ArrayList<>(); for (B bucket : getBuckets()) { List aggs = new ArrayList<>(); - for (Aggregation agg : bucket.getAggregations()) { + for (InternalAggregation agg : bucket.getAggregations()) { PipelineTree subTree = pipelineTree.subTree(agg.getName()); - aggs.add(((InternalAggregation) agg).reducePipelines((InternalAggregation) agg, reduceContext, subTree)); + aggs.add(agg.reducePipelines(agg, reduceContext, subTree)); } reducedBuckets.add(createBucket(InternalAggregations.from(aggs), bucket)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java b/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java index 739dc58743332..34fcf58e43bd3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java @@ -20,10 +20,6 @@ public InvalidAggregationPathException(String msg) { super(msg); } - public InvalidAggregationPathException(String msg, Throwable cause) { - super(msg, cause); - } - public InvalidAggregationPathException(StreamInput in) throws IOException { super(in); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index 592f7b4887598..e85d01930807c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.tasks.TaskCancelledException; import java.io.IOException; import java.util.AbstractList; @@ -163,6 +164,10 @@ protected void prepareSubAggs(LongArray ordsToCollect) throws IOException {} * array of ordinals */ protected final IntFunction buildSubAggsForBuckets(LongArray bucketOrdsToCollect) throws IOException { + if (context.isCancelled()) { + throw new TaskCancelledException("not building sub-aggregations due to task cancellation"); + } + prepareSubAggs(bucketOrdsToCollect); InternalAggregation[][] aggregations = new InternalAggregation[subAggregators.length][]; for (int i = 0; i < subAggregators.length; i++) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 441b30f872a35..4d946bfb3bb99 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -461,7 +461,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt // Visit documents sorted by the leading source of the composite definition and terminates // when the leading source value is guaranteed to be greater than the lowest composite bucket // in the queue. - DocIdSet docIdSet = sortedDocsProducer.processLeaf(topLevelQuery(), queue, aggCtx.getLeafReaderContext(), fillDocIdSet); + DocIdSet docIdSet = sortedDocsProducer.processLeaf(queue, aggCtx.getLeafReaderContext(), fillDocIdSet); if (fillDocIdSet) { entries.add(new Entry(aggCtx, docIdSet)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java index 7c920abfe2451..e88c9724edba1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSet; -import org.apache.lucene.search.Query; import org.apache.lucene.util.DocIdSetBuilder; import java.io.IOException; @@ -36,8 +35,7 @@ class PointsSortedDocsProducer extends SortedDocsProducer { } @Override - DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) - throws IOException { + DocIdSet processLeaf(CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) throws IOException { final PointValues values = context.reader().getPointValues(field); if (values == null) { // no value for the field diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java index 4503758c55b04..2d1b628482d45 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java @@ -12,7 +12,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Query; import org.apache.lucene.util.Bits; import org.apache.lucene.util.DocIdSetBuilder; import org.elasticsearch.core.Nullable; @@ -99,6 +98,5 @@ public void collect(int doc, long bucket) throws IOException { * Returns the {@link DocIdSet} of the documents that contain a top composite bucket in this leaf or * {@link DocIdSet#EMPTY} if fillDocIdSet is false. */ - abstract DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) - throws IOException; + abstract DocIdSet processLeaf(CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java index e2aaba06a19ec..3b62cb8f57d8b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java @@ -14,7 +14,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSet; -import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.DocIdSetBuilder; @@ -29,8 +28,7 @@ class TermsSortedDocsProducer extends SortedDocsProducer { } @Override - DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) - throws IOException { + DocIdSet processLeaf(CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) throws IOException { final Terms terms = context.reader().terms(field); if (terms == null) { // no value for the field diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java index 7291a099dd7f7..9994a2bca08bf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java @@ -25,15 +25,7 @@ public class InternalDateRange extends InternalRange aggregations, - boolean keyed, - DocValueFormat formatter - ) { + public Bucket(String key, double from, double to, long docCount, List aggregations, DocValueFormat formatter) { super(key, from, to, docCount, InternalAggregations.from(aggregations), formatter); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java index 17982043e8e20..b65b0e1ec010a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java @@ -75,24 +75,6 @@ public interface BucketOrdsEnum { * Read the current value. */ void readValue(BytesRef dest); - - /** - * An {@linkplain BucketOrdsEnum} that is empty. - */ - BucketOrdsEnum EMPTY = new BucketOrdsEnum() { - @Override - public boolean next() { - return false; - } - - @Override - public long ord() { - return 0; - } - - @Override - public void readValue(BytesRef dest) {} - }; } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index 5108793b8a809..9db9a41016621 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -72,7 +72,7 @@ protected Bucket(long subsetDf, long supersetDf, InternalAggregations aggregatio /** * Read from a stream. */ - protected Bucket(StreamInput in, DocValueFormat format) { + protected Bucket(DocValueFormat format) { this.format = format; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java index 17ea290b7aaaf..807514b30ab5b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java @@ -37,7 +37,7 @@ public Bucket(long subsetDf, long supersetDf, long term, InternalAggregations ag } Bucket(StreamInput in, DocValueFormat format) throws IOException { - super(in, format); + super(format); subsetDf = in.readVLong(); supersetDf = in.readVLong(); term = in.readLong(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java index b255f17d2843b..d55228304666f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java @@ -48,7 +48,7 @@ public Bucket( * Read from a stream. */ public Bucket(StreamInput in, DocValueFormat format) throws IOException { - super(in, format); + super(format); termBytes = in.readBytesRef(); subsetDf = in.readVLong(); supersetDf = in.readVLong(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java index 080cac9cbfb85..c3ed7a6dd5dfb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java @@ -422,6 +422,10 @@ public static ExecutionMode fromString(String value, final DeprecationLogger dep if ("global_ordinals".equals(value)) { return GLOBAL_ORDINALS; } else if ("global_ordinals_hash".equals(value)) { + /* + * We have no plans to remove this so we don't break anyone, no matter + * how few people still use this or how long it's been deprecated. + */ deprecationLogger.warn( DeprecationCategory.AGGREGATIONS, "global_ordinals_hash", diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java index 72e1db245338e..8b0617e116cc1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java @@ -116,8 +116,7 @@ protected DoubleHistogram getState(long bucketOrd) { if (bucketOrd >= states.size()) { return null; } - final DoubleHistogram state = states.get(bucketOrd); - return state; + return states.get(bucketOrd); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java index 00d7890e4710f..2605fc1c09361 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java @@ -44,9 +44,6 @@ public AbstractHyperLogLogPlusPlus(int precision) { /** Get HyperLogLog algorithm */ protected abstract AbstractHyperLogLog.RunLenIterator getHyperLogLog(long bucketOrd); - /** Get the number of data structures */ - public abstract long maxOrd(); - /** Collect a value in the given bucket */ public abstract void collect(long bucketOrd, long hash); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java index 1f1cbd0b34a67..908fb4bb0a2e3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java @@ -39,11 +39,6 @@ public AbstractLinearCounting(int precision) { */ protected abstract int size(long bucketOrd); - /** - * return the current values in the counter. - */ - protected abstract HashesIterator values(long bucketOrd); - public int collect(long bucketOrd, long hash) { final int k = encodeHash(hash, p); return addEncoded(bucketOrd, k); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java index 5edcf745c418d..08e9de383691b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java @@ -68,11 +68,6 @@ public interface ExtendedStats extends Stats { */ String getStdDeviationSamplingAsString(); - /** - * The upper or lower bounds of stdDev of the collected values as a String. - */ - String getStdDeviationBoundAsString(Bounds bound); - /** * The sum of the squares of the collected values as a String. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java index 5af6a50a8c4a6..16dfbdada4b0a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java @@ -87,7 +87,6 @@ public HyperLogLogPlusPlus(int precision, BigArrays bigArrays, long initialBucke this.algorithm = algorithm; } - @Override public long maxOrd() { return hll.maxOrd(); } @@ -322,8 +321,7 @@ protected int size(long bucketOrd) { return size; } - @Override - protected HashesIterator values(long bucketOrd) { + private HashesIterator values(long bucketOrd) { // Make a fresh BytesRef for reading scratch work because this method can be called on many threads return new LinearCountingIterator(this, new BytesRef(), bucketOrd); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java index 1736b5ea7656d..8b1dcfb8a2f85 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java @@ -36,11 +36,6 @@ final class HyperLogLogPlusPlusSparse extends AbstractHyperLogLogPlusPlus implem this.lc = new LinearCounting(precision, bigArrays, initialBuckets); } - @Override - public long maxOrd() { - return lc.sizes.size(); - } - /** Needs to be called before adding elements into a bucket */ protected void ensureCapacity(long bucketOrd, long size) { lc.ensureCapacity(bucketOrd, size); @@ -135,8 +130,7 @@ protected int size(long bucketOrd) { return size; } - @Override - protected HashesIterator values(long bucketOrd) { + private HashesIterator values(long bucketOrd) { return new LinearCountingIterator(values.get(bucketOrd), size(bucketOrd)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java index c3a106bd9af41..8a128b77a7300 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java @@ -73,9 +73,8 @@ public Object getProperty(List path) { }; } else if (path.size() == 2) { BoundingBox bbox = resolveBoundingBox(); - T cornerPoint = null; String cornerString = path.get(0); - cornerPoint = switch (cornerString) { + T cornerPoint = switch (cornerString) { case "top_left" -> bbox.topLeft(); case "bottom_right" -> bbox.bottomRight(); default -> throw new IllegalArgumentException("Found unknown path element [" + cornerString + "] in [" + getName() + "]"); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java index 7965211e24683..c6f4adc735c0c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java @@ -245,8 +245,7 @@ public String getStdDeviationSamplingAsString() { return valueAsString(Metrics.std_deviation_sampling.name()); } - @Override - public String getStdDeviationBoundAsString(Bounds bound) { + private String getStdDeviationBoundAsString(Bounds bound) { return switch (bound) { case UPPER -> valueAsString(Metrics.std_upper.name()); case LOWER -> valueAsString(Metrics.std_lower.name()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java index 48adad3cee618..e537c7348da6f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java @@ -90,15 +90,6 @@ protected MultiValue(StreamInput in) throws IOException { super(in); } - /** - * Read from a stream. - * - * @param readFormat whether to read the "format" field - */ - protected MultiValue(StreamInput in, boolean readFormat) throws IOException { - super(in, readFormat); - } - public abstract double value(String name); public String valueAsString(String name) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index ac37b287736aa..2ec30b411928a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -662,7 +662,7 @@ public static TopHitsAggregationBuilder parse(String aggregationName, XContentPa factory.fetchSource(FetchSourceContext.fromXContent(parser)); } else if (SearchSourceBuilder.SCRIPT_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { List scriptFields = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { String scriptFieldName = parser.currentName(); token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { @@ -740,12 +740,12 @@ public static TopHitsAggregationBuilder parse(String aggregationName, XContentPa parser ); } else if (SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { FieldAndFormat ff = FieldAndFormat.fromXContent(parser); factory.docValueField(ff.field, ff.format); } } else if (SearchSourceBuilder.FETCH_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { FieldAndFormat ff = FieldAndFormat.fromXContent(parser); factory.fetchField(ff); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java index 32d0ae6596c85..3f7da293dfa14 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java @@ -68,8 +68,7 @@ public void writeTo(StreamOutput out) throws IOException { */ @Override public final PipelineAggregator create() { - PipelineAggregator aggregator = createInternal(this.metadata); - return aggregator; + return createInternal(this.metadata); } @SuppressWarnings("unchecked") diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java index e372e328ec88e..8765657e8a4d3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java @@ -56,7 +56,7 @@ public final BucketMetricsPipelineAggregationBuilder parse(String pipelineAgg } else if (token == XContentParser.Token.START_ARRAY) { if (BUCKETS_PATH.match(currentFieldName, parser.getDeprecationHandler())) { List paths = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { String path = parser.text(); paths.add(path); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java index 802aef5be68f3..1213b1a71761d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java @@ -28,8 +28,8 @@ public class InternalBucketMetricValue extends InternalNumericMetricsAggregation public static final String NAME = "bucket_metric_value"; static final ParseField KEYS_FIELD = new ParseField("keys"); - private double value; - private String[] keys; + private final double value; + private final String[] keys; public InternalBucketMetricValue(String name, String[] keys, double value, DocValueFormat formatter, Map metadata) { super(name, formatter, metadata); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java index 667e34d85b791..beb125608cbe4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java @@ -26,9 +26,9 @@ import java.util.Objects; public class InternalPercentilesBucket extends InternalNumericMetricsAggregation.MultiValue implements PercentilesBucket { - private double[] percentiles; - private double[] percents; - private boolean keyed = true; + private final double[] percentiles; + private final double[] percents; + private final boolean keyed; private final transient Map percentileLookups = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java index 8337d644c9a9b..86807e9772a2b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java @@ -21,7 +21,7 @@ public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAggregator { private final double[] percents; - private boolean keyed = true; + private final boolean keyed; private List data; PercentilesBucketPipelineAggregator( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java index 2537d79a40bf5..03b4867f6036b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java @@ -12,6 +12,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -169,11 +170,11 @@ public static SerialDiffPipelineAggregationBuilder parse(String reducerName, XCo } else if (token == XContentParser.Token.START_ARRAY) { if (BUCKETS_PATH.match(currentFieldName, parser.getDeprecationHandler())) { List paths = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { String path = parser.text(); paths.add(path); } - bucketsPaths = paths.toArray(new String[paths.size()]); + bucketsPaths = paths.toArray(Strings.EMPTY_ARRAY); } else { throw new ParsingException( parser.getTokenLocation(), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java index c720f3d9465a3..02f300df48385 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java @@ -30,7 +30,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.DocCountFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.Rewriteable; @@ -309,14 +308,6 @@ public final AggregationUsageService getUsageService() { public abstract Set sourcePath(String fullName); - /** - * Returns the MappingLookup for the index, if one is initialized. - */ - @Nullable - public MappingLookup getMappingLookup() { - return null; - } - /** * Does this index have a {@code _doc_count} field in any segment? */ @@ -621,11 +612,6 @@ public Set sourcePath(String fullName) { return context.sourcePath(fullName); } - @Override - public MappingLookup getMappingLookup() { - return context.getMappingLookup(); - } - @Override public void close() { /* diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java index 31adf423d74c9..23ccf1d940849 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java @@ -19,8 +19,6 @@ public class AggregationUsageService implements ReportingService { private static final String ES_SEARCH_QUERY_AGGREGATIONS_TOTAL_COUNT = "es.search.query.aggregations.total"; - private final String AGGREGATION_NAME_KEY = "aggregation_name"; - private final String VALUES_SOURCE_KEY = "values_source"; private final LongCounter aggregationsUsageCounter; private final Map> aggs; private final AggregationInfo info; @@ -83,6 +81,8 @@ public void incAggregationUsage(String aggregationName, String valuesSourceType) } assert valuesSourceMap != null : "Unknown aggregation [" + aggregationName + "][" + valuesSourceType + "]"; // tests will have a no-op implementation here + String VALUES_SOURCE_KEY = "values_source"; + String AGGREGATION_NAME_KEY = "aggregation_name"; aggregationsUsageCounter.incrementBy(1, Map.of(AGGREGATION_NAME_KEY, aggregationName, VALUES_SOURCE_KEY, valuesSourceType)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java index ffdbfffbce9e9..3e74d163b0d9f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java @@ -57,10 +57,9 @@ public static void declareCommon( * @param timezoneAware - allows specifying timezone * @param filterable - allows specifying filters on the values * @param heterogeneous - allows specifying value-source specific format and user value type hint - * @param - values source type * @param - parser context */ - public static void declareField( + public static void declareField( String fieldName, AbstractObjectParser, T> objectParser, boolean scriptable, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java index 472619da78622..313f8e43014d0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java @@ -263,11 +263,7 @@ private boolean isInvalidDoc(int docId) throws IOException { // true if the TSID ord has changed since the last time we checked boolean shouldPop() throws IOException { - if (tsidOrd != tsids.ordValue()) { - return true; - } else { - return false; - } + return tsidOrd != tsids.ordValue(); } } } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 3036a295d459a..5fcfb2b9766cd 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -144,7 +144,6 @@ static void executeQuery(SearchContext searchContext) throws QueryPhaseExecution RescorePhase.execute(searchContext); SuggestPhase.execute(searchContext); - if (searchContext.getProfilers() != null) { searchContext.queryResult().profileResults(searchContext.getProfilers().buildQueryPhaseResults()); } diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java index f8b348b383f01..fdd5efceaae3c 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java @@ -72,7 +72,7 @@ public static void execute(SearchContext context) { assert topDocsSortedByScore(topDocs) : "topdocs should be sorted after rescore"; ctx.setCancellationChecker(null); } - /** + /* * Since rescorers are building top docs with score only, we must reconstruct the {@link TopFieldGroups} * or {@link TopFieldDocs} using their original version before rescoring. */ @@ -86,12 +86,13 @@ public static void execute(SearchContext context) { .topDocs(new TopDocsAndMaxScore(topDocs, topDocs.scoreDocs[0].score), context.queryResult().sortValueFormats()); } catch (IOException e) { throw new ElasticsearchException("Rescore Phase Failed", e); - } catch (ContextIndexSearcher.TimeExceededException e) { + } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { SearchTimeoutException.handleTimeout( context.request().allowPartialSearchResults(), context.shardTarget(), context.queryResult() ); + // if the rescore phase times out and partial results are allowed, the returned top docs from this shard won't be rescored } } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java b/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java index d63e0717ca7ac..17ff07f167ff8 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java @@ -10,7 +10,9 @@ import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.query.SearchTimeoutException; import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; @@ -40,12 +42,17 @@ public static void execute(SearchContext context) { for (Map.Entry entry : suggest.suggestions().entrySet()) { SuggestionSearchContext.SuggestionContext suggestion = entry.getValue(); Suggester suggester = suggestion.getSuggester(); - Suggestion> result = suggester.execute( - entry.getKey(), - suggestion, - context.searcher(), - spare - ); + Suggestion> result; + try { + result = suggester.execute(entry.getKey(), suggestion, context.searcher(), spare); + } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { + SearchTimeoutException.handleTimeout( + context.request().allowPartialSearchResults(), + context.shardTarget(), + context.queryResult() + ); + result = suggester.emptySuggestion(entry.getKey(), suggestion, spare); + } if (result != null) { assert entry.getKey().equals(result.name); suggestions.add(result); @@ -56,5 +63,4 @@ public static void execute(SearchContext context) { throw new ElasticsearchException("I/O exception during suggest phase", e); } } - } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/RescoreKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/RescoreKnnVectorQuery.java index 79ede6873ad1f..31d9767e9a857 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/RescoreKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/RescoreKnnVectorQuery.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Comparator; import java.util.Objects; /** @@ -60,6 +61,7 @@ public Query rewrite(IndexSearcher searcher) throws IOException { TopDocs topDocs = searcher.search(query, k); vectorOperations = topDocs.totalHits.value(); ScoreDoc[] scoreDocs = topDocs.scoreDocs; + Arrays.sort(scoreDocs, Comparator.comparingInt(scoreDoc -> scoreDoc.doc)); int[] docIds = new int[scoreDocs.length]; float[] scores = new float[scoreDocs.length]; for (int i = 0; i < scoreDocs.length; i++) { diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 008c75ed13473..5d8ef51af8d51 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -3885,6 +3885,11 @@ public void onFailure(Exception e) { logSnapshotFailure("create", snapshot, e); listener.onFailure(e); } + + @Override + public String toString() { + return "CreateSnapshotTask{repository=" + repository.getMetadata().name() + ", snapshot=" + snapshot + '}'; + } } private static void logSnapshotFailure(String operation, Snapshot snapshot, Exception e) { diff --git a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java index 1a9043d093feb..eb4e0394bb5a2 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java @@ -11,7 +11,6 @@ import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesReference; @@ -19,8 +18,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.metrics.CounterMetric; +import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.threadpool.ThreadPool; import java.io.EOFException; @@ -44,49 +46,17 @@ final class TransportHandshaker { * ignores the body of the request. After the handshake, the OutboundHandler uses the min(local,remote) protocol version for all later * messages. * - * This version supports three handshake protocols, v6080099, v7170099 and v8800000, which respectively have the same message structure - * as the transport protocols of v6.8.0, v7.17.0, and v8.18.0. This node only sends v7170099 requests, but it can send a valid response - * to any v6080099 or v8800000 requests that it receives. + * This version supports two handshake protocols, v7170099 and v8800000, which respectively have the same message structure as the + * transport protocols of v7.17.0, and v8.18.0. This node only sends v8800000 requests, but it can send a valid response to any v7170099 + * requests that it receives. * * Note that these are not really TransportVersion constants as used elsewhere in ES, they're independent things that just happen to be - * stored in the same location in the message header and which roughly match the same ID numbering scheme. Older versions of ES did - * rely on them matching the real transport protocol (which itself matched the release version numbers), but these days that's no longer + * stored in the same location in the message header and which roughly match the same ID numbering scheme. Older versions of ES did rely + * on them matching the real transport protocol (which itself matched the release version numbers), but these days that's no longer * true. * * Here are some example messages, broken down to show their structure. See TransportHandshakerRawMessageTests for supporting tests. * - * ## v6080099 Request: - * - * 45 53 -- 'ES' marker - * 00 00 00 34 -- total message length - * 00 00 00 00 00 00 00 01 -- request ID - * 08 -- status flags (0b1000 == handshake request) - * 00 5c c6 63 -- handshake protocol version (0x5cc663 == 6080099) - * 00 -- no request headers [1] - * 00 -- no response headers [1] - * 01 -- one feature [2] - * 06 -- feature name length - * 78 2d 70 61 63 6b -- feature name 'x-pack' - * 16 -- action string size - * 69 6e 74 65 72 6e 61 6c } - * 3a 74 63 70 2f 68 61 6e }- ASCII representation of HANDSHAKE_ACTION_NAME - * 64 73 68 61 6b 65 } - * 00 -- no parent task ID [3] - * 04 -- payload length - * 8b d5 b5 03 -- max acceptable protocol version (vInt: 00000011 10110101 11010101 10001011 == 7170699) - * - * ## v6080099 Response: - * - * 45 53 -- 'ES' marker - * 00 00 00 13 -- total message length - * 00 00 00 00 00 00 00 01 -- request ID (copied from request) - * 09 -- status flags (0b1001 == handshake response) - * 00 5c c6 63 -- handshake protocol version (0x5cc663 == 6080099, copied from request) - * 00 -- no request headers [1] - * 00 -- no response headers [1] - * c3 f9 eb 03 -- max acceptable protocol version (vInt: 00000011 11101011 11111001 11000011 == 8060099) - * - * * ## v7170099 Requests: * * 45 53 -- 'ES' marker @@ -158,14 +128,11 @@ final class TransportHandshaker { * [3] Parent task ID should be empty; see org.elasticsearch.tasks.TaskId.writeTo for its structure. */ - static final TransportVersion V7_HANDSHAKE_VERSION = TransportVersion.fromId(6_08_00_99); + private static final Logger logger = LogManager.getLogger(TransportHandshaker.class); + static final TransportVersion V8_HANDSHAKE_VERSION = TransportVersion.fromId(7_17_00_99); static final TransportVersion V9_HANDSHAKE_VERSION = TransportVersion.fromId(8_800_00_0); - static final Set ALLOWED_HANDSHAKE_VERSIONS = Set.of( - V7_HANDSHAKE_VERSION, - V8_HANDSHAKE_VERSION, - V9_HANDSHAKE_VERSION - ); + static final Set ALLOWED_HANDSHAKE_VERSIONS = Set.of(V8_HANDSHAKE_VERSION, V9_HANDSHAKE_VERSION); static final String HANDSHAKE_ACTION_NAME = "internal:tcp/handshake"; private final ConcurrentMap pendingHandshakes = new ConcurrentHashMap<>(); @@ -196,14 +163,14 @@ void sendHandshake( ActionListener listener ) { numHandshakes.inc(); - final HandshakeResponseHandler handler = new HandshakeResponseHandler(requestId, listener); + final HandshakeResponseHandler handler = new HandshakeResponseHandler(requestId, channel, listener); pendingHandshakes.put(requestId, handler); channel.addCloseListener( ActionListener.running(() -> handler.handleLocalException(new TransportException("handshake failed because connection reset"))) ); boolean success = false; try { - handshakeRequestSender.sendRequest(node, channel, requestId, V8_HANDSHAKE_VERSION); + handshakeRequestSender.sendRequest(node, channel, requestId, V9_HANDSHAKE_VERSION); threadPool.schedule( () -> handler.handleLocalException(new ConnectTransportException(node, "handshake_timeout[" + timeout + "]")), @@ -222,9 +189,9 @@ void sendHandshake( } void handleHandshake(TransportChannel channel, long requestId, StreamInput stream) throws IOException { + final HandshakeRequest handshakeRequest; try { - // Must read the handshake request to exhaust the stream - new HandshakeRequest(stream); + handshakeRequest = new HandshakeRequest(stream); } catch (Exception e) { assert ignoreDeserializationErrors : e; throw e; @@ -243,9 +210,44 @@ void handleHandshake(TransportChannel channel, long requestId, StreamInput strea assert ignoreDeserializationErrors : exception; throw exception; } + ensureCompatibleVersion(version, handshakeRequest.transportVersion, handshakeRequest.releaseVersion, channel); channel.sendResponse(new HandshakeResponse(this.version, Build.current().version())); } + static void ensureCompatibleVersion( + TransportVersion localTransportVersion, + TransportVersion remoteTransportVersion, + String releaseVersion, + Object channel + ) { + if (TransportVersion.isCompatible(remoteTransportVersion)) { + if (remoteTransportVersion.onOrAfter(localTransportVersion)) { + // Remote is newer than us, so we will be using our transport protocol and it's up to the other end to decide whether it + // knows how to do that. + return; + } + if (remoteTransportVersion.isKnown()) { + // Remote is older than us, so we will be using its transport protocol, which we can only do if and only if its protocol + // version is known to us. + return; + } + } + + final var message = Strings.format( + """ + Rejecting unreadable transport handshake from remote node with version [%s/%s] received on [%s] since this node has \ + version [%s/%s] which has an incompatible wire format.""", + releaseVersion, + remoteTransportVersion, + channel, + Build.current().version(), + localTransportVersion + ); + logger.warn(message); + throw new IllegalStateException(message); + + } + TransportResponseHandler removeHandlerForHandshake(long requestId) { return pendingHandshakes.remove(requestId); } @@ -261,11 +263,13 @@ long getNumHandshakes() { private class HandshakeResponseHandler implements TransportResponseHandler { private final long requestId; + private final TcpChannel channel; private final ActionListener listener; private final AtomicBoolean isDone = new AtomicBoolean(false); - private HandshakeResponseHandler(long requestId, ActionListener listener) { + private HandshakeResponseHandler(long requestId, TcpChannel channel, ActionListener listener) { this.requestId = requestId; + this.channel = channel; this.listener = listener; } @@ -282,20 +286,13 @@ public Executor executor() { @Override public void handleResponse(HandshakeResponse response) { if (isDone.compareAndSet(false, true)) { - TransportVersion responseVersion = response.transportVersion; - if (TransportVersion.isCompatible(responseVersion) == false) { - listener.onFailure( - new IllegalStateException( - "Received message from unsupported version: [" - + responseVersion - + "] minimal compatible version is: [" - + TransportVersions.MINIMUM_COMPATIBLE - + "]" - ) - ); - } else { - listener.onResponse(TransportVersion.min(TransportHandshaker.this.version, response.getTransportVersion())); - } + ActionListener.completeWith(listener, () -> { + ensureCompatibleVersion(version, response.getTransportVersion(), response.getReleaseVersion(), channel); + final var resultVersion = TransportVersion.min(TransportHandshaker.this.version, response.getTransportVersion()); + assert TransportVersion.current().before(version) // simulating a newer-version transport service for test purposes + || resultVersion.isKnown() : "negotiated unknown version " + resultVersion; + return resultVersion; + }); } } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportStats.java b/server/src/main/java/org/elasticsearch/transport/TransportStats.java index 46b161b01e9f3..87a86661b3546 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportStats.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportStats.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -70,18 +69,16 @@ public TransportStats(StreamInput in) throws IOException { rxSize = in.readVLong(); txCount = in.readVLong(); txSize = in.readVLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0) && in.readBoolean()) { - inboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; - for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { - inboundHandlingTimeBucketFrequencies[i] = in.readVLong(); - } - outboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; - for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { - outboundHandlingTimeBucketFrequencies[i] = in.readVLong(); - } - } else { - inboundHandlingTimeBucketFrequencies = new long[0]; - outboundHandlingTimeBucketFrequencies = new long[0]; + if (in.getTransportVersion().before(TransportVersions.TRANSPORT_STATS_HANDLING_TIME_REQUIRED_90)) { + in.readBoolean(); + } + inboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; + for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { + inboundHandlingTimeBucketFrequencies[i] = in.readVLong(); + } + outboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; + for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { + outboundHandlingTimeBucketFrequencies[i] = in.readVLong(); } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { transportActionStats = Collections.unmodifiableMap(in.readOrderedMap(StreamInput::readString, TransportActionStats::new)); @@ -99,15 +96,16 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(rxSize); out.writeVLong(txCount); out.writeVLong(txSize); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { - assert (inboundHandlingTimeBucketFrequencies.length > 0) == (outboundHandlingTimeBucketFrequencies.length > 0); - out.writeBoolean(inboundHandlingTimeBucketFrequencies.length > 0); - for (long handlingTimeBucketFrequency : inboundHandlingTimeBucketFrequencies) { - out.writeVLong(handlingTimeBucketFrequency); - } - for (long handlingTimeBucketFrequency : outboundHandlingTimeBucketFrequencies) { - out.writeVLong(handlingTimeBucketFrequency); - } + assert inboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; + assert outboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; + if (out.getTransportVersion().before(TransportVersions.TRANSPORT_STATS_HANDLING_TIME_REQUIRED_90)) { + out.writeBoolean(true); + } + for (long handlingTimeBucketFrequency : inboundHandlingTimeBucketFrequencies) { + out.writeVLong(handlingTimeBucketFrequency); + } + for (long handlingTimeBucketFrequency : outboundHandlingTimeBucketFrequencies) { + out.writeVLong(handlingTimeBucketFrequency); } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { out.writeMap(transportActionStats, StreamOutput::writeWriteable); @@ -166,24 +164,13 @@ public Map getTransportActionStats() { return transportActionStats; } - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - // Review and simplify the if-else blocks containing this symbol once v9 is released - private static final boolean IMPOSSIBLE_IN_V9 = true; - private boolean assertHistogramsConsistent() { assert inboundHandlingTimeBucketFrequencies.length == outboundHandlingTimeBucketFrequencies.length; - if (inboundHandlingTimeBucketFrequencies.length == 0) { - // Stats came from before v8.1 - assert IMPOSSIBLE_IN_V9; - } else { - assert inboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; - } + assert inboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; return true; } @Override - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - // review the "if" blocks checking for non-empty once we have public Iterator toXContentChunked(ToXContent.Params outerParams) { return Iterators.concat(Iterators.single((builder, params) -> { builder.startObject(Fields.TRANSPORT); @@ -193,19 +180,10 @@ public Iterator toXContentChunked(ToXContent.Params outerP builder.humanReadableField(Fields.RX_SIZE_IN_BYTES, Fields.RX_SIZE, ByteSizeValue.ofBytes(rxSize)); builder.field(Fields.TX_COUNT, txCount); builder.humanReadableField(Fields.TX_SIZE_IN_BYTES, Fields.TX_SIZE, ByteSizeValue.ofBytes(txSize)); - if (inboundHandlingTimeBucketFrequencies.length > 0) { - histogramToXContent(builder, inboundHandlingTimeBucketFrequencies, Fields.INBOUND_HANDLING_TIME_HISTOGRAM); - histogramToXContent(builder, outboundHandlingTimeBucketFrequencies, Fields.OUTBOUND_HANDLING_TIME_HISTOGRAM); - } else { - // Stats came from before v8.1 - assert IMPOSSIBLE_IN_V9; - } - if (transportActionStats.isEmpty() == false) { - builder.startObject(Fields.ACTIONS); - } else { - // Stats came from before v8.8 - assert IMPOSSIBLE_IN_V9; - } + assert inboundHandlingTimeBucketFrequencies.length > 0; + histogramToXContent(builder, inboundHandlingTimeBucketFrequencies, Fields.INBOUND_HANDLING_TIME_HISTOGRAM); + histogramToXContent(builder, outboundHandlingTimeBucketFrequencies, Fields.OUTBOUND_HANDLING_TIME_HISTOGRAM); + builder.startObject(Fields.ACTIONS); return builder; }), @@ -215,12 +193,7 @@ public Iterator toXContentChunked(ToXContent.Params outerP return builder; }), - Iterators.single((builder, params) -> { - if (transportActionStats.isEmpty() == false) { - builder.endObject(); - } - return builder.endObject(); - }) + Iterators.single((builder, params) -> { return builder.endObject().endObject(); }) ); } diff --git a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java index 186618f3662fb..cdd466c567e8b 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java +++ b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java @@ -15,7 +15,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; +import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -32,7 +34,6 @@ import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetadataUpdateSettingsService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -59,6 +60,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction.NO_UPGRADE_REQUIRED_INDEX_VERSION; +import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.WRITE; import static org.elasticsearch.cluster.metadata.IndexMetadata.State.CLOSE; import static org.elasticsearch.core.Strings.format; @@ -448,12 +450,33 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer { + if (aliasesResponse.hasErrors()) { + var e = new ElasticsearchException("Aliases request had errors"); + for (var error : aliasesResponse.getErrors()) { + e.addSuppressed(error); + } + throw e; + } + logger.info( + "Successfully migrated old index [{}] to new index [{}] from feature [{}]", + oldIndexName, + migrationInfo.getNextIndexName(), + migrationInfo.getFeatureName() + ); + delegate2.onResponse(bulkByScrollResponse); + }, e -> { + logger.error( + () -> format( + "An error occurred while changing aliases and removing the old index [%s] from feature [%s]", + oldIndexName, + migrationInfo.getFeatureName() + ), + e + ); + removeReadOnlyBlockOnReindexFailure(oldIndex, delegate2, e); + })); } }, e -> { logger.error( @@ -511,10 +534,7 @@ private void createIndex(SystemIndexMigrationInfo migrationInfo, ActionListener< ); } - private CheckedBiConsumer, AcknowledgedResponse, Exception> setAliasAndRemoveOldIndex( - SystemIndexMigrationInfo migrationInfo, - BulkByScrollResponse bulkByScrollResponse - ) { + private void setAliasAndRemoveOldIndex(SystemIndexMigrationInfo migrationInfo, ActionListener listener) { final IndicesAliasesRequestBuilder aliasesRequest = migrationInfo.createClient(baseClient).admin().indices().prepareAliases(); aliasesRequest.removeIndex(migrationInfo.getCurrentIndexName()); aliasesRequest.addAlias(migrationInfo.getNextIndexName(), migrationInfo.getCurrentIndexName()); @@ -533,30 +553,42 @@ private CheckedBiConsumer, AcknowledgedResp ); }); - // Technically this callback might have a different cluster state, but it shouldn't matter - these indices shouldn't be changing - // while we're trying to migrate them. - return (listener, unsetReadOnlyResponse) -> aliasesRequest.execute( - listener.delegateFailureAndWrap((l, deleteIndexResponse) -> l.onResponse(bulkByScrollResponse)) - ); + aliasesRequest.execute(listener); } /** - * Makes the index readonly if it's not set as a readonly yet + * Sets the write block on the index to the given value. */ private void setWriteBlock(Index index, boolean readOnlyValue, ActionListener listener) { - final Settings readOnlySettings = Settings.builder().put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), readOnlyValue).build(); - - metadataUpdateSettingsService.updateSettings( - new UpdateSettingsClusterStateUpdateRequest( - MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT, - TimeValue.ZERO, - readOnlySettings, - UpdateSettingsClusterStateUpdateRequest.OnExisting.OVERWRITE, - UpdateSettingsClusterStateUpdateRequest.OnStaticSetting.REJECT, - index - ), - listener - ); + if (readOnlyValue) { + // Setting the Block with an AddIndexBlockRequest ensures all shards have accounted for the block and all + // in-flight writes are completed before returning. + baseClient.admin() + .indices() + .addBlock( + new AddIndexBlockRequest(WRITE, index.getName()).masterNodeTimeout(MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT), + listener.delegateFailureAndWrap((l, response) -> { + if (response.isAcknowledged() == false) { + throw new ElasticsearchException("Failed to acknowledge read-only block index request"); + } + l.onResponse(response); + }) + ); + } else { + // The only way to remove a Block is via a settings update. + final Settings readOnlySettings = Settings.builder().put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), false).build(); + metadataUpdateSettingsService.updateSettings( + new UpdateSettingsClusterStateUpdateRequest( + MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT, + TimeValue.ZERO, + readOnlySettings, + UpdateSettingsClusterStateUpdateRequest.OnExisting.OVERWRITE, + UpdateSettingsClusterStateUpdateRequest.OnStaticSetting.REJECT, + index + ), + listener + ); + } } private void reindex(SystemIndexMigrationInfo migrationInfo, ActionListener listener) { diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 81d9bf5cb30a4..97b6dd76bf021 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -137,5 +137,7 @@ 8.16.1,8772004 8.16.2,8772004 8.16.3,8772004 +8.16.4,8772004 8.17.0,8797002 8.17.1,8797002 +8.17.2,8797002 diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 77aae99907dfc..ada61c118ec3c 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -23,6 +23,13 @@ grant codeBase "${codebase.mockito-core}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; permission java.lang.RuntimePermission "getClassLoader"; + // needed when calling bytebuddy + permission java.lang.RuntimePermission "createClassLoader"; + permission java.lang.RuntimePermission "net.bytebuddy.createJavaDispatcher"; + permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.utility"; + permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.dynamic.loading"; + permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.description.type"; + permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.description.method"; }; grant codeBase "${codebase.byte-buddy}" { diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 874d9fe3d0150..164516a562b70 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -137,5 +137,7 @@ 8.16.1,8518000 8.16.2,8518000 8.16.3,8518000 +8.16.4,8518000 8.17.0,8521000 8.17.1,8521000 +8.17.2,8521000 diff --git a/server/src/test/java/org/elasticsearch/TransportVersionTests.java b/server/src/test/java/org/elasticsearch/TransportVersionTests.java index 00429035f97d3..3866aaa5ed674 100644 --- a/server/src/test/java/org/elasticsearch/TransportVersionTests.java +++ b/server/src/test/java/org/elasticsearch/TransportVersionTests.java @@ -13,16 +13,13 @@ import org.elasticsearch.test.TransportVersionUtils; import java.lang.reflect.Modifier; -import java.util.Collections; -import java.util.List; import java.util.Set; -import java.util.TreeSet; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; @@ -30,6 +27,15 @@ public class TransportVersionTests extends ESTestCase { + /** + * This test is specific for V9, to ensure that transport versions are backported correctly. Do not forward or backport it, + * and do not adjust the TransportVersion to check (INITIAL_ELASTICSEARCH_9_0). + * If the test fails, there is something wrong with your backport PR. + */ + public void testMaximumAllowedTransportVersion() { + assertThat(TransportVersions.LATEST_DEFINED.isPatchFrom(TransportVersions.INITIAL_ELASTICSEARCH_9_0), is(true)); + } + public void testVersionComparison() { TransportVersion V_8_2_0 = TransportVersions.V_8_2_0; TransportVersion V_8_16_0 = TransportVersions.V_8_16_0; @@ -70,13 +76,11 @@ public static class DuplicatedIdFakeVersion { public void testStaticTransportVersionChecks() { assertThat( TransportVersions.collectAllVersionIdsDefinedInClass(CorrectFakeVersion.class), - equalTo( - List.of( - CorrectFakeVersion.V_0_000_002, - CorrectFakeVersion.V_0_000_003, - CorrectFakeVersion.V_0_000_004, - CorrectFakeVersion.V_0_00_01 - ) + contains( + CorrectFakeVersion.V_0_000_002, + CorrectFakeVersion.V_0_000_003, + CorrectFakeVersion.V_0_000_004, + CorrectFakeVersion.V_0_00_01 ) ); AssertionError e = expectThrows( @@ -162,15 +166,15 @@ public void testMax() { } public void testIsPatchFrom() { - TransportVersion patchVersion = TransportVersion.fromId(8_800_00_4); - assertThat(TransportVersion.fromId(8_799_00_0).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_799_00_9).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_800_00_0).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_800_00_3).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_800_00_4).isPatchFrom(patchVersion), is(true)); - assertThat(TransportVersion.fromId(8_800_00_9).isPatchFrom(patchVersion), is(true)); - assertThat(TransportVersion.fromId(8_800_01_0).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_801_00_0).isPatchFrom(patchVersion), is(false)); + TransportVersion patchVersion = TransportVersion.fromId(8_800_0_04); + assertThat(TransportVersion.fromId(8_799_0_00).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_799_0_09).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_0_00).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_0_03).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_0_04).isPatchFrom(patchVersion), is(true)); + assertThat(TransportVersion.fromId(8_800_0_49).isPatchFrom(patchVersion), is(true)); + assertThat(TransportVersion.fromId(8_800_1_00).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_801_0_00).isPatchFrom(patchVersion), is(false)); } public void testVersionConstantPresent() { @@ -185,7 +189,20 @@ public void testVersionConstantPresent() { } public void testCURRENTIsLatest() { - assertThat(Collections.max(TransportVersion.getAllVersions()), is(TransportVersion.current())); + assertThat(TransportVersion.getAllVersions().getLast(), is(TransportVersion.current())); + } + + public void testPatchVersionsStillAvailable() { + for (TransportVersion tv : TransportVersion.getAllVersions()) { + if (tv.onOrAfter(TransportVersions.V_8_9_X) && (tv.id() % 100) > 90) { + fail( + "Transport version " + + tv + + " is nearing the limit of available patch numbers." + + " Please inform the Core/Infra team that isPatchFrom may need to be modified" + ); + } + } } public void testToReleaseVersion() { @@ -199,40 +216,4 @@ public void testToString() { assertEquals("2000099", TransportVersion.fromId(2_00_00_99).toString()); assertEquals("5000099", TransportVersion.fromId(5_00_00_99).toString()); } - - /** - * Until 9.0 bumps its transport version to 9_000_00_0, all transport changes must be backported to 8.x. - * This test ensures transport versions are dense, so that we have confidence backports have not been missed. - * Note that it does not ensure patches are not missed, but it should catch the majority of misordered - * or missing transport versions. - */ - public void testDenseTransportVersions() { - Set missingVersions = new TreeSet<>(); - TransportVersion previous = null; - for (var tv : TransportVersion.getAllVersions()) { - if (tv.before(TransportVersions.V_8_16_0)) { - continue; - } - if (previous == null) { - previous = tv; - continue; - } - - if (previous.id() + 1000 < tv.id()) { - int nextId = previous.id(); - do { - nextId = (nextId + 1000) / 1000 * 1000; - missingVersions.add(nextId); - } while (nextId + 1000 < tv.id()); - } - previous = tv; - } - if (missingVersions.isEmpty() == false) { - StringBuilder msg = new StringBuilder("Missing transport versions:\n"); - for (Integer id : missingVersions) { - msg.append(" " + id + "\n"); - } - fail(msg.toString()); - } - } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java index 13479d1885360..d3e5c1c7268fa 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -31,14 +30,10 @@ import java.util.Arrays; import java.util.Base64; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; -import static org.hamcrest.CoreMatchers.containsString; - public class GetSnapshotsResponseTests extends ESTestCase { // We can not subclass AbstractSerializingTestCase because it // can only be used for instances with equals and hashCode @@ -60,12 +55,6 @@ private GetSnapshotsResponse copyInstance(GetSnapshotsResponse instance) throws private void assertEqualInstances(GetSnapshotsResponse expectedInstance, GetSnapshotsResponse newInstance) { assertEquals(expectedInstance.getSnapshots(), newInstance.getSnapshots()); assertEquals(expectedInstance.next(), newInstance.next()); - assertEquals(expectedInstance.getFailures().keySet(), newInstance.getFailures().keySet()); - for (Map.Entry expectedEntry : expectedInstance.getFailures().entrySet()) { - ElasticsearchException expectedException = expectedEntry.getValue(); - ElasticsearchException newException = newInstance.getFailures().get(expectedEntry.getKey()); - assertThat(newException.getMessage(), containsString(expectedException.getMessage())); - } } private List createSnapshotInfos(String repoName) { @@ -99,7 +88,6 @@ private List createSnapshotInfos(String repoName) { private GetSnapshotsResponse createTestInstance() { Set repositories = new HashSet<>(); - Map failures = new HashMap<>(); List responses = new ArrayList<>(); for (int i = 0; i < randomIntBetween(0, 5); i++) { @@ -111,12 +99,10 @@ private GetSnapshotsResponse createTestInstance() { for (int i = 0; i < randomIntBetween(0, 5); i++) { String repository = randomValueOtherThanMany(repositories::contains, () -> randomAlphaOfLength(10)); repositories.add(repository); - failures.put(repository, new ElasticsearchException(randomAlphaOfLength(10))); } return new GetSnapshotsResponse( responses, - failures, randomBoolean() ? Base64.getUrlEncoder() .encodeToString( diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java index c99c671c69148..fa57431cc582a 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.TransportVersionUtils; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -37,7 +36,6 @@ import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomMappingHashToIndices; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.nullValue; public class FieldCapabilitiesNodeResponseTests extends AbstractWireSerializingTestCase { @@ -145,48 +143,6 @@ public void testSerializeNodeResponseBetweenNewNodes() throws Exception { } } - public void testSerializeNodeResponseBetweenOldNodes() throws IOException { - final TransportVersion minCompactVersion = TransportVersions.MINIMUM_COMPATIBLE; - assertTrue("Remove this test once minCompactVersion >= 8.2.0", minCompactVersion.before(TransportVersions.V_8_2_0)); - List indexResponses = CollectionUtils.concatLists( - randomIndexResponsesWithMappingHash(randomMappingHashToIndices()), - randomIndexResponsesWithoutMappingHash() - ); - Randomness.shuffle(indexResponses); - FieldCapabilitiesNodeResponse inResponse = randomNodeResponse(indexResponses); - TransportVersion version = TransportVersionUtils.randomVersionBetween( - random(), - minCompactVersion, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_2_0) - ); - final FieldCapabilitiesNodeResponse outResponse = copyInstance(inResponse, version); - assertThat(outResponse.getFailures().keySet(), equalTo(inResponse.getFailures().keySet())); - assertThat(outResponse.getUnmatchedShardIds(), equalTo(inResponse.getUnmatchedShardIds())); - final List inList = inResponse.getIndexResponses(); - final List outList = outResponse.getIndexResponses(); - assertThat(outList, hasSize(inList.size())); - for (int i = 0; i < inList.size(); i++) { - assertThat("Responses between old nodes don't have mapping hash", outList.get(i).getIndexMappingHash(), nullValue()); - assertThat(outList.get(i).getIndexName(), equalTo(inList.get(i).getIndexName())); - assertThat(outList.get(i).canMatch(), equalTo(inList.get(i).canMatch())); - Map outCap = outList.get(i).get(); - Map inCap = inList.get(i).get(); - if (version.onOrAfter(TransportVersions.V_8_0_0)) { - assertThat(outCap, equalTo(inCap)); - } else { - // Exclude metric types which was introduced in 8.0 - assertThat(outCap.keySet(), equalTo(inCap.keySet())); - for (String field : outCap.keySet()) { - assertThat(outCap.get(field).name(), equalTo(inCap.get(field).name())); - assertThat(outCap.get(field).type(), equalTo(inCap.get(field).type())); - assertThat(outCap.get(field).isSearchable(), equalTo(inCap.get(field).isSearchable())); - assertThat(outCap.get(field).isAggregatable(), equalTo(inCap.get(field).isAggregatable())); - assertThat(outCap.get(field).meta(), equalTo(inCap.get(field).meta())); - } - } - } - } - private static FieldCapabilitiesNodeResponse randomNodeResponse(List indexResponses) { int numUnmatched = randomIntBetween(0, 3); final Set unmatchedShardIds = new HashSet<>(); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java index 6ea4a1d3dc46b..ceb84e4b2a0d9 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java @@ -40,7 +40,6 @@ import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomMappingHashToIndices; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.nullValue; public class FieldCapabilitiesResponseTests extends AbstractWireSerializingTestCase { @@ -198,48 +197,4 @@ public void testSerializeCCSResponseBetweenNewClusters() throws Exception { } } } - - public void testSerializeCCSResponseBetweenOldClusters() throws IOException { - TransportVersion minCompactVersion = TransportVersions.MINIMUM_COMPATIBLE; - assertTrue("Remove this test once minCompactVersion >= 8.2.0", minCompactVersion.before(TransportVersions.V_8_2_0)); - List indexResponses = CollectionUtils.concatLists( - randomIndexResponsesWithMappingHash(randomMappingHashToIndices()), - randomIndexResponsesWithoutMappingHash() - ); - Randomness.shuffle(indexResponses); - FieldCapabilitiesResponse inResponse = randomCCSResponse(indexResponses); - TransportVersion version = TransportVersionUtils.randomVersionBetween( - random(), - minCompactVersion, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_2_0) - ); - final FieldCapabilitiesResponse outResponse = copyInstance(inResponse, version); - assertThat( - outResponse.getFailures().stream().flatMap(f -> Arrays.stream(f.getIndices())).toList(), - equalTo(inResponse.getFailures().stream().flatMap(f -> Arrays.stream(f.getIndices())).toList()) - ); - final List inList = inResponse.getIndexResponses(); - final List outList = outResponse.getIndexResponses(); - assertThat(outList, hasSize(inList.size())); - for (int i = 0; i < inList.size(); i++) { - assertThat("Responses between old clusters don't have mapping hash", outList.get(i).getIndexMappingHash(), nullValue()); - assertThat(outList.get(i).getIndexName(), equalTo(inList.get(i).getIndexName())); - assertThat(outList.get(i).canMatch(), equalTo(inList.get(i).canMatch())); - Map outCap = outList.get(i).get(); - Map inCap = inList.get(i).get(); - if (version.onOrAfter(TransportVersions.V_8_0_0)) { - assertThat(outCap, equalTo(inCap)); - } else { - // Exclude metric types which was introduced in 8.0 - assertThat(outCap.keySet(), equalTo(inCap.keySet())); - for (String field : outCap.keySet()) { - assertThat(outCap.get(field).name(), equalTo(inCap.get(field).name())); - assertThat(outCap.get(field).type(), equalTo(inCap.get(field).type())); - assertThat(outCap.get(field).isSearchable(), equalTo(inCap.get(field).isSearchable())); - assertThat(outCap.get(field).isAggregatable(), equalTo(inCap.get(field).isAggregatable())); - assertThat(outCap.get(field).meta(), equalTo(inCap.get(field).meta())); - } - } - } - } } diff --git a/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java index d84ee0267251a..8eaff521068e8 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java @@ -9,11 +9,11 @@ package org.elasticsearch.bootstrap; -import org.elasticsearch.entitlement.runtime.policy.InboundNetworkEntitlement; -import org.elasticsearch.entitlement.runtime.policy.LoadNativeLibrariesEntitlement; -import org.elasticsearch.entitlement.runtime.policy.OutboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.Scope; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; import org.elasticsearch.test.ESTestCase; import java.util.List; diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index 3c680d891ff13..75cc99e4c280e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -612,11 +612,7 @@ public void testStartedShardEntrySerializationWithOlderTransportVersion() throws final String allocationId = randomRealisticUnicodeOfCodepointLengthBetween(10, 100); final long primaryTerm = randomIntBetween(0, 100); final String message = randomRealisticUnicodeOfCodepointLengthBetween(10, 100); - final TransportVersion version = randomFrom( - getFirstVersion(), - getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE), - getPreviousVersion(TransportVersions.V_8_15_0) - ); + final TransportVersion version = randomFrom(getFirstVersion(), getPreviousVersion(TransportVersions.V_8_15_0)); final ShardLongFieldRange timestampRange = ShardLongFieldRangeWireTests.randomRange(); final ShardLongFieldRange eventIngestedRange = ShardLongFieldRangeWireTests.randomRange(); var startedShardEntry = new StartedShardEntry(shardId, allocationId, primaryTerm, message, timestampRange, eventIngestedRange); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolverTests.java index 5ab5ed1c23e4f..286e1d3afaeef 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolverTests.java @@ -13,15 +13,24 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.EmptySystemIndices; import org.elasticsearch.indices.InvalidIndexNameException; +import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; +import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; +import static org.elasticsearch.indices.SystemIndices.SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY; +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.either; @@ -220,13 +229,78 @@ private boolean isIndexVisible(String index, String selector) { "*", selector, index, - IndicesOptions.strictExpandOpen(), + IndicesOptions.strictExpandHidden(), metadata, indexNameExpressionResolver, true ); } + public void testIsNetNewSystemIndexVisible() { + final Settings settings = Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", 1) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .build(); + + final Settings hiddenSettings = Settings.builder().put(settings).put("index.hidden", true).build(); + + final IndexMetadata foo = IndexMetadata.builder(".foo").settings(hiddenSettings).system(true).build(); + final IndexMetadata barReindexed = IndexMetadata.builder(".bar-reindexed") + .settings(hiddenSettings) + .system(true) + .putAlias(AliasMetadata.builder(".bar").isHidden(true).build()) + .build(); + final IndexMetadata other = IndexMetadata.builder("other").settings(settings).build(); + + final SystemIndexDescriptor fooDescriptor = SystemIndexDescriptor.builder() + .setDescription("foo indices") + .setOrigin("foo origin") + .setPrimaryIndex(".foo") + .setIndexPattern(".foo*") + .setSettings(settings) + .setMappings(mappings()) + .setNetNew() + .build(); + final SystemIndexDescriptor barDescriptor = SystemIndexDescriptor.builder() + .setDescription("bar indices") + .setOrigin("bar origin") + .setPrimaryIndex(".bar") + .setIndexPattern(".bar*") + .setSettings(settings) + .setMappings(mappings()) + .setNetNew() + .build(); + final SystemIndices systemIndices = new SystemIndices( + List.of(new SystemIndices.Feature("name", "description", List.of(fooDescriptor, barDescriptor))) + ); + + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "false"); + indexNameExpressionResolver = new IndexNameExpressionResolver(threadContext, systemIndices); + indexAbstractionResolver = new IndexAbstractionResolver(indexNameExpressionResolver); + + metadata = Metadata.builder().put(foo, true).put(barReindexed, true).put(other, true).build(); + + assertThat(isIndexVisible("other", "*"), is(true)); + assertThat(isIndexVisible(".foo", "*"), is(false)); + assertThat(isIndexVisible(".bar", "*"), is(false)); + } + + private static XContentBuilder mappings() { + try (XContentBuilder builder = jsonBuilder()) { + return builder.startObject() + .startObject(SINGLE_MAPPING_NAME) + .startObject("_meta") + .field(SystemIndexDescriptor.VERSION_META_KEY, 0) + .endObject() + .endObject() + .endObject(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + private List resolveAbstractionsSelectorNotAllowed(List expressions) { return resolveAbstractions(expressions, IndicesOptions.strictExpandHiddenNoSelectors(), defaultMask); } diff --git a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java index a91cef576df33..744a12d5ab6e0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java @@ -31,8 +31,6 @@ import static java.util.Collections.emptySet; import static org.elasticsearch.test.NodeRoles.nonRemoteClusterClientNode; import static org.elasticsearch.test.NodeRoles.remoteClusterClientNode; -import static org.elasticsearch.test.TransportVersionUtils.getPreviousVersion; -import static org.elasticsearch.test.TransportVersionUtils.randomVersionBetween; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -274,39 +272,5 @@ public void testDiscoveryNodeMinReadOnlyVersionSerialization() throws Exception } } } - - { - var oldVersion = randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - getPreviousVersion(TransportVersions.NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION) - ); - try (var out = new BytesStreamOutput()) { - out.setTransportVersion(oldVersion); - node.writeTo(out); - - try (var in = StreamInput.wrap(out.bytes().array())) { - in.setTransportVersion(oldVersion); - - var deserialized = new DiscoveryNode(in); - assertThat(deserialized.getId(), equalTo(node.getId())); - assertThat(deserialized.getAddress(), equalTo(node.getAddress())); - assertThat(deserialized.getMinIndexVersion(), equalTo(node.getMinIndexVersion())); - assertThat(deserialized.getMaxIndexVersion(), equalTo(node.getMaxIndexVersion())); - assertThat(deserialized.getMinReadOnlyIndexVersion(), equalTo(node.getMinIndexVersion())); - assertThat( - deserialized.getVersionInformation(), - equalTo( - new VersionInformation( - node.getBuildVersion(), - node.getMinIndexVersion(), - node.getMinIndexVersion(), - node.getMaxIndexVersion() - ) - ) - ); - } - } - } } } diff --git a/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java b/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java index ebf1064c2ae3f..77ae472065b08 100644 --- a/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java @@ -104,7 +104,7 @@ public void setUp() throws Exception { env = newEnvironment(Settings.EMPTY); - Files.createDirectories(env.configFile()); + Files.createDirectories(env.configDir()); fileWatchingService = new TestFileWatchingService(getWatchedFilePath(env)); } @@ -203,7 +203,7 @@ private void writeTestFile(Path path, String contents) throws IOException { } private static Path getWatchedFilePath(Environment env) { - return env.configFile().toAbsolutePath().resolve("test").resolve("test.json"); + return env.configDir().toAbsolutePath().resolve("test").resolve("test.json"); } } diff --git a/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java b/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java index 0dd1db64c144f..0c2dc68a01464 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java @@ -97,7 +97,7 @@ public void testCreate() { } public void testProcessSettingsFile() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getVersion(), equalTo(1L)); @@ -109,7 +109,7 @@ public void testProcessSettingsFile() throws Exception { } public void testProcessDeprecatedSettingsFile() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSONDepricated); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSONDepricated); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getVersion(), equalTo(1L)); @@ -119,7 +119,7 @@ public void testProcessDeprecatedSettingsFile() throws Exception { } public void testDuplicateSettingKeys() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSONDuplicateKeys); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSONDuplicateKeys); Exception e = expectThrows(Exception.class, () -> new LocallyMountedSecrets(env)); assertThat(e, instanceOf(XContentParseException.class)); assertThat(e.getMessage(), containsString("failed to parse field")); @@ -134,7 +134,7 @@ public void testDuplicateSettingKeys() throws Exception { } public void testSettingsGetFile() throws IOException, GeneralSecurityException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getSettingNames(), containsInAnyOrder("aaa", "ccc", "eee")); @@ -165,7 +165,7 @@ public void testSettingsGetFile() throws IOException, GeneralSecurityException { } public void testSettingsSHADigest() throws IOException, GeneralSecurityException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getSettingNames(), containsInAnyOrder("aaa", "ccc", "eee")); @@ -178,7 +178,7 @@ public void testSettingsSHADigest() throws IOException, GeneralSecurityException } public void testProcessBadSettingsFile() throws IOException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), noMetadataJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), noMetadataJSON); assertThat( expectThrows(IllegalArgumentException.class, () -> new LocallyMountedSecrets(env)).getMessage(), containsString("Required [metadata]") @@ -186,7 +186,7 @@ public void testProcessBadSettingsFile() throws IOException { } public void testSerializationWithSecrets() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); final BytesStreamOutput out = new BytesStreamOutput(); @@ -213,7 +213,7 @@ public void testSerializationNewlyCreated() throws Exception { } public void testClose() throws IOException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertEquals("bbb", secrets.getString("aaa").toString()); assertEquals("ddd", secrets.getString("ccc").toString()); diff --git a/server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java b/server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java index 32a8de20df9aa..0874a106e59e7 100644 --- a/server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java +++ b/server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java @@ -11,6 +11,8 @@ import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.Matchers.equalTo; + public class SizeLimitingStringWriterTests extends ESTestCase { public void testSizeIsLimited() { SizeLimitingStringWriter writer = new SizeLimitingStringWriter(10); @@ -26,4 +28,11 @@ public void testSizeIsLimited() { expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.append("a")); expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.append("a", 0, 1)); } + + public void testLimitMessage() { + SizeLimitingStringWriter writer = new SizeLimitingStringWriter(3); + + var e = expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.write("abcdefgh")); + assertThat(e.getMessage(), equalTo("String [abc...] has size [8] which exceeds the size limit [3]")); + } } diff --git a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java index 25ac11b516dc0..834f53dc410da 100644 --- a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java @@ -34,20 +34,20 @@ public class EnvironmentTests extends ESTestCase { public void testRepositoryResolution() throws IOException { Environment environment = newEnvironment(); - assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue()); - assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue()); + assertThat(environment.resolveRepoDir("/test/repos/repo1"), nullValue()); + assertThat(environment.resolveRepoDir("test/repos/repo1"), nullValue()); environment = newEnvironment( Settings.builder() .putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other") .build() ); - assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("/test/repos/../repo1"), nullValue()); - assertThat(environment.resolveRepoFile("/test/repos/../repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("/somethingeles/repos/repo1"), nullValue()); - assertThat(environment.resolveRepoFile("/test/other/repo"), notNullValue()); + assertThat(environment.resolveRepoDir("/test/repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("test/repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("/another/repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("/test/repos/../repo1"), nullValue()); + assertThat(environment.resolveRepoDir("/test/repos/../repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("/somethingeles/repos/repo1"), nullValue()); + assertThat(environment.resolveRepoDir("/test/other/repo"), notNullValue()); assertThat(environment.resolveRepoURL(new URL("file:///test/repos/repo1")), notNullValue()); assertThat(environment.resolveRepoURL(new URL("file:/test/repos/repo1")), notNullValue()); @@ -66,7 +66,7 @@ public void testPathDataWhenNotSet() { final Path pathHome = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", pathHome).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.dataFiles(), equalTo(new Path[] { pathHome.resolve("data") })); + assertThat(environment.dataDirs(), equalTo(new Path[] { pathHome.resolve("data") })); } public void testPathDataNotSetInEnvironmentIfNotSet() { @@ -82,41 +82,41 @@ public void testPathDataLegacyCommaList() { .put("path.data", createTempDir().toAbsolutePath() + "," + createTempDir().toAbsolutePath()) .build(); final Environment environment = new Environment(settings, null); - assertThat(environment.dataFiles(), arrayWithSize(2)); + assertThat(environment.dataDirs(), arrayWithSize(2)); } public void testPathLogsWhenNotSet() { final Path pathHome = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", pathHome).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.logsFile(), equalTo(pathHome.resolve("logs"))); + assertThat(environment.logsDir(), equalTo(pathHome.resolve("logs"))); } public void testDefaultConfigPath() { final Path path = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", path).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.configFile(), equalTo(path.resolve("config"))); + assertThat(environment.configDir(), equalTo(path.resolve("config"))); } public void testConfigPath() { final Path configPath = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", createTempDir().toAbsolutePath()).build(); final Environment environment = new Environment(settings, configPath); - assertThat(environment.configFile(), equalTo(configPath)); + assertThat(environment.configDir(), equalTo(configPath)); } public void testConfigPathWhenNotSet() { final Path pathHome = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", pathHome).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.configFile(), equalTo(pathHome.resolve("config"))); + assertThat(environment.configDir(), equalTo(pathHome.resolve("config"))); } public void testNonExistentTempPathValidation() { Settings build = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); Environment environment = new Environment(build, null, createTempDir().resolve("this_does_not_exist")); - FileNotFoundException e = expectThrows(FileNotFoundException.class, environment::validateTmpFile); + FileNotFoundException e = expectThrows(FileNotFoundException.class, environment::validateTmpDir); assertThat(e.getMessage(), startsWith("Temporary directory [")); assertThat(e.getMessage(), endsWith("this_does_not_exist] does not exist or is not accessible")); } @@ -124,7 +124,7 @@ public void testNonExistentTempPathValidation() { public void testTempPathValidationWhenRegularFile() throws IOException { Settings build = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); Environment environment = new Environment(build, null, createTempFile("something", ".test")); - IOException e = expectThrows(IOException.class, environment::validateTmpFile); + IOException e = expectThrows(IOException.class, environment::validateTmpDir); assertThat(e.getMessage(), startsWith("Temporary directory [")); assertThat(e.getMessage(), endsWith(".test] is not a directory")); } diff --git a/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java b/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java index ab0ccb129fe57..0cfa9716c5fe7 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java @@ -131,7 +131,7 @@ public void testCleanupAll() throws Exception { boolean hasClusterState = randomBoolean(); createIndexDataFiles(dataMasterSettings, shardCount, hasClusterState); - String messageText = NodeRepurposeCommand.noMasterMessage(1, environment.dataFiles().length * shardCount, 0); + String messageText = NodeRepurposeCommand.noMasterMessage(1, environment.dataDirs().length * shardCount, 0); Matcher outputMatcher = allOf( containsString(messageText), @@ -157,7 +157,7 @@ public void testCleanupShardData() throws Exception { createIndexDataFiles(dataMasterSettings, shardCount, hasClusterState); Matcher matcher = allOf( - containsString(NodeRepurposeCommand.shardMessage(environment.dataFiles().length * shardCount, 1)), + containsString(NodeRepurposeCommand.shardMessage(environment.dataDirs().length * shardCount, 1)), conditionalNot(containsString("testUUID"), verbose == false), conditionalNot(containsString("testIndex"), verbose == false || hasClusterState == false), conditionalNot(containsString("no name for uuid: testUUID"), verbose == false || hasClusterState) @@ -271,7 +271,7 @@ private void verifyUnchangedDataFiles(CheckedRunnable runna private long digestPaths() { // use a commutative digest to avoid dependency on file system order. - return Arrays.stream(environment.dataFiles()).mapToLong(this::digestPath).sum(); + return Arrays.stream(environment.dataDirs()).mapToLong(this::digestPath).sum(); } private long digestPath(Path path) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java index d12bf5dc2e34c..628b64de19bd1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java @@ -2420,6 +2420,34 @@ public void testStoredArrayWithFlatFields() throws IOException { {"outer":{"inner":[{"a.b":"a.b","a.c":"a.c"}]}}""", syntheticSource); } + public void testSingleDeepIgnoredField() throws IOException { + DocumentMapper documentMapper = createSytheticSourceMapperService(mapping(b -> { + b.startObject("top"); + b.startObject("properties"); + { + b.startObject("level1").startObject("properties"); + { + b.startObject("level2").startObject("properties"); + { + b.startObject("n") + .field("type", "integer") + .field("doc_values", "false") + .field("synthetic_source_keep", "all") + .endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + })).documentMapper(); + + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startObject("top").startObject("level1").startObject("level2").field("n", 25).endObject().endObject().endObject(); + }); + assertEquals("{\"top\":{\"level1\":{\"level2\":{\"n\":25}}}}", syntheticSource); + } + protected void validateRoundTripReader(String syntheticSource, DirectoryReader reader, DirectoryReader roundTripReader) throws IOException { // We exclude ignored source field since in some cases it contains an exact copy of a part of document source. diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 507314b31d00d..c89753214b6a9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -15,6 +15,9 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService.MergeReason; @@ -26,6 +29,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.StreamSupport; @@ -304,16 +308,56 @@ public void testMappingRecoverySkipFieldNameLengthLimit() throws Throwable { public void testIsMetadataField() throws IOException { IndexVersion version = IndexVersionUtils.randomCompatibleVersion(random()); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); - MapperService mapperService = createMapperService(settings, mapping(b -> {})); - assertFalse(mapperService.isMetadataField(randomAlphaOfLengthBetween(10, 15))); + CheckedFunction initMapperService = (indexMode) -> { + Settings.Builder settingsBuilder = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, version) + .put(IndexSettings.MODE.getKey(), indexMode); - for (String builtIn : IndicesModule.getBuiltInMetadataFields()) { - if (NestedPathFieldMapper.NAME.equals(builtIn) && version.before(IndexVersions.V_8_0_0)) { - continue; // Nested field does not exist in the 7x line + if (indexMode == IndexMode.TIME_SERIES) { + settingsBuilder.put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "foo"); } - assertTrue("Expected " + builtIn + " to be a metadata field for version " + version, mapperService.isMetadataField(builtIn)); + + return createMapperService(settingsBuilder.build(), mapping(b -> {})); + }; + + Consumer assertMapperService = (mapperService) -> { + assertFalse(mapperService.isMetadataField(randomAlphaOfLengthBetween(10, 15))); + + for (String builtIn : IndicesModule.getBuiltInMetadataFields()) { + if (NestedPathFieldMapper.NAME.equals(builtIn) && version.before(IndexVersions.V_8_0_0)) { + continue; // Nested field does not exist in the 7x line + } + boolean isTimeSeriesField = builtIn.equals("_tsid") || builtIn.equals("_ts_routing_hash"); + boolean isTimeSeriesMode = mapperService.getIndexSettings().getMode().equals(IndexMode.TIME_SERIES); + + if (isTimeSeriesField && isTimeSeriesMode == false) { + assertFalse( + "Expected " + + builtIn + + " to not be a metadata field for version " + + version + + " and index mode " + + mapperService.getIndexSettings().getMode(), + mapperService.isMetadataField(builtIn) + ); + } else { + assertTrue( + "Expected " + + builtIn + + " to be a metadata field for version " + + version + + " and index mode " + + mapperService.getIndexSettings().getMode(), + mapperService.isMetadataField(builtIn) + ); + } + } + }; + + for (IndexMode indexMode : IndexMode.values()) { + MapperService mapperService = initMapperService.apply(indexMode); + assertMapperService.accept(mapperService); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java index b87ab09c530d6..4b674cf1985b2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.hamcrest.CoreMatchers; @@ -327,11 +326,7 @@ public void testBlankFieldNameBefore8_6_0() throws Exception { IndexVersions.MINIMUM_READONLY_COMPATIBLE, IndexVersions.V_8_5_0 ); - TransportVersion transportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersions.V_8_5_0 - ); + TransportVersion transportVersion = TransportVersions.V_8_5_0; { XContentBuilder builder = mapping(b -> b.startObject(" ").field("type", randomFieldType()).endObject()); MappingParser mappingParser = createMappingParser(Settings.EMPTY, version, transportVersion); diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index abaab1ac8983b..dd6baee601146 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -453,7 +453,7 @@ public void testRegisterHunspellDictionary() throws Exception { InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff"); InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic"); Dictionary dictionary; - try (Directory tmp = newFSDirectory(environment.tmpFile())) { + try (Directory tmp = newFSDirectory(environment.tmpDir())) { dictionary = new Dictionary(tmp, "hunspell", aff, dic); } AnalysisModule module = new AnalysisModule(environment, singletonList(new AnalysisPlugin() { diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 78baa1699df00..62ef2fd3afc69 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -2096,7 +2096,7 @@ public void testStatName() { Processor processor = mock(Processor.class); String name = randomAlphaOfLength(10); when(processor.getType()).thenReturn(name); - assertThat(IngestService.getProcessorName(processor), equalTo(name)); + assertThat(IngestService.getProcessorName(processor), sameInstance(name)); String tag = randomAlphaOfLength(10); when(processor.getTag()).thenReturn(tag); assertThat(IngestService.getProcessorName(processor), equalTo(name + ":" + tag)); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java index dc3fb2a473f43..8babb8bb9d395 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java @@ -19,6 +19,8 @@ import java.util.Map; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; public class IngestStatsTests extends ESTestCase { @@ -31,6 +33,38 @@ public void testSerialization() throws IOException { assertIngestStats(ingestStats, serializedStats); } + public void testIdentitySerialization() throws IOException { + IngestStats serializedStats = serialize(IngestStats.IDENTITY); + assertThat(serializedStats, sameInstance(IngestStats.IDENTITY)); + } + + public void testProcessorNameAndTypeIdentitySerialization() throws IOException { + IngestStats.Builder builder = new IngestStats.Builder(); + builder.addPipelineMetrics("pipeline_id", new IngestPipelineMetric()); + builder.addProcessorMetrics("pipeline_id", "set", "set", new IngestMetric()); + builder.addProcessorMetrics("pipeline_id", "set:foo", "set", new IngestMetric()); + builder.addProcessorMetrics("pipeline_id", "set:bar", "set", new IngestMetric()); + builder.addTotalMetrics(new IngestMetric()); + + IngestStats serializedStats = serialize(builder.build()); + List processorStats = serializedStats.processorStats().get("pipeline_id"); + + // these are just table stakes + assertThat(processorStats.get(0).name(), is("set")); + assertThat(processorStats.get(0).type(), is("set")); + assertThat(processorStats.get(1).name(), is("set:foo")); + assertThat(processorStats.get(1).type(), is("set")); + assertThat(processorStats.get(2).name(), is("set:bar")); + assertThat(processorStats.get(2).type(), is("set")); + + // this is actually interesting, though -- we're canonical-izing these strings to keep our heap usage under control + final String set = processorStats.get(0).name(); + assertThat(processorStats.get(0).name(), sameInstance(set)); + assertThat(processorStats.get(0).type(), sameInstance(set)); + assertThat(processorStats.get(1).type(), sameInstance(set)); + assertThat(processorStats.get(2).type(), sameInstance(set)); + } + public void testStatsMerge() { var first = randomStats(); var second = randomStats(); diff --git a/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java b/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java index 32edcc0ad82aa..c0e1c1143ef42 100644 --- a/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java +++ b/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java @@ -57,7 +57,7 @@ public void testEmptySettings() { assertEquals(defaultNodeName, settings.get("node.name")); assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings); - String configDir = env.configFile().toString(); + String configDir = env.configDir().toString(); assertTrue(configDir, configDir.startsWith(home)); assertEquals("elasticsearch", settings.get("cluster.name")); } diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java index 97158e27b8528..8129f67947cf9 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java @@ -52,7 +52,7 @@ public class PluginsLoaderTests extends ESTestCase { static PluginsLoader newPluginsLoader(Settings settings) { return PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(), false ); @@ -121,7 +121,7 @@ public void testStablePluginWithNativeAccess() throws Exception { var pluginsLoader = PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(STABLE_PLUGIN_NAME, Set.of(STABLE_PLUGIN_MODULE_NAME)), false ); @@ -182,7 +182,7 @@ public void testModularPluginLoadingWithNativeAccess() throws Exception { var pluginsLoader = PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(MODULAR_PLUGIN_NAME, Set.of(MODULAR_PLUGIN_MODULE_NAME)), false ); diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index e76994f69c01e..57158df344a3f 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -70,7 +70,7 @@ static PluginsService newPluginsService(Settings settings) { null, PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(), false ) diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java index f5ebacde08820..250d10855b23f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java @@ -40,9 +40,12 @@ import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; import static org.elasticsearch.repositories.RepositoryData.MISSING_UUID; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.not; /** * Tests for the {@link RepositoryData} class. @@ -430,6 +433,19 @@ public void testFailsIfMinVersionNotSatisfied() throws IOException { } } + public void testToString() { + final var repositoryData = generateRandomRepoData(); + assertThat( + repositoryData.toString(), + allOf( + containsString("RepositoryData"), + containsString(repositoryData.getUuid()), + containsString(Long.toString(repositoryData.getGenId())), + not(containsString("@")) // not the default Object#toString which does a very expensive hashcode computation + ) + ); + } + public static RepositoryData generateRandomRepoData() { final int numIndices = randomIntBetween(1, 30); final List indices = new ArrayList<>(numIndices); diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 7cb12c1b316e8..d39b54a3f09f7 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -127,7 +127,7 @@ public void setUp() throws Exception { clusterService.getMasterService().setClusterStateSupplier(() -> clusterState); env = newEnvironment(Settings.EMPTY); - Files.createDirectories(env.configFile()); + Files.createDirectories(env.configDir()); ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); @@ -176,7 +176,7 @@ public void testStartStop() { public void testOperatorDirName() { Path operatorPath = fileSettingsService.watchedFileDir(); - assertTrue(operatorPath.startsWith(env.configFile())); + assertTrue(operatorPath.startsWith(env.configDir())); assertTrue(operatorPath.endsWith("operator")); Path operatorSettingsFile = fileSettingsService.watchedFile(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index 8a72f8af7035c..06600441b0a44 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -343,10 +343,7 @@ private void testRandomCase(boolean forceMerge, boolean missingBucket, int index final SortedDocsProducer docsProducer = sources[0].createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery()); for (LeafReaderContext leafReaderContext : reader.leaves()) { if (docsProducer != null && withProducer) { - assertEquals( - DocIdSet.EMPTY, - docsProducer.processLeaf(new MatchAllDocsQuery(), queue, leafReaderContext, false) - ); + assertEquals(DocIdSet.EMPTY, docsProducer.processLeaf(queue, leafReaderContext, false)); } else { final LeafBucketCollector leafCollector = new LeafBucketCollector() { @Override diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java index b417f7adbc8b7..aa9ca26974bad 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.Scorable; @@ -38,14 +39,29 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.Bits; +import org.apache.lucene.util.CharsRefBuilder; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchShardTask; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.text.Text; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.Suggester; +import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.test.TestSearchContext; +import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -275,6 +291,119 @@ private TestSearchContext createSearchContext(Query query, int size) throws IOEx return context; } + public void testSuggestOnlyWithTimeout() throws Exception { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().suggest(new SuggestBuilder()); + try (SearchContext context = createSearchContextWithSuggestTimeout(searchSourceBuilder)) { + assertTrue(context.hasOnlySuggest()); + QueryPhase.execute(context); + assertTrue(context.queryResult().searchTimedOut()); + assertEquals(1, context.queryResult().suggest().size()); + assertEquals(0, context.queryResult().suggest().getSuggestion("suggestion").getEntries().size()); + assertNotNull(context.queryResult().topDocs()); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + } + } + + public void testSuggestAndQueryWithSuggestTimeout() throws Exception { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().suggest(new SuggestBuilder()).query(new MatchAllQueryBuilder()); + try (SearchContext context = createSearchContextWithSuggestTimeout(searchSourceBuilder)) { + context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); + assertFalse(context.hasOnlySuggest()); + QueryPhase.execute(context); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), Matchers.greaterThan(0L)); + assertTrue(context.queryResult().searchTimedOut()); + assertEquals(1, context.queryResult().suggest().size()); + assertEquals(0, context.queryResult().suggest().getSuggestion("suggestion").getEntries().size()); + } + } + + private TestSearchContext createSearchContextWithSuggestTimeout(SearchSourceBuilder searchSourceBuilder) throws IOException { + ContextIndexSearcher contextIndexSearcher = newContextSearcher(reader); + SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext(); + suggestionSearchContext.addSuggestion("suggestion", new TestSuggestionContext(new TestSuggester(contextIndexSearcher), null)); + TestSearchContext context = new TestSearchContext(null, indexShard, contextIndexSearcher) { + @Override + public SuggestionSearchContext suggest() { + return suggestionSearchContext; + } + + @Override + public ShardSearchRequest request() { + SearchRequest searchRequest = new SearchRequest(); + searchRequest.allowPartialSearchResults(true); + searchRequest.source(searchSourceBuilder); + return new ShardSearchRequest( + OriginalIndices.NONE, + searchRequest, + indexShard.shardId(), + 0, + 1, + AliasFilter.EMPTY, + 1F, + 0, + null + ); + } + }; + context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap())); + return context; + } + + private static final class TestSuggester extends Suggester { + private final ContextIndexSearcher contextIndexSearcher; + + TestSuggester(ContextIndexSearcher contextIndexSearcher) { + this.contextIndexSearcher = contextIndexSearcher; + } + + @Override + protected TestSuggestion innerExecute( + String name, + TestSuggestionContext suggestion, + IndexSearcher searcher, + CharsRefBuilder spare + ) { + contextIndexSearcher.throwTimeExceededException(); + throw new AssertionError("should have thrown TimeExceededException"); + } + + @Override + protected TestSuggestion emptySuggestion(String name, TestSuggestionContext suggestion, CharsRefBuilder spare) { + return new TestSuggestion(); + } + } + + private static final class TestSuggestionContext extends SuggestionSearchContext.SuggestionContext { + TestSuggestionContext(Suggester suggester, SearchExecutionContext searchExecutionContext) { + super(suggester, searchExecutionContext); + } + } + + private static final class TestSuggestion extends Suggest.Suggestion< + Suggest.Suggestion.Entry> { + TestSuggestion() { + super("suggestion", 10); + } + + @Override + protected Entry newEntry(StreamInput in) { + return new TestSuggestionEntry(); + } + + @Override + public String getWriteableName() { + return "suggestion"; + } + } + + private static final class TestSuggestionEntry extends Suggest.Suggestion.Entry { + @Override + protected Option newOption(StreamInput in) { + return new Option(new Text("text"), 1f) { + }; + } + } + private static class Score extends Scorable { float score; diff --git a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java index 9b56cd3bde53c..cfb3cc68e035f 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.transport.InboundDecoder.ChannelType; @@ -126,105 +125,6 @@ public void testDecode() throws IOException { } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // can delete test in v9 - public void testDecodePreHeaderSizeVariableInt() throws IOException { - Compression.Scheme compressionScheme = randomFrom(Compression.Scheme.DEFLATE, Compression.Scheme.DEFLATE, null); - String action = "test-request"; - long requestId = randomNonNegativeLong(); - final TransportVersion preHeaderVariableInt = TransportHandshaker.V7_HANDSHAKE_VERSION; - final String contentValue = randomAlphaOfLength(100); - // 8.0 is only compatible with handshakes on a pre-variable int version - final OutboundMessage message = new OutboundMessage.Request( - threadContext, - new TestRequest(contentValue), - preHeaderVariableInt, - action, - requestId, - true, - compressionScheme - ); - - try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { - final BytesReference totalBytes = message.serialize(os); - int partialHeaderSize = TcpHeader.headerSize(preHeaderVariableInt); - - InboundDecoder decoder = new InboundDecoder(recycler); - final ArrayList fragments = new ArrayList<>(); - final ReleasableBytesReference releasable1 = wrapAsReleasable(totalBytes); - int bytesConsumed = decoder.decode(releasable1, fragments::add); - assertEquals(partialHeaderSize, bytesConsumed); - assertTrue(releasable1.hasReferences()); - - final Header header = (Header) fragments.get(0); - assertEquals(requestId, header.getRequestId()); - assertEquals(preHeaderVariableInt, header.getVersion()); - if (compressionScheme == null) { - assertFalse(header.isCompressed()); - } else { - assertTrue(header.isCompressed()); - } - assertTrue(header.isHandshake()); - assertTrue(header.isRequest()); - assertTrue(header.needsToReadVariableHeader()); - fragments.clear(); - - final BytesReference bytes2 = totalBytes.slice(bytesConsumed, totalBytes.length() - bytesConsumed); - final ReleasableBytesReference releasable2 = wrapAsReleasable(bytes2); - int bytesConsumed2 = decoder.decode(releasable2, fragments::add); - if (compressionScheme == null) { - assertEquals(2, fragments.size()); - } else { - assertEquals(3, fragments.size()); - final Object body = fragments.get(1); - assertThat(body, instanceOf(ReleasableBytesReference.class)); - ((ReleasableBytesReference) body).close(); - } - assertEquals(InboundDecoder.END_CONTENT, fragments.get(fragments.size() - 1)); - assertEquals(totalBytes.length() - bytesConsumed, bytesConsumed2); - } - } - - public void testDecodeHandshakeV7Compatibility() throws IOException { - String action = "test-request"; - long requestId = randomNonNegativeLong(); - final String headerKey = randomAlphaOfLength(10); - final String headerValue = randomAlphaOfLength(20); - threadContext.putHeader(headerKey, headerValue); - TransportVersion handshakeCompat = TransportHandshaker.V7_HANDSHAKE_VERSION; - OutboundMessage message = new OutboundMessage.Request( - threadContext, - new TestRequest(randomAlphaOfLength(100)), - handshakeCompat, - action, - requestId, - true, - null - ); - - try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { - final BytesReference bytes = message.serialize(os); - int totalHeaderSize = TcpHeader.headerSize(handshakeCompat); - - InboundDecoder decoder = new InboundDecoder(recycler); - final ArrayList fragments = new ArrayList<>(); - final ReleasableBytesReference releasable1 = wrapAsReleasable(bytes); - int bytesConsumed = decoder.decode(releasable1, fragments::add); - assertEquals(totalHeaderSize, bytesConsumed); - assertTrue(releasable1.hasReferences()); - - final Header header = (Header) fragments.get(0); - assertEquals(requestId, header.getRequestId()); - assertEquals(handshakeCompat, header.getVersion()); - assertFalse(header.isCompressed()); - assertTrue(header.isHandshake()); - assertTrue(header.isRequest()); - // TODO: On 9.0 this will be true because all compatible versions with contain the variable header int - assertTrue(header.needsToReadVariableHeader()); - fragments.clear(); - } - - } - public void testDecodeHandshakeV8Compatibility() throws IOException { doHandshakeCompatibilityTest(TransportHandshaker.V8_HANDSHAKE_VERSION, null); doHandshakeCompatibilityTest(TransportHandshaker.V8_HANDSHAKE_VERSION, Compression.Scheme.DEFLATE); @@ -453,46 +353,6 @@ public void testCompressedDecode() throws IOException { } - public void testCompressedDecodeHandshakeCompatibility() throws IOException { - String action = "test-request"; - long requestId = randomNonNegativeLong(); - final String headerKey = randomAlphaOfLength(10); - final String headerValue = randomAlphaOfLength(20); - threadContext.putHeader(headerKey, headerValue); - TransportVersion handshakeCompat = TransportHandshaker.V7_HANDSHAKE_VERSION; - OutboundMessage message = new OutboundMessage.Request( - threadContext, - new TestRequest(randomAlphaOfLength(100)), - handshakeCompat, - action, - requestId, - true, - Compression.Scheme.DEFLATE - ); - - try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { - final BytesReference bytes = message.serialize(os); - int totalHeaderSize = TcpHeader.headerSize(handshakeCompat); - - InboundDecoder decoder = new InboundDecoder(recycler); - final ArrayList fragments = new ArrayList<>(); - final ReleasableBytesReference releasable1 = wrapAsReleasable(bytes); - int bytesConsumed = decoder.decode(releasable1, fragments::add); - assertEquals(totalHeaderSize, bytesConsumed); - assertTrue(releasable1.hasReferences()); - - final Header header = (Header) fragments.get(0); - assertEquals(requestId, header.getRequestId()); - assertEquals(handshakeCompat, header.getVersion()); - assertTrue(header.isCompressed()); - assertTrue(header.isHandshake()); - assertTrue(header.isRequest()); - // TODO: On 9.0 this will be true because all compatible versions with contain the variable header int - assertTrue(header.needsToReadVariableHeader()); - fragments.clear(); - } - } - public void testVersionIncompatibilityDecodeException() throws IOException { String action = "test-request"; long requestId = randomNonNegativeLong(); diff --git a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java index cb266c58d70d5..3ec248e0d8d9a 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java @@ -290,7 +290,12 @@ public void testLogsSlowInboundProcessing() throws Exception { ); BytesStreamOutput byteData = new BytesStreamOutput(); TaskId.EMPTY_TASK_ID.writeTo(byteData); - TransportVersion.writeVersion(remoteVersion, byteData); + // simulate bytes of a transport handshake: vInt transport version then release version string + try (var payloadByteData = new BytesStreamOutput()) { + TransportVersion.writeVersion(remoteVersion, payloadByteData); + payloadByteData.writeString(randomIdentifier()); + byteData.writeBytesReference(payloadByteData.bytes()); + } final InboundMessage requestMessage = new InboundMessage( requestHeader, ReleasableBytesReference.wrap(byteData.bytes()), diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java index de44ca70f2005..2bac41199ab83 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.core.UpdateForV10; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -38,56 +37,6 @@ public class TransportHandshakerRawMessageTests extends ESSingleNodeTestCase { - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // remove support for v7 handshakes in v9 - public void testV7Handshake() throws Exception { - final BytesRef handshakeRequestBytes; - final var requestId = randomNonNegativeLong(); - try (var outputStream = new BytesStreamOutput()) { - outputStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - outputStream.writeLong(requestId); - outputStream.writeByte(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0))); - outputStream.writeInt(TransportHandshaker.V7_HANDSHAKE_VERSION.id()); - outputStream.writeByte((byte) 0); // no request headers; - outputStream.writeByte((byte) 0); // no response headers; - outputStream.writeStringArray(new String[] { "x-pack" }); // one feature - outputStream.writeString("internal:tcp/handshake"); - outputStream.writeByte((byte) 0); // no parent task ID; - - final var requestNodeTransportVersionId = TransportVersionUtils.randomCompatibleVersion(random()).id(); - assertThat(requestNodeTransportVersionId, allOf(greaterThanOrEqualTo(1 << 22), lessThan(1 << 28))); // 4-byte vInt - outputStream.writeByte((byte) 4); // payload length - outputStream.writeVInt(requestNodeTransportVersionId); - - handshakeRequestBytes = outputStream.bytes().toBytesRef(); - } - - final BytesRef handshakeResponseBytes; - try (var socket = openTransportConnection()) { - var streamOutput = new OutputStreamStreamOutput(socket.getOutputStream()); - streamOutput.write("ES".getBytes(StandardCharsets.US_ASCII)); - streamOutput.writeInt(handshakeRequestBytes.length); - streamOutput.writeBytes(handshakeRequestBytes.bytes, handshakeRequestBytes.offset, handshakeRequestBytes.length); - streamOutput.flush(); - - var streamInput = new InputStreamStreamInput(socket.getInputStream()); - assertEquals((byte) 'E', streamInput.readByte()); - assertEquals((byte) 'S', streamInput.readByte()); - var responseLength = streamInput.readInt(); - handshakeResponseBytes = streamInput.readBytesRef(responseLength); - } - - try (var inputStream = new BytesArray(handshakeResponseBytes).streamInput()) { - assertEquals(requestId, inputStream.readLong()); - assertEquals(TransportStatus.setResponse(TransportStatus.setHandshake((byte) 0)), inputStream.readByte()); - assertEquals(TransportHandshaker.V7_HANDSHAKE_VERSION.id(), inputStream.readInt()); - assertEquals((byte) 0, inputStream.readByte()); // no request headers - assertEquals((byte) 0, inputStream.readByte()); // no response headers - inputStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - assertEquals(TransportVersion.current().id(), inputStream.readVInt()); - assertEquals(-1, inputStream.read()); - } - } - @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // remove support for v8 handshakes in v10 public void testV8Handshake() throws Exception { final BytesRef handshakeRequestBytes; @@ -223,11 +172,10 @@ public void testOutboundHandshake() throws Exception { try (var inputStream = new BytesArray(handshakeRequestBytes).streamInput()) { assertThat(inputStream.readLong(), greaterThan(0L)); assertEquals(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0)), inputStream.readByte()); - assertEquals(TransportHandshaker.V8_HANDSHAKE_VERSION.id(), inputStream.readInt()); - assertEquals(0x1a, inputStream.readInt()); // length of variable-length header, always 0x1a + assertEquals(TransportHandshaker.V9_HANDSHAKE_VERSION.id(), inputStream.readInt()); + assertEquals(0x19, inputStream.readInt()); // length of variable-length header, always 0x19 assertEquals((byte) 0, inputStream.readByte()); // no request headers assertEquals((byte) 0, inputStream.readByte()); // no response headers - assertEquals((byte) 0, inputStream.readByte()); // no features assertEquals("internal:tcp/handshake", inputStream.readString()); assertEquals((byte) 0, inputStream.readByte()); // no parent task inputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); @@ -236,8 +184,9 @@ public void testOutboundHandshake() throws Exception { } try (var inputStream = new BytesArray(payloadBytes).streamInput()) { - inputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + inputStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); assertEquals(TransportVersion.current().id(), inputStream.readVInt()); + assertEquals(Build.current().version(), inputStream.readString()); assertEquals(-1, inputStream.read()); } } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java index d260d66157651..32f088976b273 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java @@ -8,24 +8,31 @@ */ package org.elasticsearch.transport; +import org.apache.logging.log4j.Level; +import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; -import org.elasticsearch.Version; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLog; import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.TestThreadPool; import java.io.IOException; import java.util.Collections; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -39,8 +46,8 @@ public class TransportHandshakerTests extends ESTestCase { private TestThreadPool threadPool; private TransportHandshaker.HandshakeRequestSender requestSender; - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - private static final TransportVersion HANDSHAKE_REQUEST_VERSION = TransportHandshaker.V8_HANDSHAKE_VERSION; + @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // new handshake version required in v10 + private static final TransportVersion HANDSHAKE_REQUEST_VERSION = TransportHandshaker.V9_HANDSHAKE_VERSION; @Override public void setUp() throws Exception { @@ -93,6 +100,40 @@ public void testHandshakeRequestAndResponse() throws IOException { assertEquals(TransportVersion.current(), versionFuture.actionGet()); } + @TestLogging(reason = "testing WARN logging", value = "org.elasticsearch.transport.TransportHandshaker:WARN") + public void testIncompatibleHandshakeRequest() throws IOException { + TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest( + getRandomIncompatibleTransportVersion(), + randomIdentifier() + ); + BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); + bytesStreamOutput.setTransportVersion(HANDSHAKE_REQUEST_VERSION); + handshakeRequest.writeTo(bytesStreamOutput); + StreamInput input = bytesStreamOutput.bytes().streamInput(); + input.setTransportVersion(HANDSHAKE_REQUEST_VERSION); + final TestTransportChannel channel = new TestTransportChannel(ActionListener.running(() -> fail("should not complete"))); + + MockLog.assertThatLogger( + () -> assertThat( + expectThrows(IllegalStateException.class, () -> handshaker.handleHandshake(channel, randomNonNegativeLong(), input)) + .getMessage(), + allOf( + containsString("Rejecting unreadable transport handshake"), + containsString("[" + handshakeRequest.releaseVersion + "/" + handshakeRequest.transportVersion + "]"), + containsString("[" + Build.current().version() + "/" + TransportVersion.current() + "]"), + containsString("which has an incompatible wire format") + ) + ), + TransportHandshaker.class, + new MockLog.SeenEventExpectation( + "warning", + TransportHandshaker.class.getCanonicalName(), + Level.WARN, + "Rejecting unreadable transport handshake * incompatible wire format." + ) + ); + } + public void testHandshakeResponseFromOlderNode() throws Exception { final PlainActionFuture versionFuture = new PlainActionFuture<>(); final long reqId = randomNonNegativeLong(); @@ -108,6 +149,54 @@ public void testHandshakeResponseFromOlderNode() throws Exception { assertEquals(remoteVersion, versionFuture.result()); } + @TestLogging(reason = "testing WARN logging", value = "org.elasticsearch.transport.TransportHandshaker:WARN") + public void testHandshakeResponseFromOlderNodeWithPatchedProtocol() { + final PlainActionFuture versionFuture = new PlainActionFuture<>(); + final long reqId = randomNonNegativeLong(); + handshaker.sendHandshake(reqId, node, channel, SAFE_AWAIT_TIMEOUT, versionFuture); + TransportResponseHandler handler = handshaker.removeHandlerForHandshake(reqId); + + assertFalse(versionFuture.isDone()); + + final var handshakeResponse = new TransportHandshaker.HandshakeResponse( + getRandomIncompatibleTransportVersion(), + randomIdentifier() + ); + + MockLog.assertThatLogger( + () -> handler.handleResponse(handshakeResponse), + TransportHandshaker.class, + new MockLog.SeenEventExpectation( + "warning", + TransportHandshaker.class.getCanonicalName(), + Level.WARN, + "Rejecting unreadable transport handshake * incompatible wire format." + ) + ); + + assertTrue(versionFuture.isDone()); + assertThat( + expectThrows(ExecutionException.class, IllegalStateException.class, versionFuture::result).getMessage(), + allOf( + containsString("Rejecting unreadable transport handshake"), + containsString("[" + handshakeResponse.getReleaseVersion() + "/" + handshakeResponse.getTransportVersion() + "]"), + containsString("[" + Build.current().version() + "/" + TransportVersion.current() + "]"), + containsString("which has an incompatible wire format") + ) + ); + } + + private static TransportVersion getRandomIncompatibleTransportVersion() { + return randomBoolean() + // either older than MINIMUM_COMPATIBLE + ? new TransportVersion(between(1, TransportVersions.MINIMUM_COMPATIBLE.id() - 1)) + // or between MINIMUM_COMPATIBLE and current but not known + : randomValueOtherThanMany( + TransportVersion::isKnown, + () -> new TransportVersion(between(TransportVersions.MINIMUM_COMPATIBLE.id(), TransportVersion.current().id())) + ); + } + public void testHandshakeResponseFromNewerNode() throws Exception { final PlainActionFuture versionFuture = new PlainActionFuture<>(); final long reqId = randomNonNegativeLong(); @@ -133,10 +222,8 @@ public void testHandshakeRequestFutureVersionsCompatibility() throws IOException verify(requestSender).sendRequest(node, channel, reqId, HANDSHAKE_REQUEST_VERSION); - TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest( - TransportVersion.current(), - randomIdentifier() - ); + final var buildVersion = randomIdentifier(); + final var handshakeRequest = new TransportHandshaker.HandshakeRequest(TransportVersion.current(), buildVersion); BytesStreamOutput currentHandshakeBytes = new BytesStreamOutput(); currentHandshakeBytes.setTransportVersion(HANDSHAKE_REQUEST_VERSION); handshakeRequest.writeTo(currentHandshakeBytes); @@ -145,17 +232,27 @@ public void testHandshakeRequestFutureVersionsCompatibility() throws IOException BytesStreamOutput futureHandshake = new BytesStreamOutput(); TaskId.EMPTY_TASK_ID.writeTo(lengthCheckingHandshake); TaskId.EMPTY_TASK_ID.writeTo(futureHandshake); + final var extraDataSize = between(0, 1024); try (BytesStreamOutput internalMessage = new BytesStreamOutput()) { - Version.writeVersion(Version.CURRENT, internalMessage); + internalMessage.writeVInt(TransportVersion.current().id() + between(0, 100)); + internalMessage.writeString(buildVersion); lengthCheckingHandshake.writeBytesReference(internalMessage.bytes()); - internalMessage.write(new byte[1024]); + internalMessage.write(new byte[extraDataSize]); futureHandshake.writeBytesReference(internalMessage.bytes()); } StreamInput futureHandshakeStream = futureHandshake.bytes().streamInput(); // We check that the handshake we serialize for this test equals the actual request. // Otherwise, we need to update the test. assertEquals(currentHandshakeBytes.bytes().length(), lengthCheckingHandshake.bytes().length()); - assertEquals(1031, futureHandshakeStream.available()); + final var expectedInternalMessageSize = 4 /* transport version id */ + + (1 + buildVersion.length()) /* length prefixed release version string */ + + extraDataSize; + assertEquals( + 1 /* EMPTY_TASK_ID */ + + (expectedInternalMessageSize < 0x80 ? 1 : 2) /* internalMessage size vInt */ + + expectedInternalMessageSize /* internalMessage */, + futureHandshakeStream.available() + ); final PlainActionFuture responseFuture = new PlainActionFuture<>(); final TestTransportChannel channel = new TestTransportChannel(responseFuture); handshaker.handleHandshake(channel, reqId, futureHandshakeStream); @@ -166,43 +263,6 @@ public void testHandshakeRequestFutureVersionsCompatibility() throws IOException assertEquals(TransportVersion.current(), response.getTransportVersion()); } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // v7 handshakes are not supported in v9 - public void testReadV7HandshakeRequest() throws IOException { - final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); - - final var requestPayloadStreamOutput = new BytesStreamOutput(); - requestPayloadStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - requestPayloadStreamOutput.writeVInt(transportVersion.id()); - - final var requestBytesStreamOutput = new BytesStreamOutput(); - requestBytesStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - TaskId.EMPTY_TASK_ID.writeTo(requestBytesStreamOutput); - requestBytesStreamOutput.writeBytesReference(requestPayloadStreamOutput.bytes()); - - final var requestBytesStream = requestBytesStreamOutput.bytes().streamInput(); - requestBytesStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - final var handshakeRequest = new TransportHandshaker.HandshakeRequest(requestBytesStream); - - assertEquals(transportVersion, handshakeRequest.transportVersion); - assertEquals(transportVersion.toReleaseVersion(), handshakeRequest.releaseVersion); - } - - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // v7 handshakes are not supported in v9 - public void testReadV7HandshakeResponse() throws IOException { - final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); - - final var responseBytesStreamOutput = new BytesStreamOutput(); - responseBytesStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - responseBytesStreamOutput.writeVInt(transportVersion.id()); - - final var responseBytesStream = responseBytesStreamOutput.bytes().streamInput(); - responseBytesStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - final var handshakeResponse = new TransportHandshaker.HandshakeResponse(responseBytesStream); - - assertEquals(transportVersion, handshakeResponse.getTransportVersion()); - assertEquals(transportVersion.toReleaseVersion(), handshakeResponse.getReleaseVersion()); - } - public void testReadV8HandshakeRequest() throws IOException { final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); diff --git a/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java b/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java index c3965547abb5d..1c9cb4c9afc0f 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java @@ -20,50 +20,8 @@ public class TransportStatsTests extends ESTestCase { public void testToXContent() { - assertEquals( - Strings.toString( - new TransportStats(1, 2, 3, ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), new long[0], new long[0], Map.of()), - false, - true - ), - """ - {"transport":{"server_open":1,"total_outbound_connections":2,\ - "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ - "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456\ - }}""" - ); - final var histogram = new long[HandlingTimeTracker.BUCKET_COUNT]; - assertEquals( - Strings.toString( - new TransportStats(1, 2, 3, ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), histogram, histogram, Map.of()), - false, - true - ), - """ - {"transport":{"server_open":1,"total_outbound_connections":2,\ - "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ - "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456,\ - "inbound_handling_time_histogram":[],\ - "outbound_handling_time_histogram":[]\ - }}""" - ); - histogram[4] = 10; - assertEquals( - Strings.toString( - new TransportStats(1, 2, 3, ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), histogram, histogram, Map.of()), - false, - true - ), - """ - {"transport":{"server_open":1,"total_outbound_connections":2,\ - "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ - "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456,\ - "inbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}],\ - "outbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}]\ - }}""" - ); final var requestSizeHistogram = new long[29]; requestSizeHistogram[2] = 9; @@ -84,8 +42,8 @@ public void testToXContent() { ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), - new long[0], - new long[0], + histogram, + histogram, Map.of("internal:test/action", exampleActionStats) ), false, @@ -95,6 +53,8 @@ public void testToXContent() { {"transport":{"server_open":1,"total_outbound_connections":2,\ "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456,\ + "inbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}],\ + "outbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}],\ "actions":{"internal:test/action":%s}}}""", Strings.toString(exampleActionStats, false, true)) ); } diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index f732f7cbbf00d..70f1ba529ec5e 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -89,8 +89,7 @@ public void skipOnAborted() { */ public void testSortByManyLongsSuccess() throws IOException { initManyLongs(); - Response response = sortByManyLongs(500); - Map map = responseAsMap(response); + Map response = sortByManyLongs(500); ListMatcher columns = matchesList().item(matchesMap().entry("name", "a").entry("type", "long")) .item(matchesMap().entry("name", "b").entry("type", "long")); ListMatcher values = matchesList(); @@ -99,7 +98,7 @@ public void testSortByManyLongsSuccess() throws IOException { values = values.item(List.of(0, b)); } } - assertResultMap(map, columns, values); + assertResultMap(response, columns, values); } /** @@ -107,7 +106,8 @@ public void testSortByManyLongsSuccess() throws IOException { */ public void testSortByManyLongsTooMuchMemory() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> sortByManyLongs(5000)); + // 5000 is plenty to break on most nodes + assertCircuitBreaks(attempt -> sortByManyLongs(attempt * 5000)); } /** @@ -191,26 +191,42 @@ public void testSortByManyLongsTooMuchMemoryAsync() throws IOException { ); } - private void assertCircuitBreaks(ThrowingRunnable r) throws IOException { - ResponseException e = expectThrows(ResponseException.class, r); - Map map = responseAsMap(e.getResponse()); - logger.info("expected circuit breaker {}", map); - assertMap( - map, + private static final int MAX_ATTEMPTS = 5; + + interface TryCircuitBreaking { + Map attempt(int attempt) throws IOException; + } + + private void assertCircuitBreaks(TryCircuitBreaking tryBreaking) throws IOException { + assertCircuitBreaks( + tryBreaking, matchesMap().entry("status", 429).entry("error", matchesMap().extraOk().entry("type", "circuit_breaking_exception")) ); } - private void assertFoldCircuitBreaks(ThrowingRunnable r) throws IOException { - ResponseException e = expectThrows(ResponseException.class, r); - Map map = responseAsMap(e.getResponse()); - logger.info("expected fold circuit breaking {}", map); - assertMap( - map, + private void assertFoldCircuitBreaks(TryCircuitBreaking tryBreaking) throws IOException { + assertCircuitBreaks( + tryBreaking, matchesMap().entry("status", 400).entry("error", matchesMap().extraOk().entry("type", "fold_too_much_memory_exception")) ); } + private void assertCircuitBreaks(TryCircuitBreaking tryBreaking, MapMatcher responseMatcher) throws IOException { + int attempt = 1; + while (attempt <= MAX_ATTEMPTS) { + try { + Map response = tryBreaking.attempt(attempt); + logger.warn("{}: should circuit broken but got {}", attempt, response); + attempt++; + } catch (ResponseException e) { + Map map = responseAsMap(e.getResponse()); + assertMap(map, responseMatcher); + return; + } + } + fail("giving up circuit breaking after " + attempt + " attempts"); + } + private void assertParseFailure(ThrowingRunnable r) throws IOException { ResponseException e = expectThrows(ResponseException.class, r); Map map = responseAsMap(e.getResponse()); @@ -218,9 +234,9 @@ private void assertParseFailure(ThrowingRunnable r) throws IOException { assertMap(map, matchesMap().entry("status", 400).entry("error", matchesMap().extraOk().entry("type", "parsing_exception"))); } - private Response sortByManyLongs(int count) throws IOException { + private Map sortByManyLongs(int count) throws IOException { logger.info("sorting by {} longs", count); - return query(makeSortByManyLongs(count).toString(), null); + return responseAsMap(query(makeSortByManyLongs(count).toString(), null)); } private StringBuilder makeSortByManyLongs(int count) { @@ -318,8 +334,7 @@ private Response concat(int evals) throws IOException { public void testManyConcat() throws IOException { int strings = 300; initManyLongs(); - Response resp = manyConcat("FROM manylongs", strings); - assertManyStrings(resp, strings); + assertManyStrings(manyConcat("FROM manylongs", strings), strings); } /** @@ -327,7 +342,8 @@ public void testManyConcat() throws IOException { */ public void testHugeManyConcat() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> manyConcat("FROM manylongs", 2000)); + // 2000 is plenty to break on most nodes + assertCircuitBreaks(attempt -> manyConcat("FROM manylongs", attempt * 2000)); } /** @@ -335,18 +351,18 @@ public void testHugeManyConcat() throws IOException { */ public void testManyConcatFromRow() throws IOException { int strings = 2000; - Response resp = manyConcat("ROW a=9999, b=9999, c=9999, d=9999, e=9999", strings); - assertManyStrings(resp, strings); + assertManyStrings(manyConcat("ROW a=9999, b=9999, c=9999, d=9999, e=9999", strings), strings); } /** * Hits a circuit breaker by building many moderately long strings. */ public void testHugeManyConcatFromRow() throws IOException { + // 5000 is plenty to break on most nodes assertFoldCircuitBreaks( - () -> manyConcat( + attempt -> manyConcat( "ROW a=9999999999999, b=99999999999999999, c=99999999999999999, d=99999999999999999, e=99999999999999999", - 5000 + attempt * 5000 ) ); } @@ -361,7 +377,7 @@ public void testHugeHugeManyConcatFromRow() throws IOException { /** * Tests that generate many moderately long strings. */ - private Response manyConcat(String init, int strings) throws IOException { + private Map manyConcat(String init, int strings) throws IOException { StringBuilder query = startQuery(); query.append(init).append(" | EVAL str = CONCAT("); query.append( @@ -388,7 +404,7 @@ private Response manyConcat(String init, int strings) throws IOException { query.append("str").append(s); } query.append("\"}"); - return query(query.toString(), "columns"); + return responseAsMap(query(query.toString(), "columns")); } /** @@ -397,8 +413,7 @@ private Response manyConcat(String init, int strings) throws IOException { public void testManyRepeat() throws IOException { int strings = 30; initManyLongs(); - Response resp = manyRepeat("FROM manylongs", strings); - assertManyStrings(resp, 30); + assertManyStrings(manyRepeat("FROM manylongs", strings), 30); } /** @@ -406,7 +421,8 @@ public void testManyRepeat() throws IOException { */ public void testHugeManyRepeat() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> manyRepeat("FROM manylongs", 75)); + // 75 is plenty to break on most nodes + assertCircuitBreaks(attempt -> manyRepeat("FROM manylongs", attempt * 75)); } /** @@ -414,15 +430,15 @@ public void testHugeManyRepeat() throws IOException { */ public void testManyRepeatFromRow() throws IOException { int strings = 300; - Response resp = manyRepeat("ROW a = 99", strings); - assertManyStrings(resp, strings); + assertManyStrings(manyRepeat("ROW a = 99", strings), strings); } /** * Hits a circuit breaker by building many moderately long strings. */ public void testHugeManyRepeatFromRow() throws IOException { - assertFoldCircuitBreaks(() -> manyRepeat("ROW a = 99", 400)); + // 400 is enough to break on most nodes + assertFoldCircuitBreaks(attempt -> manyRepeat("ROW a = 99", attempt * 400)); } /** @@ -435,7 +451,7 @@ public void testHugeHugeManyRepeatFromRow() throws IOException { /** * Tests that generate many moderately long strings. */ - private Response manyRepeat(String init, int strings) throws IOException { + private Map manyRepeat(String init, int strings) throws IOException { StringBuilder query = startQuery(); query.append(init).append(" | EVAL str = TO_STRING(a)"); for (int s = 0; s < strings; s++) { @@ -449,23 +465,21 @@ private Response manyRepeat(String init, int strings) throws IOException { query.append("str").append(s); } query.append("\"}"); - return query(query.toString(), "columns"); + return responseAsMap(query(query.toString(), "columns")); } - private void assertManyStrings(Response resp, int strings) throws IOException { - Map map = responseAsMap(resp); + private void assertManyStrings(Map resp, int strings) throws IOException { ListMatcher columns = matchesList(); for (int s = 0; s < strings; s++) { columns = columns.item(matchesMap().entry("name", "str" + s).entry("type", "keyword")); } MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns)); + assertMap(resp, mapMatcher.entry("columns", columns)); } public void testManyEval() throws IOException { initManyLongs(); - Response resp = manyEval(1); - Map map = responseAsMap(resp); + Map response = manyEval(1); ListMatcher columns = matchesList(); columns = columns.item(matchesMap().entry("name", "a").entry("type", "long")); columns = columns.item(matchesMap().entry("name", "b").entry("type", "long")); @@ -475,15 +489,16 @@ public void testManyEval() throws IOException { for (int i = 0; i < 20; i++) { columns = columns.item(matchesMap().entry("name", "i0" + i).entry("type", "long")); } - assertResultMap(map, columns, hasSize(10_000)); + assertResultMap(response, columns, hasSize(10_000)); } public void testTooManyEval() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> manyEval(490)); + // 490 is plenty to fail on most nodes + assertCircuitBreaks(attempt -> manyEval(attempt * 490)); } - private Response manyEval(int evalLines) throws IOException { + private Map manyEval(int evalLines) throws IOException { StringBuilder query = startQuery(); query.append("FROM manylongs"); for (int e = 0; e < evalLines; e++) { @@ -496,7 +511,7 @@ private Response manyEval(int evalLines) throws IOException { } } query.append("\n| LIMIT 10000\"}"); - return query(query.toString(), null); + return responseAsMap(query(query.toString(), null)); } private Response query(String query, String filterPath) throws IOException { @@ -554,99 +569,161 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE public void testFetchManyBigFields() throws IOException { initManyBigFieldsIndex(100); - fetchManyBigFields(100); + Map response = fetchManyBigFields(100); + ListMatcher columns = matchesList(); + for (int f = 0; f < 1000; f++) { + columns = columns.item(matchesMap().entry("name", "f" + String.format(Locale.ROOT, "%03d", f)).entry("type", "keyword")); + } + assertMap(response, matchesMap().entry("columns", columns)); } public void testFetchTooManyBigFields() throws IOException { initManyBigFieldsIndex(500); - assertCircuitBreaks(() -> fetchManyBigFields(500)); + // 500 docs is plenty to circuit break on most nodes + assertCircuitBreaks(attempt -> fetchManyBigFields(attempt * 500)); } /** * Fetches documents containing 1000 fields which are {@code 1kb} each. */ - private void fetchManyBigFields(int docs) throws IOException { + private Map fetchManyBigFields(int docs) throws IOException { StringBuilder query = startQuery(); query.append("FROM manybigfields | SORT f000 | LIMIT " + docs + "\"}"); - Response response = query(query.toString(), "columns"); - Map map = responseAsMap(response); - ListMatcher columns = matchesList(); - for (int f = 0; f < 1000; f++) { - columns = columns.item(matchesMap().entry("name", "f" + String.format(Locale.ROOT, "%03d", f)).entry("type", "keyword")); - } - assertMap(map, matchesMap().entry("columns", columns)); + return responseAsMap(query(query.toString(), "columns")); } public void testAggMvLongs() throws IOException { int fieldValues = 100; initMvLongsIndex(1, 3, fieldValues); - Response response = aggMvLongs(3); - Map map = responseAsMap(response); + Map response = aggMvLongs(3); ListMatcher columns = matchesList().item(matchesMap().entry("name", "MAX(f00)").entry("type", "long")) .item(matchesMap().entry("name", "f00").entry("type", "long")) .item(matchesMap().entry("name", "f01").entry("type", "long")) .item(matchesMap().entry("name", "f02").entry("type", "long")); - assertMap(map, matchesMap().entry("columns", columns)); + assertMap(response, matchesMap().entry("columns", columns)); } public void testAggTooManyMvLongs() throws IOException { initMvLongsIndex(1, 3, 1000); - assertCircuitBreaks(() -> aggMvLongs(3)); + // 3 fields is plenty on most nodes + assertCircuitBreaks(attempt -> aggMvLongs(attempt * 3)); } - private Response aggMvLongs(int fields) throws IOException { + private Map aggMvLongs(int fields) throws IOException { StringBuilder query = startQuery(); query.append("FROM mv_longs | STATS MAX(f00) BY f00"); for (int f = 1; f < fields; f++) { query.append(", f").append(String.format(Locale.ROOT, "%02d", f)); } - return query(query.append("\"}").toString(), "columns"); + return responseAsMap(query(query.append("\"}").toString(), "columns")); } public void testFetchMvLongs() throws IOException { int fields = 100; initMvLongsIndex(100, fields, 1000); - Response response = fetchMvLongs(); - Map map = responseAsMap(response); + Map response = fetchMvLongs(); ListMatcher columns = matchesList(); for (int f = 0; f < fields; f++) { columns = columns.item(matchesMap().entry("name", String.format(Locale.ROOT, "f%02d", f)).entry("type", "long")); } - assertMap(map, matchesMap().entry("columns", columns)); + assertMap(response, matchesMap().entry("columns", columns)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106683") public void testFetchTooManyMvLongs() throws IOException { initMvLongsIndex(500, 100, 1000); - assertCircuitBreaks(() -> fetchMvLongs()); + assertCircuitBreaks(attempt -> fetchMvLongs()); } - private Response fetchMvLongs() throws IOException { + private Map fetchMvLongs() throws IOException { StringBuilder query = startQuery(); query.append("FROM mv_longs\"}"); - return query(query.toString(), "columns"); + return responseAsMap(query(query.toString(), "columns")); } public void testLookupExplosion() throws IOException { - int sensorDataCount = 7500; + int sensorDataCount = 500; int lookupEntries = 10000; Map map = lookupExplosion(sensorDataCount, lookupEntries); assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries)))); } public void testLookupExplosionManyMatches() throws IOException { - assertCircuitBreaks(() -> { - Map result = lookupExplosion(8500, 10000); - logger.error("should have failed but got {}", result); - }); + // 1500, 10000 is enough locally, but some CI machines need more. + assertCircuitBreaks(attempt -> lookupExplosion(attempt * 1500, 10000)); + } + + public void testLookupExplosionNoFetch() throws IOException { + int sensorDataCount = 7500; + int lookupEntries = 10000; + Map map = lookupExplosionNoFetch(sensorDataCount, lookupEntries); + assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries)))); + } + + public void testLookupExplosionNoFetchManyMatches() throws IOException { + // 8500 is plenty on most nodes + assertCircuitBreaks(attempt -> lookupExplosionNoFetch(attempt * 8500, 10000)); + } + + public void testLookupExplosionBigString() throws IOException { + int sensorDataCount = 150; + int lookupEntries = 1; + Map map = lookupExplosionBigString(sensorDataCount, lookupEntries); + assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries)))); + } + + public void testLookupExplosionBigStringManyMatches() throws IOException { + // 500, 1 is enough to make it fail locally but some CI needs more + assertCircuitBreaks(attempt -> lookupExplosionBigString(attempt * 500, 1)); + } + + private Map lookupExplosion(int sensorDataCount, int lookupEntries) throws IOException { + try { + lookupExplosionData(sensorDataCount, lookupEntries); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(location)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + deleteIndex("sensor_data"); + deleteIndex("sensor_lookup"); + } + } + + private Map lookupExplosionNoFetch(int sensorDataCount, int lookupEntries) throws IOException { + try { + lookupExplosionData(sensorDataCount, lookupEntries); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(*)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + deleteIndex("sensor_data"); + deleteIndex("sensor_lookup"); + } } - private Map lookupExplosion(int sensorDataCount, int lookupEntries) throws IOException { + private void lookupExplosionData(int sensorDataCount, int lookupEntries) throws IOException { initSensorData(sensorDataCount, 1); initSensorLookup(lookupEntries, 1, i -> "73.9857 40.7484"); - StringBuilder query = startQuery(); - query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(*)\"}"); - return responseAsMap(query(query.toString(), null)); + } + + private Map lookupExplosionBigString(int sensorDataCount, int lookupEntries) throws IOException { + try { + initSensorData(sensorDataCount, 1); + initSensorLookupString(lookupEntries, 1, i -> { + int target = Math.toIntExact(ByteSizeValue.ofMb(1).getBytes()); + StringBuilder str = new StringBuilder(Math.toIntExact(ByteSizeValue.ofMb(2).getBytes())); + while (str.length() < target) { + str.append("Lorem ipsum dolor sit amet, consectetur adipiscing elit."); + } + logger.info("big string is {} characters", str.length()); + return str.toString(); + }); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(string)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + deleteIndex("sensor_data"); + deleteIndex("sensor_lookup"); + } } public void testEnrichExplosion() throws IOException { @@ -657,22 +734,25 @@ public void testEnrichExplosion() throws IOException { } public void testEnrichExplosionManyMatches() throws IOException { - assertCircuitBreaks(() -> { - Map result = enrichExplosion(3000, 10000); - logger.error("should have failed but got {}", result); - }); + // 1000, 10000 is enough on most nodes + assertCircuitBreaks(attempt -> enrichExplosion(1000, attempt * 5000)); } - private Map enrichExplosion(int sensorDataCount, int lookupEntries) throws IOException { - initSensorData(sensorDataCount, 1); - initSensorEnrich(lookupEntries, 1, i -> "73.9857 40.7484"); + private Map enrichExplosion(int sensorDataCount, int lookupEntries) throws IOException { try { - StringBuilder query = startQuery(); - query.append("FROM sensor_data | ENRICH sensor ON id | STATS COUNT(*)\"}"); - return responseAsMap(query(query.toString(), null)); + initSensorData(sensorDataCount, 1); + initSensorEnrich(lookupEntries, 1, i -> "73.9857 40.7484"); + try { + StringBuilder query = startQuery(); + query.append("FROM sensor_data | ENRICH sensor ON id | STATS COUNT(*)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + Request delete = new Request("DELETE", "/_enrich/policy/sensor"); + assertMap(responseAsMap(client().performRequest(delete)), matchesMap().entry("acknowledged", true)); + } } finally { - Request delete = new Request("DELETE", "/_enrich/policy/sensor"); - assertMap(responseAsMap(client().performRequest(delete)), matchesMap().entry("acknowledged", true)); + deleteIndex("sensor_data"); + deleteIndex("sensor_lookup"); } } @@ -834,6 +914,31 @@ private void initSensorLookup(int lookupEntries, int sensorCount, IntFunction string) throws IOException { + logger.info("loading sensor lookup with huge strings"); + createIndex("sensor_lookup", Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOOKUP.getName()).build(), """ + { + "properties": { + "id": { "type": "long" }, + "string": { "type": "text" } + } + }"""); + int docsPerBulk = 10; + StringBuilder data = new StringBuilder(); + for (int i = 0; i < lookupEntries; i++) { + int sensor = i % sensorCount; + data.append(String.format(Locale.ROOT, """ + {"create":{}} + {"id": %d, "string": "%s"} + """, sensor, string.apply(sensor))); + if (i % docsPerBulk == docsPerBulk - 1) { + bulk("sensor_lookup", data.toString()); + data.setLength(0); + } + } + initIndex("sensor_lookup", data.toString()); + } + private void initSensorEnrich(int lookupEntries, int sensorCount, IntFunction location) throws IOException { initSensorLookup(lookupEntries, sensorCount, location); logger.info("loading sensor enrich"); diff --git a/test/framework/build.gradle b/test/framework/build.gradle index c7e08eb3cdfa9..24de8c403b2cf 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -25,9 +25,9 @@ dependencies { api "commons-codec:commons-codec:${versions.commonscodec}" // mockito - api 'org.mockito:mockito-core:5.11.0' - api 'org.mockito:mockito-subclass:5.11.0' - api 'net.bytebuddy:byte-buddy:1.14.12' + api 'org.mockito:mockito-core:5.15.2' + api 'org.mockito:mockito-subclass:5.15.2' + api 'net.bytebuddy:byte-buddy:1.15.11' api 'org.objenesis:objenesis:3.3' api "org.elasticsearch:mocksocket:${versions.mocksocket}" @@ -64,6 +64,7 @@ tasks.named("thirdPartyAudit").configure { 'org.apache.log4j.Priority', // mockito + 'net.bytebuddy.agent.Installer', 'net.bytebuddy.agent.ByteBuddyAgent', 'org.mockito.internal.creation.bytebuddy.inject.MockMethodDispatcher', 'org.opentest4j.AssertionFailedError', diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java index b49d10ba9c402..c3384ede3a1a7 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java @@ -94,7 +94,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { } public TestFileStore getTestFileStore(String nodeName) { - return fileSystemProvider.getTestFileStore(internalCluster().getInstance(Environment.class, nodeName).dataFiles()[0]); + return fileSystemProvider.getTestFileStore(internalCluster().getInstance(Environment.class, nodeName).dataDirs()[0]); } protected static class TestFileStore extends FilterFileStore { diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index c3ce32d4ce333..845536792343d 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -662,7 +662,7 @@ public static MetadataRolloverService getMetadataRolloverService( ).build(MapperBuilderContext.root(false, true)); ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool); Environment env = mock(Environment.class); - when(env.sharedDataFile()).thenReturn(null); + when(env.sharedDataDir()).thenReturn(null); AllocationService allocationService = mock(AllocationService.class); when(allocationService.reroute(any(ClusterState.class), any(String.class), any())).then(i -> i.getArguments()[0]); when(allocationService.getShardRoutingRoleStrategy()).thenReturn(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY); diff --git a/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java b/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java index 4f559a5f3eaef..8aea7a5713cf1 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java +++ b/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java @@ -9,7 +9,9 @@ package org.elasticsearch.index; -import java.util.List; +import java.util.Collections; +import java.util.NavigableSet; +import java.util.TreeSet; /** * Provides access to all known index versions @@ -18,10 +20,12 @@ public class KnownIndexVersions { /** * A sorted list of all known index versions */ - public static final List ALL_VERSIONS = List.copyOf(IndexVersions.getAllVersions()); + public static final NavigableSet ALL_VERSIONS = Collections.unmodifiableNavigableSet( + new TreeSet<>(IndexVersions.getAllVersions()) + ); /** * A sorted list of all known index versions that can be written to */ - public static final List ALL_WRITE_VERSIONS = List.copyOf(IndexVersions.getAllWriteVersions()); + public static final NavigableSet ALL_WRITE_VERSIONS = ALL_VERSIONS.tailSet(IndexVersions.MINIMUM_COMPATIBLE, true); } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index b639108ea6ad2..04cb9467270d4 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -11,7 +11,6 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.ObjectMapper; -import org.elasticsearch.logsdb.datageneration.fields.DynamicMapping; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -50,11 +49,7 @@ private Supplier> keywordMapping( // We only add copy_to to keywords because we get into trouble with numeric fields that are copied to dynamic fields. // If first copied value is numeric, dynamic field is created with numeric field type and then copy of text values fail. // Actual value being copied does not influence the core logic of copy_to anyway. - // - // TODO - // We don't use copy_to on fields that are inside an object with dynamic: strict - // because we'll hit https://github.com/elastic/elasticsearch/issues/113049. - if (request.dynamicMapping() != DynamicMapping.FORBIDDEN && ESTestCase.randomDouble() <= 0.05) { + if (ESTestCase.randomDouble() <= 0.05) { var options = request.eligibleCopyToFields() .stream() .filter(f -> f.equals(request.fieldName()) == false) diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java index 0a4c99eb8b52a..74db1147f23b8 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java @@ -42,16 +42,12 @@ public class MockPluginsService extends PluginsService { * @param classpathPlugins Plugins that exist in the classpath which should be loaded */ public MockPluginsService(Settings settings, Environment environment, Collection> classpathPlugins) { - super( - settings, - environment.configFile(), - new PluginsLoader(Collections.emptySet(), Collections.emptySet(), Collections.emptyMap()) - ); + super(settings, environment.configDir(), new PluginsLoader(Collections.emptySet(), Collections.emptySet(), Collections.emptyMap())); List pluginsLoaded = new ArrayList<>(); for (Class pluginClass : classpathPlugins) { - Plugin plugin = loadPlugin(pluginClass, settings, environment.configFile()); + Plugin plugin = loadPlugin(pluginClass, settings, environment.configDir()); PluginDescriptor pluginInfo = new PluginDescriptor( pluginClass.getName(), "classpath plugin", diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 9e2dee4d94212..d034e6e6679c1 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -129,6 +129,7 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MetricsAggregator; import org.elasticsearch.search.aggregations.metrics.MultiValueAggregation; @@ -149,6 +150,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.internal.SubSearchContext; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -251,29 +253,12 @@ protected List getSearchPlugins() { return List.of(); } - /** - * Deprecated - this will be made private in a future update - */ - @Deprecated - protected A createAggregator( - AggregationBuilder aggregationBuilder, - IndexReader indexReader, - MappedFieldType... fieldTypes - ) throws IOException { - return createAggregator(aggregationBuilder, createAggregationContext(indexReader, new MatchAllDocsQuery(), fieldTypes)); - } - protected A createAggregator(AggregationBuilder aggregationBuilder, AggregationContext context) throws IOException { return createAggregator(new AggregatorFactories.Builder().addAggregator(aggregationBuilder), context); } - /** - * Deprecated - this will be made private in a future update - */ - @Deprecated - protected A createAggregator(AggregatorFactories.Builder builder, AggregationContext context) - throws IOException { + private A createAggregator(AggregatorFactories.Builder builder, AggregationContext context) throws IOException { Aggregator[] aggregators = builder.build(context, null).createTopLevelAggregators(); assertThat(aggregators.length, equalTo(1)); @SuppressWarnings("unchecked") @@ -310,10 +295,7 @@ protected AggregationContext createAggregationContext(IndexReader indexReader, Q * While {@linkplain AggregationContext} is {@link Releasable} the caller is * not responsible for releasing it. Instead, it is released automatically in * in {@link #cleanupReleasables()}. - * - * Deprecated - this will be made private in a future update */ - @Deprecated protected AggregationContext createAggregationContext( IndexReader indexReader, IndexSettings indexSettings, @@ -346,6 +328,56 @@ private AggregationContext createAggregationContext( int maxBucket, boolean isInSortOrderExecutionRequired, MappedFieldType... fieldTypes + ) { + return createAggregationContext( + searcher, + indexSettings, + query, + breakerService, + bytesToPreallocate, + maxBucket, + isInSortOrderExecutionRequired, + () -> false, + fieldTypes + ); + } + + /** + * Creates an aggregation context that will randomly report that the query has been cancelled + */ + private AggregationContext createCancellingAggregationContext( + IndexSearcher searcher, + IndexSettings indexSettings, + Query query, + CircuitBreakerService breakerService, + long bytesToPreallocate, + int maxBucket, + boolean isInSortOrderExecutionRequired, + MappedFieldType... fieldTypes + ) { + return createAggregationContext( + searcher, + indexSettings, + query, + breakerService, + bytesToPreallocate, + maxBucket, + isInSortOrderExecutionRequired, + () -> ESTestCase.random().nextInt(20) == 0, + fieldTypes + ); + } + + private AggregationContext createAggregationContext( + IndexSearcher searcher, + IndexSettings indexSettings, + Query query, + CircuitBreakerService breakerService, + long bytesToPreallocate, + int maxBucket, + boolean isInSortOrderExecutionRequired, + Supplier isCancelled, + MappedFieldType... fieldTypes ) { MappingLookup mappingLookup = MappingLookup.fromMappers( Mapping.EMPTY, @@ -409,7 +441,7 @@ public Iterable dimensionFields() { bitsetFilterCache, randomInt(), () -> 0L, - () -> false, + isCancelled, q -> q, true, isInSortOrderExecutionRequired @@ -536,9 +568,11 @@ protected A searchAndReduce(IndexReader reader, IndexSettings indexSettings = createIndexSettings(); // First run it to find circuit breaker leaks on the aggregator runWithCrankyCircuitBreaker(indexSettings, searcher, aggTestConfig); - // Second run it to the end CircuitBreakerService breakerService = new NoneCircuitBreakerService(); - return searchAndReduce(indexSettings, searcher, breakerService, aggTestConfig); + // Next, try with random cancellations, again looking for leaks + runWithCancellingConfig(indexSettings, searcher, breakerService, aggTestConfig); + // Finally, run it to the end + return searchAndReduce(indexSettings, searcher, breakerService, aggTestConfig, this::createAggregationContext); } /** @@ -552,7 +586,7 @@ private void runWithCrankyCircuitBreaker(IndexSettings indexSettings, IndexSearc CircuitBreakerService crankyService = new CrankyCircuitBreakerService(); for (int i = 0; i < 5; i++) { try { - searchAndReduce(indexSettings, searcher, crankyService, aggTestConfig); + searchAndReduce(indexSettings, searcher, crankyService, aggTestConfig, this::createAggregationContext); } catch (CircuitBreakingException e) { // Circuit breaks from the cranky breaker are expected - it randomly fails, after all assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); @@ -560,12 +594,43 @@ private void runWithCrankyCircuitBreaker(IndexSettings indexSettings, IndexSearc } } + private void runWithCancellingConfig( + IndexSettings indexSettings, + IndexSearcher searcher, + CircuitBreakerService breakerService, + AggTestConfig aggTestConfig + ) throws IOException { + for (int i = 0; i < 5; i++) { + try { + searchAndReduce(indexSettings, searcher, breakerService, aggTestConfig, this::createCancellingAggregationContext); + } catch (TaskCancelledException e) { + // we don't want to expectThrows this because the randomizer might just never report cancellation, + // but it's also normal that it should throw here. + } + } + } + + @FunctionalInterface + public interface AggregationcContextSupplier { + AggregationContext get( + IndexSearcher searcher, + IndexSettings indexSettings, + Query query, + CircuitBreakerService breakerService, + long bytesToPreallocate, + int maxBucket, + boolean isInSortOrderExecutionRequired, + MappedFieldType... fieldTypes + ); + } + @SuppressWarnings("unchecked") private A searchAndReduce( IndexSettings indexSettings, IndexSearcher searcher, CircuitBreakerService breakerService, - AggTestConfig aggTestConfig + AggTestConfig aggTestConfig, + AggregationcContextSupplier contextSupplier ) throws IOException { Query query = aggTestConfig.query(); AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addAggregator(aggTestConfig.builder()); @@ -591,7 +656,7 @@ private A searchAndReduce( subSearchers[searcherIDX] = new ShardSearcher(leave, compCTX); } for (ShardSearcher subSearcher : subSearchers) { - AggregationContext context = createAggregationContext( + AggregationContext context = contextSupplier.get( subSearcher, indexSettings, query, @@ -620,7 +685,7 @@ private A searchAndReduce( } } } else { - AggregationContext context = createAggregationContext( + AggregationContext context = contextSupplier.get( searcher, indexSettings, query, @@ -688,8 +753,41 @@ private A searchAndReduce( assertRoundTrip(internalAggregation.copyResults()); } } + /* Verify that cancellation during final reduce correctly throws. + * We check reduce time cancellation only when consuming buckets. + */ + if (aggTestConfig.testReductionCancellation()) { + try { + // I can't remember if we mutate the InternalAggregations list, so make a defensive copy + List internalAggsCopy = new ArrayList<>(internalAggs); + A internalAgg = doFinalReduce(maxBucket, bigArraysForReduction, builder, internalAggsCopy, true); + if (internalAgg instanceof MultiBucketsAggregation mb) { + // Empty mutli-bucket aggs are expected to return before even getting to the cancellation check + assertEquals("Got non-empty result for a cancelled reduction", 0, mb.getBuckets().size()); + } // other cases? + } catch (TaskCancelledException e) { + /* We may not always honor cancellation in reduce, for example if we are returning no results, so we can't + * just expectThrows here. + */ + } + } // now do the final reduce + A internalAgg = doFinalReduce(maxBucket, bigArraysForReduction, builder, internalAggs, false); + assertRoundTrip(internalAgg); + if (aggTestConfig.builder instanceof ValuesSourceAggregationBuilder.MetricsAggregationBuilder) { + verifyMetricNames((ValuesSourceAggregationBuilder.MetricsAggregationBuilder) aggTestConfig.builder, internalAgg); + } + return internalAgg; + } + + private A doFinalReduce( + int maxBucket, + BigArrays bigArraysForReduction, + Builder builder, + List internalAggs, + boolean cancelled + ) throws IOException { MultiBucketConsumer reduceBucketConsumer = new MultiBucketConsumer( maxBucket, new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST) @@ -697,7 +795,7 @@ private A searchAndReduce( AggregationReduceContext reduceContext = new AggregationReduceContext.ForFinal( bigArraysForReduction, getMockScriptService(), - () -> false, + () -> cancelled, builder, reduceBucketConsumer ); @@ -707,10 +805,6 @@ private A searchAndReduce( assertRoundTrip(internalAgg); doAssertReducedMultiBucketConsumer(internalAgg, reduceBucketConsumer); - assertRoundTrip(internalAgg); - if (aggTestConfig.builder instanceof ValuesSourceAggregationBuilder.MetricsAggregationBuilder) { - verifyMetricNames((ValuesSourceAggregationBuilder.MetricsAggregationBuilder) aggTestConfig.builder, internalAgg); - } return internalAgg; } @@ -1601,11 +1695,12 @@ public record AggTestConfig( boolean incrementalReduce, boolean useLogDocMergePolicy, + boolean testReductionCancellation, MappedFieldType... fieldTypes ) { public AggTestConfig(AggregationBuilder builder, MappedFieldType... fieldTypes) { - this(new MatchAllDocsQuery(), builder, DEFAULT_MAX_BUCKETS, randomBoolean(), true, randomBoolean(), false, fieldTypes); + this(new MatchAllDocsQuery(), builder, DEFAULT_MAX_BUCKETS, randomBoolean(), true, randomBoolean(), false, true, fieldTypes); } public AggTestConfig withQuery(Query query) { @@ -1617,6 +1712,7 @@ public AggTestConfig withQuery(Query query) { shouldBeCached, incrementalReduce, useLogDocMergePolicy, + testReductionCancellation, fieldTypes ); } @@ -1630,6 +1726,7 @@ public AggTestConfig withSplitLeavesIntoSeperateAggregators(boolean splitLeavesI shouldBeCached, incrementalReduce, useLogDocMergePolicy, + testReductionCancellation, fieldTypes ); } @@ -1643,6 +1740,7 @@ public AggTestConfig withShouldBeCached(boolean shouldBeCached) { shouldBeCached, incrementalReduce, useLogDocMergePolicy, + testReductionCancellation, fieldTypes ); } @@ -1656,6 +1754,7 @@ public AggTestConfig withMaxBuckets(int maxBuckets) { shouldBeCached, incrementalReduce, useLogDocMergePolicy, + testReductionCancellation, fieldTypes ); } @@ -1669,6 +1768,7 @@ public AggTestConfig withIncrementalReduce(boolean incrementalReduce) { shouldBeCached, incrementalReduce, useLogDocMergePolicy, + testReductionCancellation, fieldTypes ); } @@ -1682,6 +1782,21 @@ public AggTestConfig withLogDocMergePolicy() { shouldBeCached, incrementalReduce, true, + testReductionCancellation, + fieldTypes + ); + } + + public AggTestConfig noReductionCancellation() { + return new AggTestConfig( + query, + builder, + maxBuckets, + splitLeavesIntoSeparateAggregators, + shouldBeCached, + incrementalReduce, + useLogDocMergePolicy, + false, fieldTypes ); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java index d931340365cd6..22044e079018b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java @@ -14,7 +14,7 @@ import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.List; +import java.util.Collection; import static org.elasticsearch.test.BWCVersions.DEFAULT_BWC_VERSIONS; @@ -28,7 +28,7 @@ public abstract class AbstractBWCSerializationTestCase bwcVersions() { + protected Collection bwcVersions() { return DEFAULT_BWC_VERSIONS; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index cc35f63d289eb..39b0f2b60662e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -145,8 +145,21 @@ private XContentTester( public void test() throws IOException { for (int runs = 0; runs < numberOfTestRuns; runs++) { XContentType xContentType = randomFrom(XContentType.values()).canonical(); - T testInstance = instanceSupplier.apply(xContentType); + T testInstance = null; try { + if (xContentType.equals(XContentType.YAML)) { + testInstance = randomValueOtherThanMany(instance -> { + // unicode character U+0085 (NEXT LINE (NEL)) doesn't survive YAML round trip tests (see #97716) + // get a new random instance if we detect this character in the xContent output + try { + return toXContent.apply(instance, xContentType).utf8ToString().contains("\u0085"); + } catch (IOException e) { + throw new AssertionError(e); + } + }, () -> instanceSupplier.apply(xContentType)); + } else { + testInstance = instanceSupplier.apply(xContentType); + } BytesReference originalXContent = toXContent.apply(testInstance, xContentType); BytesReference shuffledContent = insertRandomFieldsAndShuffle( originalXContent, @@ -173,7 +186,9 @@ public void test() throws IOException { dispose.accept(parsed); } } finally { - dispose.accept(testInstance); + if (testInstance != null) { + dispose.accept(testInstance); + } } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java b/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java index 49859071b03cf..1cd0d0ddc4cd2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java @@ -12,17 +12,14 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; public final class BWCVersions { private BWCVersions() {} - public static List getAllBWCVersions() { - List allVersions = TransportVersion.getAllVersions(); - int minCompatVersion = Collections.binarySearch(allVersions, TransportVersions.MINIMUM_COMPATIBLE); - return allVersions.subList(minCompatVersion, allVersions.size()); + public static NavigableSet getAllBWCVersions() { + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); } - public static final List DEFAULT_BWC_VERSIONS = getAllBWCVersions(); + public static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 6dbcc798c3ae6..bb88f9d094514 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -585,10 +585,10 @@ private void afterInternal(boolean afterClass) throws Exception { ensureClusterInfoServiceRunning(); beforeIndexDeletion(); cluster().wipe(excludeTemplates()); // wipe after to make sure we fail in the test that didn't ack the delete + cluster().assertAfterTest(); if (afterClass || currentClusterScope == Scope.TEST) { cluster().close(); } - cluster().assertAfterTest(); } } finally { if (currentClusterScope == Scope.TEST) { @@ -2263,7 +2263,7 @@ public static Path randomRepoPath() { */ public static Path randomRepoPath(Settings settings) { Environment environment = TestEnvironment.newEnvironment(settings); - Path[] repoFiles = environment.repoFiles(); + Path[] repoFiles = environment.repoDirs(); assert repoFiles.length > 0; Path path; do { diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index ba3d801bf9d13..ae79636c6b14c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1808,7 +1808,7 @@ private void rebuildUnicastHostFiles(List newNodes) { .distinct() .collect(Collectors.toList()); Set configPaths = Stream.concat(currentNodes.stream(), newNodes.stream()) - .map(nac -> nac.node.getEnvironment().configFile()) + .map(nac -> nac.node.getEnvironment().configDir()) .collect(Collectors.toSet()); logger.debug("configuring discovery with {} at {}", discoveryFileContents, configPaths); for (final Path configPath : configPaths) { @@ -1822,7 +1822,7 @@ private void rebuildUnicastHostFiles(List newNodes) { } public Collection configPaths() { - return nodes.values().stream().map(nac -> nac.node.getEnvironment().configFile()).toList(); + return nodes.values().stream().map(nac -> nac.node.getEnvironment().configDir()).toList(); } private void stopNodesAndClient(NodeAndClient nodeAndClient) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java b/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java index dd2e8e4ec5506..a329b5fbaebb2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java +++ b/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java @@ -23,58 +23,63 @@ public class LambdaMatchers { - private static class TransformMatcher extends BaseMatcher { + private static class TransformMatcher extends TypeSafeMatcher { + private final String transformDescription; private final Matcher matcher; private final Function transform; - private TransformMatcher(Matcher matcher, Function transform) { + private TransformMatcher(String transformDescription, Matcher matcher, Function transform) { + this.transformDescription = transformDescription; this.matcher = matcher; this.transform = transform; } @Override - @SuppressWarnings("unchecked") - public boolean matches(Object actual) { + protected boolean matchesSafely(T item) { U u; try { - u = transform.apply((T) actual); + u = transform.apply(item); } catch (ClassCastException e) { throw new AssertionError(e); } - return matcher.matches(u); } @Override - @SuppressWarnings("unchecked") - public void describeMismatch(Object item, Description description) { + protected void describeMismatchSafely(T item, Description description) { U u; try { - u = transform.apply((T) item); + u = transform.apply(item); } catch (ClassCastException e) { description.appendValue(item).appendText(" is not of the correct type (").appendText(e.getMessage()).appendText(")"); return; } - description.appendText("transformed value "); + description.appendText(transformDescription).appendText(" "); matcher.describeMismatch(u, description); } @Override public void describeTo(Description description) { - description.appendText("transformed to match ").appendDescriptionOf(matcher); + description.appendText(transformDescription).appendText(" matches ").appendDescriptionOf(matcher); } } public static Matcher transformedMatch(Function function, Matcher matcher) { - return new TransformMatcher<>(matcher, function); + return new TransformMatcher<>("transformed value", matcher, function); + } + + public static Matcher transformedMatch(String description, Function function, Matcher matcher) { + return new TransformMatcher<>(description, matcher, function); } private static class ListTransformMatcher extends TypeSafeMatcher> { + private final String transformDescription; private final Matcher> matcher; private final Function transform; - private ListTransformMatcher(Matcher> matcher, Function transform) { + private ListTransformMatcher(String transformDescription, Matcher> matcher, Function transform) { + this.transformDescription = transformDescription; this.matcher = matcher; this.transform = transform; } @@ -110,25 +115,35 @@ protected void describeMismatchSafely(Iterable item, Description description) } } - description.appendText("transformed item "); + description.appendText(transformDescription).appendText(" "); matcher.describeMismatch(us, description); } @Override public void describeTo(Description description) { - description.appendText("iterable with transformed items to match ").appendDescriptionOf(matcher); + description.appendText("iterable with ").appendText(transformDescription).appendText(" matching ").appendDescriptionOf(matcher); } } public static Matcher> transformedItemsMatch(Function function, Matcher> matcher) { - return new ListTransformMatcher<>(matcher, function); + return new ListTransformMatcher<>("transformed items", matcher, function); + } + + public static Matcher> transformedItemsMatch( + String transformDescription, + Function function, + Matcher> matcher + ) { + return new ListTransformMatcher<>(transformDescription, matcher, function); } private static class ArrayTransformMatcher extends TypeSafeMatcher { + private final String transformDescription; private final Matcher matcher; private final Function transform; - private ArrayTransformMatcher(Matcher matcher, Function transform) { + private ArrayTransformMatcher(String transformDescription, Matcher matcher, Function transform) { + this.transformDescription = transformDescription; this.matcher = matcher; this.transform = transform; } @@ -177,18 +192,26 @@ protected void describeMismatchSafely(T[] item, Description description) { us[i] = u; } - description.appendText("transformed item "); + description.appendText(transformDescription).appendText(" "); matcher.describeMismatch(us, description); } @Override public void describeTo(Description description) { - description.appendText("array with transformed items to match ").appendDescriptionOf(matcher); + description.appendText("array with ").appendText(transformDescription).appendText(" matching ").appendDescriptionOf(matcher); } } public static Matcher transformedArrayItemsMatch(Function function, Matcher matcher) { - return new ArrayTransformMatcher<>(matcher, function); + return new ArrayTransformMatcher<>("transformed items", matcher, function); + } + + public static Matcher transformedArrayItemsMatch( + String transformDescription, + Function function, + Matcher matcher + ) { + return new ArrayTransformMatcher<>(transformDescription, matcher, function); } private static class PredicateMatcher extends BaseMatcher> { diff --git a/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java index 0c7274a36b49a..9c7114425b8db 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java @@ -14,15 +14,23 @@ import org.elasticsearch.core.Nullable; import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import java.util.Random; import java.util.Set; +import java.util.TreeSet; import java.util.stream.Collectors; +import static org.apache.lucene.tests.util.LuceneTestCase.random; + public class TransportVersionUtils { + + private static final NavigableSet RELEASED_VERSIONS = Collections.unmodifiableNavigableSet( + new TreeSet<>(TransportVersion.getAllVersions()) + ); + /** Returns all released versions */ - public static List allReleasedVersions() { - return TransportVersion.getAllVersions(); + public static NavigableSet allReleasedVersions() { + return RELEASED_VERSIONS; } /** Returns the oldest known {@link TransportVersion} */ @@ -32,7 +40,7 @@ public static TransportVersion getFirstVersion() { /** Returns a random {@link TransportVersion} from all available versions. */ public static TransportVersion randomVersion() { - return ESTestCase.randomFrom(allReleasedVersions()); + return VersionUtils.randomFrom(random(), allReleasedVersions(), TransportVersion::fromId); } /** Returns a random {@link TransportVersion} from all available versions without the ignore set */ @@ -42,7 +50,7 @@ public static TransportVersion randomVersion(Set ignore) { /** Returns a random {@link TransportVersion} from all available versions. */ public static TransportVersion randomVersion(Random random) { - return allReleasedVersions().get(random.nextInt(allReleasedVersions().size())); + return VersionUtils.randomFrom(random, allReleasedVersions(), TransportVersion::fromId); } /** Returns a random {@link TransportVersion} between minVersion and maxVersion (inclusive). */ @@ -55,24 +63,21 @@ public static TransportVersion randomVersionBetween( throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); } - int minVersionIndex = 0; - List allReleasedVersions = allReleasedVersions(); + NavigableSet versions = allReleasedVersions(); if (minVersion != null) { - minVersionIndex = Collections.binarySearch(allReleasedVersions, minVersion); + if (versions.contains(minVersion) == false) { + throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); + } + versions = versions.tailSet(minVersion, true); } - int maxVersionIndex = allReleasedVersions.size() - 1; if (maxVersion != null) { - maxVersionIndex = Collections.binarySearch(allReleasedVersions, maxVersion); - } - if (minVersionIndex < 0) { - throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); - } else if (maxVersionIndex < 0) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); - } else { - // minVersionIndex is inclusive so need to add 1 to this index - int range = maxVersionIndex + 1 - minVersionIndex; - return allReleasedVersions.get(minVersionIndex + random.nextInt(range)); + if (versions.contains(maxVersion) == false) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); + } + versions = versions.headSet(maxVersion, true); } + + return VersionUtils.randomFrom(random, versions, TransportVersion::fromId); } public static TransportVersion getPreviousVersion() { @@ -82,16 +87,11 @@ public static TransportVersion getPreviousVersion() { } public static TransportVersion getPreviousVersion(TransportVersion version) { - int place = Collections.binarySearch(allReleasedVersions(), version); - if (place < 0) { - // version does not exist - need the item before the index this version should be inserted - place = -(place + 1); - } - - if (place < 1) { + TransportVersion lower = allReleasedVersions().lower(version); + if (lower == null) { throw new IllegalArgumentException("couldn't find any released versions before [" + version + "]"); } - return allReleasedVersions().get(place - 1); + return lower; } public static TransportVersion getNextVersion(TransportVersion version) { @@ -99,17 +99,8 @@ public static TransportVersion getNextVersion(TransportVersion version) { } public static TransportVersion getNextVersion(TransportVersion version, boolean createIfNecessary) { - List allReleasedVersions = allReleasedVersions(); - int place = Collections.binarySearch(allReleasedVersions, version); - if (place < 0) { - // version does not exist - need the item at the index this version should be inserted - place = -(place + 1); - } else { - // need the *next* version - place++; - } - - if (place < 0 || place >= allReleasedVersions.size()) { + TransportVersion higher = allReleasedVersions().higher(version); + if (higher == null) { if (createIfNecessary) { // create a new transport version one greater than specified return new TransportVersion(version.id() + 1); @@ -117,7 +108,7 @@ public static TransportVersion getNextVersion(TransportVersion version, boolean throw new IllegalArgumentException("couldn't find any released versions after [" + version + "]"); } } - return allReleasedVersions.get(place); + return higher; } /** Returns a random {@code TransportVersion} that is compatible with {@link TransportVersion#current()} */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java index 8b7ab620774b9..311f032088f74 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java @@ -9,23 +9,31 @@ package org.elasticsearch.test; +import com.carrotsearch.randomizedtesting.generators.RandomNumbers; + import org.elasticsearch.Build; import org.elasticsearch.Version; +import org.elasticsearch.common.VersionId; import org.elasticsearch.core.Nullable; +import java.util.Collections; import java.util.List; -import java.util.Optional; +import java.util.NavigableSet; import java.util.Random; +import java.util.TreeSet; +import java.util.function.IntFunction; /** Utilities for selecting versions in tests */ public class VersionUtils { - private static final List ALL_VERSIONS = Version.getDeclaredVersions(Version.class); + private static final NavigableSet ALL_VERSIONS = Collections.unmodifiableNavigableSet( + new TreeSet<>(Version.getDeclaredVersions(Version.class)) + ); /** * Returns an immutable, sorted list containing all versions, both released and unreleased. */ - public static List allVersions() { + public static NavigableSet allVersions() { return ALL_VERSIONS; } @@ -33,13 +41,11 @@ public static List allVersions() { * Get the version before {@code version}. */ public static Version getPreviousVersion(Version version) { - for (int i = ALL_VERSIONS.size() - 1; i >= 0; i--) { - Version v = ALL_VERSIONS.get(i); - if (v.before(version)) { - return v; - } + var versions = ALL_VERSIONS.headSet(version, false); + if (versions.isEmpty()) { + throw new IllegalArgumentException("couldn't find any versions before [" + version + "]"); } - throw new IllegalArgumentException("couldn't find any versions before [" + version + "]"); + return versions.getLast(); } /** @@ -56,8 +62,7 @@ public static Version getPreviousVersion() { * where the minor version is less than the currents minor version. */ public static Version getPreviousMinorVersion() { - for (int i = ALL_VERSIONS.size() - 1; i >= 0; i--) { - Version v = ALL_VERSIONS.get(i); + for (Version v : ALL_VERSIONS.descendingSet()) { if (v.minor < Version.CURRENT.minor || v.major < Version.CURRENT.major) { return v; } @@ -67,12 +72,12 @@ public static Version getPreviousMinorVersion() { /** Returns the oldest {@link Version} */ public static Version getFirstVersion() { - return ALL_VERSIONS.get(0); + return ALL_VERSIONS.getFirst(); } /** Returns a random {@link Version} from all available versions. */ public static Version randomVersion(Random random) { - return ALL_VERSIONS.get(random.nextInt(ALL_VERSIONS.size())); + return randomFrom(random, ALL_VERSIONS, Version::fromId); } /** Returns a random {@link Version} from all available versions, that is compatible with the given version. */ @@ -83,38 +88,42 @@ public static Version randomCompatibleVersion(Random random, Version version) { /** Returns a random {@link Version} between minVersion and maxVersion (inclusive). */ public static Version randomVersionBetween(Random random, @Nullable Version minVersion, @Nullable Version maxVersion) { - int minVersionIndex = 0; + if (minVersion != null && maxVersion != null && maxVersion.before(minVersion)) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); + } + + NavigableSet versions = ALL_VERSIONS; if (minVersion != null) { - minVersionIndex = ALL_VERSIONS.indexOf(minVersion); + if (versions.contains(minVersion) == false) { + throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); + } + versions = versions.tailSet(minVersion, true); } - int maxVersionIndex = ALL_VERSIONS.size() - 1; if (maxVersion != null) { - maxVersionIndex = ALL_VERSIONS.indexOf(maxVersion); - } - if (minVersionIndex == -1) { - throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); - } else if (maxVersionIndex == -1) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); - } else if (minVersionIndex > maxVersionIndex) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); - } else { - // minVersionIndex is inclusive so need to add 1 to this index - int range = maxVersionIndex + 1 - minVersionIndex; - return ALL_VERSIONS.get(minVersionIndex + random.nextInt(range)); + if (versions.contains(maxVersion) == false) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); + } + versions = versions.headSet(maxVersion, true); } - } - /** returns the first future compatible version */ - public static Version compatibleFutureVersion(Version version) { - final Optional opt = ALL_VERSIONS.stream().filter(version::before).filter(v -> v.isCompatible(version)).findAny(); - assert opt.isPresent() : "no future compatible version for " + version; - return opt.get(); + return randomFrom(random, versions, Version::fromId); } /** Returns the maximum {@link Version} that is compatible with the given version. */ public static Version maxCompatibleVersion(Version version) { - final List compatible = ALL_VERSIONS.stream().filter(version::isCompatible).filter(version::onOrBefore).toList(); - assert compatible.size() > 0; - return compatible.get(compatible.size() - 1); + return ALL_VERSIONS.tailSet(version, true).descendingSet().stream().filter(version::isCompatible).findFirst().orElseThrow(); + } + + public static > T randomFrom(Random random, NavigableSet set, IntFunction ctor) { + // get the first and last id, pick a random id in the middle, then find that id in the set in O(nlogn) time + // this assumes the id numbers are reasonably evenly distributed in the set + assert set.isEmpty() == false; + int lowest = set.getFirst().id(); + int highest = set.getLast().id(); + + T randomId = ctor.apply(RandomNumbers.randomIntBetween(random, lowest, highest)); + // try to find the id below, then the id above. We're just looking for *some* item in the set that is close to randomId + T found = set.floor(randomId); + return found != null ? found : set.ceiling(randomId); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 40aee8eed4235..b0d64a87c4d36 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -257,7 +257,7 @@ public static void assertBlocked(@Nullable final Integer expectedBlockId, Except assertThat( "Request should have been blocked by [" + expectedBlockId + "] instead of " + e.blocks(), e.blocks(), - hasItem(transformedMatch(ClusterBlock::id, equalTo(expectedBlockId))) + hasItem(transformedMatch("ClusterBlock id", ClusterBlock::id, equalTo(expectedBlockId))) ); } } @@ -764,33 +764,39 @@ public static void assertSuggestion(Suggest searchSuggest, int entry, String key * Assert that an index template is missing */ public static void assertIndexTemplateMissing(GetIndexTemplatesResponse templatesResponse, String name) { - assertThat(templatesResponse.getIndexTemplates(), not(hasItem(transformedMatch(IndexTemplateMetadata::name, equalTo(name))))); + assertThat( + templatesResponse.getIndexTemplates(), + not(hasItem(transformedMatch("IndexTemplateMetadata name", IndexTemplateMetadata::name, equalTo(name)))) + ); } /** * Assert that an index template exists */ public static void assertIndexTemplateExists(GetIndexTemplatesResponse templatesResponse, String name) { - assertThat(templatesResponse.getIndexTemplates(), hasItem(transformedMatch(IndexTemplateMetadata::name, equalTo(name)))); + assertThat( + templatesResponse.getIndexTemplates(), + hasItem(transformedMatch("IndexTemplateMetadata name", IndexTemplateMetadata::name, equalTo(name))) + ); } /* * matchers */ public static Matcher hasId(final String id) { - return transformedMatch(SearchHit::getId, equalTo(id)); + return transformedMatch("SearchHit id", SearchHit::getId, equalTo(id)); } public static Matcher hasIndex(final String index) { - return transformedMatch(SearchHit::getIndex, equalTo(index)); + return transformedMatch("SearchHit index", SearchHit::getIndex, equalTo(index)); } public static Matcher hasScore(final float score) { - return transformedMatch(SearchHit::getScore, equalTo(score)); + return transformedMatch("SearchHit score", SearchHit::getScore, equalTo(score)); } public static Matcher hasRank(final int rank) { - return transformedMatch(SearchHit::getRank, equalTo(rank)); + return transformedMatch("SearchHit rank", SearchHit::getRank, equalTo(rank)); } public static T assertBooleanSubQuery(Query query, Class subqueryType, int i) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java index 667149e4bdd3e..5bf20b18abc72 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java @@ -14,41 +14,43 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.KnownIndexVersions; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import java.util.Random; import java.util.Set; import java.util.stream.Collectors; +import static org.apache.lucene.tests.util.LuceneTestCase.random; + public class IndexVersionUtils { - private static final List ALL_VERSIONS = KnownIndexVersions.ALL_VERSIONS; - private static final List ALL_WRITE_VERSIONS = KnownIndexVersions.ALL_WRITE_VERSIONS; + private static final NavigableSet ALL_VERSIONS = KnownIndexVersions.ALL_VERSIONS; + private static final NavigableSet ALL_WRITE_VERSIONS = KnownIndexVersions.ALL_WRITE_VERSIONS; /** Returns all released versions */ - public static List allReleasedVersions() { + public static NavigableSet allReleasedVersions() { return ALL_VERSIONS; } /** Returns the oldest known {@link IndexVersion}. This version can only be read from and not written to */ public static IndexVersion getLowestReadCompatibleVersion() { - return ALL_VERSIONS.get(0); + return ALL_VERSIONS.getFirst(); } /** Returns the oldest known {@link IndexVersion} that can be written to */ public static IndexVersion getLowestWriteCompatibleVersion() { - return ALL_WRITE_VERSIONS.get(0); + return ALL_WRITE_VERSIONS.getFirst(); } /** Returns a random {@link IndexVersion} from all available versions. */ public static IndexVersion randomVersion() { - return ESTestCase.randomFrom(ALL_VERSIONS); + return VersionUtils.randomFrom(random(), ALL_VERSIONS, IndexVersion::fromId); } /** Returns a random {@link IndexVersion} from all versions that can be written to. */ public static IndexVersion randomWriteVersion() { - return ESTestCase.randomFrom(ALL_WRITE_VERSIONS); + return VersionUtils.randomFrom(random(), ALL_WRITE_VERSIONS, IndexVersion::fromId); } /** Returns a random {@link IndexVersion} from all available versions without the ignore set */ @@ -62,23 +64,21 @@ public static IndexVersion randomVersionBetween(Random random, @Nullable IndexVe throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); } - int minVersionIndex = 0; + NavigableSet versions = allReleasedVersions(); if (minVersion != null) { - minVersionIndex = Collections.binarySearch(ALL_VERSIONS, minVersion); + if (versions.contains(minVersion) == false) { + throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); + } + versions = versions.tailSet(minVersion, true); } - int maxVersionIndex = ALL_VERSIONS.size() - 1; if (maxVersion != null) { - maxVersionIndex = Collections.binarySearch(ALL_VERSIONS, maxVersion); - } - if (minVersionIndex < 0) { - throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); - } else if (maxVersionIndex < 0) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); - } else { - // minVersionIndex is inclusive so need to add 1 to this index - int range = maxVersionIndex + 1 - minVersionIndex; - return ALL_VERSIONS.get(minVersionIndex + random.nextInt(range)); + if (versions.contains(maxVersion) == false) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); + } + versions = versions.headSet(maxVersion, true); } + + return VersionUtils.randomFrom(random, versions, IndexVersion::fromId); } public static IndexVersion getPreviousVersion() { @@ -88,16 +88,11 @@ public static IndexVersion getPreviousVersion() { } public static IndexVersion getPreviousVersion(IndexVersion version) { - int place = Collections.binarySearch(ALL_VERSIONS, version); - if (place < 0) { - // version does not exist - need the item before the index this version should be inserted - place = -(place + 1); - } - - if (place < 1) { + IndexVersion lower = allReleasedVersions().lower(version); + if (lower == null) { throw new IllegalArgumentException("couldn't find any released versions before [" + version + "]"); } - return ALL_VERSIONS.get(place - 1); + return lower; } public static IndexVersion getPreviousMajorVersion(IndexVersion version) { @@ -105,19 +100,11 @@ public static IndexVersion getPreviousMajorVersion(IndexVersion version) { } public static IndexVersion getNextVersion(IndexVersion version) { - int place = Collections.binarySearch(ALL_VERSIONS, version); - if (place < 0) { - // version does not exist - need the item at the index this version should be inserted - place = -(place + 1); - } else { - // need the *next* version - place++; - } - - if (place < 0 || place >= ALL_VERSIONS.size()) { + IndexVersion higher = allReleasedVersions().higher(version); + if (higher == null) { throw new IllegalArgumentException("couldn't find any released versions after [" + version + "]"); } - return ALL_VERSIONS.get(place); + return higher; } /** Returns a random {@code IndexVersion} that is compatible with {@link IndexVersion#current()} */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 5738ab60f47eb..a4884d764e468 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1012,14 +1012,21 @@ private void wipeCluster() throws Exception { private void waitForClusterUpdates() throws Exception { logger.info("Waiting for all cluster updates up to this moment to be processed"); + try { assertOK(adminClient().performRequest(new Request("GET", "_cluster/health?wait_for_events=languid"))); } catch (ResponseException e) { if (e.getResponse().getStatusLine().getStatusCode() == HttpStatus.SC_REQUEST_TIMEOUT) { + StringBuilder logMessage = new StringBuilder("Timed out waiting for cluster updates to be processed."); final var pendingTasks = getPendingClusterStateTasks(); if (pendingTasks != null) { - logger.error("Timed out waiting for cluster updates to be processed, {}", pendingTasks); + logMessage.append('\n').append(pendingTasks); + } + final var hotThreads = getHotThreads(); + if (hotThreads != null) { + logMessage.append("\nHot threads: ").append(hotThreads); } + logger.error(logMessage.toString()); } throw e; } @@ -1029,8 +1036,8 @@ private static String getPendingClusterStateTasks() { try { Response response = adminClient().performRequest(new Request("GET", "/_cluster/pending_tasks")); List tasks = (List) entityAsMap(response).get("tasks"); - if (false == tasks.isEmpty()) { - StringBuilder message = new StringBuilder("there are still running tasks:"); + if (tasks.isEmpty() == false) { + StringBuilder message = new StringBuilder("There are still running tasks:"); for (Object task : tasks) { message.append('\n').append(task.toString()); } @@ -1042,6 +1049,18 @@ private static String getPendingClusterStateTasks() { return null; } + private String getHotThreads() { + try { + Response response = adminClient().performRequest( + new Request("GET", "/_nodes/hot_threads?ignore_idle_threads=false&threads=9999") + ); + return EntityUtils.toString(response.getEntity()); + } catch (IOException e) { + logger.error("Failed to retrieve hot threads in the cluster during cleanup", e); + } + return null; + } + /** * This method checks whether ILM policies or templates get recreated after they have been deleted. If so, we are probably deleting * them unnecessarily, potentially causing test performance problems. This could happen for example if someone adds a new standard ILM diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 4595fbf286077..0df978fe4937e 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -2758,8 +2758,8 @@ public void handleException(TransportException exp) { TransportStats transportStats = serviceC.transport.getStats(); // we did a single round-trip to do the initial handshake assertEquals(1, transportStats.getRxCount()); assertEquals(1, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(55, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(60, transportStats.getTxSize().getBytes()); }); serviceC.sendRequest( connection, @@ -2773,16 +2773,16 @@ public void handleException(TransportException exp) { TransportStats transportStats = serviceC.transport.getStats(); // request has been send assertEquals(1, transportStats.getRxCount()); assertEquals(2, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(114, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(119, transportStats.getTxSize().getBytes()); }); sendResponseLatch.countDown(); responseLatch.await(); stats = serviceC.transport.getStats(); // response has been received assertEquals(2, stats.getRxCount()); assertEquals(2, stats.getTxCount()); - assertEquals(54, stats.getRxSize().getBytes()); - assertEquals(114, stats.getTxSize().getBytes()); + assertEquals(60, stats.getRxSize().getBytes()); + assertEquals(119, stats.getTxSize().getBytes()); } finally { serviceC.close(); } @@ -2873,8 +2873,8 @@ public void handleException(TransportException exp) { TransportStats transportStats = serviceC.transport.getStats(); // request has been sent assertEquals(1, transportStats.getRxCount()); assertEquals(1, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(55, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(60, transportStats.getTxSize().getBytes()); }); serviceC.sendRequest( connection, @@ -2888,8 +2888,8 @@ public void handleException(TransportException exp) { TransportStats transportStats = serviceC.transport.getStats(); // request has been sent assertEquals(1, transportStats.getRxCount()); assertEquals(2, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(114, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(119, transportStats.getTxSize().getBytes()); }); sendResponseLatch.countDown(); responseLatch.await(); @@ -2904,8 +2904,8 @@ public void handleException(TransportException exp) { String failedMessage = "Unexpected read bytes size. The transport exception that was received=" + exception; // 57 bytes are the non-exception message bytes that have been received. It should include the initial // handshake message and the header, version, etc bytes in the exception message. - assertEquals(failedMessage, 57 + streamOutput.bytes().length(), stats.getRxSize().getBytes()); - assertEquals(114, stats.getTxSize().getBytes()); + assertEquals(failedMessage, 63 + streamOutput.bytes().length(), stats.getRxSize().getBytes()); + assertEquals(119, stats.getTxSize().getBytes()); } finally { serviceC.close(); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java index b8f4dcb399ec7..e3cc3bba94a5c 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java @@ -12,11 +12,13 @@ import com.carrotsearch.randomizedtesting.RandomizedContext; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import java.io.IOException; import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -49,4 +51,42 @@ public void testInsertRandomFieldsAndShuffle() throws Exception { assertThat(mapOrdered.keySet().iterator().next(), not(equalTo("field"))); } } + + private record TestToXContent(String field, String value) implements ToXContentFragment { + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.field(field, value); + } + } + + public void testYamlXContentRoundtripSanitization() throws Exception { + var test = new AbstractXContentTestCase() { + + @Override + protected TestToXContent createTestInstance() { + // we need to randomly create both a "problematic" and an okay version in order to ensure that the sanitization code + // can draw at least one okay version if polled often enough + return randomBoolean() ? new TestToXContent("a\u0085b", "def") : new TestToXContent("a b", "def"); + } + + @Override + protected TestToXContent doParseInstance(XContentParser parser) throws IOException { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + String name = parser.currentName(); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + String value = parser.text(); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return new TestToXContent(name, value); + }; + + @Override + protected boolean supportsUnknownFields() { + return false; + } + }; + // testFromXContent runs 20 repetitions, enough to hit a YAML xcontent version very likely + test.testFromXContent(); + } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java b/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java index f7ab2349ec1ce..c97a369a9853e 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java @@ -19,11 +19,13 @@ import static org.elasticsearch.test.LambdaMatchers.transformedItemsMatch; import static org.elasticsearch.test.LambdaMatchers.transformedMatch; import static org.elasticsearch.test.LambdaMatchers.trueWith; +import static org.hamcrest.Matchers.anything; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; public class LambdaMatchersTests extends ESTestCase { @@ -56,11 +58,13 @@ public void testTransformMatcher() { assertThat(new A("1"), transformedMatch(a -> a.str, equalTo("1"))); assertThat(new B("1"), transformedMatch((A a) -> a.str, equalTo("1"))); + assertMismatch((A) null, transformedMatch(A::toString, anything()), is("was null")); assertMismatch(new A("1"), transformedMatch(a -> a.str, emptyString()), equalTo("transformed value was \"1\"")); } public void testTransformDescription() { - assertDescribeTo(transformedMatch((A a) -> a.str, emptyString()), equalTo("transformed to match an empty string")); + assertDescribeTo(transformedMatch((A a) -> a.str, emptyString()), equalTo("transformed value matches an empty string")); + assertDescribeTo(transformedMatch("str field", (A a) -> a.str, emptyString()), equalTo("str field matches an empty string")); } public void testListTransformMatcher() { @@ -71,14 +75,23 @@ public void testListTransformMatcher() { assertMismatch( as, transformedItemsMatch(a -> a.str, containsInAnyOrder("1", "2", "4")), - equalTo("transformed item not matched: \"3\"") + equalTo("transformed items not matched: \"3\"") + ); + assertMismatch( + as, + transformedItemsMatch("str field", a -> a.str, containsInAnyOrder("1", "2", "4")), + equalTo("str field not matched: \"3\"") ); } public void testListTransformDescription() { assertDescribeTo( transformedItemsMatch((A a) -> a.str, containsInAnyOrder("1")), - equalTo("iterable with transformed items to match iterable with items [\"1\"] in any order") + equalTo("iterable with transformed items matching iterable with items [\"1\"] in any order") + ); + assertDescribeTo( + transformedItemsMatch("str field", (A a) -> a.str, containsInAnyOrder("1")), + equalTo("iterable with str field matching iterable with items [\"1\"] in any order") ); } @@ -89,14 +102,23 @@ public void testArrayTransformMatcher() { assertMismatch( as, transformedArrayItemsMatch(a -> a.str, arrayContainingInAnyOrder("1", "2", "4")), - equalTo("transformed item not matched: \"3\"") + equalTo("transformed items not matched: \"3\"") + ); + assertMismatch( + as, + transformedArrayItemsMatch("str field", a -> a.str, arrayContainingInAnyOrder("1", "2", "4")), + equalTo("str field not matched: \"3\"") ); } public void testArrayTransformDescription() { assertDescribeTo( transformedArrayItemsMatch((A a) -> a.str, arrayContainingInAnyOrder("1")), - equalTo("array with transformed items to match [\"1\"] in any order") + equalTo("array with transformed items matching [\"1\"] in any order") + ); + assertDescribeTo( + transformedArrayItemsMatch("str field", (A a) -> a.str, arrayContainingInAnyOrder("1")), + equalTo("array with str field matching [\"1\"] in any order") ); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java index 5ae7e5640fc91..9951878289d48 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java @@ -21,13 +21,6 @@ */ public class VersionUtilsTests extends ESTestCase { - public void testAllVersionsSorted() { - List allVersions = VersionUtils.allVersions(); - for (int i = 0, j = 1; j < allVersions.size(); ++i, ++j) { - assertTrue(allVersions.get(i).before(allVersions.get(j))); - } - } - public void testRandomVersionBetween() { // TODO: rework this test to use a dummy Version class so these don't need to change with each release // full range @@ -50,9 +43,9 @@ public void testRandomVersionBetween() { got = VersionUtils.randomVersionBetween(random(), null, fromId(7000099)); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(fromId(7000099))); - got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allVersions().get(0)); + got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allVersions().getFirst()); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); - assertTrue(got.onOrBefore(VersionUtils.allVersions().get(0))); + assertTrue(got.onOrBefore(VersionUtils.allVersions().getFirst())); // unbounded upper got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), null); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java index cfdca56542eb2..c2d274bb0b3eb 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java @@ -765,7 +765,12 @@ private void installModule(String moduleName, DefaultPluginInstallSpec installSp }); - IOUtils.syncMaybeWithLinks(modulePath, destination); + // If we aren't overriding anything we can use links here, otherwise do a full copy + if (installSpec.entitlementsOverride == null && installSpec.propertiesOverride == null) { + IOUtils.syncMaybeWithLinks(modulePath, destination); + } else { + IOUtils.syncWithCopy(modulePath, destination); + } try { if (installSpec.entitlementsOverride != null) { @@ -794,7 +799,9 @@ private void installModule(String moduleName, DefaultPluginInstallSpec installSp if (extendedProperty != null) { String[] extendedModules = extendedProperty.split(","); for (String module : extendedModules) { - installModule(module, new DefaultPluginInstallSpec(), modulePaths); + if (spec.getModules().containsKey(module) == false) { + installModule(module, new DefaultPluginInstallSpec(), modulePaths); + } } } } catch (IOException e) { diff --git a/x-pack/plugin/analytics/build.gradle b/x-pack/plugin/analytics/build.gradle index 9a21f40a4c4a9..7aaaaaf668643 100644 --- a/x-pack/plugin/analytics/build.gradle +++ b/x-pack/plugin/analytics/build.gradle @@ -7,6 +7,7 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { name = 'x-pack-analytics' @@ -18,6 +19,10 @@ base { archivesName = 'x-pack-analytics' } +tasks.named('javaRestTest') { + usesDefaultDistribution() +} + dependencies { api 'org.apache.commons:commons-math3:3.6.1' compileOnly project(path: xpackModule('core')) diff --git a/x-pack/plugin/analytics/src/javaRestTest/java/org/elasticsearch/multiterms/AggsTimeoutIT.java b/x-pack/plugin/analytics/src/javaRestTest/java/org/elasticsearch/multiterms/AggsTimeoutIT.java new file mode 100644 index 0000000000000..6ca7d38d87842 --- /dev/null +++ b/x-pack/plugin/analytics/src/javaRestTest/java/org/elasticsearch/multiterms/AggsTimeoutIT.java @@ -0,0 +1,321 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.multiterms; + +import org.apache.http.client.config.RequestConfig; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.client.Request; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.test.ListMatcher; +import org.elasticsearch.test.MapMatcher; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; +import java.net.SocketTimeoutException; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.any; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; + +/** + * Runs slow aggregations with a timeout and asserts that they timeout and + * cancel the queries. + */ +public class AggsTimeoutIT extends ESRestTestCase { + private static final int DEPTH = 10; + private static final int VALUE_COUNT = 4; + private static final int TOTAL_DOCS = Math.toIntExact((long) Math.pow(VALUE_COUNT, DEPTH)); + private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(1); + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.watcher.enabled", "false") + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.enabled", "false") + .setting("xpack.security.transport.ssl.enabled", "false") + .setting("xpack.security.http.ssl.enabled", "false") + .jvmArg("-Xmx1g") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testTerms() throws Exception { + Request request = new Request("POST", "/deep/_search"); + XContentBuilder body = JsonXContent.contentBuilder().prettyPrint().startObject(); + body.field("size", 0); + agg(body, "terms", 10); + request.setJsonEntity(Strings.toString(body.endObject())); + setTimeout(request); + try { + Map response = responseAsMap(client().performRequest(request)); + assertMap("not expected to finish", response, matchesMap()); + } catch (SocketTimeoutException timeout) { + logger.info("timed out"); + assertNoSearchesRunning(); + } + } + + private void agg(XContentBuilder body, String type, int depth) throws IOException { + if (depth == 0) { + return; + } + body.startObject("aggs").startObject(field("agg", depth)); + { + body.startObject(type); + body.field("field", field("kwd", depth - 1)); + body.endObject(); + } + agg(body, type, depth - 1); + body.endObject().endObject(); + } + + public void testMultiTerms() throws Exception { + Request request = new Request("POST", "/deep/_search"); + XContentBuilder body = JsonXContent.contentBuilder().prettyPrint().startObject(); + body.field("size", 0); + autoDateInMultiTerms(body, b -> { + for (int i = 0; i < DEPTH; i++) { + b.startObject().field("field", field("kwd", i)).endObject(); + } + }); + request.setJsonEntity(Strings.toString(body.endObject())); + setTimeout(request); + try { + Map response = responseAsMap(client().performRequest(request)); + ListMatcher buckets = matchesList(); + for (int i = 0; i < 10; i++) { + buckets = buckets.item( + matchesMap().entry("key_as_string", any(String.class)) + .entry("key", hasSize(10)) + .entry("doc_count", 1) + .entry("adh", matchesMap().entry("buckets", hasSize(1)).entry("interval", "1s")) + ); + } + MapMatcher agg = matchesMap().entry("buckets", buckets) + .entry("doc_count_error_upper_bound", 0) + .entry("sum_other_doc_count", greaterThan(0)); + assertMap(response, matchesMap().extraOk().entry("aggregations", matchesMap().entry("multi", agg))); + } catch (SocketTimeoutException timeout) { + logger.info("timed out"); + assertNoSearchesRunning(); + } + } + + public void testMultiTermWithTimestamp() throws Exception { + Request request = new Request("POST", "/deep/_search"); + XContentBuilder body = JsonXContent.contentBuilder().prettyPrint().startObject(); + body.field("size", 0); + autoDateInMultiTerms(body, b -> { + b.startObject().field("field", field("kwd", 0)).endObject(); + b.startObject().field("field", "@timestamp").endObject(); + }); + request.setJsonEntity(Strings.toString(body.endObject())); + setTimeout(request); + try { + Map response = responseAsMap(client().performRequest(request)); + ListMatcher buckets = matchesList(); + for (int i = 0; i < 10; i++) { + buckets = buckets.item( + matchesMap().entry("key_as_string", any(String.class)) + .entry("key", hasSize(10)) + .entry("doc_count", 1) + .entry("adh", matchesMap().entry("buckets", hasSize(1)).entry("interval", "1s")) + ); + } + MapMatcher agg = matchesMap().entry("buckets", buckets) + .entry("doc_count_error_upper_bound", 0) + .entry("sum_other_doc_count", greaterThan(0)); + assertMap(response, matchesMap().extraOk().entry("aggregations", matchesMap().entry("multi", agg))); + } catch (SocketTimeoutException timeout) { + logger.info("timed out"); + assertNoSearchesRunning(); + } + } + + private void autoDateInMultiTerms(XContentBuilder body, CheckedConsumer terms) throws IOException { + body.startObject("aggs").startObject("multi"); + { + body.startObject("multi_terms"); + { + body.startArray("terms"); + terms.accept(body); + body.endArray(); + body.startArray("order"); + { + body.startObject().field("_count", "desc").endObject(); + body.startObject().field("_key", "asc").endObject(); + } + body.endArray(); + } + body.endObject(); + body.startObject("aggs").startObject("adh").startObject("auto_date_histogram"); + { + body.field("field", "@timestamp"); + body.field("buckets", 1); + } + body.endObject().endObject().endObject(); + } + body.endObject().endObject(); + } + + @Before + public void createDeep() throws IOException { + if (indexExists("deep")) { + return; + } + logger.info("creating deep index"); + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("properties"); + mapping.startObject("@timestamp").field("type", "date").endObject(); + for (int f = 0; f < DEPTH; f++) { + mapping.startObject(field("kwd", f)).field("type", "keyword").endObject(); + } + CreateIndexResponse createIndexResponse = createIndex( + "deep", + Settings.builder().put("index.number_of_replicas", 0).build(), + Strings.toString(mapping.endObject().endObject()) + ); + assertThat(createIndexResponse.isAcknowledged(), equalTo(true)); + Bulk bulk = new Bulk(); + bulk.doc(new StringBuilder("{"), 0); + bulk.flush(); + + MapMatcher shardsOk = matchesMap().entry("total", 1).entry("failed", 0).entry("successful", 1); + logger.info("refreshing deep index"); + Map refresh = responseAsMap(client().performRequest(new Request("POST", "/_refresh"))); + assertMap(refresh, matchesMap().entry("_shards", shardsOk)); + + logger.info("double checking deep index count"); + Map count = responseAsMap(client().performRequest(new Request("POST", "/deep/_count"))); + assertMap(count, matchesMap().entry("_shards", shardsOk.entry("skipped", 0)).entry("count", TOTAL_DOCS)); + + logger.info("deep index ready for test"); + } + + private String field(String prefix, int field) { + return String.format(Locale.ROOT, "%s%03d", prefix, field); + } + + @Override + protected boolean preserveClusterUponCompletion() { + return true; + } + + class Bulk { + private static final int BULK_SIZE = Math.toIntExact(ByteSizeValue.ofMb(2).getBytes()); + + StringBuilder bulk = new StringBuilder(); + int current = 0; + int total = 0; + long timestamp = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2025-01-01T00:00:00Z"); + + void doc(StringBuilder doc, int field) throws IOException { + if (field != 0) { + doc.append(','); + } + int len = doc.length(); + for (int value = 0; value < VALUE_COUNT; value++) { + doc.append('"').append(field("kwd", field)).append("\":\"").append(value).append('"'); + if (field == DEPTH - 1) { + doc.append(",\"@timestamp\":").append(timestamp).append('}'); + timestamp += TimeValue.timeValueMinutes(1).millis(); + addToBulk(doc); + } else { + doc(doc, field + 1); + } + doc.setLength(len); + } + } + + void addToBulk(StringBuilder doc) throws IOException { + current++; + total++; + bulk.append("{\"index\":{}}\n"); + bulk.append(doc).append('\n'); + if (bulk.length() > BULK_SIZE) { + flush(); + } + } + + void flush() throws IOException { + logger.info( + "Flushing to deep {} docs/{}. Total {}% {}/{}", + current, + ByteSizeValue.ofBytes(bulk.length()), + String.format(Locale.ROOT, "%04.1f", 100.0 * total / TOTAL_DOCS), + total, + TOTAL_DOCS + ); + Request request = new Request("POST", "/deep/_bulk"); + request.setJsonEntity(bulk.toString()); + Map response = responseAsMap(client().performRequest(request)); + assertMap(response, matchesMap().extraOk().entry("errors", false)); + bulk.setLength(0); + current = 0; + } + } + + private void setTimeout(Request request) { + RequestConfig.Builder config = RequestConfig.custom(); + config.setSocketTimeout(Math.toIntExact(TIMEOUT.millis())); + request.setOptions(request.getOptions().toBuilder().setRequestConfig(config.build())); + } + + /** + * Asserts that within a minute the _search has left the _tasks api. + *

+ * It'd sure be more convenient if, whenever the _search has returned + * back to us the _tasks API doesn't contain the _search. But sometimes + * it still does. So long as it stops eventually that's + * still indicative of the interrupt code working. + *

+ */ + private void assertNoSearchesRunning() throws Exception { + assertBusy(() -> { + Request tasks = new Request("GET", "/_tasks"); + tasks.addParameter("actions", "*search"); + tasks.addParameter("detailed", ""); + assertBusy(() -> { + Map response = responseAsMap(client().performRequest(tasks)); + // If there are running searches the map in `nodes` is non-empty. + if (response.isEmpty() == false) { + logger.warn("search still running, hot threads:\n{}", hotThreads()); + } + assertMap(response, matchesMap().entry("nodes", matchesMap())); + }); + }, 1, TimeUnit.MINUTES); + } + + private String hotThreads() throws IOException { + Request tasks = new Request("GET", "/_nodes/hot_threads"); + return EntityUtils.toString(client().performRequest(tasks).getEntity()); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index c76af6b0cfa09..731ab15001414 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -9,6 +9,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -252,7 +253,7 @@ private SegmentCommitInfo syncSegment( false, IndexOptions.NONE, DocValuesType.NONE, - fieldInfo.docValuesSkipIndexType(), + DocValuesSkipIndexType.NONE, -1, fieldInfo.attributes(), 0, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index cf5ebc8adc56d..3ade1a0eb1d47 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -411,9 +411,9 @@ public List getRestHandlers( } public static Path resolveConfigFile(Environment env, String name) { - Path config = env.configFile().resolve(name); + Path config = env.configDir().resolve(name); if (Files.exists(config) == false) { - Path legacyConfig = env.configFile().resolve("x-pack").resolve(name); + Path legacyConfig = env.configDir().resolve("x-pack").resolve(name); if (Files.exists(legacyConfig)) { deprecationLogger.warn( DeprecationCategory.OTHER, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index 3b4d4aec776d1..c2d157e512a21 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -317,7 +317,7 @@ public static Setting defaultStoredSecureTokenHashAlgorithmSetting( }, Property.NodeScope); } - public static final List DEFAULT_SUPPORTED_PROTOCOLS = Arrays.asList("TLSv1.3", "TLSv1.2", "TLSv1.1"); + public static final List DEFAULT_SUPPORTED_PROTOCOLS = Arrays.asList("TLSv1.3", "TLSv1.2"); public static final SslClientAuthenticationMode CLIENT_AUTH_DEFAULT = SslClientAuthenticationMode.REQUIRED; public static final SslClientAuthenticationMode HTTP_CLIENT_AUTH_DEFAULT = SslClientAuthenticationMode.NONE; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java index fe970bef87145..1993545075979 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java @@ -391,6 +391,22 @@ public T getTaskAndCheckAuthentication( TaskManager taskManager, AsyncExecutionId asyncExecutionId, Class tClass + ) throws IOException { + return getTaskAndCheckAuthentication(taskManager, security, asyncExecutionId, tClass); + } + + /** + * Returns the {@link AsyncTask} if the provided asyncTaskId + * is registered in the task manager, null otherwise. + * + * This method throws a {@link ResourceNotFoundException} if the authenticated user + * is not the creator of the original task. + */ + public static T getTaskAndCheckAuthentication( + TaskManager taskManager, + AsyncSearchSecurity security, + AsyncExecutionId asyncExecutionId, + Class tClass ) throws IOException { T asyncTask = getTask(taskManager, asyncExecutionId, tClass); if (asyncTask == null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java index f5c9f48454073..26ad24d80e14d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java @@ -19,6 +19,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.utils.MlIndexAndAlias; @@ -28,6 +29,7 @@ import java.util.Objects; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -51,6 +53,7 @@ public abstract class AbstractAuditor { private Queue backlog; private final AtomicBoolean indexAndAliasCreationInProgress; + private final ExecutorService executorService; protected AbstractAuditor( OriginSettingClient client, @@ -58,7 +61,8 @@ protected AbstractAuditor( String nodeName, AbstractAuditMessageFactory messageFactory, ClusterService clusterService, - IndexNameExpressionResolver indexNameExpressionResolver + IndexNameExpressionResolver indexNameExpressionResolver, + ExecutorService executorService ) { this.client = Objects.requireNonNull(client); this.auditIndexWriteAlias = Objects.requireNonNull(auditIndexWriteAlias); @@ -69,6 +73,7 @@ protected AbstractAuditor( this.backlog = new ConcurrentLinkedQueue<>(); this.indexAndAliasCreated = new AtomicBoolean(); this.indexAndAliasCreationInProgress = new AtomicBoolean(); + this.executorService = executorService; } public void audit(Level level, String resourceId, String message) { @@ -148,7 +153,16 @@ protected void indexDoc(ToXContent toXContent) { } private void writeDoc(ToXContent toXContent) { - client.index(indexRequest(toXContent), ActionListener.wrap(AbstractAuditor::onIndexResponse, AbstractAuditor::onIndexFailure)); + client.index(indexRequest(toXContent), ActionListener.wrap(AbstractAuditor::onIndexResponse, e -> { + if (e instanceof IndexNotFoundException) { + executorService.execute(() -> { + reset(); + indexDoc(toXContent); + }); + } else { + onIndexFailure(e); + } + })); } private IndexRequest indexRequest(ToXContent toXContent) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java index f88909ba4208e..f2b2c563d7519 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java @@ -47,7 +47,7 @@ public class InferenceAction extends ActionType { public static final InferenceAction INSTANCE = new InferenceAction(); - public static final String NAME = "cluster:monitor/xpack/inference"; + public static final String NAME = "cluster:internal/xpack/inference"; public InferenceAction() { super(NAME); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceActionProxy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceActionProxy.java new file mode 100644 index 0000000000000..68cd39f26b456 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceActionProxy.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Objects; + +/** + * This action is used when making a REST request to the inference API. The transport handler + * will then look at the task type in the params (or retrieve it from the persisted model if it wasn't + * included in the params) to determine where this request should be routed. If the task type is chat completion + * then it will be routed to the unified chat completion handler by creating the {@link UnifiedCompletionAction}. + * If not, it will be passed along to {@link InferenceAction}. + */ +public class InferenceActionProxy extends ActionType { + public static final InferenceActionProxy INSTANCE = new InferenceActionProxy(); + public static final String NAME = "cluster:monitor/xpack/inference/post"; + + public InferenceActionProxy() { + super(NAME); + } + + public static class Request extends ActionRequest { + + private final TaskType taskType; + private final String inferenceEntityId; + private final BytesReference content; + private final XContentType contentType; + private final TimeValue timeout; + private final boolean stream; + + public Request( + TaskType taskType, + String inferenceEntityId, + BytesReference content, + XContentType contentType, + TimeValue timeout, + boolean stream + ) { + this.taskType = taskType; + this.inferenceEntityId = inferenceEntityId; + this.content = content; + this.contentType = contentType; + this.timeout = timeout; + this.stream = stream; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.taskType = TaskType.fromStream(in); + this.inferenceEntityId = in.readString(); + this.content = in.readBytesReference(); + this.contentType = in.readEnum(XContentType.class); + this.timeout = in.readTimeValue(); + + // streaming is not supported yet for transport traffic + this.stream = false; + } + + public TaskType getTaskType() { + return taskType; + } + + public String getInferenceEntityId() { + return inferenceEntityId; + } + + public BytesReference getContent() { + return content; + } + + public XContentType getContentType() { + return contentType; + } + + public TimeValue getTimeout() { + return timeout; + } + + public boolean isStreaming() { + return stream; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(inferenceEntityId); + taskType.writeTo(out); + out.writeBytesReference(content); + XContentHelper.writeTo(out, contentType); + out.writeTimeValue(timeout); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return taskType == request.taskType + && Objects.equals(inferenceEntityId, request.inferenceEntityId) + && Objects.equals(content, request.content) + && contentType == request.contentType + && timeout == request.timeout + && stream == request.stream; + } + + @Override + public int hashCode() { + return Objects.hash(taskType, inferenceEntityId, content, contentType, timeout, stream); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java index f5c852a0450ae..43c84ad914c2a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java @@ -21,7 +21,7 @@ public class UnifiedCompletionAction extends ActionType { public static final UnifiedCompletionAction INSTANCE = new UnifiedCompletionAction(); - public static final String NAME = "cluster:monitor/xpack/inference/unified"; + public static final String NAME = "cluster:internal/xpack/inference/unified"; public UnifiedCompletionAction() { super(NAME); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java index cc59ae890467b..505d058394db8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java @@ -70,8 +70,8 @@ public Request(String inferenceEntityId, BytesReference content, XContentType co public Request(StreamInput in) throws IOException { super(in); this.inferenceEntityId = in.readString(); - this.content = in.readBytesReference(); this.taskType = TaskType.fromStream(in); + this.content = in.readBytesReference(); this.contentType = in.readEnum(XContentType.class); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/UnifiedChatCompletionException.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/UnifiedChatCompletionException.java new file mode 100644 index 0000000000000..f2844e6534a94 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/UnifiedChatCompletionException.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContent; + +import java.util.Iterator; +import java.util.Locale; +import java.util.Objects; + +import static java.util.Collections.emptyIterator; +import static org.elasticsearch.ExceptionsHelper.maybeError; +import static org.elasticsearch.common.collect.Iterators.concat; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.endObject; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.startObject; + +public class UnifiedChatCompletionException extends XContentFormattedException { + + private static final Logger log = LogManager.getLogger(UnifiedChatCompletionException.class); + private final String message; + private final String type; + @Nullable + private final String code; + @Nullable + private final String param; + + public UnifiedChatCompletionException(RestStatus status, String message, String type, @Nullable String code) { + this(status, message, type, code, null); + } + + public UnifiedChatCompletionException(RestStatus status, String message, String type, @Nullable String code, @Nullable String param) { + super(message, status); + this.message = Objects.requireNonNull(message); + this.type = Objects.requireNonNull(type); + this.code = code; + this.param = param; + } + + public UnifiedChatCompletionException( + Throwable cause, + RestStatus status, + String message, + String type, + @Nullable String code, + @Nullable String param + ) { + super(message, cause, status); + this.message = Objects.requireNonNull(message); + this.type = Objects.requireNonNull(type); + this.code = code; + this.param = param; + } + + @Override + public Iterator toXContentChunked(Params params) { + return concat( + startObject(), + startObject("error"), + optionalField("code", code), + field("message", message), + optionalField("param", param), + field("type", type), + endObject(), + endObject() + ); + } + + private static Iterator field(String key, String value) { + return ChunkedToXContentHelper.chunk((b, p) -> b.field(key, value)); + } + + private static Iterator optionalField(String key, String value) { + return value != null ? ChunkedToXContentHelper.chunk((b, p) -> b.field(key, value)) : emptyIterator(); + } + + public static UnifiedChatCompletionException fromThrowable(Throwable t) { + if (ExceptionsHelper.unwrapCause(t) instanceof UnifiedChatCompletionException e) { + return e; + } else { + return maybeError(t).map(error -> { + // we should never be throwing Error, but just in case we are, rethrow it on another thread so the JVM can handle it and + // return a vague error to the user so that they at least see something went wrong but don't leak JVM details to users + ExceptionsHelper.maybeDieOnAnotherThread(error); + var e = new RuntimeException("Fatal error while streaming response. Please retry the request."); + log.error(e.getMessage(), t); + return new UnifiedChatCompletionException( + RestStatus.INTERNAL_SERVER_ERROR, + e.getMessage(), + getExceptionName(e), + RestStatus.INTERNAL_SERVER_ERROR.name().toLowerCase(Locale.ROOT) + ); + }).orElseGet(() -> { + log.atDebug().withThrowable(t).log("UnifiedChatCompletionException stack trace for debugging purposes."); + var status = ExceptionsHelper.status(t); + return new UnifiedChatCompletionException( + t, + status, + t.getMessage(), + getExceptionName(t), + status.name().toLowerCase(Locale.ROOT), + null + ); + }); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/XContentFormattedException.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/XContentFormattedException.java new file mode 100644 index 0000000000000..799953d452f0d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/XContentFormattedException.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.util.Iterator; +import java.util.Objects; + +/** + * Similar to {@link org.elasticsearch.ElasticsearchWrapperException}, this will wrap an Exception to generate an xContent using + * {@link ElasticsearchException#generateFailureXContent(XContentBuilder, Params, Exception, boolean)}. + * Extends {@link ElasticsearchException} to provide REST handlers the {@link #status()} method in order to set the response header. + */ +public class XContentFormattedException extends ElasticsearchException implements ChunkedToXContent { + + public static final String X_CONTENT_PARAM = "detailedErrorsEnabled"; + private final RestStatus status; + private final Throwable cause; + + public XContentFormattedException(String message, RestStatus status) { + super(message); + this.status = Objects.requireNonNull(status); + this.cause = null; + } + + public XContentFormattedException(Throwable cause, RestStatus status) { + super(cause); + this.status = Objects.requireNonNull(status); + this.cause = cause; + } + + public XContentFormattedException(String message, Throwable cause, RestStatus status) { + super(message, cause); + this.status = Objects.requireNonNull(status); + this.cause = cause; + } + + @Override + public RestStatus status() { + return status; + } + + @Override + public Iterator toXContentChunked(Params params) { + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + Iterators.single( + (b, p) -> ElasticsearchException.generateFailureXContent( + b, + p, + cause instanceof Exception e ? e : this, + params.paramAsBoolean(X_CONTENT_PARAM, false) + ) + ), + Iterators.single((b, p) -> b.field("status", status.getStatus())), + ChunkedToXContentHelper.endObject() + ); + } + + @Override + public Iterator toXContentChunked(RestApiVersion restApiVersion, Params params) { + return ChunkedToXContent.super.toXContentChunked(restApiVersion, params); + } + + @Override + public Iterator toXContentChunkedV8(Params params) { + return ChunkedToXContent.super.toXContentChunkedV8(params); + } + + @Override + public boolean isFragment() { + return super.isFragment(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java index 68baab4469e48..98c12930e188e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.security.authz.privilege; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.search.TransportSearchShardsAction; import org.elasticsearch.index.seqno.RetentionLeaseActions; import org.elasticsearch.index.seqno.RetentionLeaseBackgroundSyncAction; @@ -38,12 +39,13 @@ public final class SystemPrivilege extends Privilege { RetentionLeaseActions.ADD.name() + "*", // needed for CCR to add retention leases RetentionLeaseActions.REMOVE.name() + "*", // needed for CCR to remove retention leases RetentionLeaseActions.RENEW.name() + "*", // needed for CCR to renew retention leases - "indices:admin/settings/update", // needed for DiskThresholdMonitor.markIndicesReadOnly + "indices:admin/settings/update", // needed for: DiskThresholdMonitor.markIndicesReadOnly, SystemIndexMigrator CompletionPersistentTaskAction.INSTANCE.name(), // needed for ShardFollowTaskCleaner "indices:data/write/*", // needed for SystemIndexMigrator "indices:data/read/*", // needed for SystemIndexMigrator "indices:admin/refresh", // needed for SystemIndexMigrator "indices:admin/aliases", // needed for SystemIndexMigrator + TransportAddIndexBlockAction.TYPE.name() + "*", // needed for SystemIndexMigrator TransportSearchShardsAction.TYPE.name(), // added so this API can be called with the system user by other APIs ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION.name() // needed for Security plugin reload of remote cluster credentials ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index fcd1ba54a8de2..172b63decfdab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -120,6 +120,11 @@ static RoleDescriptor kibanaSystem(String name) { .indices(".ml-annotations*", ".ml-notifications*") .privileges("read", "write") .build(), + // And the reindexed indices from v7 + RoleDescriptor.IndicesPrivileges.builder() + .indices(".reindexed-v8-ml-annotations*", ".reindexed-v8-ml-notifications*") + .privileges("read", "write") + .build(), // APM agent configuration - system index defined in KibanaPlugin RoleDescriptor.IndicesPrivileges.builder() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java index 1229d62dce047..9a0b17b22369c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java @@ -207,6 +207,7 @@ public class InternalUsers { TransportDeleteIndexAction.TYPE.name(), "indices:admin/data_stream/index/reindex", "indices:admin/index/create_from_source", + "indices:admin/index/copy_lifecycle_index_metadata", TransportAddIndexBlockAction.TYPE.name(), OpenIndexAction.NAME, TransportCloseIndexAction.NAME, @@ -225,6 +226,7 @@ public class InternalUsers { .build() }, null, null, + new String[] {}, MetadataUtils.DEFAULT_RESERVED_METADATA, Map.of() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java index 6424136eb1a7c..9a35b8f13d4c1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java @@ -146,7 +146,7 @@ public static SslKeyConfig createKeyConfig( boolean acceptNonSecurePasswords ) { final SslSettingsLoader settingsLoader = new SslSettingsLoader(settings, prefix, acceptNonSecurePasswords); - return settingsLoader.buildKeyConfig(environment.configFile()); + return settingsLoader.buildKeyConfig(environment.configDir()); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java index cb55de79342b8..f9b27daa8f8c1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java @@ -128,7 +128,7 @@ protected SslTrustConfig buildTrustConfig( } public SslConfiguration load(Environment env) { - return load(env.configFile()); + return load(env.configDir()); } public static SslConfiguration load(Settings settings, String prefix, Environment env) { diff --git a/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml index 4e0266b06bbb0..a069e1b4ce4ce 100644 --- a/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,8 +1,13 @@ org.apache.httpcomponents.httpclient: - outbound_network # For SamlRealm + - manage_threads org.apache.httpcomponents.httpcore.nio: - outbound_network + - manage_threads +org.apache.httpcomponents.httpasyncclient: + - manage_threads unboundid.ldapsdk: + - manage_threads - write_system_properties: properties: - java.security.auth.login.config diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java index ab6e7356a6e02..f7432a59040da 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java @@ -143,7 +143,7 @@ public List loadExtensions(Class extensionPointType) { Environment mockEnvironment = mock(Environment.class); when(mockEnvironment.settings()).thenReturn(Settings.builder().build()); - when(mockEnvironment.configFile()).thenReturn(PathUtils.get("")); + when(mockEnvironment.configDir()).thenReturn(PathUtils.get("")); // ensure createComponents does not influence the results Plugin.PluginServices services = mock(Plugin.PluginServices.class); when(services.clusterService()).thenReturn(mock(ClusterService.class)); @@ -187,7 +187,7 @@ public List loadExtensions(Class extensionPointType) { }); Environment mockEnvironment = mock(Environment.class); when(mockEnvironment.settings()).thenReturn(Settings.builder().build()); - when(mockEnvironment.configFile()).thenReturn(PathUtils.get("")); + when(mockEnvironment.configDir()).thenReturn(PathUtils.get("")); Plugin.PluginServices services = mock(Plugin.PluginServices.class); when(services.clusterService()).thenReturn(mock(ClusterService.class)); when(services.threadPool()).thenReturn(mock(ThreadPool.class)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java index bcf777906bb7c..13d15a4ab0b07 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java @@ -34,8 +34,10 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.ESTestCase; @@ -233,6 +235,40 @@ public void testAuditingBeforeTemplateInstalled() throws Exception { verify(client, times(1)).execute(eq(TransportIndexAction.TYPE), any(), any()); } + public void testRecreateTemplateWhenDeleted() throws Exception { + CountDownLatch writeSomeDocsBeforeTemplateLatch = new CountDownLatch(1); + AbstractAuditor auditor = createTestAuditorWithoutTemplate( + writeSomeDocsBeforeTemplateLatch + ); + + auditor.info("foobar", "Here is my info to queue"); + + verify(client, never()).execute(eq(TransportIndexAction.TYPE), any(), any()); + // fire the put template response + writeSomeDocsBeforeTemplateLatch.countDown(); + + assertBusy(() -> verify(client, times(1)).execute(eq(TransportPutComposableIndexTemplateAction.TYPE), any(), any())); + assertBusy(() -> verify(client, times(1)).execute(eq(TransportCreateIndexAction.TYPE), any(), any())); + + // the back log will be written some point later + assertBusy(() -> verify(client, times(1)).execute(eq(TransportBulkAction.TYPE), any(), any())); + + // "delete" the index + doAnswer(ans -> { + ActionListener listener = ans.getArgument(2); + listener.onFailure(new IndexNotFoundException("some index")); + return null; + }).when(client).execute(eq(TransportIndexAction.TYPE), any(), any()); + + // audit more data + auditor.info("foobar", "Here is another message"); + + // verify the template is recreated and the audit message is processed + assertBusy(() -> verify(client, times(2)).execute(eq(TransportPutComposableIndexTemplateAction.TYPE), any(), any())); + assertBusy(() -> verify(client, times(2)).execute(eq(TransportCreateIndexAction.TYPE), any(), any())); + assertBusy(() -> verify(client, times(2)).execute(eq(TransportBulkAction.TYPE), any(), any())); + } + public void testMaxBufferSize() throws Exception { CountDownLatch writeSomeDocsBeforeTemplateLatch = new CountDownLatch(1); AbstractAuditor auditor = createTestAuditorWithoutTemplate( @@ -358,7 +394,8 @@ public static class TestAuditor extends AbstractAuditor /** * The bwc versions to test serialization against */ - protected List bwcVersions() { + protected Collection bwcVersions() { return DEFAULT_BWC_VERSIONS; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java index 0254406a2c8ec..e6b6ef3e3a06a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java @@ -13,7 +13,7 @@ import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import java.io.IOException; -import java.util.List; +import java.util.Collection; import static org.elasticsearch.test.BWCVersions.DEFAULT_BWC_VERSIONS; @@ -28,7 +28,7 @@ public abstract class AbstractChunkedBWCSerializationTestCase bwcVersions() { + protected Collection bwcVersions() { return DEFAULT_BWC_VERSIONS; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java index 3ab5851815474..91070d5768f63 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java @@ -120,7 +120,7 @@ protected CoordinatedInferenceAction.Request mutateInstanceForVersion( instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.NONE); } - return new CoordinatedInferenceAction.Request( + var newInstance = new CoordinatedInferenceAction.Request( instance.getModelId(), instance.getInputs(), instance.getTaskSettings(), @@ -131,5 +131,7 @@ protected CoordinatedInferenceAction.Request mutateInstanceForVersion( instance.getHighPriority(), instance.getRequestModelType() ); + newInstance.setPrefixType(instance.getPrefixType()); + return newInstance; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 107953557f3ea..ab5a1bbd8c897 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -4140,7 +4140,7 @@ public void testInferenceAdminRole() { assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role role = Role.buildFromRoleDescriptor(roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES); - assertTrue(role.cluster().check("cluster:monitor/xpack/inference", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference/post", request, authentication)); assertTrue(role.cluster().check("cluster:monitor/xpack/inference/get", request, authentication)); assertTrue(role.cluster().check("cluster:admin/xpack/inference/put", request, authentication)); assertTrue(role.cluster().check("cluster:admin/xpack/inference/delete", request, authentication)); @@ -4160,10 +4160,9 @@ public void testInferenceUserRole() { assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role role = Role.buildFromRoleDescriptor(roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES); - assertTrue(role.cluster().check("cluster:monitor/xpack/inference", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference/post", request, authentication)); assertTrue(role.cluster().check("cluster:monitor/xpack/inference/get", request, authentication)); assertFalse(role.cluster().check("cluster:admin/xpack/inference/put", request, authentication)); - assertTrue(role.cluster().check("cluster:monitor/xpack/inference/unified", request, authentication)); assertFalse(role.cluster().check("cluster:admin/xpack/inference/delete", request, authentication)); assertTrue(role.cluster().check("cluster:monitor/xpack/ml/trained_models/deployment/infer", request, authentication)); assertFalse(role.cluster().check("cluster:admin/xpack/ml/trained_models/deployment/start", request, authentication)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoaderTests.java index d03595c39877b..79ff817061a01 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoaderTests.java @@ -229,7 +229,7 @@ public void testKeystorePassword() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, PASSWORD, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, PASSWORD, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -244,7 +244,7 @@ public void testKeystorePasswordBackcompat() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, PASSWORD, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, PASSWORD, KEY_MGR_ALGORITHM, environment.configDir())) ); assertSettingDeprecationsAndWarnings(new Setting[] { configurationSettings.x509KeyPair.legacyKeystorePassword }); } @@ -263,7 +263,7 @@ public void testKeystoreKeyPassword() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -279,7 +279,7 @@ public void testKeystoreKeyPasswordBackcompat() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); assertSettingDeprecationsAndWarnings( new Setting[] { @@ -298,7 +298,7 @@ public void testInferKeystoreTypeFromJksFile() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("xpack/tls/path.jks", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("xpack/tls/path.jks", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -314,7 +314,7 @@ public void testInferKeystoreTypeFromPkcs12File() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig(path, PASSWORD, "PKCS12", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig(path, PASSWORD, "PKCS12", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -328,7 +328,7 @@ public void testInferKeystoreTypeFromUnrecognised() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("xpack/tls/path.foo", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("xpack/tls/path.foo", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -347,10 +347,7 @@ public void testExplicitKeystoreType() { SslConfiguration sslConfiguration = getSslConfiguration(settings); assertThat(sslConfiguration.keyConfig(), instanceOf(StoreKeyConfig.class)); StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); - assertThat( - ksKeyInfo, - equalTo(new StoreKeyConfig(path, PASSWORD, type, null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) - ); + assertThat(ksKeyInfo, equalTo(new StoreKeyConfig(path, PASSWORD, type, null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir()))); } public void testThatEmptySettingsAreEqual() { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json index 7457dce805eca..fcb299115ffd2 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json @@ -786,6 +786,45 @@ }, "sampling": { "properties": { + "tail": { + "properties": { + "dynamic_service_groups": { + "type": "long" + }, + "events": { + "properties": { + "dropped": { + "type": "long" + }, + "failed_writes": { + "type": "long" + }, + "head_unsampled": { + "type": "long" + }, + "processed": { + "type": "long" + }, + "sampled": { + "type": "long" + }, + "stored": { + "type": "long" + } + } + }, + "storage": { + "properties": { + "lsm_size": { + "type": "long" + }, + "value_log_size": { + "type": "long" + } + } + } + } + }, "transactions_dropped": { "type": "long" } @@ -2219,6 +2258,54 @@ }, "sampling": { "properties": { + "tail": { + "properties": { + "dynamic_service_groups": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.dynamic_service_groups" + }, + "events": { + "properties": { + "dropped": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.dropped" + }, + "failed_writes": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.failed_writes" + }, + "head_unsampled": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.head_unsampled" + }, + "processed": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.processed" + }, + "sampled": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.sampled" + }, + "stored": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.stored" + } + } + }, + "storage": { + "properties": { + "lsm_size": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.storage.lsm_size" + }, + "value_log_size": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.storage.value_log_size" + } + } + } + } + }, "transactions_dropped": { "type": "alias", "path": "beat.stats.apm_server.sampling.transactions_dropped" diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json index d699317c29da3..e1a8b5cc37060 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json @@ -966,6 +966,45 @@ }, "sampling": { "properties": { + "tail": { + "properties": { + "dynamic_service_groups": { + "type": "long" + }, + "events": { + "properties": { + "dropped": { + "type": "long" + }, + "failed_writes": { + "type": "long" + }, + "head_unsampled": { + "type": "long" + }, + "processed": { + "type": "long" + }, + "sampled": { + "type": "long" + }, + "stored": { + "type": "long" + } + } + }, + "storage": { + "properties": { + "lsm_size": { + "type": "long" + }, + "value_log_size": { + "type": "long" + } + } + } + } + }, "transactions_dropped": { "type": "long" } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/reindex-data-stream-pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/reindex-data-stream-pipeline.json new file mode 100644 index 0000000000000..e8c3352131700 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/reindex-data-stream-pipeline.json @@ -0,0 +1,16 @@ +{ + "description": "This pipeline sanitizes documents that are being reindexed into a data stream using the reindex data stream API. It is an internal pipeline and should not be modified.", + "processors": [ + { + "set": { + "field": "@timestamp", + "value": 0, + "override": false + } + } + ], + "_meta": { + "managed": true + }, + "version": ${xpack.migrate.reindex.pipeline.version} +} diff --git a/x-pack/plugin/deprecation/src/main/java/module-info.java b/x-pack/plugin/deprecation/src/main/java/module-info.java index f9a86839ad6f2..4c46205df4f0c 100644 --- a/x-pack/plugin/deprecation/src/main/java/module-info.java +++ b/x-pack/plugin/deprecation/src/main/java/module-info.java @@ -13,6 +13,7 @@ requires org.apache.logging.log4j; requires org.apache.logging.log4j.core; requires log4j2.ecs.layout; + requires org.apache.lucene.core; exports org.elasticsearch.xpack.deprecation to org.elasticsearch.server; exports org.elasticsearch.xpack.deprecation.logging to org.elasticsearch.server; diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecker.java new file mode 100644 index 0000000000000..cc21f0b2cd711 --- /dev/null +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecker.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.TriConsumer; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * Cluster-specific deprecation checks, this is used to populate the {@code cluster_settings} field + */ +public class ClusterDeprecationChecker { + + private static final Logger logger = LogManager.getLogger(ClusterDeprecationChecker.class); + private final List, List>> CHECKS = List.of( + this::checkTransformSettings + ); + private final NamedXContentRegistry xContentRegistry; + + ClusterDeprecationChecker(NamedXContentRegistry xContentRegistry) { + this.xContentRegistry = xContentRegistry; + } + + public List check(ClusterState clusterState, List transformConfigs) { + List allIssues = new ArrayList<>(); + CHECKS.forEach(check -> check.apply(clusterState, transformConfigs, allIssues)); + return allIssues; + } + + private void checkTransformSettings( + ClusterState clusterState, + List transformConfigs, + List allIssues + ) { + for (var config : transformConfigs) { + try { + allIssues.addAll(config.checkForDeprecations(xContentRegistry)); + } catch (IOException e) { + logger.warn("failed to check transformation settings for '" + config.getId() + "'", e); + } + } + } +} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java index c867092f7bc19..db8a3dc205e02 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java @@ -18,13 +18,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.function.BiFunction; import java.util.stream.Collectors; import static java.util.Map.entry; import static java.util.Map.ofEntries; -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; /** * Checks the data streams for deprecation warnings. @@ -44,10 +44,24 @@ public DataStreamDeprecationChecker(IndexNameExpressionResolver indexNameExpress /** * @param clusterState The cluster state provided for the checker + * @param request not used yet in these checks + * @param precomputedData not used yet in these checks * @return the name of the data streams that have violated the checks with their respective warnings. */ @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + public Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ) { + return check(clusterState); + } + + /** + * @param clusterState The cluster state provided for the checker + * @return the name of the data streams that have violated the checks with their respective warnings. + */ + public Map> check(ClusterState clusterState) { List dataStreamNames = indexNameExpressionResolver.dataStreamNames( clusterState, IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN @@ -58,7 +72,10 @@ public Map> check(ClusterState clusterState, Depr Map> dataStreamIssues = new HashMap<>(); for (String dataStreamName : dataStreamNames) { DataStream dataStream = clusterState.metadata().dataStreams().get(dataStreamName); - List issuesForSingleDataStream = filterChecks(DATA_STREAM_CHECKS, c -> c.apply(dataStream, clusterState)); + List issuesForSingleDataStream = DATA_STREAM_CHECKS.stream() + .map(c -> c.apply(dataStream, clusterState)) + .filter(Objects::nonNull) + .toList(); if (issuesForSingleDataStream.isEmpty() == false) { dataStreamIssues.put(dataStreamName, issuesForSingleDataStream); } @@ -102,7 +119,7 @@ static DeprecationIssue ignoredOldIndicesCheck(DataStream dataStream, ClusterSta + "OK to remain read-only after upgrade", false, ofEntries( - entry("reindex_required", true), + entry("reindex_required", false), entry("total_backing_indices", backingIndices.size()), entry("ignored_indices_requiring_upgrade_count", ignoredIndices.size()), entry("ignored_indices_requiring_upgrade", ignoredIndices) diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java index 85b7c89e7cb85..2c8b95e378375 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java @@ -33,7 +33,7 @@ import java.util.function.Predicate; import java.util.function.Supplier; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.SKIP_DEPRECATIONS_SETTING; +import static org.elasticsearch.xpack.deprecation.TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING; import static org.elasticsearch.xpack.deprecation.logging.DeprecationIndexingComponent.DEPRECATION_INDEXING_FLUSH_INTERVAL; /** diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java deleted file mode 100644 index 039a75f51f030..0000000000000 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.deprecation; - -import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; - -import java.util.List; -import java.util.Objects; -import java.util.function.Function; -import java.util.stream.Collectors; - -/** - * Class containing all the cluster, node, and index deprecation checks that will be served - * by the {@link DeprecationInfoAction}. - */ -public class DeprecationChecks { - - public static final Setting> SKIP_DEPRECATIONS_SETTING = Setting.stringListSetting( - "deprecation.skip_deprecated_settings", - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - - private DeprecationChecks() {} - - static List> CLUSTER_SETTINGS_CHECKS = List.of(); - - static final List< - NodeDeprecationCheck> NODE_SETTINGS_CHECKS = List - .of( - NodeDeprecationChecks::checkMultipleDataPaths, - NodeDeprecationChecks::checkDataPathsList, - NodeDeprecationChecks::checkSharedDataPathSetting, - NodeDeprecationChecks::checkReservedPrefixedRealmNames, - NodeDeprecationChecks::checkExporterUseIngestPipelineSettings, - NodeDeprecationChecks::checkExporterPipelineMasterTimeoutSetting, - NodeDeprecationChecks::checkExporterCreateLegacyTemplateSetting, - NodeDeprecationChecks::checkMonitoringSettingHistoryDuration, - NodeDeprecationChecks::checkMonitoringSettingHistoryDuration, - NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecovery, - NodeDeprecationChecks::checkMonitoringSettingCollectIndices, - NodeDeprecationChecks::checkMonitoringSettingCollectCcrTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectEnrichStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecoveryStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectIndexStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectMlJobStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectNodeStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectClusterStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersHost, - NodeDeprecationChecks::checkMonitoringSettingExportersBulkTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersConnectionTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersConnectionReadTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersAuthUsername, - NodeDeprecationChecks::checkMonitoringSettingExportersAuthPass, - NodeDeprecationChecks::checkMonitoringSettingExportersSSL, - NodeDeprecationChecks::checkMonitoringSettingExportersProxyBase, - NodeDeprecationChecks::checkMonitoringSettingExportersSniffEnabled, - NodeDeprecationChecks::checkMonitoringSettingExportersHeaders, - NodeDeprecationChecks::checkMonitoringSettingExportersTemplateTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersMasterTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersEnabled, - NodeDeprecationChecks::checkMonitoringSettingExportersType, - NodeDeprecationChecks::checkMonitoringSettingExportersAlertsEnabled, - NodeDeprecationChecks::checkMonitoringSettingExportersAlertsBlacklist, - NodeDeprecationChecks::checkMonitoringSettingExportersIndexNameTimeFormat, - NodeDeprecationChecks::checkMonitoringSettingDecommissionAlerts, - NodeDeprecationChecks::checkMonitoringSettingEsCollectionEnabled, - NodeDeprecationChecks::checkMonitoringSettingCollectionEnabled, - NodeDeprecationChecks::checkMonitoringSettingCollectionInterval, - NodeDeprecationChecks::checkScriptContextCache, - NodeDeprecationChecks::checkScriptContextCompilationsRateLimitSetting, - NodeDeprecationChecks::checkScriptContextCacheSizeSetting, - NodeDeprecationChecks::checkScriptContextCacheExpirationSetting, - NodeDeprecationChecks::checkEnforceDefaultTierPreferenceSetting, - NodeDeprecationChecks::checkLifecyleStepMasterTimeoutSetting, - NodeDeprecationChecks::checkEqlEnabledSetting, - NodeDeprecationChecks::checkNodeAttrData, - NodeDeprecationChecks::checkWatcherBulkConcurrentRequestsSetting, - NodeDeprecationChecks::checkTracingApmSettings - ); - - /** - * helper utility function to reduce repeat of running a specific {@link List} of checks. - * - * @param checks The functional checks to execute using the mapper function - * @param mapper The function that executes the lambda check with the appropriate arguments - * @param The signature of the check (BiFunction, Function, including the appropriate arguments) - * @return The list of {@link DeprecationIssue} that were found in the cluster - */ - static List filterChecks(List checks, Function mapper) { - return checks.stream().map(mapper).filter(Objects::nonNull).collect(Collectors.toList()); - } - - @FunctionalInterface - public interface NodeDeprecationCheck { - R apply(A first, B second, C third, D fourth); - } -} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 62d2a231face4..1fceb917ece53 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -12,41 +12,25 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadRequest; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.ComponentTemplate; -import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.Tuple; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.transport.Transports; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -59,93 +43,6 @@ private DeprecationInfoAction() { super(NAME); } - /** - * helper utility function to reduce repeat of running a specific {@link Set} of checks. - * - * @param checks The functional checks to execute using the mapper function - * @param mapper The function that executes the lambda check with the appropriate arguments - * @param The signature of the check (BiFunction, Function, including the appropriate arguments) - * @return The list of {@link DeprecationIssue} that were found in the cluster - */ - public static List filterChecks(List checks, Function mapper) { - return checks.stream().map(mapper).filter(Objects::nonNull).collect(Collectors.toList()); - } - - /** - * This method rolls up DeprecationIssues that are identical but on different nodes. It also roles up DeprecationIssues that are - * identical (and on different nodes) except that they differ in the removable settings listed in their meta object. We roll these up - * by taking the intersection of all removable settings in otherwise identical DeprecationIssues. That way we don't claim that a - * setting can be automatically removed if any node has it in its elasticsearch.yml. - * @param response - * @return - */ - private static List mergeNodeIssues(NodesDeprecationCheckResponse response) { - // A collection whose values are lists of DeprecationIssues that differ only by meta values (if that): - Collection>> issuesToMerge = getDeprecationIssuesThatDifferOnlyByMeta(response.getNodes()); - // A map of DeprecationIssues (containing only the intersection of removable settings) to the nodes they are seen on - Map> issueToListOfNodesMap = getMergedIssuesToNodesMap(issuesToMerge); - - return issueToListOfNodesMap.entrySet().stream().map(entry -> { - DeprecationIssue issue = entry.getKey(); - String details = issue.getDetails() != null ? issue.getDetails() + " " : ""; - return new DeprecationIssue( - issue.getLevel(), - issue.getMessage(), - issue.getUrl(), - details + "(nodes impacted: " + entry.getValue() + ")", - issue.isResolveDuringRollingUpgrade(), - issue.getMeta() - ); - }).collect(Collectors.toList()); - } - - /* - * This method pulls all the DeprecationIssues from the given nodeResponses, and buckets them into lists of DeprecationIssues that - * differ at most by meta values (if that). The returned tuples also contain the node name the deprecation issue was found on. If all - * nodes in the cluster were configured identically then all tuples in a list will differ only by the node name. - */ - private static Collection>> getDeprecationIssuesThatDifferOnlyByMeta( - List nodeResponses - ) { - Map>> issuesToMerge = new HashMap<>(); - for (NodesDeprecationCheckAction.NodeResponse resp : nodeResponses) { - for (DeprecationIssue issue : resp.getDeprecationIssues()) { - issuesToMerge.computeIfAbsent( - new DeprecationIssue( - issue.getLevel(), - issue.getMessage(), - issue.getUrl(), - issue.getDetails(), - issue.isResolveDuringRollingUpgrade(), - null // Intentionally removing meta from the key so that it's not taken into account for equality - ), - (key) -> new ArrayList<>() - ).add(new Tuple<>(issue, resp.getNode().getName())); - } - } - return issuesToMerge.values(); - } - - /* - * At this point we have one DeprecationIssue per node for a given deprecation. This method rolls them up into a single DeprecationIssue - * with a list of nodes that they appear on. If two DeprecationIssues on two different nodes differ only by the set of removable - * settings (i.e. they have different elasticsearch.yml configurations) then this method takes the intersection of those settings when - * it rolls them up. - */ - private static Map> getMergedIssuesToNodesMap( - Collection>> issuesToMerge - ) { - Map> issueToListOfNodesMap = new HashMap<>(); - for (List> similarIssues : issuesToMerge) { - DeprecationIssue leastCommonDenominator = DeprecationIssue.getIntersectionOfRemovableSettings( - similarIssues.stream().map(Tuple::v1).toList() - ); - issueToListOfNodesMap.computeIfAbsent(leastCommonDenominator, (key) -> new ArrayList<>()) - .addAll(similarIssues.stream().map(Tuple::v2).toList()); - } - return issueToListOfNodesMap; - } - public static class Response extends ActionResponse implements ToXContentObject { static final Set RESERVED_NAMES = Set.of( "cluster_settings", @@ -289,143 +186,6 @@ public int hashCode() { return Objects.hash(clusterSettingsIssues, nodeSettingsIssues, resourceDeprecationIssues, pluginSettingsIssues); } - /** - * This is the function that does the bulk of the logic of taking the appropriate ES dependencies - * like {@link NodeInfo}, {@link ClusterState}. Alongside these objects and the list of deprecation checks, - * this function will run through all the checks and build out the final list of issues that exist in the - * cluster. - * - * @param state The cluster state - * @param indexNameExpressionResolver Used to resolve indices into their concrete names - * @param request The originating request containing the index expressions to evaluate - * @param nodeDeprecationResponse The response containing the deprecation issues found on each node - * @param clusterSettingsChecks The list of cluster-level checks - * @param pluginSettingIssues this map gets modified to move transform deprecation issues into cluster_settings - * @param skipTheseDeprecatedSettings the settings that will be removed from cluster metadata and the index metadata of all the - * indexes specified by indexNames - * @param resourceDeprecationCheckers these are checkers that take as input the cluster state and return a map from resource type - * to issues grouped by the resource name. - * @return The list of deprecation issues found in the cluster - */ - public static DeprecationInfoAction.Response from( - ClusterState state, - IndexNameExpressionResolver indexNameExpressionResolver, - Request request, - NodesDeprecationCheckResponse nodeDeprecationResponse, - List> clusterSettingsChecks, - Map> pluginSettingIssues, - List skipTheseDeprecatedSettings, - List resourceDeprecationCheckers - ) { - assert Transports.assertNotTransportThread("walking mappings in indexSettingsChecks is expensive"); - // Allow system index access here to prevent deprecation warnings when we call this API - String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(state, request); - ClusterState stateWithSkippedSettingsRemoved = removeSkippedSettings(state, concreteIndexNames, skipTheseDeprecatedSettings); - List clusterSettingsIssues = filterChecks( - clusterSettingsChecks, - (c) -> c.apply(stateWithSkippedSettingsRemoved) - ); - List nodeSettingsIssues = mergeNodeIssues(nodeDeprecationResponse); - - Map>> resourceDeprecationIssues = new HashMap<>(); - for (ResourceDeprecationChecker resourceDeprecationChecker : resourceDeprecationCheckers) { - Map> issues = resourceDeprecationChecker.check(stateWithSkippedSettingsRemoved, request); - if (issues.isEmpty() == false) { - resourceDeprecationIssues.put(resourceDeprecationChecker.getName(), issues); - } - } - - // WORKAROUND: move transform deprecation issues into cluster_settings - List transformDeprecations = pluginSettingIssues.remove( - TransformDeprecationChecker.TRANSFORM_DEPRECATION_KEY - ); - if (transformDeprecations != null) { - clusterSettingsIssues.addAll(transformDeprecations); - } - - return new DeprecationInfoAction.Response( - clusterSettingsIssues, - nodeSettingsIssues, - resourceDeprecationIssues, - pluginSettingIssues - ); - } - } - - /** - * Removes the skipped settings from the selected indices and the component and index templates. - * @param state The cluster state to modify - * @param indexNames The names of the indexes whose settings need to be filtered - * @param skipTheseDeprecatedSettings The settings that will be removed from cluster metadata and the index metadata of all the - * indexes specified by indexNames - * @return A modified cluster state with the given settings removed - */ - private static ClusterState removeSkippedSettings(ClusterState state, String[] indexNames, List skipTheseDeprecatedSettings) { - // Short-circuit, no need to reconstruct the cluster state if there are no settings to remove - if (skipTheseDeprecatedSettings == null || skipTheseDeprecatedSettings.isEmpty()) { - return state; - } - ClusterState.Builder clusterStateBuilder = new ClusterState.Builder(state); - Metadata.Builder metadataBuilder = Metadata.builder(state.metadata()); - metadataBuilder.transientSettings( - metadataBuilder.transientSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) - ); - metadataBuilder.persistentSettings( - metadataBuilder.persistentSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) - ); - Map indicesBuilder = new HashMap<>(state.getMetadata().indices()); - for (String indexName : indexNames) { - IndexMetadata indexMetadata = state.getMetadata().index(indexName); - IndexMetadata.Builder filteredIndexMetadataBuilder = new IndexMetadata.Builder(indexMetadata); - Settings filteredSettings = indexMetadata.getSettings() - .filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false); - filteredIndexMetadataBuilder.settings(filteredSettings); - indicesBuilder.put(indexName, filteredIndexMetadataBuilder.build()); - } - metadataBuilder.componentTemplates(state.metadata().componentTemplates().entrySet().stream().map(entry -> { - String templateName = entry.getKey(); - ComponentTemplate componentTemplate = entry.getValue(); - Template template = componentTemplate.template(); - if (template.settings() == null || template.settings().isEmpty()) { - return Tuple.tuple(templateName, componentTemplate); - } - return Tuple.tuple( - templateName, - new ComponentTemplate( - Template.builder(template) - .settings(template.settings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false)) - .build(), - componentTemplate.version(), - componentTemplate.metadata(), - componentTemplate.deprecated() - ) - ); - }).collect(Collectors.toMap(Tuple::v1, Tuple::v2))); - metadataBuilder.indexTemplates(state.metadata().templatesV2().entrySet().stream().map(entry -> { - String templateName = entry.getKey(); - ComposableIndexTemplate indexTemplate = entry.getValue(); - Template template = indexTemplate.template(); - if (template == null || template.settings() == null || template.settings().isEmpty()) { - return Tuple.tuple(templateName, indexTemplate); - } - return Tuple.tuple( - templateName, - indexTemplate.toBuilder() - .template( - Template.builder(indexTemplate.template()) - .settings( - indexTemplate.template() - .settings() - .filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) - ) - ) - .build() - ); - }).collect(Collectors.toMap(Tuple::v1, Tuple::v2))); - - metadataBuilder.indices(indicesBuilder); - clusterStateBuilder.metadata(metadataBuilder); - return clusterStateBuilder.build(); } public static class Request extends MasterNodeReadRequest implements IndicesRequest.Replaceable { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java index 6d7f860f645f1..f96fae6343b9f 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java @@ -19,9 +19,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.Function; -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE; @@ -33,17 +33,28 @@ public class IlmPolicyDeprecationChecker implements ResourceDeprecationChecker { public static final String NAME = "ilm_policies"; - private static final List> CHECKS = List.of( - IlmPolicyDeprecationChecker::checkLegacyTiers, - IlmPolicyDeprecationChecker::checkFrozenAction - ); + private final List> checks = List.of(this::checkLegacyTiers, this::checkFrozenAction); /** * @param clusterState The cluster state provided for the checker + * @param request not used yet in these checks + * @param precomputedData not used yet in these checks * @return the name of the data streams that have violated the checks with their respective warnings. */ @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + public Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ) { + return check(clusterState); + } + + /** + * @param clusterState The cluster state provided for the checker + * @return the name of the data streams that have violated the checks with their respective warnings. + */ + Map> check(ClusterState clusterState) { IndexLifecycleMetadata lifecycleMetadata = clusterState.metadata().custom(IndexLifecycleMetadata.TYPE); if (lifecycleMetadata == null || lifecycleMetadata.getPolicyMetadatas().isEmpty()) { return Map.of(); @@ -53,7 +64,10 @@ public Map> check(ClusterState clusterState, Depr String name = entry.getKey(); LifecyclePolicyMetadata policyMetadata = entry.getValue(); - List issuesForSinglePolicy = filterChecks(CHECKS, c -> c.apply(policyMetadata.getPolicy())); + List issuesForSinglePolicy = checks.stream() + .map(c -> c.apply(policyMetadata.getPolicy())) + .filter(Objects::nonNull) + .toList(); if (issuesForSinglePolicy.isEmpty() == false) { issues.put(name, issuesForSinglePolicy); } @@ -61,7 +75,7 @@ public Map> check(ClusterState clusterState, Depr return issues.isEmpty() ? Map.of() : issues; } - static DeprecationIssue checkLegacyTiers(LifecyclePolicy policy) { + private DeprecationIssue checkLegacyTiers(LifecyclePolicy policy) { for (Phase phase : policy.getPhases().values()) { AllocateAction allocateAction = (AllocateAction) phase.getActions().get(AllocateAction.NAME); if (allocateAction != null) { @@ -82,7 +96,7 @@ static DeprecationIssue checkLegacyTiers(LifecyclePolicy policy) { return null; } - static DeprecationIssue checkFrozenAction(LifecyclePolicy policy) { + private DeprecationIssue checkFrozenAction(LifecyclePolicy policy) { for (Phase phase : policy.getPhases().values()) { if (phase.getActions().containsKey(FreezeAction.NAME)) { return new DeprecationIssue( diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java index 778e4d176ca0f..6bed9143175ca 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java @@ -10,24 +10,28 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetadata; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.LegacyFormatNames; +import org.elasticsearch.core.Strings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.xpack.core.deprecation.DeprecatedIndexPredicate; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Function; +import java.util.stream.Collectors; -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; @@ -39,20 +43,35 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker { public static final String NAME = "index_settings"; private final IndexNameExpressionResolver indexNameExpressionResolver; - private final Map> indexToTransformIds; + private final List>, DeprecationIssue>> checks = List.of( + this::oldIndicesCheck, + this::ignoredOldIndicesCheck, + this::translogRetentionSettingCheck, + this::checkIndexDataPath, + this::storeTypeSettingCheck, + this::deprecatedCamelCasePattern, + this::legacyRoutingSettingCheck + ); - public IndexDeprecationChecker(IndexNameExpressionResolver indexNameExpressionResolver, Map> indexToTransformIds) { + public IndexDeprecationChecker(IndexNameExpressionResolver indexNameExpressionResolver) { this.indexNameExpressionResolver = indexNameExpressionResolver; - this.indexToTransformIds = indexToTransformIds; } @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + public Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ) { Map> indexSettingsIssues = new HashMap<>(); String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(clusterState, request); + Map> indexToTransformIds = indexToTransformIds(precomputedData.transformConfigs()); for (String concreteIndex : concreteIndexNames) { IndexMetadata indexMetadata = clusterState.getMetadata().index(concreteIndex); - List singleIndexIssues = filterChecks(indexSettingsChecks(), c -> c.apply(indexMetadata, clusterState)); + List singleIndexIssues = checks.stream() + .map(c -> c.apply(indexMetadata, clusterState, indexToTransformIds)) + .filter(Objects::nonNull) + .toList(); if (singleIndexIssues.isEmpty() == false) { indexSettingsIssues.put(concreteIndex, singleIndexIssues); } @@ -63,72 +82,105 @@ public Map> check(ClusterState clusterState, Depr return indexSettingsIssues; } - private List> indexSettingsChecks() { - return List.of( - this::oldIndicesCheck, - this::ignoredOldIndicesCheck, - IndexDeprecationChecker::translogRetentionSettingCheck, - IndexDeprecationChecker::checkIndexDataPath, - IndexDeprecationChecker::storeTypeSettingCheck, - IndexDeprecationChecker::deprecatedCamelCasePattern, - IndexDeprecationChecker::legacyRoutingSettingCheck - ); - } - @Override public String getName() { return NAME; } - private DeprecationIssue oldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue oldIndicesCheck( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> indexToTransformIds + ) { // TODO: this check needs to be revised. It's trivially true right now. IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion(); // We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks if (DeprecatedIndexPredicate.reindexRequired(indexMetadata, false) && isNotDataStreamIndex(indexMetadata, clusterState)) { - return new DeprecationIssue( - DeprecationIssue.Level.CRITICAL, - "Old index with a compatibility version < 9.0", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This index has version: " + currentCompatibilityVersion.toReleaseVersion(), - false, - meta(indexMetadata) - ); + var transforms = transformIdsForIndex(indexMetadata, indexToTransformIds); + if (transforms.isEmpty() == false) { + return new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "One or more Transforms write to this index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + Strings.format( + "This index was created in version [%s] and requires action before upgrading to 9.0. The following transforms are " + + "configured to write to this index: [%s]. Refer to the migration guide to learn more about how to prepare " + + "transforms destination indices for your upgrade.", + currentCompatibilityVersion.toReleaseVersion(), + String.join(", ", transforms) + ), + false, + Map.of("reindex_required", true, "transform_ids", transforms) + ); + } else { + return new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "Old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This index has version: " + currentCompatibilityVersion.toReleaseVersion(), + false, + Map.of("reindex_required", true) + ); + } } return null; } - private Map meta(IndexMetadata indexMetadata) { - var transforms = indexToTransformIds.getOrDefault(indexMetadata.getIndex().getName(), List.of()); - if (transforms.isEmpty()) { - return Map.of("reindex_required", true); - } else { - return Map.of("reindex_required", true, "transform_ids", transforms); - } + private List transformIdsForIndex(IndexMetadata indexMetadata, Map> indexToTransformIds) { + return indexToTransformIds.getOrDefault(indexMetadata.getIndex().getName(), List.of()); } - private DeprecationIssue ignoredOldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue ignoredOldIndicesCheck( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> indexToTransformIds + ) { IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion(); // We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks if (DeprecatedIndexPredicate.reindexRequired(indexMetadata, true) && isNotDataStreamIndex(indexMetadata, clusterState)) { - return new DeprecationIssue( - DeprecationIssue.Level.WARNING, - "Old index with a compatibility version < 9.0 Has Been Ignored", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This read-only index has version: " - + currentCompatibilityVersion.toReleaseVersion() - + " and will be supported as read-only in 9.0", - false, - meta(indexMetadata) - ); + var transforms = transformIdsForIndex(indexMetadata, indexToTransformIds); + if (transforms.isEmpty() == false) { + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "One or more Transforms write to this old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + Strings.format( + "This index was created in version [%s] and will be supported as a read-only index in 9.0. The following " + + "transforms are no longer able to write to this index: [%s]. Refer to the migration guide to learn more " + + "about how to handle your transforms destination indices.", + currentCompatibilityVersion.toReleaseVersion(), + String.join(", ", transforms) + ), + false, + Map.of("reindex_required", true, "transform_ids", transforms) + ); + } else { + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Old index with a compatibility version < 9.0 has been ignored", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This read-only index has version: " + + currentCompatibilityVersion.toReleaseVersion() + + " and will be supported as read-only in 9.0", + false, + Map.of("reindex_required", true) + ); + } } return null; } - private static boolean isNotDataStreamIndex(IndexMetadata indexMetadata, ClusterState clusterState) { + private boolean isNotDataStreamIndex(IndexMetadata indexMetadata, ClusterState clusterState) { return clusterState.metadata().findDataStreams(indexMetadata.getIndex().getName()).isEmpty(); } - private static DeprecationIssue translogRetentionSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue translogRetentionSettingCheck( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> ignored + ) { final boolean softDeletesEnabled = IndexSettings.INDEX_SOFT_DELETES_SETTING.get(indexMetadata.getSettings()); if (softDeletesEnabled) { if (IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.exists(indexMetadata.getSettings()) @@ -155,7 +207,7 @@ private static DeprecationIssue translogRetentionSettingCheck(IndexMetadata inde return null; } - private static DeprecationIssue checkIndexDataPath(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue checkIndexDataPath(IndexMetadata indexMetadata, ClusterState clusterState, Map> ignored) { if (IndexMetadata.INDEX_DATA_PATH_SETTING.exists(indexMetadata.getSettings())) { final String message = String.format( Locale.ROOT, @@ -170,7 +222,11 @@ private static DeprecationIssue checkIndexDataPath(IndexMetadata indexMetadata, return null; } - private static DeprecationIssue storeTypeSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue storeTypeSettingCheck( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> ignored + ) { final String storeType = IndexModule.INDEX_STORE_TYPE_SETTING.get(indexMetadata.getSettings()); if (IndexModule.Type.SIMPLEFS.match(storeType)) { return new DeprecationIssue( @@ -187,7 +243,11 @@ private static DeprecationIssue storeTypeSettingCheck(IndexMetadata indexMetadat return null; } - private static DeprecationIssue legacyRoutingSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue legacyRoutingSettingCheck( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> ignored + ) { List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(indexMetadata.getSettings()); if (deprecatedSettings.isEmpty()) { return null; @@ -203,7 +263,7 @@ private static DeprecationIssue legacyRoutingSettingCheck(IndexMetadata indexMet ); } - private static void fieldLevelMappingIssue(IndexMetadata indexMetadata, BiConsumer> checker) { + private void fieldLevelMappingIssue(IndexMetadata indexMetadata, BiConsumer> checker) { if (indexMetadata.mapping() != null) { Map sourceAsMap = indexMetadata.mapping().sourceAsMap(); checker.accept(indexMetadata.mapping(), sourceAsMap); @@ -221,7 +281,7 @@ private static void fieldLevelMappingIssue(IndexMetadata indexMetadata, BiConsum * @return a list of issues found in fields */ @SuppressWarnings("unchecked") - private static List findInPropertiesRecursively( + private List findInPropertiesRecursively( String type, Map parentMap, Function, Boolean> predicate, @@ -275,7 +335,11 @@ private static List findInPropertiesRecursively( return issues; } - private static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue deprecatedCamelCasePattern( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> ignored + ) { List fields = new ArrayList<>(); fieldLevelMappingIssue( indexMetadata, @@ -283,8 +347,8 @@ private static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMe findInPropertiesRecursively( mappingMetadata.type(), sourceAsMap, - IndexDeprecationChecker::isDateFieldWithCamelCasePattern, - IndexDeprecationChecker::changeFormatToSnakeCase, + this::isDateFieldWithCamelCasePattern, + this::changeFormatToSnakeCase, "", "" ) @@ -305,7 +369,7 @@ private static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMe return null; } - private static boolean isDateFieldWithCamelCasePattern(Map property) { + private boolean isDateFieldWithCamelCasePattern(Map property) { if ("date".equals(property.get("type")) && property.containsKey("format")) { String[] patterns = DateFormatter.splitCombinedPatterns((String) property.get("format")); for (String pattern : patterns) { @@ -316,7 +380,7 @@ private static boolean isDateFieldWithCamelCasePattern(Map property) { return false; } - private static String changeFormatToSnakeCase(String type, Map.Entry entry) { + private String changeFormatToSnakeCase(String type, Map.Entry entry) { Map value = (Map) entry.getValue(); final String formatFieldValue = (String) value.get("format"); String[] patterns = DateFormatter.splitCombinedPatterns(formatFieldValue); @@ -332,4 +396,14 @@ private static String changeFormatToSnakeCase(String type, Map.Entry entry sb.deleteCharAt(sb.length() - 1); return sb.toString(); } + + private Map> indexToTransformIds(List transformConfigs) { + return transformConfigs.stream() + .collect( + Collectors.groupingBy( + config -> config.getDestination().getIndex(), + Collectors.mapping(TransformConfig::getId, Collectors.toList()) + ) + ); + } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyIndexTemplateDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyIndexTemplateDeprecationChecker.java deleted file mode 100644 index f7aba6491dfd2..0000000000000 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyIndexTemplateDeprecationChecker.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.deprecation; - -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; -import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.function.Function; - -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; -import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; -import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; -import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE; - -/** - * Checks the legacy index templates for deprecation warnings. - */ -public class LegacyIndexTemplateDeprecationChecker implements ResourceDeprecationChecker { - - public static final String NAME = "legacy_templates"; - private static final List> CHECKS = List.of( - LegacyIndexTemplateDeprecationChecker::checkIndexTemplates - ); - - /** - * @param clusterState The cluster state provided for the checker - * @return the name of the data streams that have violated the checks with their respective warnings. - */ - @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { - var templates = clusterState.metadata().templates().entrySet(); - if (templates.isEmpty()) { - return Map.of(); - } - Map> issues = new HashMap<>(); - for (Map.Entry entry : templates) { - String name = entry.getKey(); - IndexTemplateMetadata template = entry.getValue(); - - List issuesForSingleIndexTemplate = filterChecks(CHECKS, c -> c.apply(template)); - if (issuesForSingleIndexTemplate.isEmpty() == false) { - issues.put(name, issuesForSingleIndexTemplate); - } - } - return issues.isEmpty() ? Map.of() : issues; - } - - static DeprecationIssue checkIndexTemplates(IndexTemplateMetadata indexTemplateMetadata) { - List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(indexTemplateMetadata.settings()); - if (deprecatedSettings.isEmpty()) { - return null; - } - return new DeprecationIssue( - DeprecationIssue.Level.WARNING, - DEPRECATION_MESSAGE, - DEPRECATION_HELP_URL, - "One or more of your legacy index templates is configured with 'index.routing.allocation.*.data' settings. " - + DEPRECATION_COMMON_DETAIL, - false, - DeprecationIssue.createMetaMapForRemovableSettings(deprecatedSettings) - ); - } - - @Override - public String getName() { - return NAME; - } -} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecker.java new file mode 100644 index 0000000000000..a2e9ed12a2298 --- /dev/null +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecker.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Retrieves the individual node checks and reduces them to a list of deprecation warnings + */ +public class NodeDeprecationChecker { + + private static final Logger logger = LogManager.getLogger(NodeDeprecationChecker.class); + private final ThreadPool threadPool; + + public NodeDeprecationChecker(ThreadPool threadPool) { + this.threadPool = threadPool; + } + + public void check(Client client, ActionListener> listener) { + NodesDeprecationCheckRequest nodeDepReq = new NodesDeprecationCheckRequest("_all"); + ClientHelper.executeAsyncWithOrigin( + client, + ClientHelper.DEPRECATION_ORIGIN, + NodesDeprecationCheckAction.INSTANCE, + nodeDepReq, + new ThreadedActionListener<>(threadPool.generic(), listener.delegateFailureAndWrap((l, response) -> { + if (response.hasFailures()) { + List failedNodeIds = response.failures() + .stream() + .map(failure -> failure.nodeId() + ": " + failure.getMessage()) + .collect(Collectors.toList()); + logger.warn("nodes failed to run deprecation checks: {}", failedNodeIds); + for (FailedNodeException failure : response.failures()) { + logger.debug("node {} failed to run deprecation checks: {}", failure.nodeId(), failure); + } + } + l.onResponse(reduceToDeprecationIssues(response)); + })) + ); + } + + /** + * This method rolls up DeprecationIssues that are identical but on different nodes. It also rolls up DeprecationIssues that are + * identical (and on different nodes) except that they differ in the removable settings listed in their meta object. We roll these up + * by taking the intersection of all removable settings in otherwise identical DeprecationIssues. That way we don't claim that a + * setting can be automatically removed if any node has it in its elasticsearch.yml. + * @param response the response that contains the deprecation issues of single nodes + * @return a list of deprecation issues grouped accordingly. + */ + static List reduceToDeprecationIssues(NodesDeprecationCheckResponse response) { + // A collection whose values are lists of DeprecationIssues that differ only by meta values (if that): + Collection>> issuesToMerge = getDeprecationIssuesThatDifferOnlyByMeta(response.getNodes()); + // A map of DeprecationIssues (containing only the intersection of removable settings) to the nodes they are seen on + Map> issueToListOfNodesMap = getMergedIssuesToNodesMap(issuesToMerge); + + return issueToListOfNodesMap.entrySet().stream().map(entry -> { + DeprecationIssue issue = entry.getKey(); + String details = issue.getDetails() != null ? issue.getDetails() + " " : ""; + return new DeprecationIssue( + issue.getLevel(), + issue.getMessage(), + issue.getUrl(), + details + "(nodes impacted: " + entry.getValue() + ")", + issue.isResolveDuringRollingUpgrade(), + issue.getMeta() + ); + }).collect(Collectors.toList()); + } + + /* + * This method pulls all the DeprecationIssues from the given nodeResponses, and buckets them into lists of DeprecationIssues that + * differ at most by meta values (if that). The returned tuples also contain the node name the deprecation issue was found on. If all + * nodes in the cluster were configured identically then all tuples in a list will differ only by the node name. + */ + private static Collection>> getDeprecationIssuesThatDifferOnlyByMeta( + List nodeResponses + ) { + Map>> issuesToMerge = new HashMap<>(); + for (NodesDeprecationCheckAction.NodeResponse resp : nodeResponses) { + for (DeprecationIssue issue : resp.getDeprecationIssues()) { + issuesToMerge.computeIfAbsent( + new DeprecationIssue( + issue.getLevel(), + issue.getMessage(), + issue.getUrl(), + issue.getDetails(), + issue.isResolveDuringRollingUpgrade(), + null // Intentionally removing meta from the key so that it's not taken into account for equality + ), + (key) -> new ArrayList<>() + ).add(new Tuple<>(issue, resp.getNode().getName())); + } + } + return issuesToMerge.values(); + } + + /* + * At this point we have one DeprecationIssue per node for a given deprecation. This method rolls them up into a single DeprecationIssue + * with a list of nodes that they appear on. If two DeprecationIssues on two different nodes differ only by the set of removable + * settings (i.e. they have different elasticsearch.yml configurations) then this method takes the intersection of those settings when + * it rolls them up. + */ + private static Map> getMergedIssuesToNodesMap( + Collection>> issuesToMerge + ) { + Map> issueToListOfNodesMap = new HashMap<>(); + for (List> similarIssues : issuesToMerge) { + DeprecationIssue leastCommonDenominator = DeprecationIssue.getIntersectionOfRemovableSettings( + similarIssues.stream().map(Tuple::v1).toList() + ); + issueToListOfNodesMap.computeIfAbsent(leastCommonDenominator, (key) -> new ArrayList<>()) + .addAll(similarIssues.stream().map(Tuple::v2).toList()); + } + return issueToListOfNodesMap; + } + +} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java index b6fff5a82f0cd..2f476d111f4ba 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java @@ -41,6 +41,60 @@ public class NodeDeprecationChecks { + // Visible for testing + static final List< + NodeDeprecationCheck> SINGLE_NODE_CHECKS = List.of( + NodeDeprecationChecks::checkMultipleDataPaths, + NodeDeprecationChecks::checkDataPathsList, + NodeDeprecationChecks::checkSharedDataPathSetting, + NodeDeprecationChecks::checkReservedPrefixedRealmNames, + NodeDeprecationChecks::checkExporterUseIngestPipelineSettings, + NodeDeprecationChecks::checkExporterPipelineMasterTimeoutSetting, + NodeDeprecationChecks::checkExporterCreateLegacyTemplateSetting, + NodeDeprecationChecks::checkMonitoringSettingHistoryDuration, + NodeDeprecationChecks::checkMonitoringSettingHistoryDuration, + NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecovery, + NodeDeprecationChecks::checkMonitoringSettingCollectIndices, + NodeDeprecationChecks::checkMonitoringSettingCollectCcrTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectEnrichStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecoveryStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectIndexStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectMlJobStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectNodeStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectClusterStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersHost, + NodeDeprecationChecks::checkMonitoringSettingExportersBulkTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersConnectionTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersConnectionReadTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersAuthUsername, + NodeDeprecationChecks::checkMonitoringSettingExportersAuthPass, + NodeDeprecationChecks::checkMonitoringSettingExportersSSL, + NodeDeprecationChecks::checkMonitoringSettingExportersProxyBase, + NodeDeprecationChecks::checkMonitoringSettingExportersSniffEnabled, + NodeDeprecationChecks::checkMonitoringSettingExportersHeaders, + NodeDeprecationChecks::checkMonitoringSettingExportersTemplateTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersMasterTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersEnabled, + NodeDeprecationChecks::checkMonitoringSettingExportersType, + NodeDeprecationChecks::checkMonitoringSettingExportersAlertsEnabled, + NodeDeprecationChecks::checkMonitoringSettingExportersAlertsBlacklist, + NodeDeprecationChecks::checkMonitoringSettingExportersIndexNameTimeFormat, + NodeDeprecationChecks::checkMonitoringSettingDecommissionAlerts, + NodeDeprecationChecks::checkMonitoringSettingEsCollectionEnabled, + NodeDeprecationChecks::checkMonitoringSettingCollectionEnabled, + NodeDeprecationChecks::checkMonitoringSettingCollectionInterval, + NodeDeprecationChecks::checkScriptContextCache, + NodeDeprecationChecks::checkScriptContextCompilationsRateLimitSetting, + NodeDeprecationChecks::checkScriptContextCacheSizeSetting, + NodeDeprecationChecks::checkScriptContextCacheExpirationSetting, + NodeDeprecationChecks::checkEnforceDefaultTierPreferenceSetting, + NodeDeprecationChecks::checkLifecyleStepMasterTimeoutSetting, + NodeDeprecationChecks::checkEqlEnabledSetting, + NodeDeprecationChecks::checkNodeAttrData, + NodeDeprecationChecks::checkWatcherBulkConcurrentRequestsSetting, + NodeDeprecationChecks::checkTracingApmSettings + ); + static DeprecationIssue checkDeprecatedSetting( final Settings clusterSettings, final Settings nodeSettings, @@ -77,15 +131,6 @@ private static Map createMetaMapForRemovableSettings(boolean can return canAutoRemoveSetting ? DeprecationIssue.createMetaMapForRemovableSettings(removableSettings) : null; } - static DeprecationIssue checkRemovedSetting( - final Settings clusterSettings, - final Settings nodeSettings, - final Setting removedSetting, - final String url - ) { - return checkRemovedSetting(clusterSettings, nodeSettings, removedSetting, url, null, DeprecationIssue.Level.CRITICAL); - } - static DeprecationIssue checkRemovedSetting( final Settings clusterSettings, final Settings nodeSettings, @@ -1012,4 +1057,9 @@ static DeprecationIssue checkTracingApmSettings( DeprecationIssue.Level.CRITICAL ); } + + @FunctionalInterface + public interface NodeDeprecationCheck { + R apply(A first, B second, C third, D fourth); + } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java index 71b9903f69f86..daa3514e3b989 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java @@ -24,8 +24,14 @@ public interface ResourceDeprecationChecker { * This runs the checks for the current deprecation checker. * * @param clusterState The cluster state provided for the checker + * @param request The deprecation request that triggered this check + * @param precomputedData Data that have been remotely retrieved and might be useful in the checks */ - Map> check(ClusterState clusterState, DeprecationInfoAction.Request request); + Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ); /** * @return The name of the checker diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java index 5a451a9613797..ff0ff982d11bf 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java @@ -19,9 +19,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.Function; -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE; @@ -32,20 +32,34 @@ public class TemplateDeprecationChecker implements ResourceDeprecationChecker { public static final String NAME = "templates"; - private static final List> INDEX_TEMPLATE_CHECKS = List.of( - TemplateDeprecationChecker::checkLegacyTiersInIndexTemplate + private final List> indexTemplateChecks = List.of( + this::checkLegacyTiersInIndexTemplate ); - private static final List> COMPONENT_TEMPLATE_CHECKS = List.of( - TemplateDeprecationChecker::checkSourceModeInComponentTemplates, - TemplateDeprecationChecker::checkLegacyTiersInComponentTemplates + private final List> componentTemplateChecks = List.of( + this::checkSourceModeInComponentTemplates, + this::checkLegacyTiersInComponentTemplates ); /** * @param clusterState The cluster state provided for the checker + * @param request not used yet in these checks + * @param precomputedData not used yet in these checks * @return the name of the data streams that have violated the checks with their respective warnings. */ @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + public Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ) { + return check(clusterState); + } + + /** + * @param clusterState The cluster state provided for the checker + * @return the name of the data streams that have violated the checks with their respective warnings. + */ + Map> check(ClusterState clusterState) { var indexTemplates = clusterState.metadata().templatesV2().entrySet(); var componentTemplates = clusterState.metadata().componentTemplates().entrySet(); if (indexTemplates.isEmpty() && componentTemplates.isEmpty()) { @@ -56,7 +70,10 @@ public Map> check(ClusterState clusterState, Depr String name = entry.getKey(); ComposableIndexTemplate template = entry.getValue(); - List issuesForSingleIndexTemplate = filterChecks(INDEX_TEMPLATE_CHECKS, c -> c.apply(template)); + List issuesForSingleIndexTemplate = indexTemplateChecks.stream() + .map(c -> c.apply(template)) + .filter(Objects::nonNull) + .toList(); if (issuesForSingleIndexTemplate.isEmpty() == false) { issues.computeIfAbsent(name, ignored -> new ArrayList<>()).addAll(issuesForSingleIndexTemplate); } @@ -65,7 +82,10 @@ public Map> check(ClusterState clusterState, Depr String name = entry.getKey(); ComponentTemplate template = entry.getValue(); - List issuesForSingleIndexTemplate = filterChecks(COMPONENT_TEMPLATE_CHECKS, c -> c.apply(template)); + List issuesForSingleIndexTemplate = componentTemplateChecks.stream() + .map(c -> c.apply(template)) + .filter(Objects::nonNull) + .toList(); if (issuesForSingleIndexTemplate.isEmpty() == false) { issues.computeIfAbsent(name, ignored -> new ArrayList<>()).addAll(issuesForSingleIndexTemplate); } @@ -73,7 +93,7 @@ public Map> check(ClusterState clusterState, Depr return issues.isEmpty() ? Map.of() : issues; } - static DeprecationIssue checkLegacyTiersInIndexTemplate(ComposableIndexTemplate composableIndexTemplate) { + private DeprecationIssue checkLegacyTiersInIndexTemplate(ComposableIndexTemplate composableIndexTemplate) { Template template = composableIndexTemplate.template(); if (template != null) { List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(template.settings()); @@ -93,7 +113,7 @@ static DeprecationIssue checkLegacyTiersInIndexTemplate(ComposableIndexTemplate return null; } - static DeprecationIssue checkSourceModeInComponentTemplates(ComponentTemplate template) { + private DeprecationIssue checkSourceModeInComponentTemplates(ComponentTemplate template) { if (template.template().mappings() != null) { var sourceAsMap = (Map) XContentHelper.convertToMap(template.template().mappings().uncompressed(), true).v2().get("_doc"); if (sourceAsMap != null) { @@ -102,9 +122,9 @@ static DeprecationIssue checkSourceModeInComponentTemplates(ComponentTemplate te if (sourceMap.containsKey("mode")) { return new DeprecationIssue( DeprecationIssue.Level.CRITICAL, + SourceFieldMapper.DEPRECATION_WARNING_TITLE, + "https://ela.st/migrate-source-mode", SourceFieldMapper.DEPRECATION_WARNING, - "https://github.com/elastic/elasticsearch/pull/117172", - null, false, null ); @@ -115,7 +135,7 @@ static DeprecationIssue checkSourceModeInComponentTemplates(ComponentTemplate te return null; } - static DeprecationIssue checkLegacyTiersInComponentTemplates(ComponentTemplate componentTemplate) { + private DeprecationIssue checkLegacyTiersInComponentTemplates(ComponentTemplate componentTemplate) { Template template = componentTemplate.template(); List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(template.settings()); if (deprecatedSettings.isEmpty()) { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java deleted file mode 100644 index 57c4fae960854..0000000000000 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.deprecation; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; -import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; - -import java.util.ArrayList; -import java.util.List; - -class TransformDeprecationChecker implements DeprecationChecker { - - public static final String TRANSFORM_DEPRECATION_KEY = "transform_settings"; - private final List transformConfigs; - - TransformDeprecationChecker(List transformConfigs) { - this.transformConfigs = transformConfigs; - } - - @Override - public boolean enabled(Settings settings) { - // always enabled - return true; - } - - @Override - public void check(Components components, ActionListener deprecationIssueListener) { - ActionListener.completeWith(deprecationIssueListener, () -> { - List allIssues = new ArrayList<>(); - for (var config : transformConfigs) { - allIssues.addAll(config.checkForDeprecations(components.xContentRegistry())); - } - return new CheckResult(getName(), allIssues); - }); - } - - @Override - public String getName() { - return TRANSFORM_DEPRECATION_KEY; - } -} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java index 886eddf82149e..c30d8829c23f3 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java @@ -6,12 +6,11 @@ */ package org.elasticsearch.xpack.deprecation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.GroupedActionListener; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.client.internal.OriginSettingClient; @@ -19,14 +18,22 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.Transports; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.action.util.PageParams; @@ -35,24 +42,30 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.CLUSTER_SETTINGS_CHECKS; - public class TransportDeprecationInfoAction extends TransportMasterNodeReadAction< DeprecationInfoAction.Request, DeprecationInfoAction.Response> { - private static final DeprecationChecker ML_CHECKER = new MlDeprecationChecker(); - private static final Logger logger = LogManager.getLogger(TransportDeprecationInfoAction.class); + public static final Setting> SKIP_DEPRECATIONS_SETTING = Setting.stringListSetting( + "deprecation.skip_deprecated_settings", + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + private static final List PLUGIN_CHECKERS = List.of(new MlDeprecationChecker()); private final NodeClient client; private final IndexNameExpressionResolver indexNameExpressionResolver; private final Settings settings; private final NamedXContentRegistry xContentRegistry; private volatile List skipTheseDeprecations; + private final NodeDeprecationChecker nodeDeprecationChecker; + private final ClusterDeprecationChecker clusterDeprecationChecker; + private final List resourceDeprecationCheckers; @Inject public TransportDeprecationInfoAction( @@ -79,10 +92,17 @@ public TransportDeprecationInfoAction( this.indexNameExpressionResolver = indexNameExpressionResolver; this.settings = settings; this.xContentRegistry = xContentRegistry; - skipTheseDeprecations = DeprecationChecks.SKIP_DEPRECATIONS_SETTING.get(settings); + skipTheseDeprecations = SKIP_DEPRECATIONS_SETTING.get(settings); + nodeDeprecationChecker = new NodeDeprecationChecker(threadPool); + clusterDeprecationChecker = new ClusterDeprecationChecker(xContentRegistry); + resourceDeprecationCheckers = List.of( + new IndexDeprecationChecker(indexNameExpressionResolver), + new DataStreamDeprecationChecker(indexNameExpressionResolver), + new TemplateDeprecationChecker(), + new IlmPolicyDeprecationChecker() + ); // Safe to register this here because it happens synchronously before the cluster service is started: - clusterService.getClusterSettings() - .addSettingsUpdateConsumer(DeprecationChecks.SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations); + clusterService.getClusterSettings().addSettingsUpdateConsumer(SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations); } private void setSkipDeprecations(List skipDeprecations) { @@ -102,56 +122,222 @@ protected final void masterOperation( ClusterState state, final ActionListener listener ) { - NodesDeprecationCheckRequest nodeDepReq = new NodesDeprecationCheckRequest("_all"); - ClientHelper.executeAsyncWithOrigin( - client, - ClientHelper.DEPRECATION_ORIGIN, - NodesDeprecationCheckAction.INSTANCE, - nodeDepReq, - listener.delegateFailureAndWrap((l, response) -> { - if (response.hasFailures()) { - List failedNodeIds = response.failures() - .stream() - .map(failure -> failure.nodeId() + ": " + failure.getMessage()) - .collect(Collectors.toList()); - logger.warn("nodes failed to run deprecation checks: {}", failedNodeIds); - for (FailedNodeException failure : response.failures()) { - logger.debug("node {} failed to run deprecation checks: {}", failure.nodeId(), failure); - } - } - transformConfigs(l.delegateFailureAndWrap((ll, transformConfigs) -> { - DeprecationChecker.Components components = new DeprecationChecker.Components( - xContentRegistry, - settings, - new OriginSettingClient(client, ClientHelper.DEPRECATION_ORIGIN) - ); - pluginSettingIssues( - List.of(ML_CHECKER, new TransformDeprecationChecker(transformConfigs)), - components, - new ThreadedActionListener<>( - client.threadPool().generic(), - ll.map( - deprecationIssues -> DeprecationInfoAction.Response.from( - state, - indexNameExpressionResolver, - request, - response, - CLUSTER_SETTINGS_CHECKS, - deprecationIssues, - skipTheseDeprecations, - List.of( - new IndexDeprecationChecker(indexNameExpressionResolver, indexToTransformIds(transformConfigs)), - new DataStreamDeprecationChecker(indexNameExpressionResolver), - new TemplateDeprecationChecker(), - new IlmPolicyDeprecationChecker() - ) - ) - ) - ) - ); - })); - }) + PrecomputedData precomputedData = new PrecomputedData(); + try (var refs = new RefCountingListener(checkAndCreateResponse(state, request, precomputedData, listener))) { + nodeDeprecationChecker.check(client, refs.acquire(precomputedData::setOnceNodeSettingsIssues)); + transformConfigs(refs.acquire(precomputedData::setOnceTransformConfigs)); + DeprecationChecker.Components components = new DeprecationChecker.Components( + xContentRegistry, + settings, + new OriginSettingClient(client, ClientHelper.DEPRECATION_ORIGIN) + ); + pluginSettingIssues(PLUGIN_CHECKERS, components, refs.acquire(precomputedData::setOncePluginIssues)); + } + } + + /** + * This is the function that does the bulk of the logic of combining the necessary dependencies together, including the cluster state, + * the precalculated information in {@code context} with the remaining checkers such as the cluster setting checker and the resource + * checkers.This function will run a significant part of the checks and build out the final list of issues that exist in the + * cluster. Because of that, it's important that it does not run in the transport thread that's why it's combined with + * {@link #executeInGenericThreadpool(ActionListener)}. + * + * @param state The cluster state + * @param request The originating request containing the index expressions to evaluate + * @param precomputedData Data from remote requests necessary to construct the response + * @param responseListener The listener expecting the {@link DeprecationInfoAction.Response} + * @return The listener that should be executed after all the remote requests have completed and the {@link PrecomputedData} + * is initialised. + */ + public ActionListener checkAndCreateResponse( + ClusterState state, + DeprecationInfoAction.Request request, + PrecomputedData precomputedData, + ActionListener responseListener + ) { + return executeInGenericThreadpool( + ActionListener.running( + () -> responseListener.onResponse( + checkAndCreateResponse( + state, + indexNameExpressionResolver, + request, + skipTheseDeprecations, + clusterDeprecationChecker, + resourceDeprecationCheckers, + precomputedData + ) + ) + ) + ); + } + + /** + * This is the function that does the bulk of the logic of combining the necessary dependencies together, including the cluster state, + * the precalculated information in {@code context} with the remaining checkers such as the cluster setting checker and the resource + * checkers.This function will run a significant part of the checks and build out the final list of issues that exist in the + * cluster. It's important that it does not run in the transport thread that's why it's combined with + * {@link #checkAndCreateResponse(ClusterState, DeprecationInfoAction.Request, PrecomputedData, ActionListener)}. We keep this separated + * for testing purposes. + * + * @param state The cluster state + * @param indexNameExpressionResolver Used to resolve indices into their concrete names + * @param request The originating request containing the index expressions to evaluate + * @param skipTheseDeprecatedSettings the settings that will be removed from cluster metadata and the index metadata of all the + * indexes specified by indexNames + * @param clusterDeprecationChecker The checker that provides the cluster settings deprecations warnings + * @param resourceDeprecationCheckers these are checkers that take as input the cluster state and return a map from resource type + * to issues grouped by the resource name. + * @param precomputedData data from remote requests necessary to construct the response + * @return The list of deprecation issues found in the cluster + */ + static DeprecationInfoAction.Response checkAndCreateResponse( + ClusterState state, + IndexNameExpressionResolver indexNameExpressionResolver, + DeprecationInfoAction.Request request, + List skipTheseDeprecatedSettings, + ClusterDeprecationChecker clusterDeprecationChecker, + List resourceDeprecationCheckers, + PrecomputedData precomputedData + ) { + assert Transports.assertNotTransportThread("walking mappings in indexSettingsChecks is expensive"); + // Allow system index access here to prevent deprecation warnings when we call this API + String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + ClusterState stateWithSkippedSettingsRemoved = removeSkippedSettings(state, concreteIndexNames, skipTheseDeprecatedSettings); + List clusterSettingsIssues = clusterDeprecationChecker.check( + stateWithSkippedSettingsRemoved, + precomputedData.transformConfigs() + ); + + Map>> resourceDeprecationIssues = new HashMap<>(); + for (ResourceDeprecationChecker resourceDeprecationChecker : resourceDeprecationCheckers) { + Map> issues = resourceDeprecationChecker.check( + stateWithSkippedSettingsRemoved, + request, + precomputedData + ); + if (issues.isEmpty() == false) { + resourceDeprecationIssues.put(resourceDeprecationChecker.getName(), issues); + } + } + + return new DeprecationInfoAction.Response( + clusterSettingsIssues, + precomputedData.nodeSettingsIssues(), + resourceDeprecationIssues, + precomputedData.pluginIssues() + ); + } + + /** + * This class holds the results of remote requests. These can be either checks that require remote requests such as + * {@code nodeSettingsIssues} and {@code pluginIssues} or metadata needed for more than one types of checks such as + * {@code transformConfigs}. + */ + public static class PrecomputedData { + private final SetOnce> nodeSettingsIssues = new SetOnce<>(); + private final SetOnce>> pluginIssues = new SetOnce<>(); + private final SetOnce> transformConfigs = new SetOnce<>(); + + public void setOnceNodeSettingsIssues(List nodeSettingsIssues) { + this.nodeSettingsIssues.set(nodeSettingsIssues); + } + + public void setOncePluginIssues(Map> pluginIssues) { + this.pluginIssues.set(pluginIssues); + } + + public void setOnceTransformConfigs(List transformConfigs) { + this.transformConfigs.set(transformConfigs); + } + + public List nodeSettingsIssues() { + return nodeSettingsIssues.get(); + } + + public Map> pluginIssues() { + return pluginIssues.get(); + } + + public List transformConfigs() { + return transformConfigs.get(); + } + } + + /** + * Removes the skipped settings from the selected indices and the component and index templates. + * @param state The cluster state to modify + * @param indexNames The names of the indexes whose settings need to be filtered + * @param skipTheseDeprecatedSettings The settings that will be removed from cluster metadata and the index metadata of all the + * indexes specified by indexNames + * @return A modified cluster state with the given settings removed + */ + private static ClusterState removeSkippedSettings(ClusterState state, String[] indexNames, List skipTheseDeprecatedSettings) { + // Short-circuit, no need to reconstruct the cluster state if there are no settings to remove + if (skipTheseDeprecatedSettings == null || skipTheseDeprecatedSettings.isEmpty()) { + return state; + } + ClusterState.Builder clusterStateBuilder = new ClusterState.Builder(state); + Metadata.Builder metadataBuilder = Metadata.builder(state.metadata()); + metadataBuilder.transientSettings( + metadataBuilder.transientSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) + ); + metadataBuilder.persistentSettings( + metadataBuilder.persistentSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) ); + Map indicesBuilder = new HashMap<>(state.getMetadata().indices()); + for (String indexName : indexNames) { + IndexMetadata indexMetadata = state.getMetadata().index(indexName); + IndexMetadata.Builder filteredIndexMetadataBuilder = new IndexMetadata.Builder(indexMetadata); + Settings filteredSettings = indexMetadata.getSettings() + .filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false); + filteredIndexMetadataBuilder.settings(filteredSettings); + indicesBuilder.put(indexName, filteredIndexMetadataBuilder.build()); + } + metadataBuilder.componentTemplates(state.metadata().componentTemplates().entrySet().stream().map(entry -> { + String templateName = entry.getKey(); + ComponentTemplate componentTemplate = entry.getValue(); + Template template = componentTemplate.template(); + if (template.settings() == null || template.settings().isEmpty()) { + return Tuple.tuple(templateName, componentTemplate); + } + return Tuple.tuple( + templateName, + new ComponentTemplate( + Template.builder(template) + .settings(template.settings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false)) + .build(), + componentTemplate.version(), + componentTemplate.metadata(), + componentTemplate.deprecated() + ) + ); + }).collect(Collectors.toMap(Tuple::v1, Tuple::v2))); + metadataBuilder.indexTemplates(state.metadata().templatesV2().entrySet().stream().map(entry -> { + String templateName = entry.getKey(); + ComposableIndexTemplate indexTemplate = entry.getValue(); + Template template = indexTemplate.template(); + if (template == null || template.settings() == null || template.settings().isEmpty()) { + return Tuple.tuple(templateName, indexTemplate); + } + return Tuple.tuple( + templateName, + indexTemplate.toBuilder() + .template( + Template.builder(indexTemplate.template()) + .settings( + indexTemplate.template() + .settings() + .filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) + ) + ) + .build() + ); + }).collect(Collectors.toMap(Tuple::v1, Tuple::v2))); + + metadataBuilder.indices(indicesBuilder); + clusterStateBuilder.metadata(metadataBuilder); + return clusterStateBuilder.build(); } static void pluginSettingIssues( @@ -192,34 +378,21 @@ private void transformConfigs(PageParams currentPage, ActionListener( - threadPool.generic(), - currentPageListener.delegateFailureAndWrap((delegate, getTransformConfigResponse) -> { - var currentPageOfConfigs = getTransformConfigResponse.getTransformConfigurations().stream(); - var currentPageSize = currentPage.getFrom() + currentPage.getSize(); - var totalTransformConfigCount = getTransformConfigResponse.getTransformConfigurationCount(); - if (totalTransformConfigCount >= currentPageSize) { - var nextPage = new PageParams(currentPageSize, PageParams.DEFAULT_SIZE); - transformConfigs( - nextPage, - delegate.map(nextPageOfConfigs -> Stream.concat(currentPageOfConfigs, nextPageOfConfigs)) - ); - } else { - delegate.onResponse(currentPageOfConfigs); - } - }) - ) + executeInGenericThreadpool(currentPageListener.delegateFailureAndWrap((delegate, getTransformConfigResponse) -> { + var currentPageOfConfigs = getTransformConfigResponse.getTransformConfigurations().stream(); + var currentPageSize = currentPage.getFrom() + currentPage.getSize(); + var totalTransformConfigCount = getTransformConfigResponse.getTransformConfigurationCount(); + if (totalTransformConfigCount >= currentPageSize) { + var nextPage = new PageParams(currentPageSize, PageParams.DEFAULT_SIZE); + transformConfigs(nextPage, delegate.map(nextPageOfConfigs -> Stream.concat(currentPageOfConfigs, nextPageOfConfigs))); + } else { + delegate.onResponse(currentPageOfConfigs); + } + })) ); } - private Map> indexToTransformIds(List transformConfigs) { - return transformConfigs.stream() - .collect( - Collectors.groupingBy( - config -> config.getDestination().getIndex(), - Collectors.mapping(TransformConfig::getId, Collectors.toList()) - ) - ); + private ActionListener executeInGenericThreadpool(ActionListener listener) { + return new ThreadedActionListener<>(threadPool.generic(), listener); } - } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java index 745f5e7ae8959..befe0bd6b41a4 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java @@ -36,6 +36,7 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Objects; import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING; @@ -75,10 +76,10 @@ public TransportNodeDeprecationCheckAction( this.pluginsService = pluginsService; this.licenseState = licenseState; this.clusterInfoService = clusterInfoService; - skipTheseDeprecations = DeprecationChecks.SKIP_DEPRECATIONS_SETTING.get(settings); + skipTheseDeprecations = TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.get(settings); // Safe to register this here because it happens synchronously before the cluster service is started: clusterService.getClusterSettings() - .addSettingsUpdateConsumer(DeprecationChecks.SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations); + .addSettingsUpdateConsumer(TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations); } private void setSkipDeprecations(List skipDeprecations) { @@ -106,13 +107,13 @@ protected NodesDeprecationCheckAction.NodeResponse newNodeResponse(StreamInput i @Override protected NodesDeprecationCheckAction.NodeResponse nodeOperation(NodesDeprecationCheckAction.NodeRequest request, Task task) { - return nodeOperation(request, DeprecationChecks.NODE_SETTINGS_CHECKS); + return nodeOperation(request, NodeDeprecationChecks.SINGLE_NODE_CHECKS); } NodesDeprecationCheckAction.NodeResponse nodeOperation( NodesDeprecationCheckAction.NodeRequest request, List< - DeprecationChecks.NodeDeprecationCheck< + NodeDeprecationChecks.NodeDeprecationCheck< Settings, PluginsAndModules, ClusterState, @@ -130,10 +131,10 @@ NodesDeprecationCheckAction.NodeResponse nodeOperation( .metadata(Metadata.builder(metadata).transientSettings(transientSettings).persistentSettings(persistentSettings).build()) .build(); - List issues = DeprecationInfoAction.filterChecks( - nodeSettingsChecks, - (c) -> c.apply(filteredNodeSettings, pluginsService.info(), filteredClusterState, licenseState) - ); + List issues = nodeSettingsChecks.stream() + .map(c -> c.apply(filteredNodeSettings, pluginsService.info(), filteredClusterState, licenseState)) + .filter(Objects::nonNull) + .toList(); DeprecationIssue watermarkIssue = checkDiskLowWatermark( filteredNodeSettings, filteredClusterState.metadata().settings(), diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java index 2c32fb7610a18..a8dd1d464e30c 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java @@ -69,7 +69,7 @@ public void testOldIndicesCheck() { ); // We know that the data stream checks ignore the request. - Map> issuesByDataStream = checker.check(clusterState, null); + Map> issuesByDataStream = checker.check(clusterState); assertThat(issuesByDataStream.size(), equalTo(1)); assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true)); assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected))); @@ -91,7 +91,7 @@ public void testOldIndicesCheckWithOnlyNewIndices() { .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - Map> issuesByDataStream = checker.check(clusterState, null); + Map> issuesByDataStream = checker.check(clusterState); assertThat(issuesByDataStream.size(), equalTo(0)); } @@ -137,7 +137,7 @@ public void testOldIndicesCheckWithClosedAndOpenIndices() { ) ); - Map> issuesByDataStream = checker.check(clusterState, null); + Map> issuesByDataStream = checker.check(clusterState); assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true)); assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected))); } @@ -290,14 +290,14 @@ public void testOldIndicesIgnoredWarningCheck() { + "OK to remain read-only after upgrade", false, ofEntries( - entry("reindex_required", true), + entry("reindex_required", false), entry("total_backing_indices", oldIndexCount + newIndexCount), entry("ignored_indices_requiring_upgrade_count", expectedIndices.size()), entry("ignored_indices_requiring_upgrade", expectedIndices) ) ); - Map> issuesByDataStream = checker.check(clusterState, null); + Map> issuesByDataStream = checker.check(clusterState); assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true)); assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected))); } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java deleted file mode 100644 index 9a57450b7fad7..0000000000000 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.deprecation; - -import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; - -import java.util.ArrayList; -import java.util.List; -import java.util.function.Supplier; - -import static org.hamcrest.Matchers.equalTo; - -public class DeprecationChecksTests extends ESTestCase { - - public void testFilterChecks() { - DeprecationIssue issue = createRandomDeprecationIssue(); - int numChecksPassed = randomIntBetween(0, 5); - int numChecksFailed = 10 - numChecksPassed; - List> checks = new ArrayList<>(); - for (int i = 0; i < numChecksFailed; i++) { - checks.add(() -> issue); - } - for (int i = 0; i < numChecksPassed; i++) { - checks.add(() -> null); - } - List filteredIssues = DeprecationInfoAction.filterChecks(checks, Supplier::get); - assertThat(filteredIssues.size(), equalTo(numChecksFailed)); - } - - private static DeprecationIssue createRandomDeprecationIssue() { - String details = randomBoolean() ? randomAlphaOfLength(10) : null; - return new DeprecationIssue( - randomFrom(DeprecationIssue.Level.values()), - randomAlphaOfLength(10), - randomAlphaOfLength(10), - details, - randomBoolean(), - randomMap(1, 5, () -> Tuple.tuple(randomAlphaOfLength(4), randomAlphaOfLength(4))) - ); - } -} diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java index a06bb2f2718cb..537c3eb84a902 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java @@ -6,50 +6,17 @@ */ package org.elasticsearch.xpack.deprecation; -import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.ComponentTemplate; -import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; -import org.elasticsearch.cluster.metadata.DataStream; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.Template; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue.Level; -import org.junit.Assert; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.Response.RESERVED_NAMES; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.core.IsEqual.equalTo; - public class DeprecationInfoActionResponseTests extends AbstractWireSerializingTestCase { @Override @@ -153,329 +120,11 @@ protected Writeable.Reader instanceReader() { return DeprecationInfoAction.Response::new; } - public void testFrom() throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all"); - mapping.field("enabled", false); - mapping.endObject().endObject(); - - Metadata metadata = Metadata.builder() - .put( - IndexMetadata.builder("test") - .putMapping(Strings.toString(mapping)) - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - ) - .build(); - - DiscoveryNode discoveryNode = DiscoveryNodeUtils.create("test", new TransportAddress(TransportAddress.META_ADDRESS, 9300)); - ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); - boolean clusterIssueFound = randomBoolean(); - boolean nodeIssueFound = randomBoolean(); - boolean indexIssueFound = randomBoolean(); - boolean dataStreamIssueFound = randomBoolean(); - boolean indexTemplateIssueFound = randomBoolean(); - boolean componentTemplateIssueFound = randomBoolean(); - boolean ilmPolicyIssueFound = randomBoolean(); - DeprecationIssue foundIssue = createTestDeprecationIssue(); - List> clusterSettingsChecks = List.of((s) -> clusterIssueFound ? foundIssue : null); - List resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> { - if (indexIssueFound) { - return Map.of("test", List.of(foundIssue)); - } - return Map.of(); - }), createResourceChecker("data_streams", (cs, req) -> { - if (dataStreamIssueFound) { - return Map.of("my-ds", List.of(foundIssue)); - } - return Map.of(); - }), createResourceChecker("templates", (cs, req) -> { - Map> issues = new HashMap<>(); - if (componentTemplateIssueFound) { - issues.put("my-component-template", List.of(foundIssue)); - } - if (indexTemplateIssueFound) { - issues.put("my-index-template", List.of(foundIssue)); - } - return issues; - }), createResourceChecker("ilm_policies", (cs, req) -> { - if (ilmPolicyIssueFound) { - return Map.of("my-policy", List.of(foundIssue)); - } - return Map.of(); - })); - - NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( - new ClusterName(randomAlphaOfLength(5)), - nodeIssueFound ? List.of(new NodesDeprecationCheckAction.NodeResponse(discoveryNode, List.of(foundIssue))) : List.of(), - List.of() - ); - - DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); - DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from( - state, - resolver, - request, - nodeDeprecationIssues, - clusterSettingsChecks, - new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings - List.of(), - resourceCheckers - ); - - if (clusterIssueFound) { - assertThat(response.getClusterSettingsIssues(), equalTo(List.of(foundIssue))); - } else { - assertThat(response.getClusterSettingsIssues(), empty()); - } - - if (nodeIssueFound) { - String details = foundIssue.getDetails() != null ? foundIssue.getDetails() + " " : ""; - DeprecationIssue mergedFoundIssue = new DeprecationIssue( - foundIssue.getLevel(), - foundIssue.getMessage(), - foundIssue.getUrl(), - details + "(nodes impacted: [" + discoveryNode.getName() + "])", - foundIssue.isResolveDuringRollingUpgrade(), - foundIssue.getMeta() - ); - assertThat(response.getNodeSettingsIssues(), equalTo(List.of(mergedFoundIssue))); - } else { - assertTrue(response.getNodeSettingsIssues().isEmpty()); - } - - if (indexIssueFound) { - assertThat(response.getIndexSettingsIssues(), equalTo(Map.of("test", List.of(foundIssue)))); - } else { - assertTrue(response.getIndexSettingsIssues().isEmpty()); - } - if (dataStreamIssueFound) { - assertThat(response.getDataStreamDeprecationIssues(), equalTo(Map.of("my-ds", List.of(foundIssue)))); - } else { - assertTrue(response.getDataStreamDeprecationIssues().isEmpty()); - } - if (ilmPolicyIssueFound) { - assertThat(response.getIlmPolicyDeprecationIssues(), equalTo(Map.of("my-policy", List.of(foundIssue)))); - } else { - assertTrue(response.getIlmPolicyDeprecationIssues().isEmpty()); - } - if (componentTemplateIssueFound == false && indexTemplateIssueFound == false) { - assertTrue(response.getTemplateDeprecationIssues().isEmpty()); - } else { - if (componentTemplateIssueFound) { - assertThat(response.getTemplateDeprecationIssues().get("my-component-template"), equalTo(List.of(foundIssue))); - } - if (indexTemplateIssueFound) { - assertThat(response.getTemplateDeprecationIssues().get("my-index-template"), equalTo(List.of(foundIssue))); - } - - } - } - - public void testFromWithMergeableNodeIssues() throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all"); - mapping.field("enabled", false); - mapping.endObject().endObject(); - - Metadata metadata = Metadata.builder() - .put( - IndexMetadata.builder("test") - .putMapping(Strings.toString(mapping)) - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - ) - .build(); - - DiscoveryNode node1 = DiscoveryNodeUtils.builder("nodeId1") - .name("node1") - .ephemeralId("ephemeralId1") - .address("hostName1", "hostAddress1", new TransportAddress(TransportAddress.META_ADDRESS, 9300)) - .roles(Set.of()) - .build(); - DiscoveryNode node2 = DiscoveryNodeUtils.builder("nodeId2") - .name("node2") - .ephemeralId("ephemeralId2") - .address("hostName2", "hostAddress2", new TransportAddress(TransportAddress.META_ADDRESS, 9500)) - .roles(Set.of()) - .build(); - ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); - Map metaMap1 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.1", "setting.2", "setting.3")); - Map metaMap2 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.2", "setting.3")); - DeprecationIssue foundIssue1 = createTestDeprecationIssue(metaMap1); - DeprecationIssue foundIssue2 = createTestDeprecationIssue(foundIssue1, metaMap2); - List> clusterSettingsChecks = List.of(); - List resourceCheckers = List.of(); - - NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( - new ClusterName(randomAlphaOfLength(5)), - Arrays.asList( - new NodesDeprecationCheckAction.NodeResponse(node1, List.of(foundIssue1)), - new NodesDeprecationCheckAction.NodeResponse(node2, List.of(foundIssue2)) - ), - List.of() - ); - - DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); - DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from( - state, - resolver, - request, - nodeDeprecationIssues, - clusterSettingsChecks, - new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings - List.of(), - resourceCheckers - ); - - String details = foundIssue1.getDetails() != null ? foundIssue1.getDetails() + " " : ""; - DeprecationIssue mergedFoundIssue = new DeprecationIssue( - foundIssue1.getLevel(), - foundIssue1.getMessage(), - foundIssue1.getUrl(), - details + "(nodes impacted: [" + node1.getName() + ", " + node2.getName() + "])", - foundIssue1.isResolveDuringRollingUpgrade(), - foundIssue2.getMeta() - ); - assertThat(response.getNodeSettingsIssues(), equalTo(List.of(mergedFoundIssue))); - } - - public void testRemoveSkippedSettings() { - Settings.Builder settingsBuilder = settings(IndexVersion.current()); - settingsBuilder.put("some.deprecated.property", "someValue1"); - settingsBuilder.put("some.other.bad.deprecated.property", "someValue2"); - settingsBuilder.put("some.undeprecated.property", "someValue3"); - settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); - Settings inputSettings = settingsBuilder.build(); - IndexMetadata dataStreamIndexMetadata = IndexMetadata.builder("ds-test-index-1") - .settings(inputSettings) - .numberOfShards(1) - .numberOfReplicas(0) - .build(); - ComponentTemplate componentTemplate = new ComponentTemplate(Template.builder().settings(inputSettings).build(), null, null); - ComposableIndexTemplate indexTemplate = ComposableIndexTemplate.builder() - .template(Template.builder().settings(inputSettings)) - .build(); - Metadata metadata = Metadata.builder() - .put(IndexMetadata.builder("test").settings(inputSettings).numberOfShards(1).numberOfReplicas(0)) - .put(dataStreamIndexMetadata, true) - .put(DataStream.builder("ds-test", List.of(dataStreamIndexMetadata.getIndex())).build()) - .indexTemplates( - Map.of( - "my-index-template", - indexTemplate, - "empty-template", - ComposableIndexTemplate.builder().indexPatterns(List.of("random")).build() - ) - ) - .componentTemplates(Map.of("my-component-template", componentTemplate)) - .persistentSettings(inputSettings) - .build(); - - ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); - AtomicReference visibleClusterSettings = new AtomicReference<>(); - List> clusterSettingsChecks = List.of((s) -> { - visibleClusterSettings.set(s.getMetadata().settings()); - return null; - }); - AtomicReference visibleIndexSettings = new AtomicReference<>(); - AtomicReference visibleComponentTemplateSettings = new AtomicReference<>(); - AtomicReference visibleIndexTemplateSettings = new AtomicReference<>(); - AtomicInteger backingIndicesCount = new AtomicInteger(0); - List resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> { - for (String indexName : resolver.concreteIndexNames(cs, req)) { - visibleIndexSettings.set(cs.metadata().index(indexName).getSettings()); - } - return Map.of(); - }), createResourceChecker("data_streams", (cs, req) -> { - cs.metadata().dataStreams().values().forEach(ds -> backingIndicesCount.set(ds.getIndices().size())); - return Map.of(); - }), createResourceChecker("templates", (cs, req) -> { - cs.metadata() - .componentTemplates() - .values() - .forEach(template -> visibleComponentTemplateSettings.set(template.template().settings())); - cs.metadata().templatesV2().values().forEach(template -> { - if (template.template() != null && template.template().settings() != null) { - visibleIndexTemplateSettings.set(template.template().settings()); - } - }); - return Map.of(); - })); - - NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( - new ClusterName(randomAlphaOfLength(5)), - List.of(), - List.of() - ); - - DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); - DeprecationInfoAction.Response.from( - state, - resolver, - request, - nodeDeprecationIssues, - clusterSettingsChecks, - new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings - List.of("some.deprecated.property", "some.other.*.deprecated.property"), - resourceCheckers - ); - - settingsBuilder = settings(IndexVersion.current()); - settingsBuilder.put("some.undeprecated.property", "someValue3"); - settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); - - Settings expectedSettings = settingsBuilder.build(); - Settings resultClusterSettings = visibleClusterSettings.get(); - Assert.assertNotNull(resultClusterSettings); - Assert.assertEquals(expectedSettings, visibleClusterSettings.get()); - - Settings resultIndexSettings = visibleIndexSettings.get(); - Assert.assertNotNull(resultIndexSettings); - Assert.assertEquals("someValue3", resultIndexSettings.get("some.undeprecated.property")); - Assert.assertEquals(resultIndexSettings.getAsList("some.undeprecated.list.property"), List.of("someValue4", "someValue5")); - Assert.assertFalse(resultIndexSettings.hasValue("some.deprecated.property")); - Assert.assertFalse(resultIndexSettings.hasValue("some.other.bad.deprecated.property")); - - assertThat(backingIndicesCount.get(), equalTo(1)); - - Assert.assertNotNull(visibleComponentTemplateSettings.get()); - Assert.assertEquals(expectedSettings, visibleComponentTemplateSettings.get()); - Assert.assertNotNull(visibleIndexTemplateSettings.get()); - Assert.assertEquals(expectedSettings, visibleIndexTemplateSettings.get()); - } - - public void testCtorFailure() { - Map> indexNames = Stream.generate(() -> randomAlphaOfLength(10)) - .limit(10) - .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); - Map> dataStreamNames = Stream.generate(() -> randomAlphaOfLength(10)) - .limit(10) - .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); - Set shouldCauseFailure = new HashSet<>(RESERVED_NAMES); - for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) { - Map> pluginSettingsIssues = randomSubsetOf(3, shouldCauseFailure).stream() - .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); - expectThrows( - ElasticsearchStatusException.class, - () -> new DeprecationInfoAction.Response( - List.of(), - List.of(), - Map.of("data_streams", dataStreamNames, "index_settings", indexNames), - pluginSettingsIssues - ) - ); - } - } - - private static DeprecationIssue createTestDeprecationIssue() { + static DeprecationIssue createTestDeprecationIssue() { return createTestDeprecationIssue(randomMap(1, 5, () -> Tuple.tuple(randomAlphaOfLength(4), randomAlphaOfLength(4)))); } - private static DeprecationIssue createTestDeprecationIssue(Map metaMap) { + static DeprecationIssue createTestDeprecationIssue(Map metaMap) { String details = randomBoolean() ? randomAlphaOfLength(10) : null; return new DeprecationIssue( randomFrom(Level.values()), @@ -487,7 +136,7 @@ private static DeprecationIssue createTestDeprecationIssue(Map m ); } - private static DeprecationIssue createTestDeprecationIssue(DeprecationIssue seedIssue, Map metaMap) { + static DeprecationIssue createTestDeprecationIssue(DeprecationIssue seedIssue, Map metaMap) { return new DeprecationIssue( seedIssue.getLevel(), seedIssue.getMessage(), @@ -498,27 +147,9 @@ private static DeprecationIssue createTestDeprecationIssue(DeprecationIssue seed ); } - private static List randomDeprecationIssues() { + static List randomDeprecationIssues() { return Stream.generate(DeprecationInfoActionResponseTests::createTestDeprecationIssue) .limit(randomIntBetween(0, 10)) .collect(Collectors.toList()); } - - private static ResourceDeprecationChecker createResourceChecker( - String name, - BiFunction>> check - ) { - return new ResourceDeprecationChecker() { - - @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { - return check.apply(clusterState, request); - } - - @Override - public String getName() { - return name; - } - }; - } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java index 2032a6faedc92..475cd3e6a24bc 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java @@ -90,7 +90,7 @@ public void testLegacyTierSettings() { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.WARNING, "Configuring tiers via filtered allocation is not recommended.", @@ -136,7 +136,7 @@ public void testFrozenAction() { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.WARNING, "ILM policy [deprecated-action] contains the action 'freeze' that is deprecated and will be removed in a future version.", diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java index ea30c93bfb5b8..44a7d4bf57bdc 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java @@ -29,7 +29,11 @@ import org.elasticsearch.snapshots.SearchableSnapshotsSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.elasticsearch.xpack.core.transform.transforms.DestConfig; +import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -42,10 +46,18 @@ public class IndexDeprecationCheckerTests extends ESTestCase { + private static final IndexVersion OLD_VERSION = IndexVersion.fromId(7170099); + private final IndexNameExpressionResolver indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(); + private final IndexDeprecationChecker checker = new IndexDeprecationChecker(indexNameExpressionResolver); + private final TransportDeprecationInfoAction.PrecomputedData emptyPrecomputedData = + new TransportDeprecationInfoAction.PrecomputedData(); private final IndexMetadata.State indexMetdataState; public IndexDeprecationCheckerTests(@Name("indexMetadataState") IndexMetadata.State indexMetdataState) { this.indexMetdataState = indexMetdataState; + emptyPrecomputedData.setOnceNodeSettingsIssues(List.of()); + emptyPrecomputedData.setOncePluginIssues(Map.of()); + emptyPrecomputedData.setOnceTransformConfigs(List.of()); } @ParametersFactory @@ -53,11 +65,6 @@ public static List createParameters() { return List.of(new Object[] { IndexMetadata.State.OPEN }, new Object[] { IndexMetadata.State.CLOSE }); } - private static final IndexVersion OLD_VERSION = IndexVersion.fromId(7170099); - - private final IndexNameExpressionResolver indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(); - private final IndexDeprecationChecker checker = new IndexDeprecationChecker(indexNameExpressionResolver, Map.of()); - public void testOldIndicesCheck() { IndexMetadata indexMetadata = IndexMetadata.builder("test") .settings(settings(OLD_VERSION)) @@ -79,14 +86,15 @@ public void testOldIndicesCheck() { ); Map> issuesByIndex = checker.check( clusterState, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); List issues = issuesByIndex.get("test"); assertEquals(singletonList(expected), issues); } public void testOldTransformIndicesCheck() { - var checker = new IndexDeprecationChecker(indexNameExpressionResolver, Map.of("test", List.of("test-transform"))); + var checker = new IndexDeprecationChecker(indexNameExpressionResolver); var indexMetadata = indexMetadata("test", OLD_VERSION); var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) @@ -94,21 +102,26 @@ public void testOldTransformIndicesCheck() { .build(); var expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, - "Old index with a compatibility version < 9.0", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This index has version: " + OLD_VERSION.toReleaseVersion(), + "One or more Transforms write to this index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and requires action before upgrading to 9.0. " + + "The following transforms are configured to write to this index: [test-transform]. Refer to the " + + "migration guide to learn more about how to prepare transforms destination indices for your upgrade.", false, Map.of("reindex_required", true, "transform_ids", List.of("test-transform")) ); - var issuesByIndex = checker.check(clusterState, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test", List.of("test-transform"))) + ); assertEquals(singletonList(expected), issuesByIndex.get("test")); } public void testOldIndicesCheckWithMultipleTransforms() { - var checker = new IndexDeprecationChecker( - indexNameExpressionResolver, - Map.of("test", List.of("test-transform1", "test-transform2")) - ); var indexMetadata = indexMetadata("test", OLD_VERSION); var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) @@ -116,21 +129,26 @@ public void testOldIndicesCheckWithMultipleTransforms() { .build(); var expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, - "Old index with a compatibility version < 9.0", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This index has version: " + OLD_VERSION.toReleaseVersion(), + "One or more Transforms write to this index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and requires action before upgrading to 9.0. " + + "The following transforms are configured to write to this index: [test-transform1, test-transform2]. Refer to the " + + "migration guide to learn more about how to prepare transforms destination indices for your upgrade.", false, Map.of("reindex_required", true, "transform_ids", List.of("test-transform1", "test-transform2")) ); - var issuesByIndex = checker.check(clusterState, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test", List.of("test-transform1", "test-transform2"))) + ); assertEquals(singletonList(expected), issuesByIndex.get("test")); } public void testMultipleOldIndicesCheckWithTransforms() { - var checker = new IndexDeprecationChecker( - indexNameExpressionResolver, - Map.of("test1", List.of("test-transform1"), "test2", List.of("test-transform2")) - ); var indexMetadata1 = indexMetadata("test1", OLD_VERSION); var indexMetadata2 = indexMetadata("test2", OLD_VERSION); var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) @@ -142,9 +160,14 @@ public void testMultipleOldIndicesCheckWithTransforms() { List.of( new DeprecationIssue( DeprecationIssue.Level.CRITICAL, - "Old index with a compatibility version < 9.0", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This index has version: " + OLD_VERSION.toReleaseVersion(), + "One or more Transforms write to this index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and requires action before upgrading to 9.0. " + + "The following transforms are configured to write to this index: [test-transform1]. Refer to the " + + "migration guide to learn more about how to prepare transforms destination indices for your upgrade.", false, Map.of("reindex_required", true, "transform_ids", List.of("test-transform1")) ) @@ -153,15 +176,24 @@ public void testMultipleOldIndicesCheckWithTransforms() { List.of( new DeprecationIssue( DeprecationIssue.Level.CRITICAL, - "Old index with a compatibility version < 9.0", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This index has version: " + OLD_VERSION.toReleaseVersion(), + "One or more Transforms write to this index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and requires action before upgrading to 9.0. " + + "The following transforms are configured to write to this index: [test-transform2]. Refer to the " + + "migration guide to learn more about how to prepare transforms destination indices for your upgrade.", false, Map.of("reindex_required", true, "transform_ids", List.of("test-transform2")) ) ) ); - var issuesByIndex = checker.check(clusterState, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test1", List.of("test-transform1"), "test2", List.of("test-transform2"))) + ); assertEquals(expected, issuesByIndex); } @@ -215,7 +247,8 @@ public void testOldIndicesCheckDataStreamIndex() { .build(); Map> issuesByIndex = checker.check( clusterState, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertThat(issuesByIndex.size(), equalTo(0)); } @@ -236,26 +269,21 @@ public void testOldIndicesCheckSnapshotIgnored() { Map> issuesByIndex = checker.check( clusterState, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertThat(issuesByIndex.size(), equalTo(0)); } public void testOldIndicesIgnoredWarningCheck() { - Settings.Builder settings = settings(OLD_VERSION).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); - IndexMetadata indexMetadata = IndexMetadata.builder("test") - .settings(settings) - .numberOfShards(1) - .numberOfReplicas(0) - .state(indexMetdataState) - .build(); + IndexMetadata indexMetadata = readonlyIndexMetadata("test", OLD_VERSION); ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) .blocks(clusterBlocksForIndices(indexMetadata)) .build(); DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.WARNING, - "Old index with a compatibility version < 9.0 Has Been Ignored", + "Old index with a compatibility version < 9.0 has been ignored", "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", "This read-only index has version: " + OLD_VERSION.toReleaseVersion() + " and will be supported as read-only in 9.0", false, @@ -263,12 +291,122 @@ public void testOldIndicesIgnoredWarningCheck() { ); Map> issuesByIndex = checker.check( clusterState, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertTrue(issuesByIndex.containsKey("test")); assertEquals(List.of(expected), issuesByIndex.get("test")); } + private IndexMetadata readonlyIndexMetadata(String indexName, IndexVersion indexVersion) { + Settings.Builder settings = settings(indexVersion).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); + return IndexMetadata.builder(indexName).settings(settings).numberOfShards(1).numberOfReplicas(0).state(indexMetdataState).build(); + } + + public void testOldTransformIndicesIgnoredCheck() { + var checker = new IndexDeprecationChecker(indexNameExpressionResolver); + var indexMetadata = readonlyIndexMetadata("test", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); + var expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "One or more Transforms write to this old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and will be supported as a read-only index in 9.0. " + + "The following transforms are no longer able to write to this index: [test-transform]. Refer to the " + + "migration guide to learn more about how to handle your transforms destination indices.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform")) + ); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test", List.of("test-transform"))) + ); + assertEquals(singletonList(expected), issuesByIndex.get("test")); + } + + public void testOldIndicesIgnoredCheckWithMultipleTransforms() { + var indexMetadata = readonlyIndexMetadata("test", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); + var expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "One or more Transforms write to this old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and will be supported as a read-only index in 9.0. " + + "The following transforms are no longer able to write to this index: [test-transform1, test-transform2]. Refer to the " + + "migration guide to learn more about how to handle your transforms destination indices.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform1", "test-transform2")) + ); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test", List.of("test-transform1", "test-transform2"))) + ); + assertEquals(singletonList(expected), issuesByIndex.get("test")); + } + + public void testMultipleOldIndicesIgnoredCheckWithTransforms() { + var indexMetadata1 = readonlyIndexMetadata("test1", OLD_VERSION); + var indexMetadata2 = readonlyIndexMetadata("test2", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata1, true).put(indexMetadata2, true)) + .blocks(clusterBlocksForIndices(indexMetadata1, indexMetadata2)) + .build(); + var expected = Map.of( + "test1", + List.of( + new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "One or more Transforms write to this old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and will be supported as a read-only index in 9.0. " + + "The following transforms are no longer able to write to this index: [test-transform1]. Refer to the " + + "migration guide to learn more about how to handle your transforms destination indices.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform1")) + ) + ), + "test2", + List.of( + new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "One or more Transforms write to this old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and will be supported as a read-only index in 9.0. " + + "The following transforms are no longer able to write to this index: [test-transform2]. Refer to the " + + "migration guide to learn more about how to handle your transforms destination indices.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform2")) + ) + ) + ); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test1", List.of("test-transform1"), "test2", List.of("test-transform2"))) + ); + assertEquals(expected, issuesByIndex); + } + public void testTranslogRetentionSettings() { Settings.Builder settings = settings(IndexVersion.current()); settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), randomPositiveTimeValue()); @@ -285,7 +423,8 @@ public void testTranslogRetentionSettings() { .build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); List issues = issuesByIndex.get("test"); assertThat( @@ -328,7 +467,8 @@ public void testDefaultTranslogRetentionSettings() { .build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertThat(issuesByIndex.size(), equalTo(0)); } @@ -348,7 +488,8 @@ public void testIndexDataPathSetting() { .build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); final String expectedUrl = "https://www.elastic.co/guide/en/elasticsearch/reference/7.13/breaking-changes-7.13.html#deprecate-shared-data-path-setting"; @@ -382,7 +523,8 @@ public void testSimpleFSSetting() { .build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertThat( issuesByIndex.get("test"), @@ -425,7 +567,8 @@ public void testCamelCaseDeprecation() { .build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, @@ -456,7 +599,8 @@ public void testLegacyTierIndex() { .build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertThat( issuesByIndex.get("test"), @@ -484,4 +628,23 @@ private ClusterBlocks clusterBlocksForIndices(IndexMetadata... indicesMetadatas) } return builder.build(); } + + private TransportDeprecationInfoAction.PrecomputedData createContextWithTransformConfigs(Map> indexToTransform) { + List transforms = new ArrayList<>(); + for (Map.Entry> entry : indexToTransform.entrySet()) { + String index = entry.getKey(); + for (String transform : entry.getValue()) { + transforms.add( + TransformConfig.builder() + .setId(transform) + .setSource(new SourceConfig(randomAlphaOfLength(10))) + .setDest(new DestConfig(index, List.of(), null)) + .build() + ); + } + } + TransportDeprecationInfoAction.PrecomputedData precomputedData = new TransportDeprecationInfoAction.PrecomputedData(); + precomputedData.setOnceTransformConfigs(transforms); + return precomputedData; + } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationCheckerTests.java new file mode 100644 index 0000000000000..78ddba87b9f85 --- /dev/null +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationCheckerTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.deprecation.DeprecationInfoActionResponseTests.createTestDeprecationIssue; +import static org.hamcrest.core.IsEqual.equalTo; + +public class NodeDeprecationCheckerTests extends ESTestCase { + + public void testMergingNodeIssues() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all"); + mapping.field("enabled", false); + mapping.endObject().endObject(); + + DiscoveryNode node1 = DiscoveryNodeUtils.builder("nodeId1") + .name("node1") + .ephemeralId("ephemeralId1") + .address("hostName1", "hostAddress1", new TransportAddress(TransportAddress.META_ADDRESS, 9300)) + .roles(Set.of()) + .build(); + DiscoveryNode node2 = DiscoveryNodeUtils.builder("nodeId2") + .name("node2") + .ephemeralId("ephemeralId2") + .address("hostName2", "hostAddress2", new TransportAddress(TransportAddress.META_ADDRESS, 9500)) + .roles(Set.of()) + .build(); + Map metaMap1 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.1", "setting.2", "setting.3")); + Map metaMap2 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.2", "setting.3")); + DeprecationIssue foundIssue1 = createTestDeprecationIssue(metaMap1); + DeprecationIssue foundIssue2 = createTestDeprecationIssue(foundIssue1, metaMap2); + + NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( + new ClusterName(randomAlphaOfLength(5)), + Arrays.asList( + new NodesDeprecationCheckAction.NodeResponse(node1, List.of(foundIssue1)), + new NodesDeprecationCheckAction.NodeResponse(node2, List.of(foundIssue2)) + ), + List.of() + ); + + List result = NodeDeprecationChecker.reduceToDeprecationIssues(nodeDeprecationIssues); + + String details = foundIssue1.getDetails() != null ? foundIssue1.getDetails() + " " : ""; + DeprecationIssue mergedFoundIssue = new DeprecationIssue( + foundIssue1.getLevel(), + foundIssue1.getMessage(), + foundIssue1.getUrl(), + details + "(nodes impacted: [" + node1.getName() + ", " + node2.getName() + "])", + foundIssue1.isResolveDuringRollingUpgrade(), + foundIssue2.getMeta() + ); + assertThat(result, equalTo(List.of(mergedFoundIssue))); + } +} diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java index 3aaee0e5cdb52..18b85ff532234 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java @@ -30,9 +30,11 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.function.Function; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.NODE_SETTINGS_CHECKS; +import static org.elasticsearch.xpack.deprecation.NodeDeprecationChecks.SINGLE_NODE_CHECKS; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.not; @@ -154,8 +156,8 @@ public void testSharedDataPathSetting() { .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), createTempDir()) .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); final String expectedUrl = @@ -209,8 +211,8 @@ public void testCheckReservedPrefixedRealmNames() { } final Settings settings = builder.build(); - final List deprecationIssues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + final List deprecationIssues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -235,8 +237,8 @@ public void testCheckReservedPrefixedRealmNames() { void monitoringSetting(String settingKey, String value) { Settings settings = Settings.builder().put(settingKey, value).build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings"; @@ -259,8 +261,8 @@ void monitoringExporterSetting(String suffix, String value) { String settingKey = "xpack.monitoring.exporters.test." + suffix; Settings settings = Settings.builder().put(settingKey, value).build(); final XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, licenseState) ); final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings"; @@ -284,8 +286,8 @@ void monitoringExporterGroupedSetting(String suffix, String value) { String subSettingKey = settingKey + ".subsetting"; Settings settings = Settings.builder().put(subSettingKey, value).build(); final XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, licenseState) ); final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings"; @@ -310,8 +312,8 @@ void monitoringExporterSecureSetting(String suffix, String value) { secureSettings.setString(settingKey, value); Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); final XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, licenseState) ); final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings"; @@ -457,8 +459,8 @@ public void testCheckMonitoringSettingCollectionInterval() { public void testExporterUseIngestPipelineSettings() { Settings settings = Settings.builder().put("xpack.monitoring.exporters.test.use_ingest", true).build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -483,8 +485,8 @@ public void testExporterPipelineMasterTimeoutSetting() { .put("xpack.monitoring.exporters.test.index.pipeline.master_timeout", TimeValue.timeValueSeconds(10)) .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -508,8 +510,8 @@ public void testExporterPipelineMasterTimeoutSetting() { public void testExporterCreateLegacyTemplateSetting() { Settings settings = Settings.builder().put("xpack.monitoring.exporters.test.index.template.create_legacy_templates", true).build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -535,8 +537,8 @@ public void testScriptContextCacheSetting() { .put(ScriptService.SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.getKey(), "use-context") .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -564,8 +566,8 @@ public void testScriptContextCompilationsRateLimitSetting() { .put(ScriptService.SCRIPT_MAX_COMPILATIONS_RATE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), "456/7m") .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -601,8 +603,8 @@ public void testImplicitScriptContextCacheSetting() { .put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), "2453") .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -639,8 +641,8 @@ public void testScriptContextCacheSizeSetting() { .put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), 200) .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -676,8 +678,8 @@ public void testScriptContextCacheExpirationSetting() { .put(ScriptService.SCRIPT_CACHE_EXPIRE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), "2d") .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -708,8 +710,8 @@ public void testScriptContextCacheExpirationSetting() { public void testEnforceDefaultTierPreferenceSetting() { Settings settings = Settings.builder().put(DataTier.ENFORCE_DEFAULT_TIER_PREFERENCE_SETTING.getKey(), randomBoolean()).build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -731,8 +733,8 @@ public void testEnforceDefaultTierPreferenceSetting() { } private List getDeprecationIssues(Settings settings, PluginsAndModules pluginsAndModules) { - final List issues = DeprecationChecks.filterChecks( - DeprecationChecks.NODE_SETTINGS_CHECKS, + final List issues = filterChecks( + NodeDeprecationChecks.SINGLE_NODE_CHECKS, c -> c.apply(settings, pluginsAndModules, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -799,8 +801,8 @@ public void testDynamicSettings() { } Metadata metadata = metadataBuilder.build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - final List issues = DeprecationChecks.filterChecks( - DeprecationChecks.NODE_SETTINGS_CHECKS, + final List issues = filterChecks( + NodeDeprecationChecks.SINGLE_NODE_CHECKS, c -> c.apply(nodettings, pluginsAndModules, clusterState, licenseState) ); @@ -832,4 +834,8 @@ public void testCheckNodeAttrData() { ); assertThat(issues, hasItem(expected)); } + + static List filterChecks(List checks, Function mapper) { + return checks.stream().map(mapper).filter(Objects::nonNull).toList(); + } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java index 81c0d1c7dc918..4e1b28b341282 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java @@ -48,12 +48,12 @@ public void testCheckSourceModeInComponentTemplates() throws IOException { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, + SourceFieldMapper.DEPRECATION_WARNING_TITLE, + "https://ela.st/migrate-source-mode", SourceFieldMapper.DEPRECATION_WARNING, - "https://github.com/elastic/elasticsearch/pull/117172", - null, false, null ); @@ -81,7 +81,7 @@ public void testCheckLegacyTiersInComponentTemplates() { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.WARNING, "Configuring tiers via filtered allocation is not recommended.", @@ -121,7 +121,7 @@ public void testCheckLegacyTierSettings() { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.WARNING, "Configuring tiers via filtered allocation is not recommended.", @@ -164,7 +164,7 @@ public void testComponentAndComposableTemplateWithSameName() { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expectedIndexTemplateIssue = new DeprecationIssue( DeprecationIssue.Level.WARNING, "Configuring tiers via filtered allocation is not recommended.", diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoActionTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoActionTests.java index 85fa375c09c5f..945068ba3a107 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoActionTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoActionTests.java @@ -6,22 +6,295 @@ */ package org.elasticsearch.xpack.deprecation; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.hamcrest.core.IsEqual; +import org.junit.Assert; +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.Response.RESERVED_NAMES; +import static org.elasticsearch.xpack.deprecation.DeprecationInfoActionResponseTests.createTestDeprecationIssue; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TransportDeprecationInfoActionTests extends ESTestCase { + public void testCheckAndCreateResponse() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all"); + mapping.field("enabled", false); + mapping.endObject().endObject(); + + Metadata metadata = Metadata.builder() + .put( + IndexMetadata.builder("test") + .putMapping(Strings.toString(mapping)) + .settings(settings(IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + ) + .build(); + + DiscoveryNode discoveryNode = DiscoveryNodeUtils.create("test", new TransportAddress(TransportAddress.META_ADDRESS, 9300)); + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); + boolean clusterIssueFound = randomBoolean(); + boolean nodeIssueFound = randomBoolean(); + boolean indexIssueFound = randomBoolean(); + boolean dataStreamIssueFound = randomBoolean(); + boolean indexTemplateIssueFound = randomBoolean(); + boolean componentTemplateIssueFound = randomBoolean(); + boolean ilmPolicyIssueFound = randomBoolean(); + DeprecationIssue foundIssue = createTestDeprecationIssue(); + ClusterDeprecationChecker clusterDeprecationChecker = mock(ClusterDeprecationChecker.class); + when(clusterDeprecationChecker.check(any(), any())).thenReturn(clusterIssueFound ? List.of(foundIssue) : List.of()); + List resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> { + if (indexIssueFound) { + return Map.of("test", List.of(foundIssue)); + } + return Map.of(); + }), createResourceChecker("data_streams", (cs, req) -> { + if (dataStreamIssueFound) { + return Map.of("my-ds", List.of(foundIssue)); + } + return Map.of(); + }), createResourceChecker("templates", (cs, req) -> { + Map> issues = new HashMap<>(); + if (componentTemplateIssueFound) { + issues.put("my-component-template", List.of(foundIssue)); + } + if (indexTemplateIssueFound) { + issues.put("my-index-template", List.of(foundIssue)); + } + return issues; + }), createResourceChecker("ilm_policies", (cs, req) -> { + if (ilmPolicyIssueFound) { + return Map.of("my-policy", List.of(foundIssue)); + } + return Map.of(); + })); + + List nodeDeprecationIssues = nodeIssueFound ? List.of(foundIssue) : List.of(); + + DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); + TransportDeprecationInfoAction.PrecomputedData precomputedData = new TransportDeprecationInfoAction.PrecomputedData(); + precomputedData.setOnceTransformConfigs(List.of()); + precomputedData.setOncePluginIssues(Map.of()); + precomputedData.setOnceNodeSettingsIssues(nodeDeprecationIssues); + DeprecationInfoAction.Response response = TransportDeprecationInfoAction.checkAndCreateResponse( + state, + resolver, + request, + List.of(), + clusterDeprecationChecker, + resourceCheckers, + precomputedData + ); + + if (clusterIssueFound) { + assertThat(response.getClusterSettingsIssues(), IsEqual.equalTo(List.of(foundIssue))); + } else { + assertThat(response.getClusterSettingsIssues(), empty()); + } + + if (nodeIssueFound) { + assertThat(response.getNodeSettingsIssues(), IsEqual.equalTo(List.of(foundIssue))); + } else { + assertTrue(response.getNodeSettingsIssues().isEmpty()); + } + + if (indexIssueFound) { + assertThat(response.getIndexSettingsIssues(), IsEqual.equalTo(Map.of("test", List.of(foundIssue)))); + } else { + assertTrue(response.getIndexSettingsIssues().isEmpty()); + } + if (dataStreamIssueFound) { + assertThat(response.getDataStreamDeprecationIssues(), IsEqual.equalTo(Map.of("my-ds", List.of(foundIssue)))); + } else { + assertTrue(response.getDataStreamDeprecationIssues().isEmpty()); + } + if (ilmPolicyIssueFound) { + assertThat(response.getIlmPolicyDeprecationIssues(), IsEqual.equalTo(Map.of("my-policy", List.of(foundIssue)))); + } else { + assertTrue(response.getIlmPolicyDeprecationIssues().isEmpty()); + } + if (componentTemplateIssueFound == false && indexTemplateIssueFound == false) { + assertTrue(response.getTemplateDeprecationIssues().isEmpty()); + } else { + if (componentTemplateIssueFound) { + assertThat(response.getTemplateDeprecationIssues().get("my-component-template"), IsEqual.equalTo(List.of(foundIssue))); + } + if (indexTemplateIssueFound) { + assertThat(response.getTemplateDeprecationIssues().get("my-index-template"), IsEqual.equalTo(List.of(foundIssue))); + } + + } + } + + public void testRemoveSkippedSettings() { + Settings.Builder settingsBuilder = settings(IndexVersion.current()); + settingsBuilder.put("some.deprecated.property", "someValue1"); + settingsBuilder.put("some.other.bad.deprecated.property", "someValue2"); + settingsBuilder.put("some.undeprecated.property", "someValue3"); + settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); + Settings inputSettings = settingsBuilder.build(); + IndexMetadata dataStreamIndexMetadata = IndexMetadata.builder("ds-test-index-1") + .settings(inputSettings) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + ComponentTemplate componentTemplate = new ComponentTemplate(Template.builder().settings(inputSettings).build(), null, null); + ComposableIndexTemplate indexTemplate = ComposableIndexTemplate.builder() + .template(Template.builder().settings(inputSettings)) + .build(); + Metadata metadata = Metadata.builder() + .put(IndexMetadata.builder("test").settings(inputSettings).numberOfShards(1).numberOfReplicas(0)) + .put(dataStreamIndexMetadata, true) + .put(DataStream.builder("ds-test", List.of(dataStreamIndexMetadata.getIndex())).build()) + .indexTemplates( + Map.of( + "my-index-template", + indexTemplate, + "empty-template", + ComposableIndexTemplate.builder().indexPatterns(List.of("random")).build() + ) + ) + .componentTemplates(Map.of("my-component-template", componentTemplate)) + .persistentSettings(inputSettings) + .build(); + + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); + AtomicReference visibleClusterSettings = new AtomicReference<>(); + ClusterDeprecationChecker clusterDeprecationChecker = mock(ClusterDeprecationChecker.class); + when(clusterDeprecationChecker.check(any(), any())).thenAnswer(invocationOnMock -> { + ClusterState observedState = invocationOnMock.getArgument(0); + visibleClusterSettings.set(observedState.getMetadata().settings()); + return List.of(); + }); + AtomicReference visibleIndexSettings = new AtomicReference<>(); + AtomicReference visibleComponentTemplateSettings = new AtomicReference<>(); + AtomicReference visibleIndexTemplateSettings = new AtomicReference<>(); + AtomicInteger backingIndicesCount = new AtomicInteger(0); + List resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> { + for (String indexName : resolver.concreteIndexNames(cs, req)) { + visibleIndexSettings.set(cs.metadata().index(indexName).getSettings()); + } + return Map.of(); + }), createResourceChecker("data_streams", (cs, req) -> { + cs.metadata().dataStreams().values().forEach(ds -> backingIndicesCount.set(ds.getIndices().size())); + return Map.of(); + }), createResourceChecker("templates", (cs, req) -> { + cs.metadata() + .componentTemplates() + .values() + .forEach(template -> visibleComponentTemplateSettings.set(template.template().settings())); + cs.metadata().templatesV2().values().forEach(template -> { + if (template.template() != null && template.template().settings() != null) { + visibleIndexTemplateSettings.set(template.template().settings()); + } + }); + return Map.of(); + })); + TransportDeprecationInfoAction.PrecomputedData precomputedData = new TransportDeprecationInfoAction.PrecomputedData(); + precomputedData.setOnceTransformConfigs(List.of()); + precomputedData.setOncePluginIssues(Map.of()); + precomputedData.setOnceNodeSettingsIssues(List.of()); + DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); + TransportDeprecationInfoAction.checkAndCreateResponse( + state, + resolver, + request, + List.of("some.deprecated.property", "some.other.*.deprecated.property"), + clusterDeprecationChecker, + resourceCheckers, + precomputedData + ); + + settingsBuilder = settings(IndexVersion.current()); + settingsBuilder.put("some.undeprecated.property", "someValue3"); + settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); + + Settings expectedSettings = settingsBuilder.build(); + Settings resultClusterSettings = visibleClusterSettings.get(); + Assert.assertNotNull(resultClusterSettings); + Assert.assertEquals(expectedSettings, visibleClusterSettings.get()); + + Settings resultIndexSettings = visibleIndexSettings.get(); + Assert.assertNotNull(resultIndexSettings); + Assert.assertEquals("someValue3", resultIndexSettings.get("some.undeprecated.property")); + Assert.assertEquals(resultIndexSettings.getAsList("some.undeprecated.list.property"), List.of("someValue4", "someValue5")); + Assert.assertFalse(resultIndexSettings.hasValue("some.deprecated.property")); + Assert.assertFalse(resultIndexSettings.hasValue("some.other.bad.deprecated.property")); + + assertThat(backingIndicesCount.get(), IsEqual.equalTo(1)); + + Assert.assertNotNull(visibleComponentTemplateSettings.get()); + Assert.assertEquals(expectedSettings, visibleComponentTemplateSettings.get()); + Assert.assertNotNull(visibleIndexTemplateSettings.get()); + Assert.assertEquals(expectedSettings, visibleIndexTemplateSettings.get()); + } + + public void testCtorFailure() { + Map> indexNames = Stream.generate(() -> randomAlphaOfLength(10)) + .limit(10) + .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); + Map> dataStreamNames = Stream.generate(() -> randomAlphaOfLength(10)) + .limit(10) + .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); + Set shouldCauseFailure = new HashSet<>(RESERVED_NAMES); + for (int i = 0; i < randomIntBetween(1, 100); i++) { + Map> pluginSettingsIssues = randomSubsetOf(3, shouldCauseFailure).stream() + .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); + expectThrows( + ElasticsearchStatusException.class, + () -> new DeprecationInfoAction.Response( + List.of(), + List.of(), + Map.of("data_streams", dataStreamNames, "index_settings", indexNames), + pluginSettingsIssues + ) + ); + } + } + public void testPluginSettingIssues() { DeprecationChecker.Components components = new DeprecationChecker.Components(null, Settings.EMPTY, null); PlainActionFuture>> future = new PlainActionFuture<>(); @@ -65,6 +338,28 @@ public void testPluginSettingIssuesWithFailures() { assertThat(exception.getCause().getMessage(), containsString("boom")); } + private static ResourceDeprecationChecker createResourceChecker( + String name, + BiFunction>> check + ) { + return new ResourceDeprecationChecker() { + + @Override + public Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ) { + return check.apply(clusterState, request); + } + + @Override + public String getName() { + return name; + } + }; + } + private static class NamedChecker implements DeprecationChecker { private final String name; diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckActionTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckActionTests.java index 80692efb7474a..a0a37f2bb52d1 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckActionTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckActionTests.java @@ -61,7 +61,7 @@ public void testNodeOperation() { settingsBuilder.put("some.undeprecated.property", "someValue3"); settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); settingsBuilder.putList( - DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(), + TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.deprecated.property", "some.other.*.deprecated.property", "some.bad.dynamic.property") ); Settings nodeSettings = settingsBuilder.build(); @@ -73,7 +73,10 @@ public void testNodeOperation() { ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); ClusterService clusterService = Mockito.mock(ClusterService.class); when(clusterService.state()).thenReturn(clusterState); - ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, Set.of(DeprecationChecks.SKIP_DEPRECATIONS_SETTING)); + ClusterSettings clusterSettings = new ClusterSettings( + nodeSettings, + Set.of(TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING) + ); when((clusterService.getClusterSettings())).thenReturn(clusterSettings); DiscoveryNode node = Mockito.mock(DiscoveryNode.class); when(node.getId()).thenReturn("mock-node"); @@ -98,7 +101,7 @@ public void testNodeOperation() { NodesDeprecationCheckAction.NodeRequest nodeRequest = null; AtomicReference visibleNodeSettings = new AtomicReference<>(); AtomicReference visibleClusterStateMetadataSettings = new AtomicReference<>(); - DeprecationChecks.NodeDeprecationCheck< + NodeDeprecationChecks.NodeDeprecationCheck< Settings, PluginsAndModules, ClusterState, @@ -109,7 +112,7 @@ public void testNodeOperation() { return null; }; java.util.List< - DeprecationChecks.NodeDeprecationCheck< + NodeDeprecationChecks.NodeDeprecationCheck< Settings, PluginsAndModules, ClusterState, @@ -120,7 +123,7 @@ public void testNodeOperation() { settingsBuilder.put("some.undeprecated.property", "someValue3"); settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); settingsBuilder.putList( - DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(), + TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.deprecated.property", "some.other.*.deprecated.property", "some.bad.dynamic.property") ); Settings expectedSettings = settingsBuilder.build(); @@ -131,7 +134,7 @@ public void testNodeOperation() { // Testing that the setting is dynamically updatable: Settings newSettings = Settings.builder() - .putList(DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.undeprecated.property")) + .putList(TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.undeprecated.property")) .build(); clusterSettings.applySettings(newSettings); transportNodeDeprecationCheckAction.nodeOperation(nodeRequest, nodeSettingsChecks); @@ -141,7 +144,7 @@ public void testNodeOperation() { settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); // This is the node setting (since this is the node deprecation check), not the cluster setting: settingsBuilder.putList( - DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(), + TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.deprecated.property", "some.other.*.deprecated.property", "some.bad.dynamic.property") ); expectedSettings = settingsBuilder.build(); diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/80_downsample_aggregate.yml b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/80_downsample_aggregate.yml new file mode 100644 index 0000000000000..991aa3858d8bc --- /dev/null +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/80_downsample_aggregate.yml @@ -0,0 +1,79 @@ +"downsample aggregate field": + - requires: + cluster_features: ["data_stream.downsample.default_aggregate_metric_fix"] + reason: "#119696 fixed" + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + index: + mode: time_series + routing_path: [sensor_id] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + sensor_id: + type: keyword + time_series_dimension: true + temperature: + type: aggregate_metric_double + metrics: [min, sum, value_count] + default_metric: sum + time_series_metric: gauge + - do: + bulk: + refresh: true + index: test + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:00:00Z", "sensor_id": "1", "temperature": {"min": 24.7, "sum": 50.2, "value_count": 2}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:30:00Z", "sensor_id": "1", "temperature": {"min": 24.2, "sum": 73.8, "value_count": 3}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:00:00Z", "sensor_id": "1", "temperature": {"min": 25.1, "sum": 51.0, "value_count": 2}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:30:00Z", "sensor_id": "1", "temperature": {"min": 24.8, "sum": 24.8, "value_count": 1}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:00:00Z", "sensor_id": "1", "temperature": {"min": 24.6, "sum": 49.1, "value_count": 2}}' + + - do: + indices.put_settings: + index: test + body: + index.blocks.write: true + + - do: + indices.downsample: + index: test + target_index: test-downsample + body: > + { + "fixed_interval": "1h" + } + - is_true: acknowledged + + - do: + search: + index: test-downsample + body: + size: 0 + + - match: + hits.total.value: 3 + + - do: + indices.get_mapping: + index: test-downsample + - match: + test-downsample.mappings.properties.temperature: + type: aggregate_metric_double + metrics: [min, sum, value_count] + default_metric: sum + time_series_metric: gauge diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java index afa2e95e1284c..917ce781fb1f8 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java @@ -119,7 +119,7 @@ public void testDataStreamLifecycleDownsampleRollingRestart() throws Exception { } catch (Exception e) { throw new AssertionError(e); } - }, 60, TimeUnit.SECONDS); + }, 120, TimeUnit.SECONDS); ensureGreen(targetIndex); } } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java index a451439fadea1..94650e33a397f 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java @@ -10,18 +10,18 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; public final class AggregateMetricFieldValueFetcher extends FieldValueFetcher { - private final AggregateDoubleMetricFieldType aggMetricFieldType; + private final AggregateMetricDoubleFieldType aggMetricFieldType; private final AbstractDownsampleFieldProducer fieldProducer; AggregateMetricFieldValueFetcher( MappedFieldType fieldType, - AggregateDoubleMetricFieldType aggMetricFieldType, + AggregateMetricDoubleFieldType aggMetricFieldType, IndexFieldData fieldData ) { super(fieldType.name(), fieldType, fieldData); @@ -34,7 +34,7 @@ public AbstractDownsampleFieldProducer fieldProducer() { } private AbstractDownsampleFieldProducer createFieldProducer() { - AggregateDoubleMetricFieldMapper.Metric metric = null; + AggregateMetricDoubleFieldMapper.Metric metric = null; for (var e : aggMetricFieldType.getMetricFields().entrySet()) { NumberFieldMapper.NumberFieldType metricSubField = e.getValue(); if (metricSubField.name().equals(name())) { @@ -52,7 +52,7 @@ private AbstractDownsampleFieldProducer createFieldProducer() { case min -> new MetricFieldProducer.Min(); case sum -> new MetricFieldProducer.Sum(); // To compute value_count summary, we must sum all field values - case value_count -> new MetricFieldProducer.Sum(AggregateDoubleMetricFieldMapper.Metric.value_count.name()); + case value_count -> new MetricFieldProducer.Sum(AggregateMetricDoubleFieldMapper.Metric.value_count.name()); }; return new MetricFieldProducer.GaugeMetricFieldProducer(aggMetricFieldType.name(), metricOperation); } else { diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java index 3657e4989ccbd..811d36ec1075a 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java @@ -15,7 +15,7 @@ import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; import java.util.ArrayList; import java.util.Collections; @@ -82,7 +82,7 @@ static List create(SearchExecutionContext context, String[] f MappedFieldType fieldType = context.getFieldType(field); assert fieldType != null : "Unknown field type for field: [" + field + "]"; - if (fieldType instanceof AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType aggMetricFieldType) { + if (fieldType instanceof AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType aggMetricFieldType) { // If the field is an aggregate_metric_double field, we should load all its subfields // This is a downsample-of-downsample case for (NumberFieldMapper.NumberFieldType metricSubField : aggMetricFieldType.getMetricFields().values()) { diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java index b211c5bfb0d12..8a90411bc1c5f 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java @@ -12,7 +12,7 @@ import org.elasticsearch.index.fielddata.HistogramValue; import org.elasticsearch.index.mapper.flattened.FlattenedFieldSyntheticWriterHelper; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 7c26ad60fb13c..2c08dcd9017fd 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -76,7 +76,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.downsample.DownsampleShardPersistentTaskState; import org.elasticsearch.xpack.core.downsample.DownsampleShardTask; @@ -91,6 +91,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -739,6 +740,39 @@ private static void addTimestampField( .endObject(); } + // public for testing + public record AggregateMetricDoubleFieldSupportedMetrics(String defaultMetric, List supportedMetrics) {} + + // public for testing + public static AggregateMetricDoubleFieldSupportedMetrics getSupportedMetrics( + final TimeSeriesParams.MetricType metricType, + final Map fieldProperties + ) { + boolean sourceIsAggregate = fieldProperties.get("type").equals(AggregateMetricDoubleFieldMapper.CONTENT_TYPE); + List supportedAggs = List.of(metricType.supportedAggs()); + + if (sourceIsAggregate) { + @SuppressWarnings("unchecked") + List currentAggs = (List) fieldProperties.get(AggregateMetricDoubleFieldMapper.Names.METRICS); + supportedAggs = supportedAggs.stream().filter(currentAggs::contains).toList(); + } + + assert supportedAggs.size() > 0; + + String defaultMetric = "max"; + if (supportedAggs.contains(defaultMetric) == false) { + defaultMetric = supportedAggs.get(0); + } + if (sourceIsAggregate) { + defaultMetric = Objects.requireNonNullElse( + (String) fieldProperties.get(AggregateMetricDoubleFieldMapper.Names.DEFAULT_METRIC), + defaultMetric + ); + } + + return new AggregateMetricDoubleFieldSupportedMetrics(defaultMetric, supportedAggs); + } + private static void addMetricFieldMapping(final XContentBuilder builder, final String field, final Map fieldProperties) throws IOException { final TimeSeriesParams.MetricType metricType = TimeSeriesParams.MetricType.fromString( @@ -752,12 +786,11 @@ private static void addMetricFieldMapping(final XContentBuilder builder, final S builder.field(fieldProperty, fieldProperties.get(fieldProperty)); } } else { - final String[] supportedAggsArray = metricType.supportedAggs(); - // We choose max as the default metric - final String defaultMetric = List.of(supportedAggsArray).contains("max") ? "max" : supportedAggsArray[0]; - builder.field("type", AggregateDoubleMetricFieldMapper.CONTENT_TYPE) - .array(AggregateDoubleMetricFieldMapper.Names.METRICS, supportedAggsArray) - .field(AggregateDoubleMetricFieldMapper.Names.DEFAULT_METRIC, defaultMetric) + var supported = getSupportedMetrics(metricType, fieldProperties); + + builder.field("type", AggregateMetricDoubleFieldMapper.CONTENT_TYPE) + .stringListField(AggregateMetricDoubleFieldMapper.Names.METRICS, supported.supportedMetrics) + .field(AggregateMetricDoubleFieldMapper.Names.DEFAULT_METRIC, supported.defaultMetric) .field(TIME_SERIES_METRIC_PARAM, metricType); } builder.endObject(); diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/TransportDownsampleActionTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/TransportDownsampleActionTests.java index fb699fd7c3418..1b2cc32e12a65 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/TransportDownsampleActionTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/TransportDownsampleActionTests.java @@ -13,12 +13,16 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ilm.LifecycleSettings; import java.util.List; +import java.util.Map; import java.util.UUID; +import static org.hamcrest.Matchers.is; + public class TransportDownsampleActionTests extends ESTestCase { public void testCopyIndexMetadata() { // GIVEN @@ -107,4 +111,25 @@ private static void assertTargetSettings(final IndexMetadata indexMetadata, fina settings.get(IndexMetadata.SETTING_CREATION_DATE) ); } + + public void testGetSupportedMetrics() { + TimeSeriesParams.MetricType metricType = TimeSeriesParams.MetricType.GAUGE; + Map fieldProperties = Map.of( + "type", + "aggregate_metric_double", + "metrics", + List.of("max", "sum"), + "default_metric", + "sum" + ); + + var supported = TransportDownsampleAction.getSupportedMetrics(metricType, fieldProperties); + assertThat(supported.defaultMetric(), is("sum")); + assertThat(supported.supportedMetrics(), is(List.of("max", "sum"))); + + fieldProperties = Map.of("type", "integer"); + supported = TransportDownsampleAction.getSupportedMetrics(metricType, fieldProperties); + assertThat(supported.defaultMetric(), is("max")); + assertThat(supported.supportedMetrics(), is(List.of(metricType.supportedAggs()))); + } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java index cbede5871f275..ccaf4ce3a8861 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java @@ -71,6 +71,9 @@ Collection verify(LogicalPlan plan) { // start bottom-up plan.forEachUp(p -> { + if (p.getClass().equals(Join.class)) { + failures.add(fail(p, "JOIN command is not supported")); + } if (p.analyzed()) { return; } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java index fc41bdd627c95..2e8b8578b5056 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java @@ -10,23 +10,21 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import static org.hamcrest.Matchers.equalTo; public abstract class AbstractBWCSerializationTestCase extends AbstractXContentSerializingTestCase { - private static List getAllBWCVersions() { - List allVersions = TransportVersion.getAllVersions(); - int minCompatVersion = Collections.binarySearch(allVersions, TransportVersions.MINIMUM_COMPATIBLE); - return allVersions.subList(minCompatVersion, allVersions.size()); + private static NavigableSet getAllBWCVersions() { + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); } - private static final List DEFAULT_BWC_VERSIONS = getAllBWCVersions(); + private static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); protected abstract T mutateInstanceForVersion(T instance, TransportVersion version); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java index 30777f43597c8..76c2b3355e236 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java @@ -10,22 +10,20 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.TransportVersionUtils; import java.io.IOException; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import static org.hamcrest.Matchers.equalTo; public abstract class AbstractBWCWireSerializingTestCase extends AbstractWireSerializingTestCase { - private static List getAllBWCVersions() { - List allVersions = TransportVersion.getAllVersions(); - int minCompatVersion = Collections.binarySearch(allVersions, TransportVersions.MINIMUM_COMPATIBLE); - return allVersions.subList(minCompatVersion, allVersions.size()); + private static NavigableSet getAllBWCVersions() { + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); } - private static final List DEFAULT_BWC_VERSIONS = getAllBWCVersions(); + private static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); protected abstract T mutateInstanceForVersion(T instance, TransportVersion version); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java index bec71a9846562..887132ab729e9 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java @@ -368,6 +368,13 @@ public void testJoin() { accept(idxr, "foo where serial_event_id == 0"); } + public void testJoinCommand() { + final IndexResolution idxr = loadIndexResolution("mapping-ip.json"); + + assertEquals("1:1: JOIN command is not supported", error(idxr, "join [any where true] [any where true]")); + assertEquals("1:1: JOIN command is not supported", error(idxr, "join [any where true] [any where true] | tail 3")); + } + public void testMultiField() { final IndexResolution idxr = loadIndexResolution("mapping-multi-field.json"); accept(idxr, "foo where multi_field.raw == \"bar\""); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java index 0f1cfbb85039c..dc75ac3a96248 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java @@ -32,6 +32,7 @@ public class MetadataAttribute extends TypedAttribute { public static final String TIMESTAMP_FIELD = "@timestamp"; public static final String TSID_FIELD = "_tsid"; public static final String SCORE = "_score"; + public static final String INDEX = "_index"; static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Attribute.class, @@ -42,7 +43,7 @@ public class MetadataAttribute extends TypedAttribute { private static final Map> ATTRIBUTES_MAP = Map.of( "_version", tuple(DataType.LONG, false), // _version field is not searchable - "_index", + INDEX, tuple(DataType.KEYWORD, true), IdFieldMapper.NAME, tuple(DataType.KEYWORD, false), // actually searchable, but fielddata access on the _id field is disallowed by default diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java index 73e2d5ec626ac..321c79ee13a83 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java @@ -27,11 +27,12 @@ public class EsField implements Writeable { private static Map> readers = Map.ofEntries( - Map.entry("EsField", EsField::new), Map.entry("DateEsField", DateEsField::new), + Map.entry("EsField", EsField::new), Map.entry("InvalidMappedField", InvalidMappedField::new), Map.entry("KeywordEsField", KeywordEsField::new), Map.entry("MultiTypeEsField", MultiTypeEsField::new), + Map.entry("PotentiallyUnmappedKeywordEsField", PotentiallyUnmappedKeywordEsField::new), Map.entry("TextEsField", TextEsField::new), Map.entry("UnsupportedEsField", UnsupportedEsField::new) ); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java index f83e4652ebebd..f8337d0decae7 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java @@ -45,7 +45,7 @@ public InvalidMappedField(String name, String errorMessage) { * Constructor supporting union types, used in ES|QL. */ public InvalidMappedField(String name, Map> typesToIndices) { - this(name, makeErrorMessage(typesToIndices), new TreeMap<>(), typesToIndices); + this(name, makeErrorMessage(typesToIndices, false), new TreeMap<>(), typesToIndices); } private InvalidMappedField(String name, String errorMessage, Map properties, Map> typesToIndices) { @@ -107,12 +107,21 @@ public Map> getTypesToIndices() { return typesToIndices; } - private static String makeErrorMessage(Map> typesToIndices) { + public static String makeErrorsMessageIncludingInsistKeyword(Map> typesToIndices) { + return makeErrorMessage(typesToIndices, true); + } + + private static String makeErrorMessage(Map> typesToIndices, boolean includeInsistKeyword) { StringBuilder errorMessage = new StringBuilder(); + var isInsistKeywordOnlyKeyword = includeInsistKeyword && typesToIndices.containsKey(DataType.KEYWORD.typeName()) == false; errorMessage.append("mapped as ["); - errorMessage.append(typesToIndices.size()); + errorMessage.append(typesToIndices.size() + (isInsistKeywordOnlyKeyword ? 1 : 0)); errorMessage.append("] incompatible types: "); boolean first = true; + if (isInsistKeywordOnlyKeyword) { + first = false; + errorMessage.append("[keyword] enforced by INSIST command"); + } for (Map.Entry> e : typesToIndices.entrySet()) { if (first) { first = false; @@ -121,7 +130,12 @@ private static String makeErrorMessage(Map> typesToIndices) } errorMessage.append("["); errorMessage.append(e.getKey()); - errorMessage.append("] in "); + errorMessage.append("] "); + if (e.getKey().equals(DataType.KEYWORD.typeName()) && includeInsistKeyword) { + errorMessage.append("enforced by INSIST command and in "); + } else { + errorMessage.append("in "); + } if (e.getValue().size() <= 3) { errorMessage.append(e.getValue()); } else { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/PotentiallyUnmappedKeywordEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/PotentiallyUnmappedKeywordEsField.java new file mode 100644 index 0000000000000..8672b6b61dee7 --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/PotentiallyUnmappedKeywordEsField.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.core.type; + +import org.elasticsearch.common.io.stream.StreamInput; + +import java.io.IOException; + +/** + * This class is used as a marker for fields that may be unmapped, where an unmapped field is a field which exists in the _source but is not + * mapped in the index. Note that this field may be mapped for some indices, but is unmapped in at least one of them. + * For indices where the field is unmapped, we will try to load them directly from _source. + */ +public class PotentiallyUnmappedKeywordEsField extends KeywordEsField { + public PotentiallyUnmappedKeywordEsField(String name) { + super(name); + } + + public PotentiallyUnmappedKeywordEsField(StreamInput in) throws IOException { + super(in); + } + + public String getWriteableName() { + return "PotentiallyUnmappedKeywordEsField"; + } +} diff --git a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java index 444dbcc1b9e58..8096153459003 100644 --- a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java +++ b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java @@ -37,11 +37,6 @@ * are ever collected. *

*

- * The generation code will also look for a method called {@code combineValueCount} - * which is called once per received block with a count of values. NOTE: We may - * not need this after we convert AVG into a composite operation. - *

- *

* The generation code also looks for the optional methods {@code combineIntermediate} * and {@code evaluateFinal} which are used to combine intermediate states and * produce the final output. If the first is missing then the generated code will @@ -63,4 +58,8 @@ */ Class[] warnExceptions() default {}; + /** + * If {@code true} then the @timestamp LongVector will be appended to the input blocks of the aggregation function. + */ + boolean includeTimestamps() default false; } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java index 15fc75a990c42..ec4bf2bb3907b 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java @@ -31,6 +31,7 @@ import static org.elasticsearch.compute.gen.Types.AGGREGATOR_FUNCTION_SUPPLIER; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; +import static org.elasticsearch.compute.gen.Types.LIST_AGG_FUNC_DESC; import static org.elasticsearch.compute.gen.Types.LIST_INTEGER; import static org.elasticsearch.compute.gen.Types.STRING; @@ -66,7 +67,6 @@ public AggregatorFunctionSupplierImplementer( createParameters.addAll(groupingAggregatorImplementer.createParameters()); } this.createParameters = new ArrayList<>(createParameters); - this.createParameters.add(0, new Parameter(LIST_INTEGER, "channels")); this.implementation = ClassName.get( elements.getPackageOf(declarationType).toString(), @@ -98,11 +98,9 @@ private TypeSpec type() { } createParameters.stream().forEach(p -> p.declareField(builder)); builder.addMethod(ctor()); - if (aggregatorImplementer != null) { - builder.addMethod(aggregator()); - } else { - builder.addMethod(unsupportedNonGroupingAggregator()); - } + builder.addMethod(nonGroupingIntermediateStateDesc()); + builder.addMethod(groupingIntermediateStateDesc()); + builder.addMethod(aggregator()); builder.addMethod(groupingAggregator()); builder.addMethod(describe()); return builder.build(); @@ -122,12 +120,28 @@ private MethodSpec ctor() { return builder.build(); } - private MethodSpec unsupportedNonGroupingAggregator() { - MethodSpec.Builder builder = MethodSpec.methodBuilder("aggregator") - .addParameter(DRIVER_CONTEXT, "driverContext") - .returns(Types.AGGREGATOR_FUNCTION); + private MethodSpec nonGroupingIntermediateStateDesc() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("nonGroupingIntermediateStateDesc"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); - builder.addStatement("throw new UnsupportedOperationException($S)", "non-grouping aggregator is not supported"); + builder.returns(LIST_AGG_FUNC_DESC); + + if (aggregatorImplementer == null) { + builder.addStatement("throw new UnsupportedOperationException($S)", "non-grouping aggregator is not supported"); + return builder.build(); + } + + builder.addStatement("return $T.intermediateStateDesc()", aggregatorImplementer.implementation()); + + return builder.build(); + } + + private MethodSpec groupingIntermediateStateDesc() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("groupingIntermediateStateDesc"); + builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); + builder.returns(LIST_AGG_FUNC_DESC); + + builder.addStatement("return $T.intermediateStateDesc()", groupingAggregatorImplementer.implementation()); + return builder.build(); } @@ -135,6 +149,14 @@ private MethodSpec aggregator() { MethodSpec.Builder builder = MethodSpec.methodBuilder("aggregator"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); builder.addParameter(DRIVER_CONTEXT, "driverContext"); + builder.addParameter(LIST_INTEGER, "channels"); + + if (aggregatorImplementer == null) { + builder.returns(Types.AGGREGATOR_FUNCTION); + builder.addStatement("throw new UnsupportedOperationException($S)", "non-grouping aggregator is not supported"); + return builder.build(); + } + builder.returns(aggregatorImplementer.implementation()); if (hasWarnings) { @@ -160,6 +182,7 @@ private MethodSpec groupingAggregator() { MethodSpec.Builder builder = MethodSpec.methodBuilder("groupingAggregator"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); builder.addParameter(DRIVER_CONTEXT, "driverContext"); + builder.addParameter(LIST_INTEGER, "channels"); builder.returns(groupingAggregatorImplementer.implementation()); if (hasWarnings) { diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java index 46881bf337c89..d775a46109214 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java @@ -17,12 +17,12 @@ import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.gen.Methods.TypeMatcher; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Objects; -import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -34,27 +34,24 @@ import javax.lang.model.util.Elements; import static java.util.stream.Collectors.joining; -import static org.elasticsearch.compute.gen.Methods.findMethod; -import static org.elasticsearch.compute.gen.Methods.findRequiredMethod; +import static org.elasticsearch.compute.gen.Methods.requireAnyArgs; +import static org.elasticsearch.compute.gen.Methods.requireAnyType; +import static org.elasticsearch.compute.gen.Methods.requireArgs; +import static org.elasticsearch.compute.gen.Methods.requireName; +import static org.elasticsearch.compute.gen.Methods.requirePrimitiveOrImplements; +import static org.elasticsearch.compute.gen.Methods.requireStaticMethod; +import static org.elasticsearch.compute.gen.Methods.requireType; +import static org.elasticsearch.compute.gen.Methods.requireVoidType; import static org.elasticsearch.compute.gen.Methods.vectorAccessorName; import static org.elasticsearch.compute.gen.Types.AGGREGATOR_FUNCTION; import static org.elasticsearch.compute.gen.Types.BIG_ARRAYS; import static org.elasticsearch.compute.gen.Types.BLOCK; import static org.elasticsearch.compute.gen.Types.BLOCK_ARRAY; -import static org.elasticsearch.compute.gen.Types.BOOLEAN_BLOCK; import static org.elasticsearch.compute.gen.Types.BOOLEAN_VECTOR; import static org.elasticsearch.compute.gen.Types.BYTES_REF; -import static org.elasticsearch.compute.gen.Types.BYTES_REF_BLOCK; -import static org.elasticsearch.compute.gen.Types.BYTES_REF_VECTOR; -import static org.elasticsearch.compute.gen.Types.DOUBLE_BLOCK; -import static org.elasticsearch.compute.gen.Types.DOUBLE_VECTOR; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; import static org.elasticsearch.compute.gen.Types.ELEMENT_TYPE; -import static org.elasticsearch.compute.gen.Types.FLOAT_BLOCK; -import static org.elasticsearch.compute.gen.Types.FLOAT_VECTOR; import static org.elasticsearch.compute.gen.Types.INTERMEDIATE_STATE_DESC; -import static org.elasticsearch.compute.gen.Types.INT_BLOCK; -import static org.elasticsearch.compute.gen.Types.INT_VECTOR; import static org.elasticsearch.compute.gen.Types.LIST_AGG_FUNC_DESC; import static org.elasticsearch.compute.gen.Types.LIST_INTEGER; import static org.elasticsearch.compute.gen.Types.LONG_BLOCK; @@ -78,46 +75,41 @@ public class AggregatorImplementer { private final List warnExceptions; private final ExecutableElement init; private final ExecutableElement combine; - private final ExecutableElement combineValueCount; - private final ExecutableElement combineIntermediate; - private final ExecutableElement evaluateFinal; + private final List createParameters; private final ClassName implementation; - private final TypeName stateType; - private final boolean stateTypeHasSeen; - private final boolean stateTypeHasFailed; - private final boolean valuesIsBytesRef; - private final boolean valuesIsArray; private final List intermediateState; - private final List createParameters; + private final boolean includeTimestampVector; + + private final AggregationState aggState; + private final AggregationParameter aggParam; public AggregatorImplementer( Elements elements, TypeElement declarationType, IntermediateState[] interStateAnno, - List warnExceptions + List warnExceptions, + boolean includeTimestampVector ) { this.declarationType = declarationType; this.warnExceptions = warnExceptions; - this.init = findRequiredMethod(declarationType, new String[] { "init", "initSingle" }, e -> true); - this.stateType = choseStateType(); - this.stateTypeHasSeen = elements.getAllMembers(elements.getTypeElement(stateType.toString())) - .stream() - .anyMatch(e -> e.toString().equals("seen()")); - this.stateTypeHasFailed = elements.getAllMembers(elements.getTypeElement(stateType.toString())) - .stream() - .anyMatch(e -> e.toString().equals("failed()")); + this.init = requireStaticMethod( + declarationType, + requirePrimitiveOrImplements(elements, Types.AGGREGATOR_STATE), + requireName("init", "initSingle"), + requireAnyArgs("") + ); + this.aggState = AggregationState.create(elements, init.getReturnType(), warnExceptions.isEmpty() == false, false); + + this.combine = requireStaticMethod( + declarationType, + aggState.declaredType().isPrimitive() ? requireType(aggState.declaredType()) : requireVoidType(), + requireName("combine"), + combineArgs(aggState, includeTimestampVector) + ); + // TODO support multiple parameters + this.aggParam = AggregationParameter.create(combine.getParameters().getLast().asType()); - this.combine = findRequiredMethod(declarationType, new String[] { "combine" }, e -> { - if (e.getParameters().size() == 0) { - return false; - } - TypeName firstParamType = TypeName.get(e.getParameters().get(0).asType()); - return firstParamType.isPrimitive() || firstParamType.toString().equals(stateType.toString()); - }); - this.combineValueCount = findMethod(declarationType, "combineValueCount"); - this.combineIntermediate = findMethod(declarationType, "combineIntermediate"); - this.evaluateFinal = findMethod(declarationType, "evaluateFinal"); this.createParameters = init.getParameters() .stream() .map(Parameter::from) @@ -128,9 +120,20 @@ public AggregatorImplementer( elements.getPackageOf(declarationType).toString(), (declarationType.getSimpleName() + "AggregatorFunction").replace("AggregatorAggregator", "Aggregator") ); - this.valuesIsBytesRef = BYTES_REF.equals(valueTypeName()); - this.valuesIsArray = TypeKind.ARRAY.equals(valueTypeKind()); - intermediateState = Arrays.stream(interStateAnno).map(IntermediateStateDesc::newIntermediateStateDesc).toList(); + this.intermediateState = Arrays.stream(interStateAnno).map(IntermediateStateDesc::newIntermediateStateDesc).toList(); + this.includeTimestampVector = includeTimestampVector; + } + + private static Methods.ArgumentMatcher combineArgs(AggregationState aggState, boolean includeTimestampVector) { + if (includeTimestampVector) { + return requireArgs( + requireType(aggState.declaredType()), + requireType(TypeName.LONG), // @timestamp + requireAnyType("") + ); + } else { + return requireArgs(requireType(aggState.declaredType()), requireAnyType("")); + } } ClassName implementation() { @@ -141,68 +144,8 @@ List createParameters() { return createParameters; } - private TypeName choseStateType() { - TypeName initReturn = TypeName.get(init.getReturnType()); - if (false == initReturn.isPrimitive()) { - return initReturn; - } - String simpleName = firstUpper(initReturn.toString()); - if (warnExceptions.isEmpty()) { - return ClassName.get("org.elasticsearch.compute.aggregation", simpleName + "State"); - } - return ClassName.get("org.elasticsearch.compute.aggregation", simpleName + "FallibleState"); - } - - static String valueType(ExecutableElement init, ExecutableElement combine) { - if (combine != null) { - // If there's an explicit combine function it's final parameter is the type of the value. - return combine.getParameters().get(combine.getParameters().size() - 1).asType().toString(); - } - String initReturn = init.getReturnType().toString(); - switch (initReturn) { - case "double": - return "double"; - case "float": - return "float"; - case "long": - return "long"; - case "int": - return "int"; - case "boolean": - return "boolean"; - default: - throw new IllegalArgumentException("unknown primitive type for " + initReturn); - } - } - - static ClassName valueBlockType(ExecutableElement init, ExecutableElement combine) { - return switch (valueType(init, combine)) { - case "boolean" -> BOOLEAN_BLOCK; - case "double" -> DOUBLE_BLOCK; - case "float" -> FLOAT_BLOCK; - case "long" -> LONG_BLOCK; - case "int", "int[]" -> INT_BLOCK; - case "org.apache.lucene.util.BytesRef" -> BYTES_REF_BLOCK; - default -> throw new IllegalArgumentException("unknown block type for " + valueType(init, combine)); - }; - } - - static ClassName valueVectorType(ExecutableElement init, ExecutableElement combine) { - return switch (valueType(init, combine)) { - case "boolean" -> BOOLEAN_VECTOR; - case "double" -> DOUBLE_VECTOR; - case "float" -> FLOAT_VECTOR; - case "long" -> LONG_VECTOR; - case "int", "int[]" -> INT_VECTOR; - case "org.apache.lucene.util.BytesRef" -> BYTES_REF_VECTOR; - default -> throw new IllegalArgumentException("unknown vector type for " + valueType(init, combine)); - }; - } - - public static String firstUpper(String s) { - String head = s.toString().substring(0, 1).toUpperCase(Locale.ROOT); - String tail = s.toString().substring(1); - return head + tail; + public static String capitalize(String s) { + return Character.toUpperCase(s.charAt(0)) + s.substring(1); } public JavaFile sourceFile() { @@ -232,7 +175,7 @@ private TypeSpec type() { } builder.addField(DRIVER_CONTEXT, "driverContext", Modifier.PRIVATE, Modifier.FINAL); - builder.addField(stateType, "state", Modifier.PRIVATE, Modifier.FINAL); + builder.addField(aggState.type, "state", Modifier.PRIVATE, Modifier.FINAL); builder.addField(LIST_INTEGER, "channels", Modifier.PRIVATE, Modifier.FINAL); for (Parameter p : createParameters) { @@ -292,10 +235,10 @@ private CodeBlock callInit() { .map(p -> TypeName.get(p.asType()).equals(BIG_ARRAYS) ? "driverContext.bigArrays()" : p.getSimpleName().toString()) .collect(joining(", ")); CodeBlock.Builder builder = CodeBlock.builder(); - if (init.getReturnType().toString().equals(stateType.toString())) { - builder.add("$T.$L($L)", declarationType, init.getSimpleName(), initParametersCall); + if (aggState.declaredType().isPrimitive()) { + builder.add("new $T($T.$L($L))", aggState.type(), declarationType, init.getSimpleName(), initParametersCall); } else { - builder.add("new $T($T.$L($L))", stateType, declarationType, init.getSimpleName(), initParametersCall); + builder.add("$T.$L($L)", declarationType, init.getSimpleName(), initParametersCall); } return builder.build(); } @@ -320,7 +263,7 @@ private MethodSpec ctor() { } builder.addParameter(DRIVER_CONTEXT, "driverContext"); builder.addParameter(LIST_INTEGER, "channels"); - builder.addParameter(stateType, "state"); + builder.addParameter(aggState.type, "state"); if (warnExceptions.isEmpty() == false) { builder.addStatement("this.warnings = warnings"); @@ -352,7 +295,7 @@ private MethodSpec intermediateBlockCount() { private MethodSpec addRawInput() { MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawInput"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).addParameter(PAGE, "page").addParameter(BOOLEAN_VECTOR, "mask"); - if (stateTypeHasFailed) { + if (aggState.hasFailed()) { builder.beginControlFlow("if (state.failed())"); builder.addStatement("return"); builder.endControlFlow(); @@ -366,43 +309,62 @@ private MethodSpec addRawInput() { builder.beginControlFlow("if (mask.allTrue())"); { builder.addComment("No masking"); - builder.addStatement("$T block = page.getBlock(channels.get(0))", valueBlockType(init, combine)); - builder.addStatement("$T vector = block.asVector()", valueVectorType(init, combine)); + builder.addStatement("$T block = page.getBlock(channels.get(0))", blockType(aggParam.type())); + builder.addStatement("$T vector = block.asVector()", vectorType(aggParam.type())); + if (includeTimestampVector) { + builder.addStatement("$T timestampsBlock = page.getBlock(channels.get(1))", LONG_BLOCK); + builder.addStatement("$T timestampsVector = timestampsBlock.asVector()", LONG_VECTOR); + + builder.beginControlFlow("if (timestampsVector == null) "); + builder.addStatement("throw new IllegalStateException($S)", "expected @timestamp vector; but got a block"); + builder.endControlFlow(); + } builder.beginControlFlow("if (vector != null)"); - builder.addStatement("addRawVector(vector)"); + builder.addStatement(includeTimestampVector ? "addRawVector(vector, timestampsVector)" : "addRawVector(vector)"); builder.nextControlFlow("else"); - builder.addStatement("addRawBlock(block)"); + builder.addStatement(includeTimestampVector ? "addRawBlock(block, timestampsVector)" : "addRawBlock(block)"); builder.endControlFlow(); builder.addStatement("return"); } builder.endControlFlow(); builder.addComment("Some positions masked away, others kept"); - builder.addStatement("$T block = page.getBlock(channels.get(0))", valueBlockType(init, combine)); - builder.addStatement("$T vector = block.asVector()", valueVectorType(init, combine)); + builder.addStatement("$T block = page.getBlock(channels.get(0))", blockType(aggParam.type())); + builder.addStatement("$T vector = block.asVector()", vectorType(aggParam.type())); + if (includeTimestampVector) { + builder.addStatement("$T timestampsBlock = page.getBlock(channels.get(1))", LONG_BLOCK); + builder.addStatement("$T timestampsVector = timestampsBlock.asVector()", LONG_VECTOR); + + builder.beginControlFlow("if (timestampsVector == null) "); + builder.addStatement("throw new IllegalStateException($S)", "expected @timestamp vector; but got a block"); + builder.endControlFlow(); + } builder.beginControlFlow("if (vector != null)"); - builder.addStatement("addRawVector(vector, mask)"); + builder.addStatement(includeTimestampVector ? "addRawVector(vector, timestampsVector, mask)" : "addRawVector(vector, mask)"); builder.nextControlFlow("else"); - builder.addStatement("addRawBlock(block, mask)"); + builder.addStatement(includeTimestampVector ? "addRawBlock(block, timestampsVector, mask)" : "addRawBlock(block, mask)"); builder.endControlFlow(); return builder.build(); } private MethodSpec addRawVector(boolean masked) { MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawVector"); - builder.addModifiers(Modifier.PRIVATE).addParameter(valueVectorType(init, combine), "vector"); + builder.addModifiers(Modifier.PRIVATE).addParameter(vectorType(aggParam.type()), "vector"); + if (includeTimestampVector) { + builder.addParameter(LONG_VECTOR, "timestamps"); + } if (masked) { builder.addParameter(BOOLEAN_VECTOR, "mask"); } - if (valuesIsArray) { + if (aggParam.isArray()) { builder.addComment("This type does not support vectors because all values are multi-valued"); return builder.build(); } - if (stateTypeHasSeen) { + if (aggState.hasSeen()) { builder.addStatement("state.seen(true)"); } - if (valuesIsBytesRef) { + if (aggParam.isBytesRef()) { // Add bytes_ref scratch var that will be used for bytes_ref blocks/vectors builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); } @@ -415,20 +377,20 @@ private MethodSpec addRawVector(boolean masked) { combineRawInput(builder, "vector"); } builder.endControlFlow(); - if (combineValueCount != null) { - builder.addStatement("$T.combineValueCount(state, vector.getPositionCount())", declarationType); - } return builder.build(); } private MethodSpec addRawBlock(boolean masked) { MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawBlock"); - builder.addModifiers(Modifier.PRIVATE).addParameter(valueBlockType(init, combine), "block"); + builder.addModifiers(Modifier.PRIVATE).addParameter(blockType(aggParam.type()), "block"); + if (includeTimestampVector) { + builder.addParameter(LONG_VECTOR, "timestamps"); + } if (masked) { builder.addParameter(BOOLEAN_VECTOR, "mask"); } - if (valuesIsBytesRef) { + if (aggParam.isBytesRef()) { // Add bytes_ref scratch var that will only be used for bytes_ref blocks/vectors builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); } @@ -440,16 +402,16 @@ private MethodSpec addRawBlock(boolean masked) { builder.beginControlFlow("if (block.isNull(p))"); builder.addStatement("continue"); builder.endControlFlow(); - if (stateTypeHasSeen) { + if (aggState.hasSeen()) { builder.addStatement("state.seen(true)"); } builder.addStatement("int start = block.getFirstValueIndex(p)"); builder.addStatement("int end = start + block.getValueCount(p)"); - if (valuesIsArray) { - String arrayType = valueTypeString(); + if (aggParam.isArray()) { + String arrayType = aggParam.type().toString().replace("[]", ""); builder.addStatement("$L[] valuesArray = new $L[end - start]", arrayType, arrayType); builder.beginControlFlow("for (int i = start; i < end; i++)"); - builder.addStatement("valuesArray[i-start] = $L.get$L(i)", "block", firstUpper(arrayType)); + builder.addStatement("valuesArray[i-start] = $L.get$L(i)", "block", capitalize(arrayType)); builder.endControlFlow(); combineRawInputForArray(builder, "valuesArray"); } else { @@ -459,16 +421,13 @@ private MethodSpec addRawBlock(boolean masked) { } } builder.endControlFlow(); - if (combineValueCount != null) { - builder.addStatement("$T.combineValueCount(state, block.getTotalValueCount())", declarationType); - } return builder.build(); } private void combineRawInput(MethodSpec.Builder builder, String blockVariable) { TypeName returnType = TypeName.get(combine.getReturnType()); warningsBlock(builder, () -> { - if (valuesIsBytesRef) { + if (aggParam.isBytesRef()) { combineRawInputForBytesRef(builder, blockVariable); } else if (returnType.isPrimitive()) { combineRawInputForPrimitive(returnType, builder, blockVariable); @@ -480,33 +439,57 @@ private void combineRawInput(MethodSpec.Builder builder, String blockVariable) { }); } - private void combineRawInputForPrimitive(TypeName returnType, MethodSpec.Builder builder, String blockVariable) { - builder.addStatement( - "state.$TValue($T.combine(state.$TValue(), $L.get$L(i)))", - returnType, - declarationType, - returnType, - blockVariable, - firstUpper(combine.getParameters().get(1).asType().toString()) - ); + private void combineRawInputForBytesRef(MethodSpec.Builder builder, String blockVariable) { + // scratch is a BytesRef var that must have been defined before the iteration starts + if (includeTimestampVector) { + builder.addStatement("$T.combine(state, timestamps.getLong(i), $L.getBytesRef(i, scratch))", declarationType, blockVariable); + } else { + builder.addStatement("$T.combine(state, $L.getBytesRef(i, scratch))", declarationType, blockVariable); + } } - private void combineRawInputForArray(MethodSpec.Builder builder, String arrayVariable) { - warningsBlock(builder, () -> builder.addStatement("$T.combine(state, $L)", declarationType, arrayVariable)); + private void combineRawInputForPrimitive(TypeName returnType, MethodSpec.Builder builder, String blockVariable) { + if (includeTimestampVector) { + builder.addStatement( + "state.$TValue($T.combine(state.$TValue(), timestamps.getLong(i), $L.get$L(i)))", + returnType, + declarationType, + returnType, + blockVariable, + capitalize(combine.getParameters().get(1).asType().toString()) + ); + } else { + builder.addStatement( + "state.$TValue($T.combine(state.$TValue(), $L.get$L(i)))", + returnType, + declarationType, + returnType, + blockVariable, + capitalize(combine.getParameters().get(1).asType().toString()) + ); + } } private void combineRawInputForVoid(MethodSpec.Builder builder, String blockVariable) { - builder.addStatement( - "$T.combine(state, $L.get$L(i))", - declarationType, - blockVariable, - firstUpper(combine.getParameters().get(1).asType().toString()) - ); + if (includeTimestampVector) { + builder.addStatement( + "$T.combine(state, timestamps.getLong(i), $L.get$L(i))", + declarationType, + blockVariable, + capitalize(combine.getParameters().get(1).asType().toString()) + ); + } else { + builder.addStatement( + "$T.combine(state, $L.get$L(i))", + declarationType, + blockVariable, + capitalize(combine.getParameters().get(1).asType().toString()) + ); + } } - private void combineRawInputForBytesRef(MethodSpec.Builder builder, String blockVariable) { - // scratch is a BytesRef var that must have been defined before the iteration starts - builder.addStatement("$T.combine(state, $L.getBytesRef(i, scratch))", declarationType, blockVariable); + private void combineRawInputForArray(MethodSpec.Builder builder, String arrayVariable) { + warningsBlock(builder, () -> builder.addStatement("$T.combine(state, $L)", declarationType, arrayVariable)); } private void warningsBlock(MethodSpec.Builder builder, Runnable block) { @@ -534,12 +517,7 @@ private MethodSpec addIntermediateInput() { interState.assignToVariable(builder, i); builder.addStatement("assert $L.getPositionCount() == 1", interState.name()); } - if (combineIntermediate != null) { - if (intermediateState.stream().map(IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { - builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); - } - builder.addStatement("$T.combineIntermediate(state, " + intermediateStateRowAccess() + ")", declarationType); - } else if (hasPrimitiveState()) { + if (aggState.declaredType().isPrimitive()) { if (warnExceptions.isEmpty()) { assert intermediateState.size() == 2; assert intermediateState.get(1).name().equals("seen"); @@ -557,14 +535,36 @@ private MethodSpec addIntermediateInput() { } warningsBlock(builder, () -> { + var primitiveStateMethod = switch (aggState.declaredType().toString()) { + case "boolean" -> "booleanValue"; + case "int" -> "intValue"; + case "long" -> "longValue"; + case "double" -> "doubleValue"; + case "float" -> "floatValue"; + default -> throw new IllegalArgumentException("Unexpected primitive type: [" + aggState.declaredType() + "]"); + }; var state = intermediateState.get(0); var s = "state.$L($T.combine(state.$L(), " + state.name() + "." + vectorAccessorName(state.elementType()) + "(0)))"; - builder.addStatement(s, primitiveStateMethod(), declarationType, primitiveStateMethod()); + builder.addStatement(s, primitiveStateMethod, declarationType, primitiveStateMethod); builder.addStatement("state.seen(true)"); }); builder.endControlFlow(); } else { - throw new IllegalArgumentException("Don't know how to combine intermediate input. Define combineIntermediate"); + requireStaticMethod( + declarationType, + requireVoidType(), + requireName("combineIntermediate"), + requireArgs( + Stream.concat( + Stream.of(aggState.declaredType()), // aggState + intermediateState.stream().map(IntermediateStateDesc::combineArgType) // intermediate state + ).map(Methods::requireType).toArray(TypeMatcher[]::new) + ) + ); + if (intermediateState.stream().map(IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { + builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); + } + builder.addStatement("$T.combineIntermediate(state, " + intermediateStateRowAccess() + ")", declarationType); } return builder.build(); } @@ -573,25 +573,6 @@ String intermediateStateRowAccess() { return intermediateState.stream().map(desc -> desc.access("0")).collect(joining(", ")); } - private String primitiveStateMethod() { - switch (stateType.toString()) { - case "org.elasticsearch.compute.aggregation.BooleanState", "org.elasticsearch.compute.aggregation.BooleanFallibleState": - return "booleanValue"; - case "org.elasticsearch.compute.aggregation.IntState", "org.elasticsearch.compute.aggregation.IntFallibleState": - return "intValue"; - case "org.elasticsearch.compute.aggregation.LongState", "org.elasticsearch.compute.aggregation.LongFallibleState": - return "longValue"; - case "org.elasticsearch.compute.aggregation.DoubleState", "org.elasticsearch.compute.aggregation.DoubleFallibleState": - return "doubleValue"; - case "org.elasticsearch.compute.aggregation.FloatState", "org.elasticsearch.compute.aggregation.FloatFallibleState": - return "floatValue"; - default: - throw new IllegalArgumentException( - "don't know how to fetch primitive values from " + stateType + ". define combineIntermediate." - ); - } - } - private MethodSpec evaluateIntermediate() { MethodSpec.Builder builder = MethodSpec.methodBuilder("evaluateIntermediate"); builder.addAnnotation(Override.class) @@ -610,45 +591,39 @@ private MethodSpec evaluateFinal() { .addParameter(BLOCK_ARRAY, "blocks") .addParameter(TypeName.INT, "offset") .addParameter(DRIVER_CONTEXT, "driverContext"); - if (stateTypeHasSeen || stateTypeHasFailed) { - var condition = Stream.of(stateTypeHasSeen ? "state.seen() == false" : null, stateTypeHasFailed ? "state.failed()" : null) - .filter(Objects::nonNull) - .collect(joining(" || ")); - builder.beginControlFlow("if ($L)", condition); + if (aggState.hasSeen() || aggState.hasFailed()) { + builder.beginControlFlow( + "if ($L)", + Stream.concat( + Stream.of("state.seen() == false").filter(c -> aggState.hasSeen()), + Stream.of("state.failed()").filter(c -> aggState.hasFailed()) + ).collect(joining(" || ")) + ); builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1)", BLOCK); builder.addStatement("return"); builder.endControlFlow(); } - if (evaluateFinal == null) { - primitiveStateToResult(builder); + if (aggState.declaredType().isPrimitive()) { + builder.addStatement(switch (aggState.declaredType().toString()) { + case "boolean" -> "blocks[offset] = driverContext.blockFactory().newConstantBooleanBlockWith(state.booleanValue(), 1)"; + case "int" -> "blocks[offset] = driverContext.blockFactory().newConstantIntBlockWith(state.intValue(), 1)"; + case "long" -> "blocks[offset] = driverContext.blockFactory().newConstantLongBlockWith(state.longValue(), 1)"; + case "double" -> "blocks[offset] = driverContext.blockFactory().newConstantDoubleBlockWith(state.doubleValue(), 1)"; + case "float" -> "blocks[offset] = driverContext.blockFactory().newConstantFloatBlockWith(state.floatValue(), 1)"; + default -> throw new IllegalArgumentException("Unexpected primitive type: [" + aggState.declaredType() + "]"); + }); } else { + requireStaticMethod( + declarationType, + requireType(BLOCK), + requireName("evaluateFinal"), + requireArgs(requireType(aggState.declaredType()), requireType(DRIVER_CONTEXT)) + ); builder.addStatement("blocks[offset] = $T.evaluateFinal(state, driverContext)", declarationType); } return builder.build(); } - private void primitiveStateToResult(MethodSpec.Builder builder) { - switch (stateType.toString()) { - case "org.elasticsearch.compute.aggregation.BooleanState", "org.elasticsearch.compute.aggregation.BooleanFallibleState": - builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantBooleanBlockWith(state.booleanValue(), 1)"); - return; - case "org.elasticsearch.compute.aggregation.IntState", "org.elasticsearch.compute.aggregation.IntFallibleState": - builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantIntBlockWith(state.intValue(), 1)"); - return; - case "org.elasticsearch.compute.aggregation.LongState", "org.elasticsearch.compute.aggregation.LongFallibleState": - builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantLongBlockWith(state.longValue(), 1)"); - return; - case "org.elasticsearch.compute.aggregation.DoubleState", "org.elasticsearch.compute.aggregation.DoubleFallibleState": - builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantDoubleBlockWith(state.doubleValue(), 1)"); - return; - case "org.elasticsearch.compute.aggregation.FloatState", "org.elasticsearch.compute.aggregation.FloatFallibleState": - builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantFloatBlockWith(state.floatValue(), 1)"); - return; - default: - throw new IllegalArgumentException("don't know how to convert state to result: " + stateType); - } - } - private MethodSpec toStringMethod() { MethodSpec.Builder builder = MethodSpec.methodBuilder("toString"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).returns(String.class); @@ -667,14 +642,6 @@ private MethodSpec close() { return builder.build(); } - private static final Pattern PRIMITIVE_STATE_PATTERN = Pattern.compile( - "org.elasticsearch.compute.aggregation.(Boolean|Int|Long|Double|Float)(Fallible)?State" - ); - - private boolean hasPrimitiveState() { - return PRIMITIVE_STATE_PATTERN.matcher(stateType.toString()).matches(); - } - record IntermediateStateDesc(String name, String elementType, boolean block) { static IntermediateStateDesc newIntermediateStateDesc(IntermediateState state) { String type = state.type(); @@ -711,22 +678,57 @@ public void assignToVariable(MethodSpec.Builder builder, int offset) { builder.addStatement("$T $L = (($T) $L).asVector()", vectorType(elementType), name, blockType, name + "Uncast"); } } - } - private TypeMirror valueTypeMirror() { - return combine.getParameters().get(combine.getParameters().size() - 1).asType(); + public TypeName combineArgType() { + var type = Types.fromString(elementType); + return block ? blockType(type) : type; + } } - private TypeName valueTypeName() { - return TypeName.get(valueTypeMirror()); + /** + * This represents the type returned by init method used to keep aggregation state + * @param declaredType declared state type as returned by init method + * @param type actual type used (we have some predefined state types for primitive values) + */ + public record AggregationState(TypeName declaredType, TypeName type, boolean hasSeen, boolean hasFailed) { + + public static AggregationState create(Elements elements, TypeMirror mirror, boolean hasFailures, boolean isArray) { + var declaredType = TypeName.get(mirror); + var stateType = declaredType.isPrimitive() + ? ClassName.get("org.elasticsearch.compute.aggregation", primitiveStateStoreClassname(declaredType, hasFailures, isArray)) + : declaredType; + return new AggregationState( + declaredType, + stateType, + hasMethod(elements, stateType, "seen()"), + hasMethod(elements, stateType, "failed()") + ); + } + + private static String primitiveStateStoreClassname(TypeName declaredType, boolean hasFailures, boolean isArray) { + var name = capitalize(declaredType.toString()); + if (hasFailures) { + name += "Fallible"; + } + if (isArray) { + name += "Array"; + } + return name + "State"; + } } - private TypeKind valueTypeKind() { - return valueTypeMirror().getKind(); + public record AggregationParameter(TypeName type, boolean isArray) { + + public static AggregationParameter create(TypeMirror mirror) { + return new AggregationParameter(TypeName.get(mirror), Objects.equals(mirror.getKind(), TypeKind.ARRAY)); + } + + public boolean isBytesRef() { + return Objects.equals(type, BYTES_REF); + } } - private String valueTypeString() { - String valueTypeString = TypeName.get(valueTypeMirror()).toString(); - return valuesIsArray ? valueTypeString.substring(0, valueTypeString.length() - 2) : valueTypeString; + private static boolean hasMethod(Elements elements, TypeName type, String name) { + return elements.getAllMembers(elements.getTypeElement(type.toString())).stream().anyMatch(e -> e.toString().equals(name)); } } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java index 863db86eb934a..3ad2343ad1658 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java @@ -87,7 +87,13 @@ public boolean process(Set set, RoundEnvironment roundEnv ); if (aggClass.getAnnotation(Aggregator.class) != null) { IntermediateState[] intermediateState = aggClass.getAnnotation(Aggregator.class).value(); - implementer = new AggregatorImplementer(env.getElementUtils(), aggClass, intermediateState, warnExceptionsTypes); + implementer = new AggregatorImplementer( + env.getElementUtils(), + aggClass, + intermediateState, + warnExceptionsTypes, + aggClass.getAnnotation(Aggregator.class).includeTimestamps() + ); write(aggClass, "aggregator", implementer.sourceFile(), env); } GroupingAggregatorImplementer groupingAggregatorImplementer = null; @@ -96,13 +102,12 @@ public boolean process(Set set, RoundEnvironment roundEnv if (intermediateState.length == 0 && aggClass.getAnnotation(Aggregator.class) != null) { intermediateState = aggClass.getAnnotation(Aggregator.class).value(); } - boolean includeTimestamps = aggClass.getAnnotation(GroupingAggregator.class).includeTimestamps(); groupingAggregatorImplementer = new GroupingAggregatorImplementer( env.getElementUtils(), aggClass, intermediateState, warnExceptionsTypes, - includeTimestamps + aggClass.getAnnotation(GroupingAggregator.class).includeTimestamps() ); write(aggClass, "grouping aggregator", groupingAggregatorImplementer.sourceFile(), env); } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java index 8224c73936b90..d2b6a0e011687 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java @@ -17,28 +17,35 @@ import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.gen.AggregatorImplementer.AggregationParameter; +import org.elasticsearch.compute.gen.AggregatorImplementer.AggregationState; import java.util.Arrays; import java.util.List; import java.util.function.Consumer; -import java.util.regex.Pattern; +import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; -import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Elements; import static java.util.stream.Collectors.joining; -import static org.elasticsearch.compute.gen.AggregatorImplementer.firstUpper; -import static org.elasticsearch.compute.gen.AggregatorImplementer.valueBlockType; -import static org.elasticsearch.compute.gen.AggregatorImplementer.valueVectorType; -import static org.elasticsearch.compute.gen.Methods.findMethod; -import static org.elasticsearch.compute.gen.Methods.findRequiredMethod; +import static org.elasticsearch.compute.gen.AggregatorImplementer.capitalize; +import static org.elasticsearch.compute.gen.Methods.requireAnyArgs; +import static org.elasticsearch.compute.gen.Methods.requireAnyType; +import static org.elasticsearch.compute.gen.Methods.requireArgs; +import static org.elasticsearch.compute.gen.Methods.requireName; +import static org.elasticsearch.compute.gen.Methods.requirePrimitiveOrImplements; +import static org.elasticsearch.compute.gen.Methods.requireStaticMethod; +import static org.elasticsearch.compute.gen.Methods.requireType; +import static org.elasticsearch.compute.gen.Methods.requireVoidType; import static org.elasticsearch.compute.gen.Methods.vectorAccessorName; import static org.elasticsearch.compute.gen.Types.BIG_ARRAYS; +import static org.elasticsearch.compute.gen.Types.BLOCK; import static org.elasticsearch.compute.gen.Types.BLOCK_ARRAY; import static org.elasticsearch.compute.gen.Types.BYTES_REF; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; @@ -55,6 +62,8 @@ import static org.elasticsearch.compute.gen.Types.PAGE; import static org.elasticsearch.compute.gen.Types.SEEN_GROUP_IDS; import static org.elasticsearch.compute.gen.Types.WARNINGS; +import static org.elasticsearch.compute.gen.Types.blockType; +import static org.elasticsearch.compute.gen.Types.vectorType; /** * Implements "GroupingAggregationFunction" from a class containing static methods @@ -70,17 +79,14 @@ public class GroupingAggregatorImplementer { private final List warnExceptions; private final ExecutableElement init; private final ExecutableElement combine; - private final ExecutableElement combineStates; - private final ExecutableElement evaluateFinal; - private final ExecutableElement combineIntermediate; - private final TypeName stateType; - private final boolean valuesIsBytesRef; - private final boolean valuesIsArray; private final List createParameters; private final ClassName implementation; private final List intermediateState; private final boolean includeTimestampVector; + private final AggregationState aggState; + private final AggregationParameter aggParam; + public GroupingAggregatorImplementer( Elements elements, TypeElement declarationType, @@ -91,21 +97,23 @@ public GroupingAggregatorImplementer( this.declarationType = declarationType; this.warnExceptions = warnExceptions; - this.init = findRequiredMethod(declarationType, new String[] { "init", "initGrouping" }, e -> true); - this.stateType = choseStateType(); + this.init = requireStaticMethod( + declarationType, + requirePrimitiveOrImplements(elements, Types.GROUPING_AGGREGATOR_STATE), + requireName("init", "initGrouping"), + requireAnyArgs("") + ); + this.aggState = AggregationState.create(elements, init.getReturnType(), warnExceptions.isEmpty() == false, true); + + this.combine = requireStaticMethod( + declarationType, + aggState.declaredType().isPrimitive() ? requireType(aggState.declaredType()) : requireVoidType(), + requireName("combine"), + combineArgs(aggState, includeTimestampVector) + ); + // TODO support multiple parameters + this.aggParam = AggregationParameter.create(combine.getParameters().getLast().asType()); - this.combine = findRequiredMethod(declarationType, new String[] { "combine" }, e -> { - if (e.getParameters().size() == 0) { - return false; - } - TypeName firstParamType = TypeName.get(e.getParameters().get(0).asType()); - return firstParamType.isPrimitive() || firstParamType.toString().equals(stateType.toString()); - }); - this.combineStates = findMethod(declarationType, "combineStates"); - this.combineIntermediate = findMethod(declarationType, "combineIntermediate"); - this.evaluateFinal = findMethod(declarationType, "evaluateFinal"); - this.valuesIsBytesRef = BYTES_REF.equals(valueTypeName()); - this.valuesIsArray = TypeKind.ARRAY.equals(valueTypeKind()); this.createParameters = init.getParameters() .stream() .map(Parameter::from) @@ -117,12 +125,31 @@ public GroupingAggregatorImplementer( (declarationType.getSimpleName() + "GroupingAggregatorFunction").replace("AggregatorGroupingAggregator", "GroupingAggregator") ); - intermediateState = Arrays.stream(interStateAnno) + this.intermediateState = Arrays.stream(interStateAnno) .map(AggregatorImplementer.IntermediateStateDesc::newIntermediateStateDesc) .toList(); this.includeTimestampVector = includeTimestampVector; } + private static Methods.ArgumentMatcher combineArgs(AggregationState aggState, boolean includeTimestampVector) { + if (aggState.declaredType().isPrimitive()) { + return requireArgs(requireType(aggState.declaredType()), requireAnyType("")); + } else if (includeTimestampVector) { + return requireArgs( + requireType(aggState.declaredType()), + requireType(TypeName.INT), + requireType(TypeName.LONG), // @timestamp + requireAnyType("") + ); + } else { + return requireArgs( + requireType(aggState.declaredType()), + requireType(TypeName.INT), + requireAnyType("") + ); + } + } + public ClassName implementation() { return implementation; } @@ -131,18 +158,6 @@ List createParameters() { return createParameters; } - private TypeName choseStateType() { - TypeName initReturn = TypeName.get(init.getReturnType()); - if (false == initReturn.isPrimitive()) { - return initReturn; - } - String simpleName = firstUpper(initReturn.toString()); - if (warnExceptions.isEmpty()) { - return ClassName.get("org.elasticsearch.compute.aggregation", simpleName + "ArrayState"); - } - return ClassName.get("org.elasticsearch.compute.aggregation", simpleName + "FallibleArrayState"); - } - public JavaFile sourceFile() { JavaFile.Builder builder = JavaFile.builder(implementation.packageName(), type()); builder.addFileComment(""" @@ -164,7 +179,7 @@ private TypeSpec type() { .initializer(initInterState()) .build() ); - builder.addField(stateType, "state", Modifier.PRIVATE, Modifier.FINAL); + builder.addField(aggState.type(), "state", Modifier.PRIVATE, Modifier.FINAL); if (warnExceptions.isEmpty() == false) { builder.addField(WARNINGS, "warnings", Modifier.PRIVATE, Modifier.FINAL); } @@ -180,10 +195,10 @@ private TypeSpec type() { builder.addMethod(intermediateStateDesc()); builder.addMethod(intermediateBlockCount()); builder.addMethod(prepareProcessPage()); - builder.addMethod(addRawInputLoop(INT_VECTOR, valueBlockType(init, combine))); - builder.addMethod(addRawInputLoop(INT_VECTOR, valueVectorType(init, combine))); - builder.addMethod(addRawInputLoop(INT_BLOCK, valueBlockType(init, combine))); - builder.addMethod(addRawInputLoop(INT_BLOCK, valueVectorType(init, combine))); + builder.addMethod(addRawInputLoop(INT_VECTOR, blockType(aggParam.type()))); + builder.addMethod(addRawInputLoop(INT_VECTOR, vectorType(aggParam.type()))); + builder.addMethod(addRawInputLoop(INT_BLOCK, blockType(aggParam.type()))); + builder.addMethod(addRawInputLoop(INT_BLOCK, vectorType(aggParam.type()))); builder.addMethod(selectedMayContainUnseenGroups()); builder.addMethod(addIntermediateInput()); builder.addMethod(addIntermediateRowInput()); @@ -230,16 +245,16 @@ private CodeBlock callInit() { .map(p -> TypeName.get(p.asType()).equals(BIG_ARRAYS) ? "driverContext.bigArrays()" : p.getSimpleName().toString()) .collect(joining(", ")); CodeBlock.Builder builder = CodeBlock.builder(); - if (init.getReturnType().toString().equals(stateType.toString())) { - builder.add("$T.$L($L)", declarationType, init.getSimpleName(), initParametersCall); - } else { + if (aggState.declaredType().isPrimitive()) { builder.add( "new $T(driverContext.bigArrays(), $T.$L($L))", - stateType, + aggState.type(), declarationType, init.getSimpleName(), initParametersCall ); + } else { + builder.add("$T.$L($L)", declarationType, init.getSimpleName(), initParametersCall); } return builder.build(); } @@ -263,7 +278,7 @@ private MethodSpec ctor() { builder.addParameter(WARNINGS, "warnings"); } builder.addParameter(LIST_INTEGER, "channels"); - builder.addParameter(stateType, "state"); + builder.addParameter(aggState.type(), "state"); builder.addParameter(DRIVER_CONTEXT, "driverContext"); if (warnExceptions.isEmpty() == false) { builder.addStatement("this.warnings = warnings"); @@ -301,8 +316,8 @@ private MethodSpec prepareProcessPage() { builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).returns(GROUPING_AGGREGATOR_FUNCTION_ADD_INPUT); builder.addParameter(SEEN_GROUP_IDS, "seenGroupIds").addParameter(PAGE, "page"); - builder.addStatement("$T valuesBlock = page.getBlock(channels.get(0))", valueBlockType(init, combine)); - builder.addStatement("$T valuesVector = valuesBlock.asVector()", valueVectorType(init, combine)); + builder.addStatement("$T valuesBlock = page.getBlock(channels.get(0))", blockType(aggParam.type())); + builder.addStatement("$T valuesVector = valuesBlock.asVector()", vectorType(aggParam.type())); if (includeTimestampVector) { builder.addStatement("$T timestampsBlock = page.getBlock(channels.get(1))", LONG_BLOCK); builder.addStatement("$T timestampsVector = timestampsBlock.asVector()", LONG_VECTOR); @@ -355,18 +370,17 @@ private TypeSpec addInput(Consumer addBlock) { private MethodSpec addRawInputLoop(TypeName groupsType, TypeName valuesType) { boolean groupsIsBlock = groupsType.toString().endsWith("Block"); boolean valuesIsBlock = valuesType.toString().endsWith("Block"); - String methodName = "addRawInput"; - MethodSpec.Builder builder = MethodSpec.methodBuilder(methodName); + MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawInput"); builder.addModifiers(Modifier.PRIVATE); builder.addParameter(TypeName.INT, "positionOffset").addParameter(groupsType, "groups").addParameter(valuesType, "values"); if (includeTimestampVector) { builder.addParameter(LONG_VECTOR, "timestamps"); } - if (valuesIsBytesRef) { + if (aggParam.isBytesRef()) { // Add bytes_ref scratch var that will be used for bytes_ref blocks/vectors builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); } - if (valuesIsArray && valuesIsBlock == false) { + if (aggParam.isArray() && valuesIsBlock == false) { builder.addComment("This type does not support vectors because all values are multi-valued"); return builder.build(); } @@ -397,11 +411,11 @@ private MethodSpec addRawInputLoop(TypeName groupsType, TypeName valuesType) { builder.endControlFlow(); builder.addStatement("int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset)"); builder.addStatement("int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset)"); - if (valuesIsArray) { - String arrayType = valueTypeString(); + if (aggParam.isArray()) { + String arrayType = aggParam.type().toString().replace("[]", ""); builder.addStatement("$L[] valuesArray = new $L[valuesEnd - valuesStart]", arrayType, arrayType); builder.beginControlFlow("for (int v = valuesStart; v < valuesEnd; v++)"); - builder.addStatement("valuesArray[v-valuesStart] = $L.get$L(v)", "values", firstUpper(arrayType)); + builder.addStatement("valuesArray[v-valuesStart] = $L.get$L(v)", "values", capitalize(arrayType)); builder.endControlFlow(); combineRawInputForArray(builder, "valuesArray"); } else { @@ -422,14 +436,12 @@ private MethodSpec addRawInputLoop(TypeName groupsType, TypeName valuesType) { } private void combineRawInput(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { - TypeName valueType = valueTypeName(); + TypeName valueType = aggParam.type(); TypeName returnType = TypeName.get(combine.getReturnType()); warningsBlock(builder, () -> { - if (valuesIsBytesRef) { + if (aggParam.isBytesRef()) { combineRawInputForBytesRef(builder, blockVariable, offsetVariable); - } else if (includeTimestampVector) { - combineRawInputWithTimestamp(builder, offsetVariable); } else if (valueType.isPrimitive() == false) { throw new IllegalArgumentException("second parameter to combine must be a primitive, array or BytesRef: " + valueType); } else if (returnType.isPrimitive()) { @@ -442,48 +454,75 @@ private void combineRawInput(MethodSpec.Builder builder, String blockVariable, S }); } - private void combineRawInputForPrimitive(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { - builder.addStatement( - "state.set(groupId, $T.combine(state.getOrDefault(groupId), $L.get$L($L)))", - declarationType, - blockVariable, - firstUpper(valueTypeName().toString()), - offsetVariable - ); + private void combineRawInputForBytesRef(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { + // scratch is a BytesRef var that must have been defined before the iteration starts + if (includeTimestampVector) { + if (offsetVariable.contains(" + ")) { + builder.addStatement("var valuePosition = $L", offsetVariable); + offsetVariable = "valuePosition"; + } + builder.addStatement( + "$T.combine(state, groupId, timestamps.getLong($L), $L.getBytesRef($L, scratch))", + declarationType, + offsetVariable, + blockVariable, + offsetVariable + ); + } else { + builder.addStatement("$T.combine(state, groupId, $L.getBytesRef($L, scratch))", declarationType, blockVariable, offsetVariable); + } } - private void combineRawInputForArray(MethodSpec.Builder builder, String arrayVariable) { - warningsBlock(builder, () -> builder.addStatement("$T.combine(state, groupId, $L)", declarationType, arrayVariable)); + private void combineRawInputForPrimitive(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { + if (includeTimestampVector) { + if (offsetVariable.contains(" + ")) { + builder.addStatement("var valuePosition = $L", offsetVariable); + offsetVariable = "valuePosition"; + } + builder.addStatement( + "$T.combine(state, groupId, timestamps.getLong($L), values.get$L($L))", + declarationType, + offsetVariable, + capitalize(aggParam.type().toString()), + offsetVariable + ); + } else { + builder.addStatement( + "state.set(groupId, $T.combine(state.getOrDefault(groupId), $L.get$L($L)))", + declarationType, + blockVariable, + capitalize(aggParam.type().toString()), + offsetVariable + ); + } } private void combineRawInputForVoid(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { - builder.addStatement( - "$T.combine(state, groupId, $L.get$L($L))", - declarationType, - blockVariable, - firstUpper(valueTypeName().toString()), - offsetVariable - ); - } - - private void combineRawInputWithTimestamp(MethodSpec.Builder builder, String offsetVariable) { - String blockType = firstUpper(valueTypeName().toString()); - if (offsetVariable.contains(" + ")) { - builder.addStatement("var valuePosition = $L", offsetVariable); - offsetVariable = "valuePosition"; + if (includeTimestampVector) { + if (offsetVariable.contains(" + ")) { + builder.addStatement("var valuePosition = $L", offsetVariable); + offsetVariable = "valuePosition"; + } + builder.addStatement( + "$T.combine(state, groupId, timestamps.getLong($L), values.get$L($L))", + declarationType, + offsetVariable, + capitalize(aggParam.type().toString()), + offsetVariable + ); + } else { + builder.addStatement( + "$T.combine(state, groupId, $L.get$L($L))", + declarationType, + blockVariable, + capitalize(aggParam.type().toString()), + offsetVariable + ); } - builder.addStatement( - "$T.combine(state, groupId, timestamps.getLong($L), values.get$L($L))", - declarationType, - offsetVariable, - blockType, - offsetVariable - ); } - private void combineRawInputForBytesRef(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { - // scratch is a BytesRef var that must have been defined before the iteration starts - builder.addStatement("$T.combine(state, groupId, $L.getBytesRef($L, scratch))", declarationType, blockVariable, offsetVariable); + private void combineRawInputForArray(MethodSpec.Builder builder, String arrayVariable) { + warningsBlock(builder, () -> builder.addStatement("$T.combine(state, groupId, $L)", declarationType, arrayVariable)); } private void warningsBlock(MethodSpec.Builder builder, Runnable block) { @@ -539,7 +578,7 @@ private MethodSpec addIntermediateInput() { builder.beginControlFlow("for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++)"); { builder.addStatement("int groupId = groups.getInt(groupPosition)"); - if (hasPrimitiveState()) { + if (aggState.declaredType().isPrimitive()) { if (warnExceptions.isEmpty()) { assert intermediateState.size() == 2; assert intermediateState.get(1).name().equals("seen"); @@ -567,31 +606,33 @@ private MethodSpec addIntermediateInput() { }); builder.endControlFlow(); } else { - builder.addStatement("$T.combineIntermediate(state, groupId, " + intermediateStateRowAccess() + ")", declarationType); + var stateHasBlock = intermediateState.stream().anyMatch(AggregatorImplementer.IntermediateStateDesc::block); + requireStaticMethod( + declarationType, + requireVoidType(), + requireName("combineIntermediate"), + requireArgs( + Stream.of( + Stream.of(aggState.declaredType(), TypeName.INT), // aggState and groupId + intermediateState.stream().map(AggregatorImplementer.IntermediateStateDesc::combineArgType), + Stream.of(TypeName.INT).filter(p -> stateHasBlock) // position + ).flatMap(Function.identity()).map(Methods::requireType).toArray(Methods.TypeMatcher[]::new) + ) + ); + + builder.addStatement( + "$T.combineIntermediate(state, groupId, " + + intermediateState.stream().map(desc -> desc.access("groupPosition + positionOffset")).collect(joining(", ")) + + (stateHasBlock ? ", groupPosition + positionOffset" : "") + + ")", + declarationType + ); } builder.endControlFlow(); } return builder.build(); } - String intermediateStateRowAccess() { - String rowAccess = intermediateState.stream().map(desc -> desc.access("groupPosition + positionOffset")).collect(joining(", ")); - if (intermediateState.stream().anyMatch(AggregatorImplementer.IntermediateStateDesc::block)) { - rowAccess += ", groupPosition + positionOffset"; - } - return rowAccess; - } - - private void combineStates(MethodSpec.Builder builder) { - if (combineStates == null) { - builder.beginControlFlow("if (inState.hasValue(position))"); - builder.addStatement("state.set(groupId, $T.combine(state.getOrDefault(groupId), inState.get(position)))", declarationType); - builder.endControlFlow(); - return; - } - builder.addStatement("$T.combineStates(state, groupId, inState, position)", declarationType); - } - private MethodSpec addIntermediateRowInput() { MethodSpec.Builder builder = MethodSpec.methodBuilder("addIntermediateRowInput"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); @@ -601,9 +642,26 @@ private MethodSpec addIntermediateRowInput() { builder.addStatement("throw new IllegalArgumentException($S + getClass() + $S + input.getClass())", "expected ", "; got "); } builder.endControlFlow(); - builder.addStatement("$T inState = (($T) input).state", stateType, implementation); + builder.addStatement("$T inState = (($T) input).state", aggState.type(), implementation); builder.addStatement("state.enableGroupIdTracking(new $T.Empty())", SEEN_GROUP_IDS); - combineStates(builder); + if (aggState.declaredType().isPrimitive()) { + builder.beginControlFlow("if (inState.hasValue(position))"); + builder.addStatement("state.set(groupId, $T.combine(state.getOrDefault(groupId), inState.get(position)))", declarationType); + builder.endControlFlow(); + } else { + requireStaticMethod( + declarationType, + requireVoidType(), + requireName("combineStates"), + requireArgs( + requireType(aggState.declaredType()), + requireType(TypeName.INT), + requireType(aggState.declaredType()), + requireType(TypeName.INT) + ) + ); + builder.addStatement("$T.combineStates(state, groupId, inState, position)", declarationType); + } return builder.build(); } @@ -627,9 +685,15 @@ private MethodSpec evaluateFinal() { .addParameter(INT_VECTOR, "selected") .addParameter(DRIVER_CONTEXT, "driverContext"); - if (evaluateFinal == null) { + if (aggState.declaredType().isPrimitive()) { builder.addStatement("blocks[offset] = state.toValuesBlock(selected, driverContext)"); } else { + requireStaticMethod( + declarationType, + requireType(BLOCK), + requireName("evaluateFinal"), + requireArgs(requireType(aggState.declaredType()), requireType(INT_VECTOR), requireType(DRIVER_CONTEXT)) + ); builder.addStatement("blocks[offset] = $T.evaluateFinal(state, selected, driverContext)", declarationType); } return builder.build(); @@ -652,32 +716,4 @@ private MethodSpec close() { builder.addStatement("state.close()"); return builder.build(); } - - private static final Pattern PRIMITIVE_STATE_PATTERN = Pattern.compile( - "org.elasticsearch.compute.aggregation.(Boolean|Int|Long|Double|Float)(Fallible)?ArrayState" - ); - - private boolean hasPrimitiveState() { - return PRIMITIVE_STATE_PATTERN.matcher(stateType.toString()).matches(); - } - - private TypeMirror valueTypeMirror() { - return combine.getParameters().get(combine.getParameters().size() - 1).asType(); - } - - private TypeName valueTypeName() { - return TypeName.get(valueTypeMirror()); - } - - private TypeKind valueTypeKind() { - return valueTypeMirror().getKind(); - } - - private String valueTypeString() { - String valueTypeString = TypeName.get(valueTypeMirror()).toString(); - if (valuesIsArray) { - valueTypeString = valueTypeString.substring(0, valueTypeString.length() - 2); - } - return valueTypeString; - } } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java index 6f98f1f797ab0..f2fa7b8084448 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java @@ -9,18 +9,22 @@ import com.squareup.javapoet.TypeName; -import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.Set; import java.util.function.Predicate; +import java.util.stream.IntStream; +import java.util.stream.Stream; -import javax.lang.model.element.Element; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; -import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; -import javax.lang.model.type.TypeMirror; +import javax.lang.model.type.TypeKind; import javax.lang.model.util.ElementFilter; +import javax.lang.model.util.Elements; +import static java.util.stream.Collectors.joining; import static org.elasticsearch.compute.gen.Types.BOOLEAN_BLOCK; import static org.elasticsearch.compute.gen.Types.BOOLEAN_BLOCK_BUILDER; import static org.elasticsearch.compute.gen.Types.BOOLEAN_VECTOR; @@ -49,30 +53,116 @@ * Finds declared methods for the code generator. */ public class Methods { - static ExecutableElement findRequiredMethod(TypeElement declarationType, String[] names, Predicate filter) { - ExecutableElement result = findMethod(names, filter, declarationType, superClassOf(declarationType)); - if (result == null) { - if (names.length == 1) { - throw new IllegalArgumentException(declarationType + "#" + names[0] + " is required"); - } - throw new IllegalArgumentException("one of " + declarationType + "#" + Arrays.toString(names) + " is required"); + + static ExecutableElement requireStaticMethod( + TypeElement declarationType, + TypeMatcher returnTypeMatcher, + NameMatcher nameMatcher, + ArgumentMatcher argumentMatcher + ) { + return typeAndSuperType(declarationType).flatMap(type -> ElementFilter.methodsIn(type.getEnclosedElements()).stream()) + .filter(method -> method.getModifiers().contains(Modifier.STATIC)) + .filter(method -> nameMatcher.test(method.getSimpleName().toString())) + .filter(method -> returnTypeMatcher.test(TypeName.get(method.getReturnType()))) + .filter(method -> argumentMatcher.test(method.getParameters().stream().map(it -> TypeName.get(it.asType())).toList())) + .findFirst() + .orElseThrow(() -> { + var message = nameMatcher.names.size() == 1 ? "Requires method: " : "Requires one of methods: "; + var signatures = nameMatcher.names.stream() + .map(name -> "public static " + returnTypeMatcher + " " + declarationType + "#" + name + "(" + argumentMatcher + ")") + .collect(joining(" or ")); + return new IllegalArgumentException(message + signatures); + }); + } + + static NameMatcher requireName(String... names) { + return new NameMatcher(Set.of(names)); + } + + static TypeMatcher requireVoidType() { + return new TypeMatcher(type -> Objects.equals(TypeName.VOID, type), "void"); + } + + static TypeMatcher requireAnyType(String description) { + return new TypeMatcher(type -> true, description); + } + + static TypeMatcher requirePrimitiveOrImplements(Elements elements, TypeName requiredInterface) { + return new TypeMatcher( + type -> type.isPrimitive() || isImplementing(elements, type, requiredInterface), + "[boolean|int|long|float|double|" + requiredInterface + "]" + ); + } + + static TypeMatcher requireType(TypeName requiredType) { + return new TypeMatcher(type -> Objects.equals(requiredType, type), requiredType.toString()); + } + + static ArgumentMatcher requireAnyArgs(String description) { + return new ArgumentMatcher(args -> true, description); + } + + static ArgumentMatcher requireArgs(TypeMatcher... argTypes) { + return new ArgumentMatcher( + args -> args.size() == argTypes.length && IntStream.range(0, argTypes.length).allMatch(i -> argTypes[i].test(args.get(i))), + Stream.of(argTypes).map(TypeMatcher::toString).collect(joining(", ")) + ); + } + + record NameMatcher(Set names) implements Predicate { + @Override + public boolean test(String name) { + return names.contains(name); } - return result; } - static ExecutableElement findMethod(TypeElement declarationType, String name) { - return findMethod(new String[] { name }, e -> true, declarationType, superClassOf(declarationType)); + record TypeMatcher(Predicate matcher, String description) implements Predicate { + @Override + public boolean test(TypeName typeName) { + return matcher.test(typeName); + } + + @Override + public String toString() { + return description; + } } - private static TypeElement superClassOf(TypeElement declarationType) { - TypeMirror superclass = declarationType.getSuperclass(); - if (superclass instanceof DeclaredType declaredType) { - Element superclassElement = declaredType.asElement(); - if (superclassElement instanceof TypeElement) { - return (TypeElement) superclassElement; - } + record ArgumentMatcher(Predicate> matcher, String description) implements Predicate> { + @Override + public boolean test(List typeName) { + return matcher.test(typeName); + } + + @Override + public String toString() { + return description; + } + } + + private static boolean isImplementing(Elements elements, TypeName type, TypeName requiredInterface) { + return allInterfacesOf(elements, type).anyMatch( + anInterface -> Objects.equals(anInterface.toString(), requiredInterface.toString()) + ); + } + + private static Stream allInterfacesOf(Elements elements, TypeName type) { + var typeElement = elements.getTypeElement(type.toString()); + var superType = Stream.of(typeElement.getSuperclass()).filter(sType -> sType.getKind() != TypeKind.NONE).map(TypeName::get); + var interfaces = typeElement.getInterfaces().stream().map(TypeName::get); + return Stream.concat( + superType.flatMap(sType -> allInterfacesOf(elements, sType)), + interfaces.flatMap(anInterface -> Stream.concat(Stream.of(anInterface), allInterfacesOf(elements, anInterface))) + ); + } + + private static Stream typeAndSuperType(TypeElement declarationType) { + if (declarationType.getSuperclass() instanceof DeclaredType declaredType + && declaredType.asElement() instanceof TypeElement superType) { + return Stream.of(declarationType, superType); + } else { + return Stream.of(declarationType); } - return null; } static ExecutableElement findMethod(TypeElement declarationType, String[] names, Predicate filter) { @@ -95,16 +185,6 @@ static ExecutableElement findMethod(String[] names, Predicate return null; } - /** - * Returns the arguments of a method after applying a filter. - */ - static VariableElement[] findMethodArguments(ExecutableElement method, Predicate filter) { - if (method.getParameters().isEmpty()) { - return new VariableElement[0]; - } - return method.getParameters().stream().filter(filter).toArray(VariableElement[]::new); - } - /** * Returns the name of the method used to add {@code valueType} instances * to vector or block builders. diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java index 8b01d957f3bd2..35c42153f9ad6 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java @@ -15,9 +15,13 @@ import java.util.ArrayDeque; import java.util.Deque; import java.util.List; +import java.util.Map; +import java.util.stream.Stream; import javax.lang.model.type.TypeMirror; +import static java.util.stream.Collectors.toUnmodifiableMap; + /** * Types used by the code generator. */ @@ -75,26 +79,8 @@ public class Types { static final ClassName DOUBLE_VECTOR_FIXED_BUILDER = ClassName.get(DATA_PACKAGE, "DoubleVector", "FixedBuilder"); static final ClassName FLOAT_VECTOR_FIXED_BUILDER = ClassName.get(DATA_PACKAGE, "FloatVector", "FixedBuilder"); - static final ClassName BOOLEAN_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "BooleanArrayVector"); - static final ClassName BYTES_REF_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "BytesRefArrayVector"); - static final ClassName INT_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "IntArrayVector"); - static final ClassName LONG_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "LongArrayVector"); - static final ClassName DOUBLE_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "DoubleArrayVector"); - static final ClassName FLOAT_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "FloatArrayVector"); - - static final ClassName BOOLEAN_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "BooleanArrayBlock"); - static final ClassName BYTES_REF_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "BytesRefArrayBlock"); - static final ClassName INT_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "IntArrayBlock"); - static final ClassName LONG_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "LongArrayBlock"); - static final ClassName DOUBLE_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "DoubleArrayBlock"); - static final ClassName FLOAT_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "FloatArrayBlock"); - - static final ClassName BOOLEAN_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantBooleanVector"); - static final ClassName BYTES_REF_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantBytesRefVector"); - static final ClassName INT_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantIntVector"); - static final ClassName LONG_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantLongVector"); - static final ClassName DOUBLE_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantDoubleVector"); - static final ClassName FLOAT_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantFloatVector"); + static final ClassName AGGREGATOR_STATE = ClassName.get(AGGREGATION_PACKAGE, "AggregatorState"); + static final ClassName GROUPING_AGGREGATOR_STATE = ClassName.get(AGGREGATION_PACKAGE, "GroupingAggregatorState"); static final ClassName AGGREGATOR_FUNCTION = ClassName.get(AGGREGATION_PACKAGE, "AggregatorFunction"); static final ClassName AGGREGATOR_FUNCTION_SUPPLIER = ClassName.get(AGGREGATION_PACKAGE, "AggregatorFunctionSupplier"); @@ -138,89 +124,50 @@ public class Types { static final ClassName RELEASABLE = ClassName.get("org.elasticsearch.core", "Releasable"); static final ClassName RELEASABLES = ClassName.get("org.elasticsearch.core", "Releasables"); - static ClassName blockType(TypeName elementType) { - if (elementType.equals(TypeName.BOOLEAN)) { - return BOOLEAN_BLOCK; - } - if (elementType.equals(BYTES_REF)) { - return BYTES_REF_BLOCK; - } - if (elementType.equals(TypeName.INT)) { - return INT_BLOCK; - } - if (elementType.equals(TypeName.LONG)) { - return LONG_BLOCK; - } - if (elementType.equals(TypeName.DOUBLE)) { - return DOUBLE_BLOCK; + private record TypeDef(TypeName type, String alias, ClassName block, ClassName vector) { + + public static TypeDef of(TypeName type, String alias, String block, String vector) { + return new TypeDef(type, alias, ClassName.get(DATA_PACKAGE, block), ClassName.get(DATA_PACKAGE, vector)); } - throw new IllegalArgumentException("unknown block type for [" + elementType + "]"); + } + + private static final Map TYPES = Stream.of( + TypeDef.of(TypeName.BOOLEAN, "BOOLEAN", "BooleanBlock", "BooleanVector"), + TypeDef.of(TypeName.INT, "INT", "IntBlock", "IntVector"), + TypeDef.of(TypeName.LONG, "LONG", "LongBlock", "LongVector"), + TypeDef.of(TypeName.FLOAT, "FLOAT", "FloatBlock", "FloatVector"), + TypeDef.of(TypeName.DOUBLE, "DOUBLE", "DoubleBlock", "DoubleVector"), + TypeDef.of(BYTES_REF, "BYTES_REF", "BytesRefBlock", "BytesRefVector") + ) + .flatMap(def -> Stream.of(def.type.toString(), def.type + "[]", def.alias).map(alias -> Map.entry(alias, def))) + .collect(toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + + private static TypeDef findRequired(String name, String kind) { + TypeDef typeDef = TYPES.get(name); + if (typeDef == null) { + throw new IllegalArgumentException("unknown " + kind + " type [" + name + "]"); + } + return typeDef; + } + + static TypeName fromString(String type) { + return findRequired(type, "plain").type; + } + + static ClassName blockType(TypeName elementType) { + return blockType(elementType.toString()); } static ClassName blockType(String elementType) { - if (elementType.equalsIgnoreCase(TypeName.BOOLEAN.toString())) { - return BOOLEAN_BLOCK; - } - if (elementType.equalsIgnoreCase("BYTES_REF")) { - return BYTES_REF_BLOCK; - } - if (elementType.equalsIgnoreCase(TypeName.INT.toString())) { - return INT_BLOCK; - } - if (elementType.equalsIgnoreCase(TypeName.LONG.toString())) { - return LONG_BLOCK; - } - if (elementType.equalsIgnoreCase(TypeName.DOUBLE.toString())) { - return DOUBLE_BLOCK; - } - if (elementType.equalsIgnoreCase(TypeName.FLOAT.toString())) { - return FLOAT_BLOCK; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); + return findRequired(elementType, "block").block; } static ClassName vectorType(TypeName elementType) { - if (elementType.equals(TypeName.BOOLEAN)) { - return BOOLEAN_VECTOR; - } - if (elementType.equals(BYTES_REF)) { - return BYTES_REF_VECTOR; - } - if (elementType.equals(TypeName.INT)) { - return INT_VECTOR; - } - if (elementType.equals(TypeName.LONG)) { - return LONG_VECTOR; - } - if (elementType.equals(TypeName.DOUBLE)) { - return DOUBLE_VECTOR; - } - if (elementType.equals(TypeName.FLOAT)) { - return FLOAT_VECTOR; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); + return vectorType(elementType.toString()); } static ClassName vectorType(String elementType) { - if (elementType.equalsIgnoreCase(TypeName.BOOLEAN.toString())) { - return BOOLEAN_VECTOR; - } - if (elementType.equalsIgnoreCase("BYTES_REF")) { - return BYTES_REF_VECTOR; - } - if (elementType.equalsIgnoreCase(TypeName.INT.toString())) { - return INT_VECTOR; - } - if (elementType.equalsIgnoreCase(TypeName.LONG.toString())) { - return LONG_VECTOR; - } - if (elementType.equalsIgnoreCase(TypeName.DOUBLE.toString())) { - return DOUBLE_VECTOR; - } - if (elementType.equalsIgnoreCase(TypeName.FLOAT.toString())) { - return FLOAT_VECTOR; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); + return findRequired(elementType, "vector").vector; } static ClassName builderType(TypeName resultType) { @@ -282,63 +229,6 @@ static ClassName vectorFixedBuilderType(TypeName elementType) { throw new IllegalArgumentException("unknown vector fixed builder type for [" + elementType + "]"); } - static ClassName arrayVectorType(TypeName elementType) { - if (elementType.equals(TypeName.BOOLEAN)) { - return BOOLEAN_ARRAY_VECTOR; - } - if (elementType.equals(BYTES_REF)) { - return BYTES_REF_ARRAY_VECTOR; - } - if (elementType.equals(TypeName.INT)) { - return INT_ARRAY_VECTOR; - } - if (elementType.equals(TypeName.LONG)) { - return LONG_ARRAY_VECTOR; - } - if (elementType.equals(TypeName.DOUBLE)) { - return DOUBLE_ARRAY_VECTOR; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); - } - - static ClassName arrayBlockType(TypeName elementType) { - if (elementType.equals(TypeName.BOOLEAN)) { - return BOOLEAN_ARRAY_BLOCK; - } - if (elementType.equals(BYTES_REF)) { - return BYTES_REF_ARRAY_BLOCK; - } - if (elementType.equals(TypeName.INT)) { - return INT_ARRAY_BLOCK; - } - if (elementType.equals(TypeName.LONG)) { - return LONG_ARRAY_BLOCK; - } - if (elementType.equals(TypeName.DOUBLE)) { - return DOUBLE_ARRAY_BLOCK; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); - } - - static ClassName constantVectorType(TypeName elementType) { - if (elementType.equals(TypeName.BOOLEAN)) { - return BOOLEAN_CONSTANT_VECTOR; - } - if (elementType.equals(BYTES_REF)) { - return BYTES_REF_CONSTANT_VECTOR; - } - if (elementType.equals(TypeName.INT)) { - return INT_CONSTANT_VECTOR; - } - if (elementType.equals(TypeName.LONG)) { - return LONG_CONSTANT_VECTOR; - } - if (elementType.equals(TypeName.DOUBLE)) { - return DOUBLE_CONSTANT_VECTOR; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); - } - static TypeName elementType(TypeName t) { if (t.equals(BOOLEAN_BLOCK) || t.equals(BOOLEAN_VECTOR) || t.equals(BOOLEAN_BLOCK_BUILDER)) { return TypeName.BOOLEAN; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java index cbd20f15c6511..deec1ef04f623 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java @@ -333,7 +333,8 @@ Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateFloatAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateFloatAggregator.java index b50b125d98331..94ad5254bc723 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateFloatAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateFloatAggregator.java @@ -334,7 +334,8 @@ Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java index 01c3e3d7fb8e7..011291dd08c52 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java @@ -334,7 +334,8 @@ Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java index c84985b703aed..9ccb5d3bd1b1a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java @@ -333,7 +333,8 @@ Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBooleanAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBooleanAggregator.java index 32391c4827303..a2e86b3b09340 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBooleanAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBooleanAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.BooleanBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -74,7 +73,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BooleanBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -89,7 +88,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -97,7 +97,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -107,7 +108,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -122,7 +123,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBytesRefAggregator.java index c9b0e679b3e64..0a965899c0775 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBytesRefAggregator.java @@ -19,7 +19,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.BytesRefBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -78,7 +77,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BytesRefBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -95,7 +94,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -103,7 +103,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -113,7 +114,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -128,7 +129,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopDoubleAggregator.java index d9a7a302f07c8..6a20ed99bc236 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopDoubleAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.DoubleBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -74,7 +73,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final DoubleBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -89,7 +88,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -97,7 +97,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -107,7 +108,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -122,7 +123,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatAggregator.java index 8b65261e10f46..cf6ad0f9017de 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.FloatBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -74,7 +73,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final FloatBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -89,7 +88,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -97,7 +97,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -107,7 +108,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -122,7 +123,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIntAggregator.java index 5c6b79f710af5..f4ac83c438063 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIntAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.IntBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -74,7 +73,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final IntBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -89,7 +88,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -97,7 +97,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -107,7 +108,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -122,7 +123,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIpAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIpAggregator.java index 219f7385b56df..292dd539edeb5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIpAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIpAggregator.java @@ -18,7 +18,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.IpBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -77,7 +76,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final IpBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -92,7 +91,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -100,7 +100,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -110,7 +111,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -125,7 +126,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java index 44cef8df7257b..c5af92956bec1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.sort.LongBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -74,7 +73,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -89,7 +88,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -97,7 +97,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -107,7 +108,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -122,7 +123,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java index bd77bd7ff1e46..ad0ab2f7189f6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java @@ -20,7 +20,6 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -83,14 +82,15 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final BytesRefHash values; private SingleState(BigArrays bigArrays) { values = new BytesRefHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -125,7 +125,7 @@ public void close() { * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongLongHash values; private final BytesRefHash bytes; @@ -146,7 +146,8 @@ private GroupingState(BigArrays bigArrays) { } } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -190,7 +191,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java index a8409367bc090..271d7120092ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java @@ -18,7 +18,6 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; /** * Aggregates field values for double. @@ -77,14 +76,15 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final LongHash values; private SingleState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -118,14 +118,15 @@ public void close() { * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongLongHash values; private GroupingState(BigArrays bigArrays) { values = new LongLongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -168,7 +169,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java index f9e5e1b7b283a..b44cad807fba2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.FloatBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; /** * Aggregates field values for float. @@ -82,14 +81,15 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final LongHash values; private SingleState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -123,14 +123,15 @@ public void close() { * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongHash values; private GroupingState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -175,7 +176,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java index 2420dcee70712..4d0c518245694 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; /** * Aggregates field values for int. @@ -82,14 +81,15 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final LongHash values; private SingleState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -123,14 +123,15 @@ public void close() { * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongHash values; private GroupingState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -175,7 +176,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java index 4938b8f15edb0..5471c90147ec4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java @@ -18,7 +18,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; /** * Aggregates field values for long. @@ -77,14 +76,15 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final LongHash values; private SingleState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -118,14 +118,15 @@ public void close() { * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongLongHash values; private GroupingState(BigArrays bigArrays) { values = new LongLongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -168,7 +169,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java index 63318a2189908..ca89e6f999641 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public void collect(double value, int bucket) { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ private void swap(long lhs, long rhs) { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.DOUBLE_PAGE_SIZE, Double.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java index b490fe193c33f..2bf8edd99f48c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.FloatArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public void collect(float value, int bucket) { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ private void swap(long lhs, long rhs) { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.FLOAT_PAGE_SIZE, Float.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java index 04a635d75fe52..257dfe2ebb0bd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public void collect(int value, int bucket) { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ private void swap(long lhs, long rhs) { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.INT_PAGE_SIZE, Integer.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java index e08c25256944b..c27467ebb60ff 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public void collect(long value, int bucket) { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ private void swap(long lhs, long rhs) { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.LONG_PAGE_SIZE, Long.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java index 6ea78052c5f5b..bbfb2a34f920c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java @@ -15,20 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public CountDistinctBooleanAggregatorFunctionSupplier() { + } - public CountDistinctBooleanAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return CountDistinctBooleanAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctBooleanGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public CountDistinctBooleanAggregatorFunction aggregator(DriverContext driverContext) { + public CountDistinctBooleanAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctBooleanAggregatorFunction.create(driverContext, channels); } @Override public CountDistinctBooleanGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return CountDistinctBooleanGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java index 9191b7d7cfa5a..cb92d715c91d6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java @@ -15,23 +15,31 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int precision; - public CountDistinctBytesRefAggregatorFunctionSupplier(List channels, int precision) { - this.channels = channels; + public CountDistinctBytesRefAggregatorFunctionSupplier(int precision) { this.precision = precision; } @Override - public CountDistinctBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountDistinctBytesRefAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctBytesRefGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public CountDistinctBytesRefAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctBytesRefAggregatorFunction.create(driverContext, channels, precision); } @Override public CountDistinctBytesRefGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return CountDistinctBytesRefGroupingAggregatorFunction.create(channels, driverContext, precision); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java index 08153afd30d8e..f4d9c2425b4ef 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java @@ -15,23 +15,31 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int precision; - public CountDistinctDoubleAggregatorFunctionSupplier(List channels, int precision) { - this.channels = channels; + public CountDistinctDoubleAggregatorFunctionSupplier(int precision) { this.precision = precision; } @Override - public CountDistinctDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountDistinctDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctDoubleGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public CountDistinctDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctDoubleAggregatorFunction.create(driverContext, channels, precision); } @Override public CountDistinctDoubleGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return CountDistinctDoubleGroupingAggregatorFunction.create(channels, driverContext, precision); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java index a107f38d07a55..f4c941d8d7f59 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java @@ -15,23 +15,31 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int precision; - public CountDistinctFloatAggregatorFunctionSupplier(List channels, int precision) { - this.channels = channels; + public CountDistinctFloatAggregatorFunctionSupplier(int precision) { this.precision = precision; } @Override - public CountDistinctFloatAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountDistinctFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctFloatGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public CountDistinctFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctFloatAggregatorFunction.create(driverContext, channels, precision); } @Override public CountDistinctFloatGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return CountDistinctFloatGroupingAggregatorFunction.create(channels, driverContext, precision); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java index 891b2f7f553ed..8a09acde91568 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java @@ -15,23 +15,31 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int precision; - public CountDistinctIntAggregatorFunctionSupplier(List channels, int precision) { - this.channels = channels; + public CountDistinctIntAggregatorFunctionSupplier(int precision) { this.precision = precision; } @Override - public CountDistinctIntAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountDistinctIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctIntGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public CountDistinctIntAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctIntAggregatorFunction.create(driverContext, channels, precision); } @Override - public CountDistinctIntGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + public CountDistinctIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return CountDistinctIntGroupingAggregatorFunction.create(channels, driverContext, precision); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java index b9b171c45f883..1443fb6d66e66 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java @@ -15,23 +15,31 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int precision; - public CountDistinctLongAggregatorFunctionSupplier(List channels, int precision) { - this.channels = channels; + public CountDistinctLongAggregatorFunctionSupplier(int precision) { this.precision = precision; } @Override - public CountDistinctLongAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountDistinctLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctLongGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public CountDistinctLongAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctLongAggregatorFunction.create(driverContext, channels, precision); } @Override - public CountDistinctLongGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + public CountDistinctLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return CountDistinctLongGroupingAggregatorFunction.create(channels, driverContext, precision); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java index d000f49920a3d..e8ccdb92e5198 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxBooleanAggregatorFunctionSupplier() { + } - public MaxBooleanAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxBooleanAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxBooleanGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxBooleanAggregatorFunction aggregator(DriverContext driverContext) { + public MaxBooleanAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MaxBooleanAggregatorFunction.create(driverContext, channels); } @Override - public MaxBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxBooleanGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java index 9c97ce88c0063..bc52373bb933f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxBytesRefAggregatorFunctionSupplier() { + } - public MaxBytesRefAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxBytesRefAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxBytesRefGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + public MaxBytesRefAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MaxBytesRefAggregatorFunction.create(driverContext, channels); } @Override - public MaxBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxBytesRefGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java index df4d2749c4361..417cb4b7c9c37 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxDoubleAggregatorFunctionSupplier() { + } - public MaxDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public MaxDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MaxDoubleAggregatorFunction.create(driverContext, channels); } @Override - public MaxDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java index 70628ace17f37..3279506e75afa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxFloatAggregatorFunctionSupplier() { + } - public MaxFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxFloatAggregatorFunction aggregator(DriverContext driverContext) { + public MaxFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MaxFloatAggregatorFunction.create(driverContext, channels); } @Override - public MaxFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java index c2c6fdcb8e1a7..0e1dca2b52f0d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java @@ -15,19 +15,27 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxIntAggregatorFunctionSupplier() { + } - public MaxIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxIntAggregatorFunction aggregator(DriverContext driverContext) { + public MaxIntAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MaxIntAggregatorFunction.create(driverContext, channels); } @Override - public MaxIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java index cd08981c7b2ab..fa84acd602af4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java @@ -15,19 +15,27 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxIpAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxIpAggregatorFunctionSupplier() { + } - public MaxIpAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxIpAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxIpGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxIpAggregatorFunction aggregator(DriverContext driverContext) { + public MaxIpAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MaxIpAggregatorFunction.create(driverContext, channels); } @Override - public MaxIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxIpGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java index 0a56f31076008..7683622aadd12 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java @@ -15,19 +15,27 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxLongAggregatorFunctionSupplier() { + } - public MaxLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxLongAggregatorFunction aggregator(DriverContext driverContext) { + public MaxLongAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MaxLongAggregatorFunction.create(driverContext, channels); } @Override - public MaxLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java index b78346f4b57b2..08ae3c3fe8664 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java @@ -15,20 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier() { + } - public MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MedianAbsoluteDeviationDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MedianAbsoluteDeviationDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public MedianAbsoluteDeviationDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MedianAbsoluteDeviationDoubleAggregatorFunction.create(driverContext, channels); } @Override public MedianAbsoluteDeviationDoubleGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java index 069f125c0347d..d63c9ce2dcdcd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java @@ -15,20 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MedianAbsoluteDeviationFloatAggregatorFunctionSupplier() { + } - public MedianAbsoluteDeviationFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MedianAbsoluteDeviationFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MedianAbsoluteDeviationFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MedianAbsoluteDeviationFloatAggregatorFunction aggregator(DriverContext driverContext) { + public MedianAbsoluteDeviationFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MedianAbsoluteDeviationFloatAggregatorFunction.create(driverContext, channels); } @Override public MedianAbsoluteDeviationFloatGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return MedianAbsoluteDeviationFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java index 147809fae080c..c496749ff19e5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java @@ -15,20 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MedianAbsoluteDeviationIntAggregatorFunctionSupplier() { + } - public MedianAbsoluteDeviationIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MedianAbsoluteDeviationIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MedianAbsoluteDeviationIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MedianAbsoluteDeviationIntAggregatorFunction aggregator(DriverContext driverContext) { + public MedianAbsoluteDeviationIntAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MedianAbsoluteDeviationIntAggregatorFunction.create(driverContext, channels); } @Override public MedianAbsoluteDeviationIntGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return MedianAbsoluteDeviationIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java index 1246c96941c37..25af01363494d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java @@ -15,20 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MedianAbsoluteDeviationLongAggregatorFunctionSupplier() { + } - public MedianAbsoluteDeviationLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MedianAbsoluteDeviationLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MedianAbsoluteDeviationLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MedianAbsoluteDeviationLongAggregatorFunction aggregator(DriverContext driverContext) { + public MedianAbsoluteDeviationLongAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MedianAbsoluteDeviationLongAggregatorFunction.create(driverContext, channels); } @Override public MedianAbsoluteDeviationLongGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return MedianAbsoluteDeviationLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java index 02d2fda11ff7a..53f80570e3976 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinBooleanAggregatorFunctionSupplier() { + } - public MinBooleanAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinBooleanAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinBooleanGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinBooleanAggregatorFunction aggregator(DriverContext driverContext) { + public MinBooleanAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MinBooleanAggregatorFunction.create(driverContext, channels); } @Override - public MinBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinBooleanGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java index 65e7b4b58e94d..2588947976980 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinBytesRefAggregatorFunctionSupplier() { + } - public MinBytesRefAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinBytesRefAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinBytesRefGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + public MinBytesRefAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MinBytesRefAggregatorFunction.create(driverContext, channels); } @Override - public MinBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinBytesRefGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java index 18aadce9baa58..3af1017b5de2c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinDoubleAggregatorFunctionSupplier() { + } - public MinDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public MinDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MinDoubleAggregatorFunction.create(driverContext, channels); } @Override - public MinDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java index 04d08ed6ea4b6..c120706ebba29 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinFloatAggregatorFunctionSupplier() { + } - public MinFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinFloatAggregatorFunction aggregator(DriverContext driverContext) { + public MinFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MinFloatAggregatorFunction.create(driverContext, channels); } @Override - public MinFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java index 51761433e0254..c44b47bad0cfa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java @@ -15,19 +15,27 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinIntAggregatorFunctionSupplier() { + } - public MinIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinIntAggregatorFunction aggregator(DriverContext driverContext) { + public MinIntAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MinIntAggregatorFunction.create(driverContext, channels); } @Override - public MinIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java index dd066820b50e7..a00ebdb43e1ac 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java @@ -15,19 +15,27 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinIpAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinIpAggregatorFunctionSupplier() { + } - public MinIpAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinIpAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinIpGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinIpAggregatorFunction aggregator(DriverContext driverContext) { + public MinIpAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MinIpAggregatorFunction.create(driverContext, channels); } @Override - public MinIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinIpGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java index 58311b65589f3..850ae6284e0f5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java @@ -15,19 +15,27 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinLongAggregatorFunctionSupplier() { + } - public MinLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinLongAggregatorFunction aggregator(DriverContext driverContext) { + public MinLongAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MinLongAggregatorFunction.create(driverContext, channels); } @Override - public MinLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java index 28fe487f99197..bd50841421a6a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java @@ -15,23 +15,31 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final double percentile; - public PercentileDoubleAggregatorFunctionSupplier(List channels, double percentile) { - this.channels = channels; + public PercentileDoubleAggregatorFunctionSupplier(double percentile) { this.percentile = percentile; } @Override - public PercentileDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return PercentileDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return PercentileDoubleGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public PercentileDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return PercentileDoubleAggregatorFunction.create(driverContext, channels, percentile); } @Override - public PercentileDoubleGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + public PercentileDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return PercentileDoubleGroupingAggregatorFunction.create(channels, driverContext, percentile); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java index 4288d062ec238..d705ad8da70d8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java @@ -15,22 +15,31 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final double percentile; - public PercentileFloatAggregatorFunctionSupplier(List channels, double percentile) { - this.channels = channels; + public PercentileFloatAggregatorFunctionSupplier(double percentile) { this.percentile = percentile; } @Override - public PercentileFloatAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return PercentileFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return PercentileFloatGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public PercentileFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return PercentileFloatAggregatorFunction.create(driverContext, channels, percentile); } @Override - public PercentileFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public PercentileFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return PercentileFloatGroupingAggregatorFunction.create(channels, driverContext, percentile); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java index 3a9996aed0d8c..d925ef91ed6ef 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java @@ -15,22 +15,31 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final double percentile; - public PercentileIntAggregatorFunctionSupplier(List channels, double percentile) { - this.channels = channels; + public PercentileIntAggregatorFunctionSupplier(double percentile) { this.percentile = percentile; } @Override - public PercentileIntAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return PercentileIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return PercentileIntGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public PercentileIntAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return PercentileIntAggregatorFunction.create(driverContext, channels, percentile); } @Override - public PercentileIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public PercentileIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return PercentileIntGroupingAggregatorFunction.create(channels, driverContext, percentile); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java index d3cdf57a1862f..36d2ed23cee94 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java @@ -15,22 +15,31 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final double percentile; - public PercentileLongAggregatorFunctionSupplier(List channels, double percentile) { - this.channels = channels; + public PercentileLongAggregatorFunctionSupplier(double percentile) { this.percentile = percentile; } @Override - public PercentileLongAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return PercentileLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return PercentileLongGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public PercentileLongAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return PercentileLongAggregatorFunction.create(driverContext, channels, percentile); } @Override - public PercentileLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public PercentileLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return PercentileLongGroupingAggregatorFunction.create(channels, driverContext, percentile); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java index 92d73864fa772..d2dd780bf43a5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java @@ -15,22 +15,30 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final long unitInMillis; - public RateDoubleAggregatorFunctionSupplier(List channels, long unitInMillis) { - this.channels = channels; + public RateDoubleAggregatorFunctionSupplier(long unitInMillis) { this.unitInMillis = unitInMillis; } @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public List groupingIntermediateStateDesc() { + return RateDoubleGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { throw new UnsupportedOperationException("non-grouping aggregator is not supported"); } @Override - public RateDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public RateDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return RateDoubleGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java index d4914ba36e803..be8456b28b3fe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java @@ -15,22 +15,30 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final long unitInMillis; - public RateFloatAggregatorFunctionSupplier(List channels, long unitInMillis) { - this.channels = channels; + public RateFloatAggregatorFunctionSupplier(long unitInMillis) { this.unitInMillis = unitInMillis; } @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public List groupingIntermediateStateDesc() { + return RateFloatGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { throw new UnsupportedOperationException("non-grouping aggregator is not supported"); } @Override - public RateFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public RateFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return RateFloatGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java index 6c0fd0ed21957..c9c6ce5a55bed 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java @@ -15,22 +15,30 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final long unitInMillis; - public RateIntAggregatorFunctionSupplier(List channels, long unitInMillis) { - this.channels = channels; + public RateIntAggregatorFunctionSupplier(long unitInMillis) { this.unitInMillis = unitInMillis; } @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public List groupingIntermediateStateDesc() { + return RateIntGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { throw new UnsupportedOperationException("non-grouping aggregator is not supported"); } @Override - public RateIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public RateIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return RateIntGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java index 311616effba37..a1f503b726aa4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java @@ -15,22 +15,30 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final long unitInMillis; - public RateLongAggregatorFunctionSupplier(List channels, long unitInMillis) { - this.channels = channels; + public RateLongAggregatorFunctionSupplier(long unitInMillis) { this.unitInMillis = unitInMillis; } @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public List groupingIntermediateStateDesc() { + return RateLongGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { throw new UnsupportedOperationException("non-grouping aggregator is not supported"); } @Override - public RateLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public RateLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return RateLongGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java index caf53dad23b0d..5310a11c1fddb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public StdDevDoubleAggregatorFunctionSupplier() { + } - public StdDevDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return StdDevDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return StdDevDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public StdDevDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public StdDevDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return StdDevDoubleAggregatorFunction.create(driverContext, channels); } @Override - public StdDevDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public StdDevDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return StdDevDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java index c807c1582e1ca..52ffb0f5d580d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public StdDevFloatAggregatorFunctionSupplier() { + } - public StdDevFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return StdDevFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return StdDevFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public StdDevFloatAggregatorFunction aggregator(DriverContext driverContext) { + public StdDevFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return StdDevFloatAggregatorFunction.create(driverContext, channels); } @Override - public StdDevFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public StdDevFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return StdDevFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java index 36560af8557e2..2f43a867bf83e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public StdDevIntAggregatorFunctionSupplier() { + } - public StdDevIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return StdDevIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return StdDevIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public StdDevIntAggregatorFunction aggregator(DriverContext driverContext) { + public StdDevIntAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return StdDevIntAggregatorFunction.create(driverContext, channels); } @Override - public StdDevIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public StdDevIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return StdDevIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java index dc6ed063031ed..364fc4820c283 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public StdDevLongAggregatorFunctionSupplier() { + } - public StdDevLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return StdDevLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return StdDevLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public StdDevLongAggregatorFunction aggregator(DriverContext driverContext) { + public StdDevLongAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return StdDevLongAggregatorFunction.create(driverContext, channels); } @Override - public StdDevLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public StdDevLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return StdDevLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java index a88b6ddc3bf5b..6fbe13d696ec9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SumDoubleAggregatorFunctionSupplier() { + } - public SumDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SumDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SumDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SumDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public SumDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SumDoubleAggregatorFunction.create(driverContext, channels); } @Override - public SumDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public SumDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return SumDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java index 5a01eaeaafd39..9b9d863fc8171 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SumFloatAggregatorFunctionSupplier() { + } - public SumFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SumFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SumFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SumFloatAggregatorFunction aggregator(DriverContext driverContext) { + public SumFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SumFloatAggregatorFunction.create(driverContext, channels); } @Override - public SumFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public SumFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return SumFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java index bef192a06c3df..ef48162d214b6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java @@ -15,19 +15,27 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SumIntAggregatorFunctionSupplier() { + } - public SumIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SumIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SumIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SumIntAggregatorFunction aggregator(DriverContext driverContext) { + public SumIntAggregatorFunction aggregator(DriverContext driverContext, List channels) { return SumIntAggregatorFunction.create(driverContext, channels); } @Override - public SumIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public SumIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return SumIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java index 9f5f3d7d493aa..fe666c535f63a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java @@ -15,19 +15,27 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SumLongAggregatorFunctionSupplier() { + } - public SumLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SumLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SumLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SumLongAggregatorFunction aggregator(DriverContext driverContext) { + public SumLongAggregatorFunction aggregator(DriverContext driverContext, List channels) { return SumLongAggregatorFunction.create(driverContext, channels); } @Override - public SumLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public SumLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return SumLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java index aebe53c4c4fbf..5ee84e44bad68 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java @@ -15,26 +15,34 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopBooleanAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { - this.channels = channels; + public TopBooleanAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopBooleanAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopBooleanAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopBooleanGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopBooleanAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return TopBooleanAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopBooleanGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java index 9108cfcef1892..89417c4a98ad6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java @@ -15,26 +15,34 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopBytesRefAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { - this.channels = channels; + public TopBytesRefAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopBytesRefAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopBytesRefGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopBytesRefAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return TopBytesRefAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopBytesRefGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java index 3e65be2efb210..0aa10d3cc48ed 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java @@ -15,25 +15,34 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopDoubleAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { - this.channels = channels; + public TopDoubleAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopDoubleGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return TopDoubleAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopDoubleGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java index 79561a349cef1..52e2dbc304955 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java @@ -15,25 +15,34 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopFloatAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { - this.channels = channels; + public TopFloatAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopFloatAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopFloatGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return TopFloatAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopFloatGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java index cd7690f189007..88919a4b25ce4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java @@ -15,25 +15,33 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopIntAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { - this.channels = channels; + public TopIntAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopIntAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopIntGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopIntAggregatorFunction aggregator(DriverContext driverContext, List channels) { return TopIntAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopIntGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java index 9b137b39d8e89..461f9809b673e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java @@ -15,25 +15,33 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopIpAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopIpAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { - this.channels = channels; + public TopIpAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopIpAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopIpAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopIpGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopIpAggregatorFunction aggregator(DriverContext driverContext, List channels) { return TopIpAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopIpGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java index 8fd7f59135986..cefd6082c22ec 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java @@ -15,25 +15,33 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopLongAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { - this.channels = channels; + public TopLongAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopLongAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopLongGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopLongAggregatorFunction aggregator(DriverContext driverContext, List channels) { return TopLongAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopLongGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java index b15dd0ed696ab..80279a8d6c731 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesBooleanAggregatorFunctionSupplier() { + } - public ValuesBooleanAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesBooleanAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesBooleanGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesBooleanAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesBooleanAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesBooleanAggregatorFunction.create(driverContext, channels); } @Override - public ValuesBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesBooleanGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java index c09331bf19709..16fa41876122a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesBytesRefAggregatorFunctionSupplier() { + } - public ValuesBytesRefAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesBytesRefAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesBytesRefGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesBytesRefAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesBytesRefAggregatorFunction.create(driverContext, channels); } @Override - public ValuesBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesBytesRefGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java index c8f93159eb3c1..0a70a3d71ef9c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesDoubleAggregatorFunctionSupplier() { + } - public ValuesDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesDoubleAggregatorFunction.create(driverContext, channels); } @Override - public ValuesDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java index 7802a06a6935f..f8c395b01b5ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesFloatAggregatorFunctionSupplier() { + } - public ValuesFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesFloatAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesFloatAggregatorFunction.create(driverContext, channels); } @Override - public ValuesFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java index a86b3838d7c92..f4aa9722bff7a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesIntAggregatorFunctionSupplier() { + } - public ValuesIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesIntAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesIntAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesIntAggregatorFunction.create(driverContext, channels); } @Override - public ValuesIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java index dd302cc4eb69e..9f3bbf2b3122a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java @@ -15,19 +15,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesLongAggregatorFunctionSupplier() { + } - public ValuesLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesLongAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesLongAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesLongAggregatorFunction.create(driverContext, channels); } @Override - public ValuesLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java index 19139c22863d9..593e7c9d42916 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,21 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier() { + } - public SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialCentroidCartesianPointDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialCentroidCartesianPointDocValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidCartesianPointDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java index b43fb64f6730b..8ae5fc6180d97 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,22 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier() { + } - public SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier( - List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialCentroidCartesianPointSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialCentroidCartesianPointSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidCartesianPointSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java index 34414a9e9c5c3..ae38d6d91ab82 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,21 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier() { + } - public SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialCentroidGeoPointDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SpatialCentroidGeoPointDocValuesAggregatorFunction aggregator( - DriverContext driverContext) { + public SpatialCentroidGeoPointDocValuesAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SpatialCentroidGeoPointDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java index 80f608a10a6fb..e10e2b50ef615 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,21 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier() { + } - public SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialCentroidGeoPointSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialCentroidGeoPointSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidGeoPointSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java index c9447dfce0f19..0d41ea3bf7e80 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,21 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier() { + } - public SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentCartesianPointDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialExtentCartesianPointDocValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianPointDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java index d0cd2e33fe0f8..1cccb66bfa0ea 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,21 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier() { + } - public SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentCartesianPointSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialExtentCartesianPointSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianPointSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java index 2fa68f5226488..40432cfb548e1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,21 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier() { + } - public SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentCartesianShapeDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialExtentCartesianShapeDocValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianShapeDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java index 822a10fbe4794..dfb2aaee9aff9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,21 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier() { + } - public SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentCartesianShapeSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialExtentCartesianShapeSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianShapeSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java index 3c5d7c8355133..1f2dfb378498f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,20 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier() { + } - public SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentGeoPointDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SpatialExtentGeoPointDocValuesAggregatorFunction aggregator(DriverContext driverContext) { + public SpatialExtentGeoPointDocValuesAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SpatialExtentGeoPointDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentGeoPointDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java index 8018b7d8d829b..e97d858511c04 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,21 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier() { + } - public SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentGeoPointSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SpatialExtentGeoPointSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + public SpatialExtentGeoPointSourceValuesAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SpatialExtentGeoPointSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java index cd36ee8fd14a2..9582411551572 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,20 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier() { + } - public SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentGeoShapeDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SpatialExtentGeoShapeDocValuesAggregatorFunction aggregator(DriverContext driverContext) { + public SpatialExtentGeoShapeDocValuesAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SpatialExtentGeoShapeDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java index 95aa4f3d30070..be425646e90ed 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** @@ -16,21 +17,28 @@ * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier() { + } - public SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentGeoShapeSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SpatialExtentGeoShapeSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + public SpatialExtentGeoShapeSourceValuesAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SpatialExtentGeoShapeSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/EsqlRefCountingListener.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/EsqlRefCountingListener.java index 69df0fb8ceff1..2dfc60744be2e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/EsqlRefCountingListener.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/EsqlRefCountingListener.java @@ -34,7 +34,8 @@ public EsqlRefCountingListener(ActionListener delegate) { } public ActionListener acquire() { - return refs.acquireListener().delegateResponse((l, e) -> { + var listener = ActionListener.assertAtLeastOnce(refs.acquireListener()); + return listener.delegateResponse((l, e) -> { failureCollector.unwrapAndCollect(e); l.onFailure(e); }); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java index 5fa1394e8cf96..9886e0c1af306 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java @@ -37,6 +37,7 @@ public boolean hasValue(int groupId) { * idempotent and fast if already tracking so it's safe to, say, call it once * for every block of values that arrives containing {@code null}. */ + @Override public final void enableGroupIdTracking(SeenGroupIds seenGroupIds) { if (seen == null) { seen = seenGroupIds.seenGroupIds(bigArrays); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunctionSupplier.java index 9f2395960477d..e192d1b2de7f8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunctionSupplier.java @@ -10,19 +10,25 @@ import org.elasticsearch.compute.Describable; import org.elasticsearch.compute.operator.DriverContext; +import java.util.List; + /** * Builds aggregation implementations, closing over any state required to do so. */ public interface AggregatorFunctionSupplier extends Describable { - AggregatorFunction aggregator(DriverContext driverContext); + List nonGroupingIntermediateStateDesc(); + + List groupingIntermediateStateDesc(); + + AggregatorFunction aggregator(DriverContext driverContext, List channels); - GroupingAggregatorFunction groupingAggregator(DriverContext driverContext); + GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels); - default Aggregator.Factory aggregatorFactory(AggregatorMode mode) { + default Aggregator.Factory aggregatorFactory(AggregatorMode mode, List channels) { return new Aggregator.Factory() { @Override public Aggregator apply(DriverContext driverContext) { - return new Aggregator(aggregator(driverContext), mode); + return new Aggregator(aggregator(driverContext, channels), mode); } @Override @@ -32,11 +38,11 @@ public String describe() { }; } - default GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode) { + default GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode, List channels) { return new GroupingAggregator.Factory() { @Override public GroupingAggregator apply(DriverContext driverContext) { - return new GroupingAggregator(groupingAggregator(driverContext), mode); + return new GroupingAggregator(groupingAggregator(driverContext, channels), mode); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/BytesRefArrayState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/BytesRefArrayState.java index eb0a992c8610f..18b92c5447076 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/BytesRefArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/BytesRefArrayState.java @@ -138,7 +138,8 @@ boolean hasValue(int groupId) { * stores a flag to know if optimizations can be made. *

*/ - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { this.groupIdTrackingEnabled = true; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java index c6416f6d075db..a9d21babfbd9c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java @@ -19,15 +19,25 @@ import java.util.List; public class CountAggregatorFunction implements AggregatorFunction { - public static AggregatorFunctionSupplier supplier(List channels) { + public static AggregatorFunctionSupplier supplier() { return new AggregatorFunctionSupplier() { @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { return CountAggregatorFunction.create(channels); } @Override - public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels) { return CountGroupingAggregatorFunction.create(driverContext, channels); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java index ed63a283b3568..eab897fe24fc2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import java.util.List; + /** * A {@link AggregatorFunctionSupplier} that wraps another, filtering which positions * are supplied to the aggregator. @@ -20,8 +22,18 @@ public record FilteredAggregatorFunctionSupplier(AggregatorFunctionSupplier next AggregatorFunctionSupplier { @Override - public AggregatorFunction aggregator(DriverContext driverContext) { - AggregatorFunction next = this.next.aggregator(driverContext); + public List nonGroupingIntermediateStateDesc() { + return next.nonGroupingIntermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return next.groupingIntermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { + AggregatorFunction next = this.next.aggregator(driverContext, channels); EvalOperator.ExpressionEvaluator filter = null; try { filter = this.filter.get(driverContext); @@ -35,8 +47,8 @@ public AggregatorFunction aggregator(DriverContext driverContext) { } @Override - public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - GroupingAggregatorFunction next = this.next.groupingAggregator(driverContext); + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels) { + GroupingAggregatorFunction next = this.next.groupingAggregator(driverContext, channels); EvalOperator.ExpressionEvaluator filter = null; try { filter = this.filter.get(driverContext); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorState.java index 7c644342598dc..0e65164665808 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorState.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorState.java @@ -17,4 +17,5 @@ public interface GroupingAggregatorState extends Releasable { /** Extracts an intermediate view of the contents of this state. */ void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext); + void enableGroupIdTracking(SeenGroupIds seenGroupIds); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java index 3d8d04d7dc7e3..64a970c2acc07 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java @@ -138,7 +138,8 @@ static class GroupingState implements GroupingAggregatorState { this.hll = new HyperLogLogPlusPlus(HyperLogLogPlusPlus.precisionFromThreshold(precision), bigArrays, 1); } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // Nothing to do } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregator.java index 144214f93571e..049642c350917 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -71,7 +70,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(selected, driverContext); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BytesRefArrayState internalState; private GroupingState(BigArrays bigArrays, CircuitBreaker breaker) { @@ -90,7 +89,8 @@ public void combine(int groupId, GroupingState otherState, int otherGroupId) { } } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { internalState.toIntermediate(blocks, offset, selected, driverContext); } @@ -98,7 +98,8 @@ Block toBlock(IntVector selected, DriverContext driverContext) { return internalState.toValuesBlock(selected, driverContext); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { internalState.enableGroupIdTracking(seen); } @@ -108,7 +109,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final BreakingBytesRefBuilder internalState; private boolean seen; @@ -128,7 +129,8 @@ public void add(BytesRef value) { } } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = driverContext.blockFactory().newConstantBytesRefBlockWith(internalState.bytesRefView(), 1); blocks[offset + 1] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxIpAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxIpAggregator.java index 1ddce7674ae7b..43b4a4a2fe0a1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxIpAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxIpAggregator.java @@ -15,7 +15,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @Aggregator({ @IntermediateState(name = "max", type = "BYTES_REF"), @IntermediateState(name = "seen", type = "BOOLEAN") }) @@ -67,7 +66,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(selected, driverContext); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BytesRef scratch = new BytesRef(); private final IpArrayState internalState; @@ -87,7 +86,8 @@ public void combine(int groupId, GroupingState otherState, int otherGroupId) { } } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { internalState.toIntermediate(blocks, offset, selected, driverContext); } @@ -95,7 +95,8 @@ Block toBlock(IntVector selected, DriverContext driverContext) { return internalState.toValuesBlock(selected, driverContext); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { internalState.enableGroupIdTracking(seen); } @@ -105,7 +106,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final BytesRef internalState; private boolean seen; @@ -121,7 +122,8 @@ public void add(BytesRef value) { } } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = driverContext.blockFactory().newConstantBytesRefBlockWith(internalState, 1); blocks[offset + 1] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregator.java index 830900702a371..677b38a9af3a7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -71,7 +70,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(selected, driverContext); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BytesRefArrayState internalState; private GroupingState(BigArrays bigArrays, CircuitBreaker breaker) { @@ -90,7 +89,8 @@ public void combine(int groupId, GroupingState otherState, int otherGroupId) { } } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { internalState.toIntermediate(blocks, offset, selected, driverContext); } @@ -98,7 +98,8 @@ Block toBlock(IntVector selected, DriverContext driverContext) { return internalState.toValuesBlock(selected, driverContext); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { internalState.enableGroupIdTracking(seen); } @@ -108,7 +109,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final BreakingBytesRefBuilder internalState; private boolean seen; @@ -128,7 +129,8 @@ public void add(BytesRef value) { } } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = driverContext.blockFactory().newConstantBytesRefBlockWith(internalState.bytesRefView(), 1); blocks[offset + 1] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinIpAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinIpAggregator.java index 8313756851c1f..c4ee93db89cf8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinIpAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinIpAggregator.java @@ -15,7 +15,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @Aggregator({ @IntermediateState(name = "max", type = "BYTES_REF"), @IntermediateState(name = "seen", type = "BOOLEAN") }) @@ -67,7 +66,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(selected, driverContext); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BytesRef scratch = new BytesRef(); private final IpArrayState internalState; @@ -87,7 +86,8 @@ public void combine(int groupId, GroupingState otherState, int otherGroupId) { } } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { internalState.toIntermediate(blocks, offset, selected, driverContext); } @@ -95,7 +95,8 @@ Block toBlock(IntVector selected, DriverContext driverContext) { return internalState.toValuesBlock(selected, driverContext); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { internalState.enableGroupIdTracking(seen); } @@ -105,7 +106,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final BytesRef internalState; private boolean seen; @@ -121,7 +122,8 @@ public void add(BytesRef value) { } } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = driverContext.blockFactory().newConstantBytesRefBlockWith(internalState, 1); blocks[offset + 1] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java index 329e798dcb3f0..d5ea72ed23e5e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java @@ -146,7 +146,8 @@ void add(int groupId, TDigestState other) { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // We always enable. } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java index bff8903fd3bec..5b48498d83294 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java @@ -204,7 +204,8 @@ public void close() { Releasables.close(states); } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java index 252436ad9634f..e19d3107172e3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -84,11 +83,12 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private boolean seenFalse; private boolean seenTrue; - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -113,14 +113,15 @@ Block toBlock(BlockFactory blockFactory) { public void close() {} } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BitArray values; private GroupingState(BigArrays bigArrays) { values = new BitArray(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -155,7 +156,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we don't need to track which values have been seen because we don't do anything special for groups without values } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st index 2581d3ebbf80b..a0b4ed8bd6337 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st @@ -338,7 +338,8 @@ public class Rate$Type$Aggregator { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st index 18d573eea4a4c..761b70791e946 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st @@ -28,7 +28,6 @@ import org.elasticsearch.compute.data.$Type$Block; $endif$ import org.elasticsearch.compute.data.sort.$Name$BucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -99,7 +98,7 @@ $endif$ return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final $Name$BucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -120,7 +119,8 @@ $endif$ sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -128,7 +128,8 @@ $endif$ return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -138,7 +139,7 @@ $endif$ } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -153,7 +154,8 @@ $endif$ internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st index 1cef234b2238f..3006af595be1f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st @@ -35,7 +35,6 @@ $if(long)$ import org.elasticsearch.compute.data.LongBlock; $endif$ import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; $if(BytesRef)$ import org.elasticsearch.core.Releasables; @@ -155,7 +154,7 @@ $endif$ return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { $if(BytesRef)$ private final BytesRefHash values; @@ -171,7 +170,8 @@ $else$ $endif$ } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -228,7 +228,7 @@ $endif$ * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { $if(long||double)$ private final LongLongHash values; @@ -263,7 +263,8 @@ $elseif(int||float)$ $endif$ } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -324,7 +325,8 @@ $endif$ } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java index 47d927fda91b5..c3b07d069cf11 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java @@ -260,7 +260,8 @@ boolean hasValue(int index) { } /** Needed for generated code that does null tracking, which we do not need because we use count */ - final void enableGroupIdTracking(SeenGroupIds ignore) {} + @Override + public final void enableGroupIdTracking(SeenGroupIds ignore) {} private void ensureCapacity(int groupId) { if (groupId >= xValues.size()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java index 6dca94b9bc79a..63d79a9198622 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java @@ -8,10 +8,12 @@ package org.elasticsearch.compute.data.sort; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -29,6 +31,11 @@ /** * Aggregates the top N variable length {@link BytesRef} values per bucket. * See {@link BucketedSort} for more information. + *

+ * This is substantially different from {@link IpBucketedSort} because + * this has to handle variable length byte strings. To do that it allocates + * a heap of {@link BreakingBytesRefBuilder}s. + *

*/ public class BytesRefBucketedSort implements Releasable { private final BucketedSortCommon common; @@ -123,7 +130,7 @@ public void collect(BytesRef value, int bucket) { // Gathering mode long requiredSize = common.endIndex(rootIndex); if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); common.assertValidNextOffset(next); @@ -271,13 +278,23 @@ private void swap(long lhs, long rhs) { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long requiredSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = common.bigArrays.grow(values, requiredSize); + assert oldMax % common.bucketSize == 0; + + long newSize = BigArrays.overSize( + ((long) bucket + 1) * common.bucketSize, + PageCacheRecycler.OBJECT_PAGE_SIZE, + RamUsageEstimator.NUM_BYTES_OBJECT_REF + ); + // Round up to the next full bucket. + newSize = (newSize + common.bucketSize - 1) / common.bucketSize; + values = common.bigArrays.resize(values, newSize * common.bucketSize); // Set the next gather offsets for all newly allocated buckets. - fillGatherOffsets(oldMax - (oldMax % common.bucketSize)); + fillGatherOffsets(oldMax); } /** @@ -296,6 +313,7 @@ private void fillGatherOffsets(long startingAt) { bytes.grow(Integer.BYTES); bytes.setLength(Integer.BYTES); ByteUtils.writeIntLE(nextOffset, bytes.bytes(), 0); + checkInvariant(Math.toIntExact(bucketRoot / common.bucketSize)); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java index 4eb31ea30db22..4392d3994886c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.common.util.ByteUtils; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -26,6 +27,11 @@ /** * Aggregates the top N IP values per bucket. * See {@link BucketedSort} for more information. + *

+ * This is substantially different from {@link BytesRefBucketedSort} because + * this takes advantage of IPs having a fixed length and allocates a dense + * storage for them. + *

*/ public class IpBucketedSort implements Releasable { private static final int IP_LENGTH = 16; // Bytes. It's ipv6. @@ -101,7 +107,7 @@ public void collect(BytesRef value, int bucket) { // Gathering mode long requiredSize = common.endIndex(rootIndex) * IP_LENGTH; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); common.assertValidNextOffset(next); @@ -268,17 +274,23 @@ private void swap(long lhs, long rhs) { * Allocate storage for more buckets and store the "next gather offset" * for those new buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size() / IP_LENGTH; - values = common.bigArrays.grow(values, minSize); + assert oldMax % common.bucketSize == 0; + + int bucketBytes = common.bucketSize * IP_LENGTH; + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketBytes, PageCacheRecycler.BYTE_PAGE_SIZE, 1); + // Round up to the next full bucket. + newSize = (newSize + bucketBytes - 1) / bucketBytes; + values = common.bigArrays.resize(values, newSize * bucketBytes); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % common.bucketSize)); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = common.bucketSize - 1; for (long bucketRoot = startingAt; bucketRoot < values.size() / IP_LENGTH; bucketRoot += common.bucketSize) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st index 6587743e34b6f..095d48021e9c1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st @@ -10,6 +10,7 @@ package org.elasticsearch.compute.data.sort; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.$Type$Array; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public class $Type$BucketedSort implements Releasable { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -261,19 +262,25 @@ $endif$ /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.$TYPE$_PAGE_SIZE, $BYTES$); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index 78572f55cd5eb..c0d220fda5d4e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -52,6 +52,13 @@ public class Driver implements Releasable, Describable { private final String sessionId; + /** + * Description of the task this driver is running. This description should be + * short and meaningful as a grouping identifier. We use the phase of the + * query right now: "data", "node_reduce", "final". + */ + private final String taskDescription; + /** * The wall clock time when this driver was created in milliseconds since epoch. * Compared to {@link #startNanos} this is less accurate and is measured by a @@ -96,6 +103,10 @@ public class Driver implements Releasable, Describable { /** * Creates a new driver with a chain of operators. * @param sessionId session Id + * @param taskDescription Description of the task this driver is running. This + * description should be short and meaningful as a grouping + * identifier. We use the phase of the query right now: + * "data", "node_reduce", "final". * @param driverContext the driver context * @param source source operator * @param intermediateOperators the chain of operators to execute @@ -105,6 +116,7 @@ public class Driver implements Releasable, Describable { */ public Driver( String sessionId, + String taskDescription, long startTime, long startNanos, DriverContext driverContext, @@ -116,6 +128,7 @@ public Driver( Releasable releasable ) { this.sessionId = sessionId; + this.taskDescription = taskDescription; this.startTime = startTime; this.startNanos = startNanos; this.driverContext = driverContext; @@ -129,6 +142,7 @@ public Driver( this.status = new AtomicReference<>( new DriverStatus( sessionId, + taskDescription, startTime, System.currentTimeMillis(), 0, @@ -150,6 +164,7 @@ public Driver( * @param releasable a {@link Releasable} to invoked once the chain of operators has run to completion */ public Driver( + String taskDescription, DriverContext driverContext, SourceOperator source, List intermediateOperators, @@ -158,6 +173,7 @@ public Driver( ) { this( "unset", + taskDescription, System.currentTimeMillis(), System.nanoTime(), driverContext, @@ -485,6 +501,7 @@ public DriverProfile profile() { throw new IllegalStateException("can only get profile from finished driver"); } return new DriverProfile( + status.taskDescription(), status.started(), status.lastUpdated(), finishNanos - startNanos, @@ -531,6 +548,7 @@ private void updateStatus(long extraCpuNanos, int extraIterations, DriverStatus. return new DriverStatus( sessionId, + taskDescription, startTime, now, prev.cpuNanos() + extraCpuNanos, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java index 59ecdde230413..7aeee7a3757f5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java @@ -27,6 +27,13 @@ * Profile results from a single {@link Driver}. */ public class DriverProfile implements Writeable, ChunkedToXContentObject { + /** + * Description of the task this driver is running. This description should be + * short and meaningful as a grouping identifier. We use the phase of the + * query right now: "data", "node_reduce", "final". + */ + private final String taskDescription; + /** * Millis since epoch when the driver started. */ @@ -62,6 +69,7 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { private final DriverSleeps sleeps; public DriverProfile( + String taskDescription, long startMillis, long stopMillis, long tookNanos, @@ -70,6 +78,7 @@ public DriverProfile( List operators, DriverSleeps sleeps ) { + this.taskDescription = taskDescription; this.startMillis = startMillis; this.stopMillis = stopMillis; this.tookNanos = tookNanos; @@ -80,6 +89,7 @@ public DriverProfile( } public DriverProfile(StreamInput in) throws IOException { + this.taskDescription = in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90) ? in.readString() : ""; if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.startMillis = in.readVLong(); this.stopMillis = in.readVLong(); @@ -102,6 +112,9 @@ public DriverProfile(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90)) { + out.writeString(taskDescription); + } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVLong(startMillis); out.writeVLong(stopMillis); @@ -115,6 +128,13 @@ public void writeTo(StreamOutput out) throws IOException { sleeps.writeTo(out); } + /** + * Description of the task this driver is running. + */ + public String taskDescription() { + return taskDescription; + } + /** * Millis since epoch when the driver started. */ @@ -169,6 +189,7 @@ public DriverSleeps sleeps() { @Override public Iterator toXContentChunked(ToXContent.Params params) { return Iterators.concat(ChunkedToXContentHelper.startObject(), Iterators.single((b, p) -> { + b.field("task_description", taskDescription); b.timestampFieldsFromUnixEpochMillis("start_millis", "start", startMillis); b.timestampFieldsFromUnixEpochMillis("stop_millis", "stop", stopMillis); b.field("took_nanos", tookNanos); @@ -197,7 +218,8 @@ public boolean equals(Object o) { return false; } DriverProfile that = (DriverProfile) o; - return startMillis == that.startMillis + return taskDescription.equals(that.taskDescription) + && startMillis == that.startMillis && stopMillis == that.stopMillis && tookNanos == that.tookNanos && cpuNanos == that.cpuNanos @@ -208,7 +230,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); + return Objects.hash(taskDescription, startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java index 05fe38007a929..9d82f73f3105f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java @@ -7,7 +7,9 @@ package org.elasticsearch.compute.operator; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import java.util.List; import java.util.concurrent.Executor; @@ -21,7 +23,7 @@ */ final class DriverScheduler { private final AtomicReference delayedTask = new AtomicReference<>(); - private final AtomicReference scheduledTask = new AtomicReference<>(); + private final AtomicReference scheduledTask = new AtomicReference<>(); private final AtomicBoolean completing = new AtomicBoolean(); void addOrRunDelayedTask(Runnable task) { @@ -35,22 +37,32 @@ void addOrRunDelayedTask(Runnable task) { } } - void scheduleOrRunTask(Executor executor, Runnable task) { - final Runnable existing = scheduledTask.getAndSet(task); + void scheduleOrRunTask(Executor executor, AbstractRunnable task) { + final AbstractRunnable existing = scheduledTask.getAndSet(task); assert existing == null : existing; final Executor executorToUse = completing.get() ? EsExecutors.DIRECT_EXECUTOR_SERVICE : executor; - executorToUse.execute(() -> { - final Runnable next = scheduledTask.getAndSet(null); - if (next != null) { - assert next == task; - next.run(); + executorToUse.execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + assert e instanceof EsRejectedExecutionException : new AssertionError(e); + if (scheduledTask.getAndUpdate(t -> t == task ? null : t) == task) { + task.onFailure(e); + } + } + + @Override + protected void doRun() { + AbstractRunnable toRun = scheduledTask.getAndSet(null); + if (toRun == task) { + task.run(); + } } }); } void runPendingTasks() { completing.set(true); - for (var taskHolder : List.of(delayedTask, scheduledTask)) { + for (var taskHolder : List.of(scheduledTask, delayedTask)) { final Runnable task = taskHolder.getAndSet(null); if (task != null) { task.run(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java index 42e3908231206..2148cb159893e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java @@ -42,6 +42,11 @@ public class DriverStatus implements Task.Status { */ private final String sessionId; + /** + * Description of the task this driver is running. + */ + private final String taskDescription; + /** * Milliseconds since epoch when this driver started. */ @@ -83,6 +88,7 @@ public class DriverStatus implements Task.Status { DriverStatus( String sessionId, + String taskDescription, long started, long lastUpdated, long cpuTime, @@ -93,6 +99,7 @@ public class DriverStatus implements Task.Status { DriverSleeps sleeps ) { this.sessionId = sessionId; + this.taskDescription = taskDescription; this.started = started; this.lastUpdated = lastUpdated; this.cpuNanos = cpuTime; @@ -105,6 +112,7 @@ public class DriverStatus implements Task.Status { public DriverStatus(StreamInput in) throws IOException { this.sessionId = in.readString(); + this.taskDescription = in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90) ? in.readString() : ""; this.started = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readLong() : 0; this.lastUpdated = in.readLong(); this.cpuNanos = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0; @@ -122,6 +130,9 @@ public DriverStatus(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(sessionId); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90)) { + out.writeString(taskDescription); + } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) { out.writeLong(started); } @@ -150,6 +161,15 @@ public String sessionId() { return sessionId; } + /** + * Description of the task this driver is running. This description should be + * short and meaningful as a grouping identifier. We use the phase of the + * query right now: "data", "node_reduce", "final". + */ + public String taskDescription() { + return taskDescription; + } + /** * When this {@link Driver} was started. */ @@ -211,7 +231,8 @@ public List activeOperators() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("sessionId", sessionId); + builder.field("session_id", sessionId); + builder.field("task_description", taskDescription); builder.field("started", DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(started)); builder.field("last_updated", DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(lastUpdated)); builder.field("cpu_nanos", cpuNanos); @@ -240,6 +261,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; DriverStatus that = (DriverStatus) o; return sessionId.equals(that.sessionId) + && taskDescription.equals(that.taskDescription) && started == that.started && lastUpdated == that.lastUpdated && cpuNanos == that.cpuNanos @@ -252,7 +274,18 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(sessionId, started, lastUpdated, cpuNanos, iterations, status, completedOperators, activeOperators, sleeps); + return Objects.hash( + sessionId, + taskDescription, + started, + lastUpdated, + cpuNanos, + iterations, + status, + completedOperators, + activeOperators, + sleeps + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java index 1e9ea88b2f1d7..3b011d4a682ff 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java @@ -41,22 +41,24 @@ */ public final class TimeSeriesAggregationOperatorFactories { + public record SupplierWithChannels(AggregatorFunctionSupplier supplier, List channels) {} + public record Initial( int tsHashChannel, int timeBucketChannel, List groupings, - List rates, - List nonRates, + List rates, + List nonRates, int maxPageSize ) implements Operator.OperatorFactory { @Override public Operator get(DriverContext driverContext) { List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); - for (AggregatorFunctionSupplier f : rates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + for (SupplierWithChannels f : rates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.INITIAL, f.channels)); } - for (AggregatorFunctionSupplier f : nonRates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + for (SupplierWithChannels f : nonRates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.INITIAL, f.channels)); } aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); return new HashAggregationOperator( @@ -76,18 +78,18 @@ public record Intermediate( int tsHashChannel, int timeBucketChannel, List groupings, - List rates, - List nonRates, + List rates, + List nonRates, int maxPageSize ) implements Operator.OperatorFactory { @Override public Operator get(DriverContext driverContext) { List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); - for (AggregatorFunctionSupplier f : rates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + for (SupplierWithChannels f : rates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.FINAL, f.channels)); } - for (AggregatorFunctionSupplier f : nonRates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INTERMEDIATE)); + for (SupplierWithChannels f : nonRates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.INTERMEDIATE, f.channels)); } aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); List hashGroups = List.of( @@ -109,18 +111,18 @@ public String describe() { public record Final( List groupings, - List outerRates, - List nonRates, + List outerRates, + List nonRates, int maxPageSize ) implements Operator.OperatorFactory { @Override public Operator get(DriverContext driverContext) { List aggregators = new ArrayList<>(outerRates.size() + nonRates.size()); - for (AggregatorFunctionSupplier f : outerRates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.SINGLE)); + for (SupplierWithChannels f : outerRates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.SINGLE, f.channels)); } - for (AggregatorFunctionSupplier f : nonRates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + for (SupplierWithChannels f : nonRates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.FINAL, f.channels)); } return new HashAggregationOperator( aggregators, @@ -139,17 +141,17 @@ static List valuesAggregatorForGroupings(List aggregators = new ArrayList<>(); for (BlockHash.GroupSpec g : groupings) { if (g.channel() != timeBucketChannel) { - final List channels = List.of(g.channel()); // TODO: perhaps introduce a specialized aggregator for this? var aggregatorSupplier = (switch (g.elementType()) { - case BYTES_REF -> new org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier(channels); - case DOUBLE -> new org.elasticsearch.compute.aggregation.ValuesDoubleAggregatorFunctionSupplier(channels); - case INT -> new org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier(channels); - case LONG -> new org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier(channels); - case BOOLEAN -> new org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier(channels); + case BYTES_REF -> new org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier(); + case DOUBLE -> new org.elasticsearch.compute.aggregation.ValuesDoubleAggregatorFunctionSupplier(); + case INT -> new org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier(); + case LONG -> new org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier(); + case BOOLEAN -> new org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier(); case FLOAT, NULL, DOC, COMPOSITE, UNKNOWN -> throw new IllegalArgumentException("unsupported grouping type"); }); - aggregators.add(aggregatorSupplier.groupingAggregatorFactory(AggregatorMode.SINGLE)); + final List channels = List.of(g.channel()); + aggregators.add(aggregatorSupplier.groupingAggregatorFactory(AggregatorMode.SINGLE, channels)); } } return aggregators; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeBuffer.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeBuffer.java index ce400ddbdd6f9..23c98a1df193d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeBuffer.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeBuffer.java @@ -47,7 +47,17 @@ void addPage(Page page) { notifyNotEmpty(); } if (noMoreInputs) { - discardPages(); + // O(N) but acceptable because it only occurs with the stop API, and the queue size should be very small. + if (queue.removeIf(p -> p == page)) { + page.releaseBlocks(); + final int size = queueSize.decrementAndGet(); + if (size == maxSize - 1) { + notifyNotFull(); + } + if (size == 0) { + completionFuture.onResponse(null); + } + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index ac02273a48ee4..dd36a6f455e8b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -366,7 +367,13 @@ private void doFetchPageAsync(boolean allSourcesFinished, ActionListener 0) { // This doesn't fully protect ESQL from OOM, but reduces the likelihood. - blockFactory.breaker().addEstimateBytesAndMaybeBreak(reservedBytes, "fetch page"); + try { + blockFactory.breaker().addEstimateBytesAndMaybeBreak(reservedBytes, "fetch page"); + } catch (Exception e) { + assert e instanceof CircuitBreakingException : new AssertionError(e); + listener.onFailure(e); + return; + } listener = ActionListener.runAfter(listener, () -> blockFactory.breaker().addWithoutBreaking(-reservedBytes)); } transportService.sendChildRequest( diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index db9a62da5d9ea..68f684cdf9dcd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -7,19 +7,16 @@ package org.elasticsearch.compute.operator.exchange; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.FailureCollector; import org.elasticsearch.compute.operator.IsBlockedResult; import org.elasticsearch.core.Releasable; +import org.elasticsearch.tasks.TaskCancelledException; -import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; @@ -38,10 +35,9 @@ public final class ExchangeSourceHandler { private final PendingInstances outstandingSinks; private final PendingInstances outstandingSources; - // Collect failures that occur while fetching pages from the remote sink with `failFast=true`. - // The exchange source will stop fetching and abort as soon as any failure is added to this failure collector. - // The final failure collected will be notified to callers via the {@code completionListener}. - private final FailureCollector failure = new FailureCollector(); + // Track if this exchange source should abort. There is no need to track the actual failure since the actual failure + // should be notified via #addRemoteSink(RemoteSink, boolean, Runnable, int, ActionListener). + private volatile boolean aborted = false; private final AtomicInteger nextSinkId = new AtomicInteger(); private final Map remoteSinks = ConcurrentCollections.newConcurrentMap(); @@ -52,35 +48,18 @@ public final class ExchangeSourceHandler { * @param maxBufferSize the maximum size of the exchange buffer. A larger buffer reduces ``pauses`` but uses more memory, * which could otherwise be allocated for other purposes. * @param fetchExecutor the executor used to fetch pages. - * @param completionListener a listener that will be notified when the exchange source handler fails or completes */ - public ExchangeSourceHandler(int maxBufferSize, Executor fetchExecutor, ActionListener completionListener) { + public ExchangeSourceHandler(int maxBufferSize, Executor fetchExecutor) { this.buffer = new ExchangeBuffer(maxBufferSize); this.fetchExecutor = fetchExecutor; this.outstandingSinks = new PendingInstances(() -> buffer.finish(false)); - final PendingInstances closingSinks = new PendingInstances(() -> {}); - closingSinks.trackNewInstance(); - this.outstandingSources = new PendingInstances(() -> finishEarly(true, ActionListener.running(closingSinks::finishInstance))); - buffer.addCompletionListener(ActionListener.running(() -> { - final ActionListener listener = ActionListener.assertAtLeastOnce(completionListener); - try (RefCountingRunnable refs = new RefCountingRunnable(() -> { - final Exception e = failure.getFailure(); - if (e != null) { - listener.onFailure(e); - } else { - listener.onResponse(null); - } - })) { - closingSinks.completion.addListener(refs.acquireListener()); - for (PendingInstances pending : List.of(outstandingSinks, outstandingSources)) { - // Create an outstanding instance and then finish to complete the completionListener - // if we haven't registered any instances of exchange sinks or exchange sources before. - pending.trackNewInstance(); - pending.completion.addListener(refs.acquireListener()); - pending.finishInstance(); - } - } - })); + this.outstandingSources = new PendingInstances(() -> finishEarly(true, ActionListener.noop())); + } + + private void checkFailure() { + if (aborted) { + throw new TaskCancelledException("remote sinks failed"); + } } private class ExchangeSourceImpl implements ExchangeSource { @@ -90,13 +69,6 @@ private class ExchangeSourceImpl implements ExchangeSource { outstandingSources.trackNewInstance(); } - private void checkFailure() { - Exception e = failure.getFailure(); - if (e != null) { - throw ExceptionsHelper.convertToRuntime(e); - } - } - @Override public Page pollPage() { checkFailure(); @@ -201,7 +173,7 @@ void fetchPage() { while (loopControl.isRunning()) { loopControl.exiting(); // finish other sinks if one of them failed or source no longer need pages. - boolean toFinishSinks = buffer.noMoreInputs() || failure.hasFailure(); + boolean toFinishSinks = buffer.noMoreInputs() || aborted; remoteSink.fetchPageAsync(toFinishSinks, ActionListener.wrap(resp -> { Page page = resp.takePage(); if (page != null) { @@ -231,7 +203,7 @@ void fetchPage() { void onSinkFailed(Exception e) { if (failFast) { - failure.unwrapAndCollect(e); + aborted = true; } buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading if (finished == false) { @@ -260,12 +232,12 @@ void onSinkComplete() { * - If {@code false}, failures from this remote sink will not cause the exchange source to abort. * Callers must handle these failures notified via {@code listener}. * - If {@code true}, failures from this remote sink will cause the exchange source to abort. - * Callers can safely ignore failures notified via this listener, as they are collected and - * reported by the exchange source. + * * @param onPageFetched a callback that will be called when a page is fetched from the remote sink * @param instances the number of concurrent ``clients`` that this handler should use to fetch pages. * More clients reduce latency, but add overhead. - * @param listener a listener that will be notified when the sink fails or completes + * @param listener a listener that will be notified when the sink fails or completes. Callers must handle failures notified via + * this listener. * @see ExchangeSinkHandler#fetchPageAsync(boolean, ActionListener) */ public void addRemoteSink( @@ -280,11 +252,17 @@ public void addRemoteSink( final ActionListener sinkListener = ActionListener.assertAtLeastOnce( ActionListener.notifyOnce(ActionListener.runBefore(listener, () -> remoteSinks.remove(sinkId))) ); + final Releasable emptySink = addEmptySink(); fetchExecutor.execute(new AbstractRunnable() { + @Override + public void onAfter() { + emptySink.close(); + } + @Override public void onFailure(Exception e) { if (failFast) { - failure.unwrapAndCollect(e); + aborted = true; } buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading remoteSink.close(ActionListener.running(() -> sinkListener.onFailure(e))); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index a5061b8cf6d32..401fa0d14cd9f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -123,7 +123,7 @@ public void testQueryOperator() throws IOException { } }); DriverContext driverContext = driverContext(); - drivers.add(new Driver(driverContext, factory.get(driverContext), List.of(), docCollector, () -> {})); + drivers.add(new Driver("test", driverContext, factory.get(driverContext), List.of(), docCollector, () -> {})); } OperatorTestCase.runDriver(drivers); Set expectedDocIds = searchForDocIds(reader, query); @@ -197,14 +197,14 @@ public String toString() { ElementType.BYTES_REF, 0, gField, - List.of(CountAggregatorFunction.supplier(List.of(1)).groupingAggregatorFactory(INITIAL)), + List.of(CountAggregatorFunction.supplier().groupingAggregatorFactory(INITIAL, List.of(1))), randomPageSize(), driverContext ) ); operators.add( new HashAggregationOperator( - List.of(CountAggregatorFunction.supplier(List.of(1, 2)).groupingAggregatorFactory(FINAL)), + List.of(CountAggregatorFunction.supplier().groupingAggregatorFactory(FINAL, List.of(1, 2))), () -> BlockHash.build( List.of(new BlockHash.GroupSpec(0, ElementType.BYTES_REF)), driverContext.blockFactory(), @@ -215,6 +215,7 @@ public String toString() { ) ); Driver driver = new Driver( + "test", driverContext, luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT).get(driverContext), operators, @@ -248,6 +249,7 @@ public void testLimitOperator() { DriverContext driverContext = driverContext(); try ( var driver = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), values, 100), List.of((new LimitOperator.Factory(limit)).get(driverContext)), @@ -335,6 +337,7 @@ public void testHashLookup() { var actualPrimeOrds = new ArrayList<>(); try ( var driver = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), values, 100), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index 3eaf85c27e596..abac7a4cd47e3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -47,10 +47,10 @@ import static org.hamcrest.Matchers.hasSize; public abstract class AggregatorFunctionTestCase extends ForkingOperatorTestCase { - protected abstract AggregatorFunctionSupplier aggregatorFunction(List inputChannels); + protected abstract AggregatorFunctionSupplier aggregatorFunction(); protected final int aggregatorIntermediateBlockCount() { - try (var agg = aggregatorFunction(List.of()).aggregator(driverContext())) { + try (var agg = aggregatorFunction().aggregator(driverContext(), List.of())) { return agg.intermediateBlockCount(); } } @@ -69,8 +69,8 @@ private Operator.OperatorFactory simpleWithMode( Function wrap ) { List channels = mode.isInputPartial() ? range(0, aggregatorIntermediateBlockCount()).boxed().toList() : List.of(0); - AggregatorFunctionSupplier supplier = aggregatorFunction(channels); - Aggregator.Factory factory = wrap.apply(supplier).aggregatorFactory(mode); + AggregatorFunctionSupplier supplier = aggregatorFunction(); + Aggregator.Factory factory = wrap.apply(supplier).aggregatorFactory(mode, channels); return new AggregationOperator.AggregationOperatorFactory(List.of(factory), mode); } @@ -111,6 +111,7 @@ public final void testIgnoresNulls() { try ( Driver d = new Driver( + "test", driverContext, new NullInsertingSourceOperator(new CannedSourceOperator(input.iterator()), blockFactory), List.of(simple().get(driverContext)), @@ -223,7 +224,7 @@ public void testSomeFiltered() { // Returns an intermediate state that is equivalent to what the local execution planner will emit // if it determines that certain shards have no relevant data. List nullIntermediateState(BlockFactory blockFactory) { - try (var agg = aggregatorFunction(List.of()).aggregator(driverContext())) { + try (var agg = aggregatorFunction().aggregator(driverContext(), List.of())) { var method = agg.getClass().getMethod("intermediateStateDesc"); @SuppressWarnings("unchecked") List intermediateStateDescs = (List) method.invoke(null); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java index 452fa206a5590..e30082c843b19 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return CountAggregatorFunction.supplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return CountAggregatorFunction.supplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java index 1c0f3c4f64cb5..d91fdce409835 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctBooleanAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctBooleanAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java index c39fe32620ff9..f86c296878772 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java @@ -25,8 +25,8 @@ public class CountDistinctBooleanGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctBooleanAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctBooleanAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java index e8e51c2adf291..d01cbb39bf470 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctBytesRefAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctBytesRefAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java index dd739d2189ba8..c430249ffceb4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java @@ -27,8 +27,8 @@ public class CountDistinctBytesRefGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctBytesRefAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctBytesRefAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java index a3e7a6a6d70f5..d1e845fba40ca 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctDoubleAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctDoubleAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java index 7b6f928d57ddb..9b45c8dd6e50b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java @@ -26,8 +26,8 @@ public class CountDistinctDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctDoubleAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctDoubleAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java index bbd61455a3053..7c0d7c1e3d2fd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctFloatAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctFloatAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunctionTests.java index 6b4a8f2900aaa..d536affb34a0d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunctionTests.java @@ -26,8 +26,8 @@ public class CountDistinctFloatGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctFloatAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctFloatAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java index 5bd9ecc931cf2..8657caafef409 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java @@ -34,8 +34,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctIntAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctIntAggregatorFunctionSupplier(40000); } @Override @@ -65,6 +65,7 @@ public void testRejectsDouble() { BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java index cfd3357a14c03..88f594b5a6d6d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java @@ -26,8 +26,8 @@ public class CountDistinctIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctIntAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctIntAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java index 70662efae688f..55f522f31b28a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java @@ -35,8 +35,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctLongAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctLongAggregatorFunctionSupplier(40000); } @Override @@ -66,6 +66,7 @@ public void testRejectsDouble() { BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java index 55be7fe9a8ed3..db08fd0428e7b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java @@ -25,8 +25,8 @@ public class CountDistinctLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctLongAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctLongAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java index 06c267ff2d6ab..06a0666586290 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java @@ -25,8 +25,8 @@ public class CountGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return CountAggregatorFunction.supplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return CountAggregatorFunction.supplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java index 35ecced470e01..a4411d92c6c29 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java @@ -28,10 +28,10 @@ public class FilteredAggregatorFunctionTests extends AggregatorFunctionTestCase private final List unclosed = Collections.synchronizedList(new ArrayList<>()); @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + protected AggregatorFunctionSupplier aggregatorFunction() { return new FilteredAggregatorFunctionSupplier( - new SumIntAggregatorFunctionSupplier(inputChannels), - new FilteredGroupingAggregatorFunctionTests.AnyGreaterThanFactory(unclosed, inputChannels) + new SumIntAggregatorFunctionSupplier(), + new FilteredGroupingAggregatorFunctionTests.AnyGreaterThanFactory(unclosed, List.of(0)) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java index 26971dc927cd1..efe7fccd4f06a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java @@ -34,10 +34,10 @@ public class FilteredGroupingAggregatorFunctionTests extends GroupingAggregatorF private final List unclosed = Collections.synchronizedList(new ArrayList<>()); @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + protected AggregatorFunctionSupplier aggregatorFunction() { return new FilteredAggregatorFunctionSupplier( - new SumIntAggregatorFunctionSupplier(inputChannels), - new AnyGreaterThanFactory(unclosed, inputChannels) + new SumIntAggregatorFunctionSupplier(), + new AnyGreaterThanFactory(unclosed, List.of(1)) ); } @@ -112,11 +112,12 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { */ public void testAddIntermediateRowInput() { DriverContext ctx = driverContext(); - AggregatorFunctionSupplier supplier = aggregatorFunction(channels(AggregatorMode.SINGLE)); + AggregatorFunctionSupplier supplier = aggregatorFunction(); + List channels = channels(AggregatorMode.SINGLE); Block[] results = new Block[2]; try ( - GroupingAggregatorFunction main = supplier.groupingAggregator(ctx); - GroupingAggregatorFunction leaf = supplier.groupingAggregator(ctx); + GroupingAggregatorFunction main = supplier.groupingAggregator(ctx, channels); + GroupingAggregatorFunction leaf = supplier.groupingAggregator(ctx, channels); SourceOperator source = simpleInput(ctx.blockFactory(), 10); ) { Page p; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index ff96336dc0bb4..d82a8487b5390 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -59,10 +59,10 @@ * Shared tests for testing grouped aggregations. */ public abstract class GroupingAggregatorFunctionTestCase extends ForkingOperatorTestCase { - protected abstract AggregatorFunctionSupplier aggregatorFunction(List inputChannels); + protected abstract AggregatorFunctionSupplier aggregatorFunction(); protected final int aggregatorIntermediateBlockCount() { - try (var agg = aggregatorFunction(List.of()).groupingAggregator(driverContext())) { + try (var agg = aggregatorFunction().groupingAggregator(driverContext(), List.of())) { return agg.intermediateBlockCount(); } } @@ -98,14 +98,14 @@ private Operator.OperatorFactory simpleWithMode( ) { int emitChunkSize = between(100, 200); - AggregatorFunctionSupplier supplier = wrap.apply(aggregatorFunction(channels(mode))); + AggregatorFunctionSupplier supplier = wrap.apply(aggregatorFunction()); if (randomBoolean()) { supplier = chunkGroups(emitChunkSize, supplier); } return new HashAggregationOperator.HashAggregationOperatorFactory( List.of(new BlockHash.GroupSpec(0, ElementType.LONG)), mode, - List.of(supplier.groupingAggregatorFactory(mode)), + List.of(supplier.groupingAggregatorFactory(mode, channels(mode))), randomPageSize(), null ); @@ -619,14 +619,24 @@ protected static LongStream allLongs(Page page, Long group) { private AggregatorFunctionSupplier chunkGroups(int emitChunkSize, AggregatorFunctionSupplier supplier) { return new AggregatorFunctionSupplier() { @Override - public AggregatorFunction aggregator(DriverContext driverContext) { - return supplier.aggregator(driverContext); + public List nonGroupingIntermediateStateDesc() { + return supplier.nonGroupingIntermediateStateDesc(); } @Override - public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public List groupingIntermediateStateDesc() { + return supplier.groupingIntermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { + return supplier.aggregator(driverContext, channels); + } + + @Override + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels) { return new GroupingAggregatorFunction() { - GroupingAggregatorFunction delegate = supplier.groupingAggregator(driverContext); + GroupingAggregatorFunction delegate = supplier.groupingAggregator(driverContext, channels); BitArray seenGroupIds = new BitArray(0, nonBreakingBigArrays()); @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionTests.java index 11119aade12ff..a7164740af009 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxBooleanAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxBooleanAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionTests.java index adc891a6a977d..54b82dcbc5008 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunctionTests.java index 75a6a839ea62d..97d3126fa7673 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunctionTests.java @@ -40,8 +40,8 @@ protected DataType acceptedDataType() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java index 9d638fae4e822..fee5950c08257 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java index 18aec87a9d07b..0e7d716d2c0cd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionTests.java index 5e14a99fd0fa2..a1f13566a069a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java index e4da581a59136..62fe712beb4e8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java @@ -33,8 +33,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java index af198e3aec9d5..5507e2c261e97 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java @@ -25,8 +25,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java index 372015ebd767c..da59a0f91ccdd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java @@ -23,8 +23,8 @@ public class MaxIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionTests.java index 84488b5115e5d..b39b5fe384961 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxIpAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxIpAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunctionTests.java index 12e34fcf9a50e..2e6210c701367 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunctionTests.java @@ -42,8 +42,8 @@ protected DataType acceptedDataType() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxIpAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxIpAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java index 27a6fb0660461..081ef44f37047 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java index 1bf7cd9eea27d..6d6c37fb306a0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java @@ -23,8 +23,8 @@ public class MaxLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java index 1d105430ce1db..db9ab2c998103 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java @@ -29,8 +29,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java index a6ca769036e54..75305708bd933 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java @@ -46,8 +46,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionTests.java index 786603e12f9c8..fb70fa6385d74 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionTests.java @@ -29,8 +29,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunctionTests.java index 14416b3aec1ee..a0b5495d53bdd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunctionTests.java @@ -46,8 +46,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java index fa396d7dcf7a6..fca7ec47b05a5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java @@ -29,8 +29,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java index 8a8b051528195..d5e5f0869988b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java @@ -46,8 +46,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java index 0f570adfc6fd8..f700d4270f4d7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java @@ -29,8 +29,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java index 818150d3234aa..55895ceadd52c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java @@ -46,8 +46,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionTests.java index 74cdca31da34b..186d9edf2a9e0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinBooleanAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinBooleanAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionTests.java index b4383d6b0f56e..678f7259f7843 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunctionTests.java index d4cfca819f3b7..7d099e7606843 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunctionTests.java @@ -40,8 +40,8 @@ protected DataType acceptedDataType() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java index e92b98ebf91d0..7f7095d13aa46 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java index 62cf954a1909e..756d19345aa9c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionTests.java index 59a09569c65a2..ef98a2dd7b954 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunctionTests.java index be41e058f60da..9044732c1b8cc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java index ffa2189f96b66..e7296a5b08f4d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java @@ -25,8 +25,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java index a7644c8bb26a9..d77b63bbb54c5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java @@ -23,8 +23,8 @@ public class MinIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionTests.java index 17e9812d2e4e8..9072702178316 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinIpAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinIpAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunctionTests.java index f51662ffee352..86d7d0e961a1d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunctionTests.java @@ -42,8 +42,8 @@ protected DataType acceptedDataType() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinIpAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinIpAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java index 2ce7aab455c53..8bb82a149f45e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java index 5591fb57a8f2d..da8a63a429200 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java @@ -23,8 +23,8 @@ public class MinLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java index b9ee31fb481f5..aa18c47733ff5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java @@ -32,8 +32,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileDoubleAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileDoubleAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java index d000fba1ee299..8a44fba3bfa18 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java @@ -33,8 +33,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileDoubleAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileDoubleAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionTests.java index da69e11734b36..d23436310cff7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionTests.java @@ -32,8 +32,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileFloatAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileFloatAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunctionTests.java index 917f6b6a0b643..c338dc38395f1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunctionTests.java @@ -33,8 +33,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileFloatAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileFloatAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java index 4b8ef49e09d97..278c37c02be35 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java @@ -31,8 +31,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileIntAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileIntAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java index 15cf0c9202527..ef8fa6eab85fe 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java @@ -33,8 +33,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileIntAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileIntAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java index 664fe1edc6ad9..b1896025c363e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java @@ -31,8 +31,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileLongAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileLongAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java index f09d395c877c6..55065129df0ce 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java @@ -33,8 +33,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileLongAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileLongAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java index 003dc415c6194..a64ec4e155ad0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java @@ -33,8 +33,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumDoubleAggregatorFunctionSupplier(); } @Override @@ -53,6 +53,7 @@ public void testOverflowSucceeds() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(Double.MAX_VALUE - 1, 2)), List.of(simple().get(driverContext)), @@ -71,6 +72,7 @@ public void testSummationAccuracy() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator( driverContext.blockFactory(), @@ -100,6 +102,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(values)), List.of(simple().get(driverContext)), @@ -122,6 +125,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(largeValues)), List.of(simple().get(driverContext)), @@ -141,6 +145,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(largeValues)), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java index f982ee6cd58d6..8e6970ebdd109 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java index 521c1e261cc62..11205907acb2d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java @@ -33,8 +33,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumFloatAggregatorFunctionSupplier(); } @Override @@ -53,6 +53,7 @@ public void testOverflowSucceeds() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(Float.MAX_VALUE - 1, 2f)), List.of(simple().get(driverContext)), @@ -71,6 +72,7 @@ public void testSummationAccuracy() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator( driverContext.blockFactory(), @@ -100,6 +102,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(values)), List.of(simple().get(driverContext)), @@ -122,6 +125,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(largeValues)), List.of(simple().get(driverContext)), @@ -141,6 +145,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(largeValues)), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunctionTests.java index 54bd92cbfff21..008b8a18a6b0e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java index 8c5e4430128b7..6484382d5ff50 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumIntAggregatorFunctionSupplier(); } @Override @@ -52,6 +52,7 @@ public void testRejectsDouble() { BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java index 3dfa4e9332a08..d83357940d99f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java @@ -22,8 +22,8 @@ public class SumIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java index 00cdbedef54d6..c2b805291f4f6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumLongAggregatorFunctionSupplier(); } @Override @@ -51,6 +51,7 @@ public void testOverflowFails() { DriverContext driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.of(Long.MAX_VALUE - 1, 2)), List.of(simple().get(driverContext)), @@ -68,6 +69,7 @@ public void testRejectsDouble() { BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java index f41a5cbef94fb..f289686f8e844 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java @@ -22,8 +22,8 @@ public class SumLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionTests.java index 662b963d32473..cfb91acb1cf20 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionTests.java @@ -27,8 +27,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopBooleanAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopBooleanAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionTests.java index 732229c98f9c7..e1f38692877a2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionTests.java @@ -9,8 +9,6 @@ import org.apache.lucene.util.BytesRef; -import java.util.List; - public class TopBytesRefAggregatorFunctionTests extends AbstractTopBytesRefAggregatorFunctionTests { @Override protected BytesRef randomValue() { @@ -18,8 +16,8 @@ protected BytesRef randomValue() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopBytesRefAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopBytesRefAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunctionTests.java index 4932e1abef46d..0c27a5f386811 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunctionTests.java @@ -10,8 +10,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.type.DataType; -import java.util.List; - public class TopBytesRefGroupingAggregatorFunctionTests extends AbstractTopBytesRefGroupingAggregatorFunctionTests { @Override protected BytesRef randomValue() { @@ -19,8 +17,8 @@ protected BytesRef randomValue() { } @Override - protected final AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopBytesRefAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected final AggregatorFunctionSupplier aggregatorFunction() { + return new TopBytesRefAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionTests.java index 817df4ba47130..04c2000d2e2d7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionTests.java @@ -27,8 +27,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopDoubleAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopDoubleAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionTests.java index c565a13fb73d4..8dd2d5d82f815 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionTests.java @@ -27,8 +27,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopFloatAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopFloatAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionTests.java index a0ac1a685413e..b52439dc98263 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionTests.java @@ -27,8 +27,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopIntAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopIntAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionTests.java index 840e4cf9af961..c9ec81cb5981d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionTests.java @@ -10,8 +10,6 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; -import java.util.List; - public class TopIpAggregatorFunctionTests extends AbstractTopBytesRefAggregatorFunctionTests { @Override protected BytesRef randomValue() { @@ -19,8 +17,8 @@ protected BytesRef randomValue() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopIpAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopIpAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunctionTests.java index 02bf6b667192b..3c0577f7a1a99 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunctionTests.java @@ -11,8 +11,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.type.DataType; -import java.util.List; - public class TopIpGroupingAggregatorFunctionTests extends AbstractTopBytesRefGroupingAggregatorFunctionTests { @Override protected BytesRef randomValue() { @@ -20,8 +18,8 @@ protected BytesRef randomValue() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopIpAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopIpAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java index cb42be67844dc..4ff27b092a183 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java @@ -27,8 +27,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopLongAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopLongAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionTests.java index c0a91fe22b87b..7c5e3f3861161 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunctionTests.java index fc9bc90828df3..a1367bee53340 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunctionTests.java @@ -28,8 +28,8 @@ public class ValuesBytesRefGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java index e5bb8e3138e25..497813e058e67 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java index a4b1a3c028e43..b89612a52c682 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java @@ -27,8 +27,8 @@ public class ValuesDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionTests.java index 67068ce10c997..0cf536d3e0eca 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunctionTests.java index e25d7567a1933..7dc550abd4e49 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunctionTests.java @@ -27,8 +27,8 @@ public class ValuesFloatGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java index c60707046a0b1..9e4d56a962b2a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java index 154b076d6a246..7368ed285ddb6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java @@ -27,8 +27,8 @@ public class ValuesIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java index 933058d8d8e13..32609edd2b8fe 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java index 8259d84d955ef..3180ac53f6efc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java @@ -27,8 +27,8 @@ public class ValuesLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java index 914d29bb8ba25..42e9fc8deafc1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java @@ -416,6 +416,7 @@ public void testCategorize_withDriver() { List intermediateOutput = new ArrayList<>(); Driver driver = new Driver( + "test", driverContext, new LocalSourceOperator(input1), List.of( @@ -423,8 +424,8 @@ public void testCategorize_withDriver() { List.of(makeGroupSpec()), AggregatorMode.INITIAL, List.of( - new SumLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL), - new MaxLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL) + new SumLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(1)), + new MaxLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(1)) ), 16 * 1024, analysisRegistry @@ -436,6 +437,7 @@ public void testCategorize_withDriver() { runDriver(driver); driver = new Driver( + "test", driverContext, new LocalSourceOperator(input2), List.of( @@ -443,8 +445,8 @@ public void testCategorize_withDriver() { List.of(makeGroupSpec()), AggregatorMode.INITIAL, List.of( - new SumLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL), - new MaxLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL) + new SumLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(1)), + new MaxLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(1)) ), 16 * 1024, analysisRegistry @@ -458,6 +460,7 @@ public void testCategorize_withDriver() { List finalOutput = new ArrayList<>(); driver = new Driver( + "test", driverContext, new CannedSourceOperator(intermediateOutput.iterator()), List.of( @@ -465,8 +468,8 @@ public void testCategorize_withDriver() { List.of(makeGroupSpec()), AggregatorMode.FINAL, List.of( - new SumLongAggregatorFunctionSupplier(List.of(1, 2)).groupingAggregatorFactory(AggregatorMode.FINAL), - new MaxLongAggregatorFunctionSupplier(List.of(3, 4)).groupingAggregatorFactory(AggregatorMode.FINAL) + new SumLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.FINAL, List.of(1, 2)), + new MaxLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.FINAL, List.of(3, 4)) ), 16 * 1024, analysisRegistry diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java index 5f868f51f06e2..9c89317e4c359 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java @@ -137,13 +137,14 @@ public void testCategorize_withDriver() { List intermediateOutput = new ArrayList<>(); Driver driver = new Driver( + "test", driverContext, new LocalSourceOperator(input1), List.of( new HashAggregationOperator.HashAggregationOperatorFactory( groupSpecs, AggregatorMode.INITIAL, - List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(0)).groupingAggregatorFactory(AggregatorMode.INITIAL)), + List.of(new ValuesBytesRefAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(0))), 16 * 1024, analysisRegistry ).get(driverContext) @@ -154,13 +155,14 @@ public void testCategorize_withDriver() { runDriver(driver); driver = new Driver( + "test", driverContext, new LocalSourceOperator(input2), List.of( new HashAggregationOperator.HashAggregationOperatorFactory( groupSpecs, AggregatorMode.INITIAL, - List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(0)).groupingAggregatorFactory(AggregatorMode.INITIAL)), + List.of(new ValuesBytesRefAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(0))), 16 * 1024, analysisRegistry ).get(driverContext) @@ -173,13 +175,14 @@ public void testCategorize_withDriver() { List finalOutput = new ArrayList<>(); driver = new Driver( + "test", driverContext, new CannedSourceOperator(intermediateOutput.iterator()), List.of( new HashAggregationOperator.HashAggregationOperatorFactory( groupSpecs, AggregatorMode.FINAL, - List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(2)).groupingAggregatorFactory(AggregatorMode.FINAL)), + List.of(new ValuesBytesRefAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.FINAL, List.of(2))), 16 * 1024, analysisRegistry ).get(driverContext) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java index 78ed096c10b3f..2358643dc089e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java @@ -409,6 +409,42 @@ public final void testMergeThisBigger() { } } + public final void testMergePastEnd() { + int buckets = 10000; + int bucketSize = between(1, 1000); + int target = between(0, buckets); + List values = randomList(buckets, buckets, this::randomValue); + Collections.sort(values); + try (T sort = build(SortOrder.ASC, bucketSize)) { + // Add a single value to the main sort. + for (int b = 0; b < buckets; b++) { + collect(sort, values.get(b), b); + } + + try (T other = build(SortOrder.ASC, bucketSize)) { + // Add *all* values to the target bucket of the secondary sort. + for (int i = 0; i < values.size(); i++) { + if (i != target) { + collect(other, values.get(i), target); + } + } + + // Merge all buckets pairwise. Most of the secondary ones are empty. + for (int b = 0; b < buckets; b++) { + merge(sort, b, other, b); + } + } + + for (int b = 0; b < buckets; b++) { + if (b == target) { + assertBlock(sort, b, values.subList(0, bucketSize)); + } else { + assertBlock(sort, b, List.of(values.get(b))); + } + } + } + } + protected void assertBlock(T sort, int groupId, List values) { var blockFactory = TestBlockFactory.getNonBreakingInstance(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java index 1f5b5bf9b9337..61c7582c74245 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java @@ -151,7 +151,7 @@ private void testCount(Supplier contexts, int size, int limit) { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxDoubleOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxDoubleOperatorTests.java index 4cb113457b23f..49d7e42e49df7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxDoubleOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxDoubleOperatorTests.java @@ -70,7 +70,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MaxDoubleAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MaxDoubleAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxFloatOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxFloatOperatorTests.java index 4a009a2d84c66..7651cf5c0b876 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxFloatOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxFloatOperatorTests.java @@ -70,7 +70,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MaxFloatAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MaxFloatAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxIntOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxIntOperatorTests.java index a6118481ca43d..f26274be6f810 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxIntOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxIntOperatorTests.java @@ -69,7 +69,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MaxIntAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MaxIntAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxLongOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxLongOperatorTests.java index 894c8e862123e..ae096b5e3630c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxLongOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxLongOperatorTests.java @@ -69,7 +69,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MaxLongAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MaxLongAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java index b65da5aba7588..f6fba20a28889 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java @@ -166,7 +166,7 @@ private void testMax(Supplier contexts, int size, int limit) { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinDoubleOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinDoubleOperatorTests.java index 5fef2d4897030..ce212392ef888 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinDoubleOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinDoubleOperatorTests.java @@ -70,7 +70,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MinDoubleAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MinDoubleAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinFloatOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinFloatOperatorTests.java index 41c8751c08a96..9500879f450b3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinFloatOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinFloatOperatorTests.java @@ -71,7 +71,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MinFloatAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MinFloatAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinIntegerOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinIntegerOperatorTests.java index 5d2c867f4f660..e800619ef747d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinIntegerOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinIntegerOperatorTests.java @@ -69,7 +69,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MinIntAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MinIntAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinLongOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinLongOperatorTests.java index 15c34f5853ae2..a20d90f1fcb5b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinLongOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinLongOperatorTests.java @@ -69,7 +69,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MinLongAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MinLongAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java index f57bbd8c5ddb5..3033efa50f373 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java @@ -166,7 +166,7 @@ private void testMin(Supplier contexts, int size, int limit) { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java index 54b33732aa425..4a628d596f142 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java @@ -210,6 +210,7 @@ private List runQuery(Set values, Query query, boolean shuffleDocs operators.add(new EvalOperator(blockFactory, luceneQueryEvaluator)); List results = new ArrayList<>(); Driver driver = new Driver( + "test", driverContext, luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT, scoring).get(driverContext), operators, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index b7114bb4e9b54..574f9b25ff146 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -160,7 +160,7 @@ private void testSimple(DriverContext ctx, int size, int limit) { List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java index 20af40bcc6840..3af21ba37d088 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java @@ -127,7 +127,7 @@ protected void testSimple(DriverContext ctx, int size, int limit) { List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index a6d652d499d84..92eaa78eedcd7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -187,7 +187,7 @@ protected void testSimple(DriverContext ctx, int size, int limit) { List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index feba401d445e7..934fbcc0b897e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -174,6 +174,7 @@ record Doc(int host, long timestamp, long metric) {} var metricField = new NumberFieldMapper.NumberFieldType("metric", NumberFieldMapper.NumberType.LONG); OperatorTestCase.runDriver( new Driver( + "test", driverContext, timeSeriesFactory.get(driverContext), List.of(ValuesSourceReaderOperatorTests.factory(reader, metricField, ElementType.LONG).get(driverContext)), @@ -248,6 +249,7 @@ public void testMatchNone() throws Exception { List results = new ArrayList<>(); OperatorTestCase.runDriver( new Driver( + "test", driverContext, timeSeriesFactory.get(driverContext), List.of(), @@ -306,6 +308,7 @@ List runDriver(int limit, int maxPageSize, boolean forceMerge, int numTime var hostnameField = new KeywordFieldMapper.KeywordFieldType("hostname"); OperatorTestCase.runDriver( new Driver( + "test", ctx, timeSeriesFactory.get(ctx), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java index 910541607d83f..32164c7954dda 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java @@ -1299,6 +1299,7 @@ public void testWithNulls() throws IOException { var vsShardContext = new ValuesSourceReaderOperator.ShardContext(reader(indexKey), () -> SourceLoader.FROM_STORED_SOURCE); try ( Driver driver = new Driver( + "test", driverContext, luceneFactory.get(driverContext), List.of( @@ -1376,6 +1377,7 @@ public void testNullsShared() { int[] pages = new int[] { 0 }; try ( Driver d = new Driver( + "test", driverContext, simpleInput(driverContext, 10), List.of( @@ -1497,6 +1499,7 @@ protected final List drive(List operators, Iterator input, boolean success = false; try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input), operators, @@ -1524,6 +1527,7 @@ public static void runDriver(List drivers) { for (int i = 0; i < dummyDrivers; i++) { drivers.add( new Driver( + "test", "dummy-session", 0, 0, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 2661ff665831f..07a66a473f3b1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -1307,6 +1307,7 @@ public void testWithNulls() throws IOException { ); try ( Driver driver = new Driver( + "test", driverContext, luceneFactory.get(driverContext), List.of( @@ -1409,6 +1410,7 @@ public void testNullsShared() { int[] pages = new int[] { 0 }; try ( Driver d = new Driver( + "test", driverContext, simpleInput(driverContext.blockFactory(), 10), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java index 5e16fce2af00b..6fbef583cbefa 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java @@ -48,8 +48,8 @@ protected Operator.OperatorFactory simpleWithMode(AggregatorMode mode) { return new AggregationOperator.AggregationOperatorFactory( List.of( - new SumLongAggregatorFunctionSupplier(sumChannels).aggregatorFactory(mode), - new MaxLongAggregatorFunctionSupplier(maxChannels).aggregatorFactory(mode) + new SumLongAggregatorFunctionSupplier().aggregatorFactory(mode, sumChannels), + new MaxLongAggregatorFunctionSupplier().aggregatorFactory(mode, maxChannels) ), mode ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index f017fed16cc96..e94864b9530bc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -165,7 +165,14 @@ public void doClose() { } }); PlainActionFuture future = new PlainActionFuture<>(); - Driver driver = new Driver(driverContext, sourceOperator, intermediateOperators, outputOperator, () -> assertFalse(it.hasNext())); + Driver driver = new Driver( + "test", + driverContext, + sourceOperator, + intermediateOperators, + outputOperator, + () -> assertFalse(it.hasNext()) + ); Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 10000), future); future.actionGet(); Releasables.close(localBreaker); @@ -295,7 +302,7 @@ protected void doClose() { }; SinkOperator outputOperator = new PageConsumerOperator(Page::releaseBlocks); PlainActionFuture future = new PlainActionFuture<>(); - Driver driver = new Driver(driverContext, sourceOperator, List.of(asyncOperator), outputOperator, localBreaker); + Driver driver = new Driver("test", driverContext, sourceOperator, List.of(asyncOperator), outputOperator, localBreaker); Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 1000), future); assertBusy(() -> assertTrue(future.isDone())); if (failed.get()) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java index 27083ea0fcd13..a39aa10af5f31 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java @@ -27,6 +27,7 @@ public class DriverProfileTests extends AbstractWireSerializingTestCase { public void testToXContent() { DriverProfile status = new DriverProfile( + "test", 123413220000L, 123413243214L, 10012, @@ -44,6 +45,7 @@ public void testToXContent() { ); assertThat(Strings.toString(status, true, true), equalTo(""" { + "task_description" : "test", "start" : "1973-11-29T09:27:00.000Z", "start_millis" : 123413220000, "stop" : "1973-11-29T09:27:23.214Z", @@ -101,6 +103,7 @@ protected Writeable.Reader instanceReader() { @Override protected DriverProfile createTestInstance() { return new DriverProfile( + DriverStatusTests.randomTaskDescription(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), @@ -113,6 +116,7 @@ protected DriverProfile createTestInstance() { @Override protected DriverProfile mutateInstance(DriverProfile instance) throws IOException { + String taskDescription = instance.taskDescription(); long startMillis = instance.startMillis(); long stopMillis = instance.stopMillis(); long tookNanos = instance.tookNanos(); @@ -120,17 +124,18 @@ protected DriverProfile mutateInstance(DriverProfile instance) throws IOExceptio long iterations = instance.iterations(); var operators = instance.operators(); var sleeps = instance.sleeps(); - switch (between(0, 6)) { - case 0 -> startMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); - case 1 -> stopMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); - case 2 -> tookNanos = randomValueOtherThan(tookNanos, ESTestCase::randomNonNegativeLong); - case 3 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); - case 4 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); - case 5 -> operators = randomValueOtherThan(operators, DriverStatusTests::randomOperatorStatuses); - case 6 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); + switch (between(0, 7)) { + case 0 -> taskDescription = randomValueOtherThan(taskDescription, DriverStatusTests::randomTaskDescription); + case 1 -> startMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); + case 2 -> stopMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); + case 3 -> tookNanos = randomValueOtherThan(tookNanos, ESTestCase::randomNonNegativeLong); + case 4 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); + case 5 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); + case 6 -> operators = randomValueOtherThan(operators, DriverStatusTests::randomOperatorStatuses); + case 7 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); default -> throw new UnsupportedOperationException(); } - return new DriverProfile(startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); + return new DriverProfile(taskDescription, startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverSchedulerTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverSchedulerTests.java new file mode 100644 index 0000000000000..ec6bf38e557a9 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverSchedulerTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.FixedExecutorBuilder; +import org.elasticsearch.threadpool.TestThreadPool; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.equalTo; + +public class DriverSchedulerTests extends ESTestCase { + + public void testClearPendingTaskOnRejection() { + DriverScheduler scheduler = new DriverScheduler(); + AtomicInteger counter = new AtomicInteger(); + var threadPool = new TestThreadPool( + "test", + new FixedExecutorBuilder(Settings.EMPTY, "test", 1, 2, "test", EsExecutors.TaskTrackingConfig.DEFAULT) + ); + CountDownLatch latch = new CountDownLatch(1); + Executor executor = threadPool.executor("test"); + try { + for (int i = 0; i < 10; i++) { + try { + executor.execute(() -> safeAwait(latch)); + } catch (EsRejectedExecutionException e) { + break; + } + } + scheduler.scheduleOrRunTask(executor, new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + counter.incrementAndGet(); + } + + @Override + protected void doRun() { + counter.incrementAndGet(); + } + }); + scheduler.runPendingTasks(); + assertThat(counter.get(), equalTo(1)); + } finally { + latch.countDown(); + terminate(threadPool); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java index b46d9f3f4add7..83deb57a3ba7c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java @@ -32,6 +32,7 @@ public class DriverStatusTests extends AbstractWireSerializingTestCase instanceReader() { protected DriverStatus createTestInstance() { return new DriverStatus( randomSessionId(), + randomTaskDescription(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), @@ -129,6 +132,10 @@ private String randomSessionId() { return RandomStrings.randomAsciiLettersOfLengthBetween(random(), 1, 15); } + public static String randomTaskDescription() { + return RandomStrings.randomAsciiLettersOfLength(random(), 5); + } + private DriverStatus.Status randomStatus() { return randomFrom(DriverStatus.Status.values()); } @@ -150,6 +157,7 @@ private static DriverStatus.OperatorStatus randomOperatorStatus() { @Override protected DriverStatus mutateInstance(DriverStatus instance) throws IOException { var sessionId = instance.sessionId(); + var taskDescription = instance.taskDescription(); long started = instance.started(); long lastUpdated = instance.lastUpdated(); long cpuNanos = instance.cpuNanos(); @@ -158,19 +166,31 @@ protected DriverStatus mutateInstance(DriverStatus instance) throws IOException var completedOperators = instance.completedOperators(); var activeOperators = instance.activeOperators(); var sleeps = instance.sleeps(); - switch (between(0, 8)) { + switch (between(0, 9)) { case 0 -> sessionId = randomValueOtherThan(sessionId, this::randomSessionId); - case 1 -> started = randomValueOtherThan(started, ESTestCase::randomNonNegativeLong); - case 2 -> lastUpdated = randomValueOtherThan(lastUpdated, ESTestCase::randomNonNegativeLong); - case 3 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); - case 4 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); - case 5 -> status = randomValueOtherThan(status, this::randomStatus); - case 6 -> completedOperators = randomValueOtherThan(completedOperators, DriverStatusTests::randomOperatorStatuses); - case 7 -> activeOperators = randomValueOtherThan(activeOperators, DriverStatusTests::randomOperatorStatuses); - case 8 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); + case 1 -> taskDescription = randomValueOtherThan(taskDescription, DriverStatusTests::randomTaskDescription); + case 2 -> started = randomValueOtherThan(started, ESTestCase::randomNonNegativeLong); + case 3 -> lastUpdated = randomValueOtherThan(lastUpdated, ESTestCase::randomNonNegativeLong); + case 4 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); + case 5 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); + case 6 -> status = randomValueOtherThan(status, this::randomStatus); + case 7 -> completedOperators = randomValueOtherThan(completedOperators, DriverStatusTests::randomOperatorStatuses); + case 8 -> activeOperators = randomValueOtherThan(activeOperators, DriverStatusTests::randomOperatorStatuses); + case 9 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); default -> throw new UnsupportedOperationException(); } - return new DriverStatus(sessionId, started, lastUpdated, cpuNanos, iterations, status, completedOperators, activeOperators, sleeps); + return new DriverStatus( + sessionId, + taskDescription, + started, + lastUpdated, + cpuNanos, + iterations, + status, + completedOperators, + activeOperators, + sleeps + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java index e715b94bc55e5..a0b04668b7307 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java @@ -67,6 +67,7 @@ public void testProfileAndStatusFinishInOneRound() { Driver driver = new Driver( "unset", + "test", startEpoch, startNanos, driverContext, @@ -116,6 +117,7 @@ public void testProfileAndStatusOneIterationAtATime() { Driver driver = new Driver( "unset", + "test", startEpoch, startNanos, driverContext, @@ -166,6 +168,7 @@ public void testProfileAndStatusTimeout() { Driver driver = new Driver( "unset", + "test", startEpoch, startNanos, driverContext, @@ -231,7 +234,7 @@ public void testThreadContext() throws Exception { WarningsOperator warning1 = new WarningsOperator(threadPool); WarningsOperator warning2 = new WarningsOperator(threadPool); CyclicBarrier allPagesProcessed = new CyclicBarrier(2); - Driver driver = new Driver(driverContext, new CannedSourceOperator(inPages.iterator()) { + Driver driver = new Driver("test", driverContext, new CannedSourceOperator(inPages.iterator()) { @Override public Page getOutput() { assertRunningWithRegularUser(threadPool); @@ -315,7 +318,7 @@ public void close() { } }); - Driver driver = new Driver(driverContext, sourceOperator, List.of(delayOperator), sinkOperator, () -> {}); + Driver driver = new Driver("test", driverContext, sourceOperator, List.of(delayOperator), sinkOperator, () -> {}); ThreadContext threadContext = threadPool.getThreadContext(); PlainActionFuture future = new PlainActionFuture<>(); @@ -331,12 +334,11 @@ public void testResumeOnEarlyFinish() throws Exception { DriverContext driverContext = driverContext(); ThreadPool threadPool = threadPool(); try { - PlainActionFuture sourceFuture = new PlainActionFuture<>(); - var sourceHandler = new ExchangeSourceHandler(between(1, 5), threadPool.executor("esql"), sourceFuture); + var sourceHandler = new ExchangeSourceHandler(between(1, 5), threadPool.executor("esql")); var sinkHandler = new ExchangeSinkHandler(driverContext.blockFactory(), between(1, 5), System::currentTimeMillis); var sourceOperator = new ExchangeSourceOperator(sourceHandler.createExchangeSource()); var sinkOperator = new ExchangeSinkOperator(sinkHandler.createExchangeSink(() -> {}), Function.identity()); - Driver driver = new Driver(driverContext, sourceOperator, List.of(), sinkOperator, () -> {}); + Driver driver = new Driver("test", driverContext, sourceOperator, List.of(), sinkOperator, () -> {}); PlainActionFuture future = new PlainActionFuture<>(); Driver.start(threadPool.getThreadContext(), threadPool.executor("esql"), driver, between(1, 1000), future); assertBusy( @@ -348,7 +350,6 @@ public void testResumeOnEarlyFinish() throws Exception { sinkHandler.fetchPageAsync(true, ActionListener.noop()); future.actionGet(5, TimeUnit.SECONDS); assertThat(driver.status().status(), equalTo(DriverStatus.Status.DONE)); - sourceFuture.actionGet(5, TimeUnit.SECONDS); } finally { terminate(threadPool); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 744121a3807c3..f08552913963d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -68,6 +68,7 @@ public final void testInitialFinal() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input.iterator()), List.of(simpleWithMode(AggregatorMode.INITIAL).get(driverContext), simpleWithMode(AggregatorMode.FINAL).get(driverContext)), @@ -89,6 +90,7 @@ public final void testManyInitialFinal() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(partials.iterator()), List.of(simpleWithMode(AggregatorMode.FINAL).get(driverContext)), @@ -110,6 +112,7 @@ public final void testInitialIntermediateFinal() { try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input.iterator()), List.of( @@ -142,6 +145,7 @@ public final void testManyInitialManyPartialFinal() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(intermediates.iterator()), List.of(simpleWithMode(AggregatorMode.FINAL).get(driverContext)), @@ -212,11 +216,7 @@ List createDriversForInput(List input, List results, boolean randomIntBetween(2, 10), threadPool.relativeTimeInMillisSupplier() ); - ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler( - randomIntBetween(1, 4), - threadPool.executor(ESQL_TEST_EXECUTOR), - ActionListener.noop() - ); + ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(randomIntBetween(1, 4), threadPool.executor(ESQL_TEST_EXECUTOR)); sourceExchanger.addRemoteSink( sinkExchanger::fetchPageAsync, randomBoolean(), @@ -240,6 +240,7 @@ List createDriversForInput(List input, List results, boolean DriverContext driver1Context = driverContext(); drivers.add( new Driver( + "test", driver1Context, new CannedSourceOperator(pages.iterator()), List.of( @@ -257,6 +258,7 @@ List createDriversForInput(List input, List results, boolean DriverContext driver2Context = driverContext(); drivers.add( new Driver( + "test", driver2Context, new ExchangeSourceOperator(sourceExchanger.createExchangeSource()), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java index 953c7d1c313f1..30579f864abcb 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java @@ -56,8 +56,8 @@ protected Operator.OperatorFactory simpleWithMode(AggregatorMode mode) { List.of(new BlockHash.GroupSpec(0, ElementType.LONG)), mode, List.of( - new SumLongAggregatorFunctionSupplier(sumChannels).groupingAggregatorFactory(mode), - new MaxLongAggregatorFunctionSupplier(maxChannels).groupingAggregatorFactory(mode) + new SumLongAggregatorFunctionSupplier().groupingAggregatorFactory(mode, sumChannels), + new MaxLongAggregatorFunctionSupplier().groupingAggregatorFactory(mode, maxChannels) ), randomPageSize(), null diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java index afd4695db932f..103a6a35651c7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -41,6 +41,7 @@ import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.createTimeSeriesSourceOperator; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.writeTS; +import static org.elasticsearch.compute.operator.TimeSeriesAggregationOperatorFactories.SupplierWithChannels; import static org.hamcrest.Matchers.equalTo; public class TimeSeriesAggregationOperatorTests extends ComputeTestCase { @@ -269,7 +270,7 @@ public void close() { 1, 3, IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), - List.of(new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis)), + List.of(new SupplierWithChannels(new RateLongAggregatorFunctionSupplier(unitInMillis), List.of(4, 2))), List.of(), between(1, 100) ).get(ctx); @@ -279,7 +280,7 @@ public void close() { 0, 1, IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), - List.of(new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis)), + List.of(new SupplierWithChannels(new RateLongAggregatorFunctionSupplier(unitInMillis), List.of(2, 3, 4))), List.of(), between(1, 100) ).get(ctx); @@ -295,7 +296,7 @@ public void close() { } Operator finalAgg = new TimeSeriesAggregationOperatorFactories.Final( finalGroups, - List.of(new SumDoubleAggregatorFunctionSupplier(List.of(2))), + List.of(new SupplierWithChannels(new SumDoubleAggregatorFunctionSupplier(), List.of(2))), List.of(), between(1, 100) ).get(ctx); @@ -303,6 +304,7 @@ public void close() { List results = new ArrayList<>(); OperatorTestCase.runDriver( new Driver( + "test", ctx, sourceOperatorFactory.get(ctx), CollectionUtils.concatLists(intermediateOperators, List.of(intialAgg, intermediateAgg, finalAgg)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java index bd5b53fb25c8b..7213e0b27aea0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java @@ -66,6 +66,25 @@ public void testDrainPages() throws Exception { blockFactory.ensureAllBlocksAreReleased(); } + public void testOutstandingPages() throws Exception { + ExchangeBuffer buffer = new ExchangeBuffer(randomIntBetween(1000, 10000)); + var blockFactory = blockFactory(); + Page p1 = randomPage(blockFactory); + Page p2 = randomPage(blockFactory); + buffer.addPage(p1); + buffer.addPage(p2); + buffer.finish(false); + buffer.addPage(randomPage(blockFactory)); + assertThat(buffer.size(), equalTo(2)); + assertSame(buffer.pollPage(), p1); + p1.releaseBlocks(); + assertSame(buffer.pollPage(), p2); + p2.releaseBlocks(); + assertNull(buffer.pollPage()); + assertTrue(buffer.isFinished()); + blockFactory.ensureAllBlocksAreReleased(); + } + private static MockBlockFactory blockFactory() { BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index fffeeac4e4cc2..57dfe65ca485f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -7,15 +7,18 @@ package org.elasticsearch.compute.operator.exchange; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.node.VersionInformation; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -23,6 +26,7 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; @@ -37,6 +41,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancellationService; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.test.transport.StubbableTransport; @@ -69,6 +74,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; public class ExchangeServiceTests extends ESTestCase { @@ -100,16 +106,16 @@ public void testBasic() throws Exception { AtomicInteger pagesAddedToSink = new AtomicInteger(); ExchangeSink sink1 = sinkExchanger.createExchangeSink(pagesAddedToSink::incrementAndGet); ExchangeSink sink2 = sinkExchanger.createExchangeSink(pagesAddedToSink::incrementAndGet); - PlainActionFuture sourceCompletion = new PlainActionFuture<>(); - ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR), sourceCompletion); + ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR)); ExchangeSource source = sourceExchanger.createExchangeSource(); AtomicInteger pagesAddedToSource = new AtomicInteger(); + PlainActionFuture remoteSinkFuture = new PlainActionFuture<>(); sourceExchanger.addRemoteSink( sinkExchanger::fetchPageAsync, randomBoolean(), pagesAddedToSource::incrementAndGet, 1, - ActionListener.noop() + remoteSinkFuture ); SubscribableListener waitForReading = source.waitForReading().listener(); assertFalse(waitForReading.isDone()); @@ -156,13 +162,12 @@ public void testBasic() throws Exception { sink2.finish(); assertTrue(sink2.isFinished()); assertTrue(source.isFinished()); - assertFalse(sourceCompletion.isDone()); source.finish(); - sourceCompletion.actionGet(10, TimeUnit.SECONDS); ESTestCase.terminate(threadPool); for (Page page : pages) { page.releaseBlocks(); } + safeGet(remoteSinkFuture); } /** @@ -300,6 +305,7 @@ Set runConcurrentTest( DriverContext dc = driverContext(); Driver d = new Driver( "test-session:1", + "test", 0, 0, dc, @@ -318,6 +324,7 @@ Set runConcurrentTest( DriverContext dc = driverContext(); Driver d = new Driver( "test-session:2", + "test", 0, 0, dc, @@ -343,47 +350,45 @@ protected void start(Driver driver, ActionListener listener) { public void testConcurrentWithHandlers() { BlockFactory blockFactory = blockFactory(); - PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); - var sourceExchanger = new ExchangeSourceHandler( - randomExchangeBuffer(), - threadPool.executor(ESQL_TEST_EXECUTOR), - sourceCompletionFuture - ); - List sinkHandlers = new ArrayList<>(); - Supplier exchangeSink = () -> { - final ExchangeSinkHandler sinkHandler; - if (sinkHandlers.isEmpty() == false && randomBoolean()) { - sinkHandler = randomFrom(sinkHandlers); - } else { - sinkHandler = new ExchangeSinkHandler(blockFactory, randomExchangeBuffer(), threadPool.relativeTimeInMillisSupplier()); - sourceExchanger.addRemoteSink( - sinkHandler::fetchPageAsync, - randomBoolean(), - () -> {}, - randomIntBetween(1, 3), - ActionListener.noop() - ); - sinkHandlers.add(sinkHandler); - } - return sinkHandler.createExchangeSink(() -> {}); - }; - final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); - final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); - Set actualSeqNos = runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceExchanger::createExchangeSource, exchangeSink); - var expectedSeqNos = IntStream.range(0, Math.min(maxInputSeqNo, maxOutputSeqNo)).boxed().collect(Collectors.toSet()); - assertThat(actualSeqNos, hasSize(expectedSeqNos.size())); - assertThat(actualSeqNos, equalTo(expectedSeqNos)); - sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); + var sourceExchanger = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture remoteSinksFuture = new PlainActionFuture<>(); + try (RefCountingListener refs = new RefCountingListener(remoteSinksFuture)) { + List sinkHandlers = new ArrayList<>(); + Supplier exchangeSink = () -> { + final ExchangeSinkHandler sinkHandler; + if (sinkHandlers.isEmpty() == false && randomBoolean()) { + sinkHandler = randomFrom(sinkHandlers); + } else { + sinkHandler = new ExchangeSinkHandler(blockFactory, randomExchangeBuffer(), threadPool.relativeTimeInMillisSupplier()); + sourceExchanger.addRemoteSink( + sinkHandler::fetchPageAsync, + randomBoolean(), + () -> {}, + randomIntBetween(1, 3), + refs.acquire() + ); + sinkHandlers.add(sinkHandler); + } + return sinkHandler.createExchangeSink(() -> {}); + }; + final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); + final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); + Set actualSeqNos = runConcurrentTest( + maxInputSeqNo, + maxOutputSeqNo, + sourceExchanger::createExchangeSource, + exchangeSink + ); + var expectedSeqNos = IntStream.range(0, Math.min(maxInputSeqNo, maxOutputSeqNo)).boxed().collect(Collectors.toSet()); + assertThat(actualSeqNos, hasSize(expectedSeqNos.size())); + assertThat(actualSeqNos, equalTo(expectedSeqNos)); + } + safeGet(remoteSinksFuture); } public void testExchangeSourceContinueOnFailure() { BlockFactory blockFactory = blockFactory(); - PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); - var exchangeSourceHandler = new ExchangeSourceHandler( - randomExchangeBuffer(), - threadPool.executor(ESQL_TEST_EXECUTOR), - sourceCompletionFuture - ); + var exchangeSourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); Set expectedSeqNos = ConcurrentCollections.newConcurrentSet(); @@ -391,57 +396,65 @@ public void testExchangeSourceContinueOnFailure() { AtomicInteger totalSinks = new AtomicInteger(); AtomicInteger failedSinks = new AtomicInteger(); AtomicInteger completedSinks = new AtomicInteger(); - Supplier exchangeSink = () -> { - var sinkHandler = new ExchangeSinkHandler(blockFactory, randomExchangeBuffer(), threadPool.relativeTimeInMillisSupplier()); - int failAfter = randomBoolean() ? Integer.MAX_VALUE : randomIntBetween(0, 100); - AtomicInteger fetched = new AtomicInteger(); - int instance = randomIntBetween(1, 3); - totalSinks.incrementAndGet(); - AtomicBoolean sinkFailed = new AtomicBoolean(); - exchangeSourceHandler.addRemoteSink((allSourcesFinished, listener) -> { - if (fetched.incrementAndGet() > failAfter) { - sinkHandler.fetchPageAsync(true, listener.delegateFailure((l, r) -> { - failedRequests.incrementAndGet(); - sinkFailed.set(true); - listener.onFailure(new CircuitBreakingException("simulated", CircuitBreaker.Durability.PERMANENT)); - })); - } else { - sinkHandler.fetchPageAsync(allSourcesFinished, listener.delegateFailure((l, r) -> { - Page page = r.takePage(); - if (page != null) { - IntBlock block = page.getBlock(0); - for (int i = 0; i < block.getPositionCount(); i++) { - int v = block.getInt(i); - if (v < maxOutputSeqNo) { - expectedSeqNos.add(v); + PlainActionFuture remoteSinksFuture = new PlainActionFuture<>(); + try (RefCountingListener refs = new RefCountingListener(remoteSinksFuture)) { + Supplier exchangeSink = () -> { + var sinkHandler = new ExchangeSinkHandler(blockFactory, randomExchangeBuffer(), threadPool.relativeTimeInMillisSupplier()); + int failAfter = randomBoolean() ? Integer.MAX_VALUE : randomIntBetween(0, 100); + AtomicInteger fetched = new AtomicInteger(); + int instance = randomIntBetween(1, 3); + totalSinks.incrementAndGet(); + AtomicBoolean sinkFailed = new AtomicBoolean(); + ActionListener oneSinkListener = refs.acquire(); + exchangeSourceHandler.addRemoteSink((allSourcesFinished, listener) -> { + if (fetched.incrementAndGet() > failAfter) { + sinkHandler.fetchPageAsync(true, listener.delegateFailure((l, r) -> { + failedRequests.incrementAndGet(); + sinkFailed.set(true); + listener.onFailure(new CircuitBreakingException("simulated", CircuitBreaker.Durability.PERMANENT)); + })); + } else { + sinkHandler.fetchPageAsync(allSourcesFinished, listener.delegateFailure((l, r) -> { + Page page = r.takePage(); + if (page != null) { + IntBlock block = page.getBlock(0); + for (int i = 0; i < block.getPositionCount(); i++) { + int v = block.getInt(i); + if (v < maxOutputSeqNo) { + expectedSeqNos.add(v); + } } } - } - l.onResponse(new ExchangeResponse(blockFactory, page, r.finished())); - })); - } - }, false, () -> {}, instance, ActionListener.wrap(r -> { - assertFalse(sinkFailed.get()); - completedSinks.incrementAndGet(); - }, e -> { - assertTrue(sinkFailed.get()); - failedSinks.incrementAndGet(); - })); - return sinkHandler.createExchangeSink(() -> {}); - }; - Set actualSeqNos = runConcurrentTest( - maxInputSeqNo, - maxOutputSeqNo, - exchangeSourceHandler::createExchangeSource, - exchangeSink - ); - assertThat(actualSeqNos, equalTo(expectedSeqNos)); - assertThat(completedSinks.get() + failedSinks.get(), equalTo(totalSinks.get())); - sourceCompletionFuture.actionGet(); + l.onResponse(new ExchangeResponse(blockFactory, page, r.finished())); + })); + } + }, false, () -> {}, instance, ActionListener.wrap(r -> { + assertFalse(sinkFailed.get()); + completedSinks.incrementAndGet(); + oneSinkListener.onResponse(null); + }, e -> { + assertTrue(sinkFailed.get()); + failedSinks.incrementAndGet(); + oneSinkListener.onFailure(e); + })); + return sinkHandler.createExchangeSink(() -> {}); + }; + Set actualSeqNos = runConcurrentTest( + maxInputSeqNo, + maxOutputSeqNo, + exchangeSourceHandler::createExchangeSource, + exchangeSink + ); + assertThat(actualSeqNos, equalTo(expectedSeqNos)); + } if (failedRequests.get() > 0) { + expectThrows(CircuitBreakingException.class, () -> remoteSinksFuture.actionGet(1, TimeUnit.MINUTES)); assertThat(failedSinks.get(), greaterThan(0)); + assertThat(completedSinks.get() + failedSinks.get(), equalTo(totalSinks.get())); } else { + safeGet(remoteSinksFuture); assertThat(failedSinks.get(), equalTo(0)); + assertThat(completedSinks.get(), equalTo(totalSinks.get())); } } @@ -458,7 +471,7 @@ public void testClosingSinks() { assertFalse(sink.waitForWriting().listener().isDone()); PlainActionFuture future = new PlainActionFuture<>(); sinkExchanger.fetchPageAsync(true, future); - ExchangeResponse resp = future.actionGet(); + ExchangeResponse resp = safeGet(future); assertTrue(resp.finished()); assertNull(resp.takePage()); assertTrue(sink.waitForWriting().listener().isDone()); @@ -466,7 +479,7 @@ public void testClosingSinks() { } public void testFinishEarly() throws Exception { - ExchangeSourceHandler sourceHandler = new ExchangeSourceHandler(20, threadPool.generic(), ActionListener.noop()); + ExchangeSourceHandler sourceHandler = new ExchangeSourceHandler(20, threadPool.generic()); Semaphore permits = new Semaphore(between(1, 5)); BlockFactory blockFactory = blockFactory(); Queue pages = ConcurrentCollections.newQueue(); @@ -537,12 +550,7 @@ public void testConcurrentWithTransportActions() { try (exchange0; exchange1; node0; node1) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); - PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); - var sourceHandler = new ExchangeSourceHandler( - randomExchangeBuffer(), - threadPool.executor(ESQL_TEST_EXECUTOR), - sourceCompletionFuture - ); + var sourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomExchangeBuffer()); Transport.Connection connection = node0.getConnection(node1.getLocalNode()); sourceHandler.addRemoteSink( @@ -563,7 +571,6 @@ public void testConcurrentWithTransportActions() { var expectedSeqNos = IntStream.range(0, Math.min(maxInputSeqNo, maxOutputSeqNo)).boxed().collect(Collectors.toSet()); assertThat(actualSeqNos, hasSize(expectedSeqNos.size())); assertThat(actualSeqNos, equalTo(expectedSeqNos)); - sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } @@ -613,22 +620,18 @@ public void sendResponse(TransportResponse transportResponse) { try (exchange0; exchange1; node0; node1) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); - PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); - var sourceHandler = new ExchangeSourceHandler( - randomIntBetween(1, 128), - threadPool.executor(ESQL_TEST_EXECUTOR), - sourceCompletionFuture - ); + var sourceHandler = new ExchangeSourceHandler(randomIntBetween(1, 128), threadPool.executor(ESQL_TEST_EXECUTOR)); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); Transport.Connection connection = node0.getConnection(node1.getLocalNode()); + PlainActionFuture remoteSinkFuture = new PlainActionFuture<>(); sourceHandler.addRemoteSink( exchange0.newRemoteSink(task, exchangeId, node0, connection), true, () -> {}, randomIntBetween(1, 5), - ActionListener.noop() + remoteSinkFuture ); - Exception err = expectThrows( + Exception driverException = expectThrows( Exception.class, () -> runConcurrentTest( maxSeqNo, @@ -637,13 +640,36 @@ public void sendResponse(TransportResponse transportResponse) { () -> sinkHandler.createExchangeSink(() -> {}) ) ); - Throwable cause = ExceptionsHelper.unwrap(err, IOException.class); + assertThat(driverException, instanceOf(TaskCancelledException.class)); + var sinkException = expectThrows(Exception.class, remoteSinkFuture::actionGet); + Throwable cause = ExceptionsHelper.unwrap(sinkException, IOException.class); assertNotNull(cause); assertThat(cause.getMessage(), equalTo("page is too large")); PlainActionFuture sinkCompletionFuture = new PlainActionFuture<>(); sinkHandler.addCompletionListener(sinkCompletionFuture); - assertBusy(() -> assertTrue(sinkCompletionFuture.isDone())); - expectThrows(Exception.class, () -> sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS)); + safeGet(sinkCompletionFuture); + } + } + + public void testNoCyclicException() throws Exception { + PlainActionFuture future = new PlainActionFuture<>(); + try (EsqlRefCountingListener refs = new EsqlRefCountingListener(future)) { + var exchangeSourceHandler = new ExchangeSourceHandler(between(10, 100), threadPool.generic()); + int numSinks = between(5, 10); + for (int i = 0; i < numSinks; i++) { + RemoteSink remoteSink = (allSourcesFinished, listener) -> threadPool.schedule( + () -> listener.onFailure(new IOException("simulated")), + TimeValue.timeValueMillis(1), + threadPool.generic() + ); + exchangeSourceHandler.addRemoteSink(remoteSink, randomBoolean(), () -> {}, between(1, 3), refs.acquire()); + } + } + Exception err = expectThrows(Exception.class, () -> future.actionGet(10, TimeUnit.SECONDS)); + assertThat(ExceptionsHelper.unwrap(err, IOException.class).getMessage(), equalTo("simulated")); + try (BytesStreamOutput output = new BytesStreamOutput()) { + // ensure no cyclic exception + ElasticsearchException.writeException(err, output); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index e63e8b63d6ee9..49d91df556d14 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -543,6 +543,7 @@ public void testCollectAllValues() { List> actualTop = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -633,6 +634,7 @@ public void testCollectAllValues_RandomMultiValues() { List> actualTop = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -668,6 +670,7 @@ private List> topNTwoColumns( List> outputValues = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new TupleBlockSourceOperator(driverContext.blockFactory(), inputValues, randomIntBetween(1, 1000)), List.of( @@ -938,6 +941,7 @@ private void assertSortingOnMV( int topCount = randomIntBetween(1, values.size()); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(page).iterator()), List.of( @@ -1112,6 +1116,7 @@ public void testIPSortingSingleValue() throws UnknownHostException { List> actual = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( @@ -1239,6 +1244,7 @@ private void assertIPSortingOnMultiValues( DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( @@ -1327,6 +1333,7 @@ public void testZeroByte() { DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -1367,6 +1374,7 @@ public void testErrorBeforeFullyDraining() { DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.range(0, docCount)), List.of( diff --git a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java index a46dca4ae38cf..d9fca11ecdcf2 100644 --- a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java @@ -190,6 +190,7 @@ protected final List oneDriverPerPageList(Iterator> source, Sup List in = source.next(); try ( Driver d = new Driver( + "test", driverContext(), new CannedSourceOperator(in.iterator()), operators.get(), @@ -264,6 +265,7 @@ protected final List drive(List operators, Iterator input, boolean success = false; try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input), operators, @@ -291,6 +293,7 @@ public static void runDriver(List drivers) { for (int i = 0; i < dummyDrivers; i++) { drivers.add( new Driver( + "test", "dummy-session", 0, 0, diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index 790b12346bb14..71da79063548d 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -85,6 +85,11 @@ protected boolean supportsIndexModeLookup() throws IOException { return hasCapabilities(List.of(JOIN_LOOKUP_V12.capabilityName())); } + @Override + protected boolean supportsSourceFieldMapping() { + return false; + } + @Override protected boolean deduplicateExactWarnings() { /* diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 4d06db94801bf..5ad10f6949be1 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -48,9 +48,11 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V3; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V12; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.UNMAPPED_FIELDS; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; @@ -124,7 +126,10 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); + assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V3.capabilityName())); assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V12.capabilityName())); + // Unmapped fields require a coorect capability response from every cluster, which isn't currently implemented. + assumeFalse("UNMAPPED FIELDS not yet supported in CCS", testCase.requiredCapabilities.contains(UNMAPPED_FIELDS.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { @@ -287,4 +292,9 @@ protected boolean supportsIndexModeLookup() throws IOException { // return hasCapabilities(List.of(JOIN_LOOKUP_V10.capabilityName())); return false; } + + @Override + protected boolean supportsSourceFieldMapping() { + return false; + } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index b838d8ae284a4..791f5dacdce64 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -38,7 +38,10 @@ import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.esql.ccq.Clusters.REMOTE_CLUSTER_NAME; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.any; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasKey; @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class MultiClustersIT extends ESRestTestCase { diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java index 7c81f97714a6f..d8c68dd5281aa 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java @@ -14,9 +14,12 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.esql.qa.rest.RequestIndexFilteringTestCase; +import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase; +import org.hamcrest.Matcher; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -25,6 +28,12 @@ import org.junit.rules.TestRule; import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.instanceOf; @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class RequestIndexFilteringIT extends RequestIndexFilteringTestCase { @@ -49,6 +58,8 @@ public void setRemoteClient() throws IOException { } } + private boolean isCCSRequest; + @AfterClass public static void closeRemoteClients() throws IOException { try { @@ -66,13 +77,20 @@ protected void indexTimestampData(int docs, String indexName, String date, Strin @Override protected String from(String... indexName) { - if (randomBoolean()) { + isCCSRequest = randomBoolean(); + if (isCCSRequest) { return "FROM *:" + String.join(",*:", indexName); } else { return "FROM " + String.join(",", indexName); } } + @Override + public Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException { + requestObject.includeCCSMetadata(true); + return super.runEsql(requestObject); + } + @After public void wipeRemoteTestData() throws IOException { try { @@ -82,4 +100,35 @@ public void wipeRemoteTestData() throws IOException { assertEquals(404, re.getResponse().getStatusLine().getStatusCode()); } } + + private MapMatcher getClustersMetadataMatcher() { + MapMatcher mapMatcher = matchesMap(); + mapMatcher = mapMatcher.entry("running", 0); + mapMatcher = mapMatcher.entry("total", 1); + mapMatcher = mapMatcher.entry("failed", 0); + mapMatcher = mapMatcher.entry("partial", 0); + mapMatcher = mapMatcher.entry("successful", 1); + mapMatcher = mapMatcher.entry("skipped", 0); + mapMatcher = mapMatcher.entry( + "details", + matchesMap().entry( + Clusters.REMOTE_CLUSTER_NAME, + matchesMap().entry("_shards", matchesMap().extraOk()) + .entry("took", greaterThanOrEqualTo(0)) + .entry("indices", instanceOf(String.class)) + .entry("status", "successful") + ) + ); + return mapMatcher; + } + + @Override + protected void assertQueryResult(Map result, Matcher columnMatcher, Matcher valuesMatcher) { + var matcher = getResultMatcher(result).entry("columns", columnMatcher).entry("values", valuesMatcher); + if (isCCSRequest) { + matcher = matcher.entry("_clusters", getClustersMetadataMatcher()); + } + assertMap(result, matcher); + } + } diff --git a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java index 64c113345bd53..4ef14a4fdf70c 100644 --- a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java @@ -37,4 +37,9 @@ public EsqlSpecIT( protected boolean enableRoundingDoubleValuesOnAsserting() { return true; } + + @Override + protected boolean supportsSourceFieldMapping() { + return false; + } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java index 368eebe808eee..2e7aed670fd13 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java @@ -42,4 +42,5 @@ protected boolean enableRoundingDoubleValuesOnAsserting() { // This suite runs with more than one node and three shards in serverless return cluster.getNumNodes() > 1; } + } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java index 9b98c29f5c3e3..d322263ce9182 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.qa.rest.generative.GenerativeRestTest; import org.junit.ClassRule; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102084") +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/121754") @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class GenerativeIT extends GenerativeRestTest { @ClassRule diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 601ce819224b5..58c82d800954c 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -40,8 +40,10 @@ import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.any; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItem; @@ -287,7 +289,6 @@ public void testProfile() throws IOException { equalTo(List.of(List.of(499.5d))) ); - List> signatures = new ArrayList<>(); @SuppressWarnings("unchecked") List> profiles = (List>) ((Map) result.get("profile")).get("drivers"); for (Map p : profiles) { @@ -299,26 +300,34 @@ public void testProfile() throws IOException { for (Map o : operators) { sig.add(checkOperatorProfile(o)); } - signatures.add(sig); + String taskDescription = p.get("task_description").toString(); + switch (taskDescription) { + case "data" -> assertMap( + sig, + matchesList().item("LuceneSourceOperator") + .item("ValuesSourceReaderOperator") + .item("AggregationOperator") + .item("ExchangeSinkOperator") + ); + case "node_reduce" -> assertThat( + sig, + either(matchesList().item("ExchangeSourceOperator").item("ExchangeSinkOperator")).or( + matchesList().item("ExchangeSourceOperator").item("AggregationOperator").item("ExchangeSinkOperator") + ) + ); + case "final" -> assertMap( + sig, + matchesList().item("ExchangeSourceOperator") + .item("AggregationOperator") + .item("ProjectOperator") + .item("LimitOperator") + .item("EvalOperator") + .item("ProjectOperator") + .item("OutputOperator") + ); + default -> throw new IllegalArgumentException("can't match " + taskDescription); + } } - var readProfile = matchesList().item("LuceneSourceOperator") - .item("ValuesSourceReaderOperator") - .item("AggregationOperator") - .item("ExchangeSinkOperator"); - var mergeProfile = matchesList().item("ExchangeSourceOperator") - .item("AggregationOperator") - .item("ProjectOperator") - .item("LimitOperator") - .item("EvalOperator") - .item("ProjectOperator") - .item("OutputOperator"); - var emptyReduction = matchesList().item("ExchangeSourceOperator").item("ExchangeSinkOperator"); - var reduction = matchesList().item("ExchangeSourceOperator").item("AggregationOperator").item("ExchangeSinkOperator"); - assertThat( - signatures, - Matchers.either(containsInAnyOrder(readProfile, reduction, mergeProfile)) - .or(containsInAnyOrder(readProfile, emptyReduction, mergeProfile)) - ); } public void testProfileOrdinalsGroupingOperator() throws IOException { @@ -391,6 +400,7 @@ public void testInlineStatsProfile() throws IOException { } signatures.add(sig); } + // TODO adapt this to use task_description once this is reenabled assertThat( signatures, containsInAnyOrder( @@ -491,10 +501,10 @@ public void testForceSleepsProfile() throws IOException { MapMatcher sleepMatcher = matchesMap().entry("reason", "exchange empty") .entry("sleep_millis", greaterThan(0L)) .entry("wake_millis", greaterThan(0L)); - if (operators.contains("LuceneSourceOperator")) { - assertMap(sleeps, matchesMap().entry("counts", Map.of()).entry("first", List.of()).entry("last", List.of())); - } else if (operators.contains("ExchangeSourceOperator")) { - if (operators.contains("ExchangeSinkOperator")) { + String taskDescription = p.get("task_description").toString(); + switch (taskDescription) { + case "data" -> assertMap(sleeps, matchesMap().entry("counts", Map.of()).entry("first", List.of()).entry("last", List.of())); + case "node_reduce" -> { assertMap(sleeps, matchesMap().entry("counts", matchesMap().entry("exchange empty", greaterThan(0))).extraOk()); @SuppressWarnings("unchecked") List> first = (List>) sleeps.get("first"); @@ -506,8 +516,8 @@ public void testForceSleepsProfile() throws IOException { for (Map s : last) { assertMap(s, sleepMatcher); } - - } else { + } + case "final" -> { assertMap( sleeps, matchesMap().entry("counts", matchesMap().entry("exchange empty", 1)) @@ -515,14 +525,14 @@ public void testForceSleepsProfile() throws IOException { .entry("last", List.of(sleepMatcher)) ); } - } else { - fail("unknown signature: " + operators); + default -> throw new IllegalArgumentException("unknown task: " + taskDescription); } } } private MapMatcher commonProfile() { - return matchesMap().entry("start_millis", greaterThan(0L)) + return matchesMap().entry("task_description", any(String.class)) + .entry("start_millis", greaterThan(0L)) .entry("stop_millis", greaterThan(0L)) .entry("iterations", greaterThan(0L)) .entry("cpu_nanos", greaterThan(0L)) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 18bfb6b8676ce..3b9d2e7a20f88 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.esql.AssertWarnings; import org.elasticsearch.xpack.esql.CsvSpecReader.CsvTestCase; import org.elasticsearch.xpack.esql.CsvTestUtils; +import org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.SpecReader; import org.elasticsearch.xpack.esql.plugin.EsqlFeatures; @@ -61,7 +62,6 @@ import static org.elasticsearch.xpack.esql.CsvAssert.assertData; import static org.elasticsearch.xpack.esql.CsvAssert.assertMetadata; import static org.elasticsearch.xpack.esql.CsvSpecReader.specParser; -import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; import static org.elasticsearch.xpack.esql.CsvTestUtils.loadCsvSpecValues; import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.availableDatasetsForEs; @@ -70,6 +70,7 @@ import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.deleteInferenceEndpoint; import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.loadDataSetIntoEs; import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.SOURCE_FIELD_MAPPING; // This test can run very long in serverless configurations @TimeoutSuite(millis = 30 * TimeUnits.MINUTE) @@ -132,8 +133,10 @@ public void setup() throws IOException { createInferenceEndpoint(client()); } - if (indexExists(availableDatasetsForEs(client(), supportsIndexModeLookup()).iterator().next().indexName()) == false) { - loadDataSetIntoEs(client(), supportsIndexModeLookup()); + boolean supportsLookup = supportsIndexModeLookup(); + boolean supportsSourceMapping = supportsSourceFieldMapping(); + if (indexExists(availableDatasetsForEs(client(), supportsLookup, supportsSourceMapping).iterator().next().indexName()) == false) { + loadDataSetIntoEs(client(), supportsLookup, supportsSourceMapping); } } @@ -172,6 +175,9 @@ protected void shouldSkipTest(String testName) throws IOException { } checkCapabilities(adminClient(), testFeatureService, testName, testCase); assumeTrue("Test " + testName + " is not enabled", isEnabled(testName, instructions, Version.CURRENT)); + if (supportsSourceFieldMapping() == false) { + assumeFalse("source mapping tests are muted", testCase.requiredCapabilities.contains(SOURCE_FIELD_MAPPING.capabilityName())); + } } protected static void checkCapabilities(RestClient client, TestFeatureService testFeatureService, String testName, CsvTestCase testCase) @@ -229,10 +235,14 @@ protected boolean supportsIndexModeLookup() throws IOException { return true; } + protected boolean supportsSourceFieldMapping() throws IOException { + return true; + } + protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); - if (testCase.query.toUpperCase(Locale.ROOT).contains("LOOKUP")) { + if (testCase.query.toUpperCase(Locale.ROOT).contains("LOOKUP_\uD83D\uDC14")) { builder.tables(tables()); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java index ad61c52775eb9..1fdc11174ee09 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java @@ -17,6 +17,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.AssertWarnings; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.hamcrest.Matcher; import org.junit.After; import org.junit.Assert; @@ -62,7 +63,7 @@ public void testTimestampFilterFromQuery() throws IOException { // filter includes both indices in the result (all columns, all rows) RestEsqlTestCase.RequestObjectBuilder builder = timestampFilter("gte", "2023-01-01").query(from("test*")); - assertResultMap( + assertQueryResult( runEsql(builder), matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) .item(matchesMap().entry("name", "id1").entry("type", "integer")) @@ -73,7 +74,7 @@ public void testTimestampFilterFromQuery() throws IOException { // filter includes only test1. Columns from test2 are filtered out, as well (not only rows)! builder = timestampFilter("gte", "2024-01-01").query(from("test*")); - assertResultMap( + assertQueryResult( runEsql(builder), matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) .item(matchesMap().entry("name", "id1").entry("type", "integer")) @@ -84,7 +85,7 @@ public void testTimestampFilterFromQuery() throws IOException { // filter excludes both indices (no rows); the first analysis step fails because there are no columns, a second attempt succeeds // after eliminating the index filter. All columns are returned. builder = timestampFilter("gte", "2025-01-01").query(from("test*")); - assertResultMap( + assertQueryResult( runEsql(builder), matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) .item(matchesMap().entry("name", "id1").entry("type", "integer")) @@ -102,7 +103,7 @@ public void testFieldExistsFilter_KeepWildcard() throws IOException { // filter includes only test1. Columns and rows of test2 are filtered out RestEsqlTestCase.RequestObjectBuilder builder = existsFilter("id1").query(from("test*")); - assertResultMap( + assertQueryResult( runEsql(builder), matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) .item(matchesMap().entry("name", "id1").entry("type", "integer")) @@ -113,7 +114,7 @@ public void testFieldExistsFilter_KeepWildcard() throws IOException { // filter includes only test1. Columns from test2 are filtered out, as well (not only rows)! builder = existsFilter("id1").query(from("test*") + " METADATA _index | KEEP _index, id*"); Map result = runEsql(builder); - assertResultMap( + assertQueryResult( result, matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword")) .item(matchesMap().entry("name", "id1").entry("type", "integer")), @@ -138,7 +139,7 @@ public void testFieldExistsFilter_With_ExplicitUseOfDiscardedIndexFields() throw from("test*") + " METADATA _index | SORT id2 | KEEP _index, id*" ); Map result = runEsql(builder); - assertResultMap( + assertQueryResult( result, matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword")) .item(matchesMap().entry("name", "id1").entry("type", "integer")) @@ -298,4 +299,9 @@ protected void indexTimestampDataForClient(RestClient client, int docs, String i Assert.assertEquals("{\"errors\":false}", EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)); } } + + protected void assertQueryResult(Map result, Matcher columnMatcher, Matcher valuesMatcher) { + assertResultMap(result, columnMatcher, valuesMatcher); + } + } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 66333421eeb75..dc979806370f0 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -33,7 +33,6 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.AssertWarnings; import org.elasticsearch.xpack.esql.EsqlTestUtils; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -660,10 +659,6 @@ public void testErrorMessageForArrayValuesInParams() throws IOException { } public void testNamedParamsForIdentifierAndIdentifierPatterns() throws IOException { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); bulkLoadTestData(10); // positive var query = requestObjectBuilder().query( @@ -773,6 +768,33 @@ public void testNamedParamsForIdentifierAndIdentifierPatterns() throws IOExcepti } } + public void testErrorMessageForMissingParams() throws IOException { + ResponseException re = expectThrows( + ResponseException.class, + () -> runEsql(requestObjectBuilder().query("from idx | where x == ?n1").params("[]")) + ); + assertThat( + EntityUtils.toString(re.getResponse().getEntity()).replaceAll("\\\\\n\s+\\\\", ""), + containsString("line 1:23: Unknown query parameter [n1]") + ); + + re = expectThrows( + ResponseException.class, + () -> runEsql(requestObjectBuilder().query("from idx | where x == ?n1 and y == ?n2").params("[{\"n\" : \"v\"}]")) + ); + assertThat(EntityUtils.toString(re.getResponse().getEntity()).replaceAll("\\\\\n\s+\\\\", ""), containsString(""" + line 1:23: Unknown query parameter [n1], did you mean [n]?; line 1:36: Unknown query parameter [n2], did you mean [n]?""")); + + re = expectThrows( + ResponseException.class, + () -> runEsql(requestObjectBuilder().query("from idx | where x == ?n1 and y == ?n2").params("[{\"n1\" : \"v1\"}]")) + ); + assertThat( + EntityUtils.toString(re.getResponse().getEntity()).replaceAll("\\\\\n\s+\\\\", ""), + containsString("line 1:36: Unknown query parameter [n2], did you mean [n1]") + ); + } + public void testErrorMessageForLiteralDateMathOverflow() throws IOException { List dateMathOverflowExpressions = List.of( "2147483647 day + 1 day", diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java index 844ccd3802bf1..5bf13d2d9c762 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java @@ -28,10 +28,13 @@ public record Column(String name, String type) {} public record QueryExecuted(String query, int depth, List outputSchema, Exception exception) {} public static String sourceCommand(List availabeIndices) { - return switch (randomIntBetween(0, 2)) { + return switch (randomIntBetween(0, 1)) { case 0 -> from(availabeIndices); - case 1 -> metaFunctions(); - default -> row(); + // case 1 -> metaFunctions(); + default -> from(availabeIndices); + // TODO re-enable ROW. + // now it crashes nodes in some cases: exiting java.lang.AssertionError: estimated row size [0] wasn't set + // default -> row(); }; } @@ -41,8 +44,12 @@ public static String sourceCommand(List availabeIndices) { * @param policies * @return a new command that can process it as input */ - public static String pipeCommand(List previousOutput, List policies) { - return switch (randomIntBetween(0, 11)) { + public static String pipeCommand( + List previousOutput, + List policies, + List lookupIndices + ) { + return switch (randomIntBetween(0, 12)) { case 0 -> dissect(previousOutput); case 1 -> drop(previousOutput); case 2 -> enrich(previousOutput, policies); @@ -54,10 +61,26 @@ public static String pipeCommand(List previousOutput, List rename(previousOutput); case 9 -> sort(previousOutput); case 10 -> stats(previousOutput); + case 11 -> join(previousOutput, lookupIndices); default -> where(previousOutput); }; } + private static String join(List previousOutput, List lookupIndices) { + + GenerativeRestTest.LookupIdx lookupIdx = randomFrom(lookupIndices); + String lookupIdxName = lookupIdx.idxName(); + String idxKey = lookupIdx.key(); + String keyType = lookupIdx.keyType(); + + var candidateKeys = previousOutput.stream().filter(x -> x.type.equals(keyType)).toList(); + if (candidateKeys.isEmpty()) { + return ""; + } + Column key = randomFrom(candidateKeys); + return "| rename " + key.name + " as " + idxKey + " | lookup join " + lookupIdxName + " on " + idxKey; + } + private static String where(List previousOutput) { // TODO more complex conditions StringBuilder result = new StringBuilder(" | where "); @@ -191,7 +214,53 @@ private static String keep(List previousOutput) { } private static String randomName(List previousOutput) { - return previousOutput.get(randomIntBetween(0, previousOutput.size() - 1)).name(); + // we need to exclude + // https://github.com/elastic/elasticsearch/issues/121741 + return randomFrom(previousOutput.stream().filter(x -> x.name().equals("") == false).toList()).name(); + } + + private static String randomGroupableName(List previousOutput) { + // we need to exclude + // https://github.com/elastic/elasticsearch/issues/121741 + var candidates = previousOutput.stream() + .filter(EsqlQueryGenerator::groupable) + .filter(x -> x.name().equals("") == false) + .toList(); + if (candidates.isEmpty()) { + return null; + } + return randomFrom(candidates).name(); + } + + private static boolean groupable(Column col) { + return col.type.equals("keyword") + || col.type.equals("text") + || col.type.equals("long") + || col.type.equals("integer") + || col.type.equals("ip") + || col.type.equals("version"); + } + + private static String randomSortableName(List previousOutput) { + // we need to exclude + // https://github.com/elastic/elasticsearch/issues/121741 + var candidates = previousOutput.stream() + .filter(EsqlQueryGenerator::sortable) + .filter(x -> x.name().equals("") == false) + .toList(); + if (candidates.isEmpty()) { + return null; + } + return randomFrom(candidates).name(); + } + + private static boolean sortable(Column col) { + return col.type.equals("keyword") + || col.type.equals("text") + || col.type.equals("long") + || col.type.equals("integer") + || col.type.equals("ip") + || col.type.equals("version"); } private static String rename(List previousOutput) { @@ -199,7 +268,12 @@ private static String rename(List previousOutput) { List proj = new ArrayList<>(); List names = new ArrayList<>(previousOutput.stream().map(Column::name).collect(Collectors.toList())); for (int i = 0; i < n; i++) { - String name = names.remove(randomIntBetween(0, names.size() - 1)); + var colN = randomIntBetween(0, names.size() - 1); + if (previousOutput.get(colN).type().endsWith("_range")) { + // ranges are not fully supported yet + continue; + } + String name = names.remove(colN); String newName; if (names.isEmpty() || randomBoolean()) { newName = randomAlphaOfLength(5); @@ -209,6 +283,9 @@ private static String rename(List previousOutput) { names.add(newName); proj.add(name + " AS " + newName); } + if (proj.isEmpty()) { + return ""; + } return " | rename " + proj.stream().collect(Collectors.joining(", ")); } @@ -227,7 +304,7 @@ private static String drop(List previousOutput) { name = "*" + name.substring(randomIntBetween(1, name.length() - 1)); } } - proj.add(name); + proj.add(name.contains("*") ? name : "`" + name + "`"); } return " | drop " + proj.stream().collect(Collectors.joining(", ")); } @@ -236,7 +313,11 @@ private static String sort(List previousOutput) { int n = randomIntBetween(1, previousOutput.size()); Set proj = new HashSet<>(); for (int i = 0; i < n; i++) { - proj.add(randomName(previousOutput)); + String col = randomSortableName(previousOutput); + if (col == null) { + return "";// no sortable columns + } + proj.add(col); } return " | sort " + proj.stream() @@ -295,9 +376,10 @@ private static String stats(List previousOutput) { cmd.append(expression); } if (randomBoolean()) { - cmd.append(" by "); - - cmd.append(randomName(nonNull)); + var col = randomGroupableName(nonNull); + if (col != null) { + cmd.append(" by " + col); + } } return cmd.toString(); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java index 588d5870d89ec..0ceeb132f5b5c 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -35,7 +35,18 @@ public abstract class GenerativeRestTest extends ESRestTestCase { public static final Set ALLOWED_ERRORS = Set.of( "Reference \\[.*\\] is ambiguous", "Cannot use field \\[.*\\] due to ambiguities", - "cannot sort on .*" + "cannot sort on .*", + "argument of \\[count_distinct\\(.*\\)\\] must", + "Cannot use field \\[.*\\] with unsupported type \\[.*_range\\]", + // warnings + "Field '.*' shadowed by field at line .*", + "evaluation of \\[.*\\] failed, treating result as null", // TODO investigate? + // Awaiting fixes + "estimated row size \\[0\\] wasn't set", // https://github.com/elastic/elasticsearch/issues/121739 + "unknown physical plan node \\[OrderExec\\]", // https://github.com/elastic/elasticsearch/issues/120817 + "Unknown column \\[\\]", // https://github.com/elastic/elasticsearch/issues/121741 + // + "The incoming YAML document exceeds the limit:" // still to investigate, but it seems to be specific to the test framework ); public static final Set ALLOWED_ERROR_PATTERNS = ALLOWED_ERRORS.stream() @@ -46,7 +57,7 @@ public abstract class GenerativeRestTest extends ESRestTestCase { @Before public void setup() throws IOException { if (indexExists(CSV_DATASET_MAP.keySet().iterator().next()) == false) { - loadDataSetIntoEs(client(), true); + loadDataSetIntoEs(client(), true, true); } } @@ -64,6 +75,7 @@ public static void wipeTestData() throws IOException { public void test() { List indices = availableIndices(); + List lookupIndices = lookupIndices(); List policies = availableEnrichPolicies(); for (int i = 0; i < ITERATIONS; i++) { String command = EsqlQueryGenerator.sourceCommand(indices); @@ -76,7 +88,7 @@ public void test() { if (result.outputSchema().isEmpty()) { break; } - command = EsqlQueryGenerator.pipeCommand(result.outputSchema(), policies); + command = EsqlQueryGenerator.pipeCommand(result.outputSchema(), policies, lookupIndices); result = execute(result.query() + command, result.depth() + 1); if (result.exception() != null) { checkException(result); @@ -102,6 +114,9 @@ private EsqlQueryGenerator.QueryExecuted execute(String command, int depth) { return new EsqlQueryGenerator.QueryExecuted(command, depth, outputSchema, null); } catch (Exception e) { return new EsqlQueryGenerator.QueryExecuted(command, depth, null, e); + } catch (AssertionError ae) { + // this is for ensureNoWarnings() + return new EsqlQueryGenerator.QueryExecuted(command, depth, null, new RuntimeException(ae.getMessage())); } } @@ -116,7 +131,23 @@ private List outputSchema(Map a) { } private List availableIndices() { - return new ArrayList<>(CSV_DATASET_MAP.keySet()); + return new ArrayList<>( + CSV_DATASET_MAP.entrySet() + .stream() + .filter(x -> x.getValue().requiresInferenceEndpoint() == false) + .map(Map.Entry::getKey) + .toList() + ); + } + + record LookupIdx(String idxName, String key, String keyType) {} + + private List lookupIndices() { + List result = new ArrayList<>(); + // we don't have key info from the dataset loader, let's hardcode it for now + result.add(new LookupIdx("languages_lookup", "language_code", "integer")); + result.add(new LookupIdx("message_types_lookup", "message", "keyword")); + return result; } List availableEnrichPolicies() { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index fbd4f9feca78d..c66ffb37184ef 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -42,6 +42,8 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.CsvTestUtils.COMMA_ESCAPING_REGEX; @@ -83,6 +85,22 @@ public class CsvTestsDataLoader { .withData("sample_data_ts_nanos.csv") .withTypeMapping(Map.of("@timestamp", "date_nanos")); private static final TestDataset MISSING_IP_SAMPLE_DATA = new TestDataset("missing_ip_sample_data"); + private static final TestDataset SAMPLE_DATA_PARTIAL_MAPPING = new TestDataset("partial_mapping_sample_data"); + private static final TestDataset SAMPLE_DATA_NO_MAPPING = new TestDataset( + "no_mapping_sample_data", + "mapping-no_mapping_sample_data.json", + "partial_mapping_sample_data.csv" + ).withTypeMapping(Stream.of("timestamp", "client_ip", "event_duration").collect(Collectors.toMap(k -> k, k -> "keyword"))); + private static final TestDataset SAMPLE_DATA_PARTIAL_MAPPING_NO_SOURCE = new TestDataset( + "partial_mapping_no_source_sample_data", + "mapping-partial_mapping_no_source_sample_data.json", + "partial_mapping_sample_data.csv" + ).withSetting("source_parameters-settings.json"); + private static final TestDataset SAMPLE_DATA_PARTIAL_MAPPING_EXCLUDED_SOURCE = new TestDataset( + "partial_mapping_excluded_source_sample_data", + "mapping-partial_mapping_excluded_source_sample_data.json", + "partial_mapping_sample_data.csv" + ).withSetting("source_parameters-settings.json"); private static final TestDataset CLIENT_IPS = new TestDataset("clientips"); private static final TestDataset CLIENT_IPS_LOOKUP = CLIENT_IPS.withIndex("clientips_lookup") .withSetting("clientips_lookup-settings.json"); @@ -128,6 +146,10 @@ public class CsvTestsDataLoader { Map.entry(LANGUAGES_NESTED_FIELDS.indexName, LANGUAGES_NESTED_FIELDS), Map.entry(UL_LOGS.indexName, UL_LOGS), Map.entry(SAMPLE_DATA.indexName, SAMPLE_DATA), + Map.entry(SAMPLE_DATA_PARTIAL_MAPPING.indexName, SAMPLE_DATA_PARTIAL_MAPPING), + Map.entry(SAMPLE_DATA_NO_MAPPING.indexName, SAMPLE_DATA_NO_MAPPING), + Map.entry(SAMPLE_DATA_PARTIAL_MAPPING_NO_SOURCE.indexName, SAMPLE_DATA_PARTIAL_MAPPING_NO_SOURCE), + Map.entry(SAMPLE_DATA_PARTIAL_MAPPING_EXCLUDED_SOURCE.indexName, SAMPLE_DATA_PARTIAL_MAPPING_EXCLUDED_SOURCE), Map.entry(MV_SAMPLE_DATA.indexName, MV_SAMPLE_DATA), Map.entry(ALERTS.indexName, ALERTS), Map.entry(SAMPLE_DATA_STR.indexName, SAMPLE_DATA_STR), @@ -248,7 +270,7 @@ public static void main(String[] args) throws IOException { } try (RestClient client = builder.build()) { - loadDataSetIntoEs(client, true, (restClient, indexName, indexMapping, indexSettings) -> { + loadDataSetIntoEs(client, true, true, (restClient, indexName, indexMapping, indexSettings) -> { // don't use ESRestTestCase methods here or, if you do, test running the main method before making the change StringBuilder jsonBody = new StringBuilder("{"); if (indexSettings != null && indexSettings.isEmpty() == false) { @@ -267,14 +289,19 @@ public static void main(String[] args) throws IOException { } } - public static Set availableDatasetsForEs(RestClient client, boolean supportsIndexModeLookup) throws IOException { + public static Set availableDatasetsForEs( + RestClient client, + boolean supportsIndexModeLookup, + boolean supportsSourceFieldMapping + ) throws IOException { boolean inferenceEnabled = clusterHasInferenceEndpoint(client); Set testDataSets = new HashSet<>(); for (TestDataset dataset : CSV_DATASET_MAP.values()) { if ((inferenceEnabled || dataset.requiresInferenceEndpoint == false) - && (supportsIndexModeLookup || isLookupDataset(dataset) == false)) { + && (supportsIndexModeLookup || isLookupDataset(dataset) == false) + && (supportsSourceFieldMapping || isSourceMappingDataset(dataset) == false)) { testDataSets.add(dataset); } } @@ -282,24 +309,44 @@ public static Set availableDatasetsForEs(RestClient client, boolean return testDataSets; } - public static boolean isLookupDataset(TestDataset dataset) throws IOException { + private static boolean isLookupDataset(TestDataset dataset) throws IOException { Settings settings = dataset.readSettingsFile(); String mode = settings.get("index.mode"); return (mode != null && mode.equalsIgnoreCase("lookup")); } - public static void loadDataSetIntoEs(RestClient client, boolean supportsIndexModeLookup) throws IOException { - loadDataSetIntoEs(client, supportsIndexModeLookup, (restClient, indexName, indexMapping, indexSettings) -> { - ESRestTestCase.createIndex(restClient, indexName, indexSettings, indexMapping, null); - }); + private static boolean isSourceMappingDataset(TestDataset dataset) throws IOException { + if (dataset.mappingFileName() == null) { + return true; + } + String mappingJsonText = readTextFile(getResource("/" + dataset.mappingFileName())); + JsonNode mappingNode = new ObjectMapper().readTree(mappingJsonText); + // BWC tests don't support _source field mappings, so don't load those datasets. + return mappingNode.get("_source") != null; } - private static void loadDataSetIntoEs(RestClient client, boolean supportsIndexModeLookup, IndexCreator indexCreator) + public static void loadDataSetIntoEs(RestClient client, boolean supportsIndexModeLookup, boolean supportsSourceFieldMapping) throws IOException { + loadDataSetIntoEs( + client, + supportsIndexModeLookup, + supportsSourceFieldMapping, + (restClient, indexName, indexMapping, indexSettings) -> { + ESRestTestCase.createIndex(restClient, indexName, indexSettings, indexMapping, null); + } + ); + } + + private static void loadDataSetIntoEs( + RestClient client, + boolean supportsIndexModeLookup, + boolean supportsSourceFieldMapping, + IndexCreator indexCreator + ) throws IOException { Logger logger = LogManager.getLogger(CsvTestsDataLoader.class); Set loadedDatasets = new HashSet<>(); - for (var dataset : availableDatasetsForEs(client, supportsIndexModeLookup)) { + for (var dataset : availableDatasetsForEs(client, supportsIndexModeLookup, supportsSourceFieldMapping)) { load(client, dataset, logger, indexCreator); loadedDatasets.add(dataset.indexName); } @@ -351,10 +398,7 @@ public static boolean clusterHasInferenceEndpoint(RestClient client) throws IOEx } private static void loadEnrichPolicy(RestClient client, String policyName, String policyFileName, Logger logger) throws IOException { - URL policyMapping = CsvTestsDataLoader.class.getResource("/" + policyFileName); - if (policyMapping == null) { - throw new IllegalArgumentException("Cannot find resource " + policyFileName); - } + URL policyMapping = getResource("/" + policyFileName); String entity = readTextFile(policyMapping); Request request = new Request("PUT", "/_enrich/policy/" + policyName); request.setJsonEntity(entity); @@ -364,17 +408,17 @@ private static void loadEnrichPolicy(RestClient client, String policyName, Strin client.performRequest(request); } - private static void load(RestClient client, TestDataset dataset, Logger logger, IndexCreator indexCreator) throws IOException { - final String mappingName = "/" + dataset.mappingFileName; - URL mapping = CsvTestsDataLoader.class.getResource(mappingName); - if (mapping == null) { - throw new IllegalArgumentException("Cannot find resource " + mappingName); - } - final String dataName = "/data/" + dataset.dataFileName; - URL data = CsvTestsDataLoader.class.getResource(dataName); - if (data == null) { - throw new IllegalArgumentException("Cannot find resource " + dataName); + private static URL getResource(String name) { + URL result = CsvTestsDataLoader.class.getResource(name); + if (result == null) { + throw new IllegalArgumentException("Cannot find resource " + name); } + return result; + } + + private static void load(RestClient client, TestDataset dataset, Logger logger, IndexCreator indexCreator) throws IOException { + URL mapping = getResource("/" + dataset.mappingFileName); + URL data = getResource("/data/" + dataset.dataFileName); Settings indexSettings = dataset.readSettingsFile(); indexCreator.createIndex(client, dataset.indexName, readMappingFile(mapping, dataset.typeMapping), indexSettings); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 3e072e9a05c20..3af9871893207 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -96,6 +96,7 @@ import java.time.Duration; import java.time.Period; import java.util.ArrayList; +import java.util.Collection; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; @@ -137,6 +138,8 @@ import static org.elasticsearch.xpack.esql.parser.ParserUtils.ParamClassification.IDENTIFIER; import static org.elasticsearch.xpack.esql.parser.ParserUtils.ParamClassification.PATTERN; import static org.elasticsearch.xpack.esql.parser.ParserUtils.ParamClassification.VALUE; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @@ -829,4 +832,9 @@ public static void assertEsqlFailure(Exception e) { ExceptionsHelper.unwrapCausesAndSuppressed(e, t -> t instanceof RemoteTransportException) .ifPresent(transportFailure -> assertNull("remote transport exception must be unwrapped", transportFailure.getCause())); } + + public static T singleValue(Collection collection) { + assertThat(collection, hasSize(1)); + return collection.iterator().next(); + } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/data/partial_mapping_sample_data.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/data/partial_mapping_sample_data.csv new file mode 100644 index 0000000000000..a7782a3c429ae --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/data/partial_mapping_sample_data.csv @@ -0,0 +1,8 @@ +@timestamp:date,client_ip:ip,event_duration:long,message:keyword,unmapped_message:keyword,unmapped_event_duration:keyword,unmapped.nested:keyword +2024-10-23T13:55:01.543Z,173.21.3.15,1756466,Connected to 10.1.0.1!,Disconnected from 10.1.0.1,1756468,a +2024-10-23T13:53:55.832Z,173.21.3.15,5033754,Connection error?,Disconnection error,5033756,b +2024-10-23T13:52:55.015Z,173.21.3.15,8268152,Connection error?,Disconnection error,8268154,c +2024-10-23T13:51:54.732Z,173.21.3.15,725447,Connection error?,Disconnection error,725449,d +2024-10-23T13:33:34.937Z,173.21.0.5,1232381,42,43,1232383,e +2024-10-23T12:27:28.948Z,173.21.2.113,2764888,Connected to 10.1.0.2!,Disconnected from 10.1.0.2,2764890,f +2024-10-23T12:15:03.360Z,173.21.2.162,3450232,Connected to 10.1.0.3!,Disconnected from 10.1.0.3,3450234,g diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec similarity index 97% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec index 91075691a6a1c..cf2d44665bd5a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec @@ -2,8 +2,8 @@ // TODO: re-enable the commented tests once the Join functionality stabilizes // -maxOfInt-Ignore -required_capability: join_planning_v1 +maxOfInt +required_capability: inlinestats_v3 // tag::max-languages[] FROM employees | KEEP emp_no, languages @@ -25,7 +25,7 @@ emp_no:integer | languages:integer | max_lang:integer ; maxOfIntByKeyword -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, languages, gender @@ -43,7 +43,7 @@ emp_no:integer | languages:integer | gender:keyword | max_lang:integer ; maxOfLongByKeyword -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, avg_worked_seconds, gender @@ -57,8 +57,8 @@ emp_no:integer | avg_worked_seconds:long | gender:keyword | max_avg_worked_secon 10030 | 394597613 | M | 394597613 ; -maxOfLong-Ignore -required_capability: join_planning_v1 +maxOfLong +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, avg_worked_seconds, gender @@ -71,7 +71,7 @@ emp_no:integer | avg_worked_seconds:long | gender:keyword | max_avg_worked_secon ; maxOfLongByCalculatedKeyword -required_capability: join_planning_v1 +required_capability: inlinestats_v3 // tag::longest-tenured-by-first[] FROM employees @@ -94,7 +94,7 @@ emp_no:integer | avg_worked_seconds:long | last_name:keyword | SUBSTRING(last_na ; maxOfLongByCalculatedNamedKeyword -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, avg_worked_seconds, last_name @@ -112,7 +112,7 @@ emp_no:integer | avg_worked_seconds:long | last_name:keyword | l:keyword | max_a 10087 | 305782871 | Eugenio | E | 305782871 ; -maxOfLongByCalculatedDroppedKeyword +maxOfLongByCalculatedDroppedKeyword-Ignore required_capability: join_planning_v1 FROM employees @@ -132,7 +132,7 @@ emp_no:integer | avg_worked_seconds:long | last_name:keyword | max_avg_worked_se ; maxOfLongByEvaledKeyword -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | EVAL l = SUBSTRING(last_name, 0, 1) @@ -152,7 +152,7 @@ emp_no:integer | avg_worked_seconds:long | l:keyword | max_avg_worked_seconds:lo ; maxOfLongByInt -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, avg_worked_seconds, languages @@ -170,7 +170,7 @@ emp_no:integer | avg_worked_seconds:long | languages:integer | max_avg_worked_se ; maxOfLongByIntDouble -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, avg_worked_seconds, languages, height @@ -205,7 +205,7 @@ emp_no:integer | languages:integer | avg_worked_seconds:long | gender:keyword | 10007 | 4 | 393084805 | F | 2.863684210555556E8 | 5 ; -byMultivaluedSimple +byMultivaluedSimple-Ignore required_capability: join_planning_v1 // tag::mv-group[] @@ -223,7 +223,7 @@ abbrev:keyword | type:keyword | scalerank:integer | min_scalerank:integer // end::mv-group-result[] ; -byMultivaluedMvExpand +byMultivaluedMvExpand-Ignore required_capability: join_planning_v1 // tag::mv-expand[] @@ -243,7 +243,7 @@ abbrev:keyword | type:keyword | scalerank:integer | min_scalerank:integer // end::mv-expand-result[] ; -byMvExpand +byMvExpand-Ignore required_capability: join_planning_v1 // tag::extreme-airports[] @@ -307,7 +307,7 @@ count:long | country:keyword | avg:double 17 | United Kingdom | 4.455 ; -afterWhere +afterWhere-Ignore required_capability: join_planning_v1 FROM airports @@ -366,8 +366,8 @@ abbrev:keyword | city:keyword | region:text | "COUNT(*)":long FUK | Fukuoka | 中央区 | 2 ; -beforeStats-Ignore -required_capability: join_planning_v1 +beforeStats +required_capability: inlinestats_v3 FROM airports | EVAL lat = ST_Y(location) @@ -379,7 +379,7 @@ northern:long | southern:long 520 | 371 ; -beforeKeepSort +beforeKeepSort-Ignore required_capability: join_planning_v1 FROM employees @@ -394,7 +394,7 @@ emp_no:integer | languages:integer | max_salary:integer 10003 | 4 | 74572 ; -beforeKeepWhere +beforeKeepWhere-Ignore required_capability: join_planning_v1 FROM employees @@ -537,8 +537,8 @@ emp_no:integer | one:integer 10005 | 1 ; -percentile-Ignore -required_capability: join_planning_v1 +percentile +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, salary diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index d4a98fdc70a9a..8ca4292f97faa 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -1346,3 +1346,50 @@ language_code:integer | language_name:keyword | country:text 1 | English | United States of America 1 | English | null ; + + +sortBeforeAndAfterJoin +required_capability: join_lookup_v12 +required_capability: remove_redundant_sort + +FROM employees +| sort first_name +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| WHERE emp_no >= 10091 AND emp_no < 10094 +| SORT emp_no +| KEEP emp_no, language_code, language_name +; + +emp_no:integer | language_code:integer | language_name:keyword +10091 | 3 | Spanish +10092 | 1 | English +10093 | 3 | Spanish +; + + + +sortBeforeAndAfterMultipleJoinAndMvExpand +required_capability: join_lookup_v12 +required_capability: remove_redundant_sort + +FROM employees +| sort first_name +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| WHERE emp_no >= 10091 AND emp_no < 10094 +| SORT language_name +| MV_EXPAND first_name +| SORT first_name +| MV_EXPAND last_name +| SORT last_name +| LOOKUP JOIN languages_lookup ON language_code +| SORT emp_no +| KEEP emp_no, language_code, language_name +; + +emp_no:integer | language_code:integer | language_name:keyword +10091 | 3 | Spanish +10092 | 1 | English +10093 | 3 | Spanish +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-no_mapping_sample_data.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-no_mapping_sample_data.json new file mode 100644 index 0000000000000..d2ae900835e44 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-no_mapping_sample_data.json @@ -0,0 +1,4 @@ +{ + "dynamic": "false", + "properties": {} +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-partial_mapping_excluded_source_sample_data.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-partial_mapping_excluded_source_sample_data.json new file mode 100644 index 0000000000000..0f77e59f4dc39 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-partial_mapping_excluded_source_sample_data.json @@ -0,0 +1,13 @@ +{ + "dynamic": "false", + "properties": { + "@timestamp": { + "type": "date" + } + }, + "_source": { + "excludes": [ + "message" + ] + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-partial_mapping_no_source_sample_data.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-partial_mapping_no_source_sample_data.json new file mode 100644 index 0000000000000..64f209e8d64a4 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-partial_mapping_no_source_sample_data.json @@ -0,0 +1,11 @@ +{ + "dynamic": "false", + "properties": { + "@timestamp": { + "type": "date" + } + }, + "_source": { + "enabled": false + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-partial_mapping_sample_data.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-partial_mapping_sample_data.json new file mode 100644 index 0000000000000..bb86a1428f59b --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-partial_mapping_sample_data.json @@ -0,0 +1,17 @@ +{ + "dynamic": "false", + "properties": { + "@timestamp": { + "type": "date" + }, + "client_ip": { + "type": "ip" + }, + "event_duration": { + "type": "long" + }, + "message": { + "type": "keyword" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec index 4d7ee9b1b5af6..88c4fbf7de6cc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec @@ -39,7 +39,7 @@ max:integer |_index:keyword ; metaIndexAliasedInAggs -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: metadata_fields_remote_test from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i | SORT _i; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec index a213c378d33d8..1f41ffdb60691 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec @@ -40,7 +40,7 @@ max:integer |_index:keyword ; metaIndexSorted -required_capability: metadata_fields +required_capability: index_metadata_field from employees metadata _index | sort _index, emp_no desc | keep emp_no, _index | limit 2; @@ -50,7 +50,7 @@ emp_no:integer |_index:keyword ; metaIndexWithInPredicate -required_capability: metadata_fields +required_capability: index_metadata_field from employees metadata _index | where _index in ("employees", "foobar") | sort emp_no desc | keep emp_no, _index | limit 2; @@ -60,7 +60,7 @@ emp_no:integer |_index:keyword ; metaIndexAliasedInAggs -required_capability: metadata_fields +required_capability: index_metadata_field from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec index 2a7c092798404..1b4c1f0bc2b6c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec @@ -404,3 +404,17 @@ from employees | where emp_no == 10003 | mv_expand first_name | keep first_name first_name:keyword Parto ; + + +sortBeforeAndAfterMvExpand +from employees +| sort first_name +| mv_expand job_positions +| sort emp_no, job_positions +| keep emp_no, job_positions +| limit 2; + +emp_no:integer | job_positions:keyword +10001 | Accountant +10001 | Senior Python Developer +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec index a2f491e20e3b9..8b19bc589fcff 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec @@ -133,7 +133,7 @@ mc:l | count:l multiIndexIpString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: casting_operator required_capability: union_types_remove_fields @@ -162,7 +162,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: casting_operator required_capability: union_types_remove_fields @@ -191,7 +191,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringRenameToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_str METADATA _index @@ -219,7 +219,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexWhereIpString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_str METADATA _index @@ -237,7 +237,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 3450233 | Connected multiIndexWhereIpStringLike required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_str METADATA _index @@ -445,7 +445,7 @@ count:long | message:keyword multiIndexMissingIpToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_missing_field FROM sample_data, sample_data_str, missing_ip_sample_data METADATA _index @@ -480,7 +480,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450 multiIndexMissingIpToIp required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_missing_field FROM sample_data, sample_data_str, missing_ip_sample_data METADATA _index @@ -515,7 +515,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -543,7 +543,7 @@ sample_data_ts_long | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsLongRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -573,7 +573,7 @@ sample_data_ts_long | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsNanosRename required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -602,7 +602,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsNanosRenameToNanos required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -631,7 +631,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360123456Z | 172.21.2.162 | 34502 multiIndex sort millis and nanos as nanos required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -660,7 +660,7 @@ sample_data | 2023-10-23T12:15:03.360000000Z | 172.21.2.162 | 34502 multiIndex sort millis and nanos as millis required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -691,7 +691,7 @@ multiIndexTsNanosRenameToNanosWithFiltering required_capability: to_date_nanos required_capability: date_nanos_binary_comparison required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -716,7 +716,7 @@ sample_data_ts_nanos | 2023-10-23T13:33:34.937123456Z | 172.21.0.5 | 12323 multiIndexTsLongRenameToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -744,7 +744,7 @@ sample_data_ts_long | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexWhereTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -979,7 +979,7 @@ count:long | message:keyword multiIndexIpStringTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1022,7 +1022,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongDropped required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: to_date_nanos FROM sample_data* METADATA _index @@ -1064,7 +1064,7 @@ sample_data_ts_nanos | 8268153 | Connection error multiIndexIpStringTsLongRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1107,7 +1107,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongRenameDropped required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: to_date_nanos FROM sample_data* METADATA _index @@ -1149,7 +1149,7 @@ sample_data_ts_nanos | 8268153 | Connection error multiIndexIpStringTsLongRenameToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1192,7 +1192,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexWhereIpStringTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1226,7 +1226,7 @@ count:long | message:keyword multiIndexWhereIpStringLikeTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1260,7 +1260,7 @@ count:long | message:keyword multiIndexMultiColumnTypesRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1279,7 +1279,7 @@ null | null | 8268153 | Connectio multiIndexMultiColumnTypesRenameAndKeep required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1299,7 +1299,7 @@ sample_data_ts_nanos | 2023-10-23T13:52:55.015Z | 2023-10-23T13:52:55.015123456 multiIndexMultiColumnTypesRenameAndDrop required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1591,7 +1591,7 @@ FROM sample_data, sample_data_ts_long shortIntegerWidening required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: casting_operator required_capability: union_types_numeric_widening diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unmapped_fields.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unmapped_fields.csv-spec new file mode 100644 index 0000000000000..a0828ff628a6d --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unmapped_fields.csv-spec @@ -0,0 +1,582 @@ +###################### +# Single index tests # +###################### + +// This one is more of a test of the configuration than the unmapped fields feature. +doesNotLoadUnmappedFields +required_capability: unmapped_fields +FROM partial_mapping_sample_data +| SORT @timestamp DESC +; + +@timestamp:datetime | client_ip:ip | event_duration:long | message:keyword +2024-10-23T13:55:01.543Z | 173.21.3.15 | 1756466 | Connected to 10.1.0.1! +2024-10-23T13:53:55.832Z | 173.21.3.15 | 5033754 | Connection error? +2024-10-23T13:52:55.015Z | 173.21.3.15 | 8268152 | Connection error? +2024-10-23T13:51:54.732Z | 173.21.3.15 | 725447 | Connection error? +2024-10-23T13:33:34.937Z | 173.21.0.5 | 1232381 | 42 +2024-10-23T12:27:28.948Z | 173.21.2.113 | 2764888 | Connected to 10.1.0.2! +2024-10-23T12:15:03.360Z | 173.21.2.162 | 3450232 | Connected to 10.1.0.3! +; + +fieldIsMappedToNonKeywordSingleIndex +required_capability: unmapped_fields +FROM partial_mapping_sample_data +| INSIST_🐔 client_ip +| KEEP @timestamp, client_ip +| SORT @timestamp DESC +; + +@timestamp:date | client_ip:ip +2024-10-23T13:55:01.543Z | 173.21.3.15 +2024-10-23T13:53:55.832Z | 173.21.3.15 +2024-10-23T13:52:55.015Z | 173.21.3.15 +2024-10-23T13:51:54.732Z | 173.21.3.15 +2024-10-23T13:33:34.937Z | 173.21.0.5 +2024-10-23T12:27:28.948Z | 173.21.2.113 +2024-10-23T12:15:03.360Z | 173.21.2.162 +; + +fieldIsMappedToKeywordSingleIndex +required_capability: unmapped_fields +FROM partial_mapping_sample_data +| INSIST_🐔 message +| KEEP @timestamp, message +| SORT @timestamp DESC +; + +@timestamp:datetime | message:keyword +2024-10-23T13:55:01.543Z | Connected to 10.1.0.1! +2024-10-23T13:53:55.832Z | Connection error? +2024-10-23T13:52:55.015Z | Connection error? +2024-10-23T13:51:54.732Z | Connection error? +2024-10-23T13:33:34.937Z | 42 +2024-10-23T12:27:28.948Z | Connected to 10.1.0.2! +2024-10-23T12:15:03.360Z | Connected to 10.1.0.3! +; + +unmappedFieldAppearsLast +required_capability: unmapped_fields +FROM partial_mapping_sample_data +| INSIST_🐔 event_duration +| SORT @timestamp DESC +| Limit 1 +; + +@timestamp:date | client_ip:ip | message:keyword | event_duration:long +2024-10-23T13:55:01.543Z | 173.21.3.15 | Connected to 10.1.0.1! | 1756466 +; + +fieldDoesNotExistSingleIndex +required_capability: unmapped_fields +FROM partial_mapping_sample_data +| INSIST_🐔 foo +| KEEP @timestamp, foo +| SORT @timestamp DESC +; + +@timestamp:date | foo:keyword +2024-10-23T13:55:01.543Z | null +2024-10-23T13:53:55.832Z | null +2024-10-23T13:52:55.015Z | null +2024-10-23T13:51:54.732Z | null +2024-10-23T13:33:34.937Z | null +2024-10-23T12:27:28.948Z | null +2024-10-23T12:15:03.360Z | null +; + +fieldIsUnmappedSingleIndex +required_capability: unmapped_fields +FROM partial_mapping_sample_data +| INSIST_🐔 unmapped_message +| KEEP @timestamp, message, unmapped_message +| SORT @timestamp DESC +; + +@timestamp:date | message:keyword | unmapped_message:keyword +2024-10-23T13:55:01.543Z | Connected to 10.1.0.1! | Disconnected from 10.1.0.1 +2024-10-23T13:53:55.832Z | Connection error? | Disconnection error +2024-10-23T13:52:55.015Z | Connection error? | Disconnection error +2024-10-23T13:51:54.732Z | Connection error? | Disconnection error +2024-10-23T13:33:34.937Z | 42 | 43 +2024-10-23T12:27:28.948Z | Connected to 10.1.0.2! | Disconnected from 10.1.0.2 +2024-10-23T12:15:03.360Z | Connected to 10.1.0.3! | Disconnected from 10.1.0.3 +; + +fieldIsUnmappedButSourceIsDisabledSingleIndex +required_capability: source_field_mapping +required_capability: unmapped_fields +FROM partial_mapping_no_source_sample_data +| INSIST_🐔 message +; + +@timestamp:date | message:keyword +2024-10-23T13:55:01.543Z | null +2024-10-23T13:53:55.832Z | null +2024-10-23T13:52:55.015Z | null +2024-10-23T13:51:54.732Z | null +2024-10-23T13:33:34.937Z | null +2024-10-23T12:27:28.948Z | null +2024-10-23T12:15:03.360Z | null +; + +fieldIsUnmappedButExcludedFromSourceSingleIndex +required_capability: source_field_mapping +required_capability: unmapped_fields +FROM partial_mapping_excluded_source_sample_data +| INSIST_🐔 message +| SORT @timestamp DESC +; + +@timestamp:date | message:keyword +2024-10-23T13:55:01.543Z | null +2024-10-23T13:53:55.832Z | null +2024-10-23T13:52:55.015Z | null +2024-10-23T13:51:54.732Z | null +2024-10-23T13:33:34.937Z | null +2024-10-23T12:27:28.948Z | null +2024-10-23T12:15:03.360Z | null +; + +fieldIsNestedAndMapped +required_capability: unmapped_fields +FROM addresses +| INSIST_🐔 city.name +| KEEP city.name +| SORT city.name DESC +; + +city.name:keyword +Tokyo +San Francisco +Amsterdam +; + +fieldIsNestedAndUnmapped +required_capability: unmapped_fields +FROM partial_mapping_sample_data +| INSIST_🐔 unmapped.nested +| KEEP @timestamp, unmapped.nested +| SORT @timestamp +; + +@timestamp:date | unmapped.nested:keyword +2024-10-23T12:15:03.360Z | g +2024-10-23T12:27:28.948Z | f +2024-10-23T13:33:34.937Z | e +2024-10-23T13:51:54.732Z | d +2024-10-23T13:52:55.015Z | c +2024-10-23T13:53:55.832Z | b +2024-10-23T13:55:01.543Z | a +; + +fieldIsNestedAndNonExistent +required_capability: unmapped_fields +FROM partial_mapping_sample_data +| INSIST_🐔 unmapped.nested.nonexistent +| KEEP @timestamp, unmapped.nested.nonexistent +| SORT @timestamp +; + +@timestamp:date | unmapped.nested.nonexistent:keyword +2024-10-23T12:15:03.360Z | null +2024-10-23T12:27:28.948Z | null +2024-10-23T13:33:34.937Z | null +2024-10-23T13:51:54.732Z | null +2024-10-23T13:52:55.015Z | null +2024-10-23T13:53:55.832Z | null +2024-10-23T13:55:01.543Z | null +; + +######################### +# Multi-parameter tests # +######################### + +noFieldExistsMultiParametersSingleIndex +required_capability: unmapped_fields +FROM partial_mapping_sample_data +| INSIST_🐔 foo, bar, bazz +| KEEP @timestamp, foo, bar, bazz +| SORT @timestamp DESC +; + +@timestamp:date | foo:keyword | bar:keyword | bazz:keyword +2024-10-23T13:55:01.543Z | null | null | null +2024-10-23T13:53:55.832Z | null | null | null +2024-10-23T13:52:55.015Z | null | null | null +2024-10-23T13:51:54.732Z | null | null | null +2024-10-23T13:33:34.937Z | null | null | null +2024-10-23T12:27:28.948Z | null | null | null +2024-10-23T12:15:03.360Z | null | null | null +; + +mixedFieldsMultiParametersSingleIndex +required_capability: unmapped_fields +FROM partial_mapping_sample_data +| INSIST_🐔 foo, message, unmapped_message +| KEEP @timestamp, foo, message, unmapped_message +| SORT @timestamp DESC +; + +@timestamp:date | foo:keyword | message:keyword | unmapped_message:keyword +2024-10-23T13:55:01.543Z | null | Connected to 10.1.0.1! | Disconnected from 10.1.0.1 +2024-10-23T13:53:55.832Z | null | Connection error? | Disconnection error +2024-10-23T13:52:55.015Z | null | Connection error? | Disconnection error +2024-10-23T13:51:54.732Z | null | Connection error? | Disconnection error +2024-10-23T13:33:34.937Z | null | 42 | 43 +2024-10-23T12:27:28.948Z | null | Connected to 10.1.0.2! | Disconnected from 10.1.0.2 +2024-10-23T12:15:03.360Z | null | Connected to 10.1.0.3! | Disconnected from 10.1.0.3 +; + +repeatedInsistFieldsUseTheLastEntry +required_capability: unmapped_fields +FROM partial_mapping_sample_data +| INSIST_🐔 unmapped_message, foo, message, foo, message, unmapped_message +| KEEP @timestamp, foo, message, unmapped_message +| SORT @timestamp DESC +; + +@timestamp:date | foo:keyword | message:keyword | unmapped_message:keyword +2024-10-23T13:55:01.543Z | null | Connected to 10.1.0.1! | Disconnected from 10.1.0.1 +2024-10-23T13:53:55.832Z | null | Connection error? | Disconnection error +2024-10-23T13:52:55.015Z | null | Connection error? | Disconnection error +2024-10-23T13:51:54.732Z | null | Connection error? | Disconnection error +2024-10-23T13:33:34.937Z | null | 42 | 43 +2024-10-23T12:27:28.948Z | null | Connected to 10.1.0.2! | Disconnected from 10.1.0.2 +2024-10-23T12:15:03.360Z | null | Connected to 10.1.0.3! | Disconnected from 10.1.0.3 +; + +##################### +# Multi index tests # +##################### + +mixedFieldsMultiParametersMultiIndex +required_capability: unmapped_fields +required_capability: index_metadata_field +FROM partial_mapping_sample_data, sample_data METADATA _index +| INSIST_🐔 foo, message, unmapped_message +| KEEP _index, @timestamp, foo, message, unmapped_message +| SORT @timestamp DESC +; + +_index:keyword | @timestamp:datetime | foo:keyword | message:keyword | unmapped_message:keyword +partial_mapping_sample_data | 2024-10-23T13:55:01.543Z | null | Connected to 10.1.0.1! | Disconnected from 10.1.0.1 +partial_mapping_sample_data | 2024-10-23T13:53:55.832Z | null | Connection error? | Disconnection error +partial_mapping_sample_data | 2024-10-23T13:52:55.015Z | null | Connection error? | Disconnection error +partial_mapping_sample_data | 2024-10-23T13:51:54.732Z | null | Connection error? | Disconnection error +partial_mapping_sample_data | 2024-10-23T13:33:34.937Z | null | 42 | 43 +partial_mapping_sample_data | 2024-10-23T12:27:28.948Z | null | Connected to 10.1.0.2! | Disconnected from 10.1.0.2 +partial_mapping_sample_data | 2024-10-23T12:15:03.360Z | null | Connected to 10.1.0.3! | Disconnected from 10.1.0.3 +sample_data | 2023-10-23T13:55:01.543Z | null | Connected to 10.1.0.1 | null +sample_data | 2023-10-23T13:53:55.832Z | null | Connection error | null +sample_data | 2023-10-23T13:52:55.015Z | null | Connection error | null +sample_data | 2023-10-23T13:51:54.732Z | null | Connection error | null +sample_data | 2023-10-23T13:33:34.937Z | null | Disconnected | null +sample_data | 2023-10-23T12:27:28.948Z | null | Connected to 10.1.0.2 | null +sample_data | 2023-10-23T12:15:03.360Z | null | Connected to 10.1.0.3 | null +; + +insistOnTopOfInsistMultiIndex +required_capability: unmapped_fields +required_capability: index_metadata_field +FROM partial_mapping_sample_data, sample_data METADATA _index +| INSIST_🐔 foo, message +| INSIST_🐔 unmapped_message +| KEEP _index, @timestamp, foo, message, unmapped_message +| SORT @timestamp DESC +; + +_index:keyword | @timestamp:datetime | foo:keyword | message:keyword | unmapped_message:keyword +partial_mapping_sample_data | 2024-10-23T13:55:01.543Z | null | Connected to 10.1.0.1! | Disconnected from 10.1.0.1 +partial_mapping_sample_data | 2024-10-23T13:53:55.832Z | null | Connection error? | Disconnection error +partial_mapping_sample_data | 2024-10-23T13:52:55.015Z | null | Connection error? | Disconnection error +partial_mapping_sample_data | 2024-10-23T13:51:54.732Z | null | Connection error? | Disconnection error +partial_mapping_sample_data | 2024-10-23T13:33:34.937Z | null | 42 | 43 +partial_mapping_sample_data | 2024-10-23T12:27:28.948Z | null | Connected to 10.1.0.2! | Disconnected from 10.1.0.2 +partial_mapping_sample_data | 2024-10-23T12:15:03.360Z | null | Connected to 10.1.0.3! | Disconnected from 10.1.0.3 +sample_data | 2023-10-23T13:55:01.543Z | null | Connected to 10.1.0.1 | null +sample_data | 2023-10-23T13:53:55.832Z | null | Connection error | null +sample_data | 2023-10-23T13:52:55.015Z | null | Connection error | null +sample_data | 2023-10-23T13:51:54.732Z | null | Connection error | null +sample_data | 2023-10-23T13:33:34.937Z | null | Disconnected | null +sample_data | 2023-10-23T12:27:28.948Z | null | Connected to 10.1.0.2 | null +sample_data | 2023-10-23T12:15:03.360Z | null | Connected to 10.1.0.3 | null +; + +fieldDoesNotExistMultiIndex +required_capability: index_metadata_field +required_capability: unmapped_fields +FROM partial_mapping_sample_data, sample_data METADATA _index +| INSIST_🐔 foo +| KEEP _index, @timestamp, foo +| SORT @timestamp DESC +; + +_index:keyword | @timestamp:date | foo:keyword +partial_mapping_sample_data | 2024-10-23T13:55:01.543Z | null +partial_mapping_sample_data | 2024-10-23T13:53:55.832Z | null +partial_mapping_sample_data | 2024-10-23T13:52:55.015Z | null +partial_mapping_sample_data | 2024-10-23T13:51:54.732Z | null +partial_mapping_sample_data | 2024-10-23T13:33:34.937Z | null +partial_mapping_sample_data | 2024-10-23T12:27:28.948Z | null +partial_mapping_sample_data | 2024-10-23T12:15:03.360Z | null +sample_data | 2023-10-23T13:55:01.543Z | null +sample_data | 2023-10-23T13:53:55.832Z | null +sample_data | 2023-10-23T13:52:55.015Z | null +sample_data | 2023-10-23T13:51:54.732Z | null +sample_data | 2023-10-23T13:33:34.937Z | null +sample_data | 2023-10-23T12:27:28.948Z | null +sample_data | 2023-10-23T12:15:03.360Z | null +; + +fieldIsUnmappedMultiIndex +required_capability: index_metadata_field +required_capability: unmapped_fields +FROM partial_mapping_sample_data, sample_data METADATA _index +| INSIST_🐔 unmapped_message +| KEEP @timestamp, message, unmapped_message, _index +| SORT @timestamp DESC +; + +@timestamp:date | message:keyword | unmapped_message:keyword | _index:keyword +2024-10-23T13:55:01.543Z | Connected to 10.1.0.1! | Disconnected from 10.1.0.1 | partial_mapping_sample_data +2024-10-23T13:53:55.832Z | Connection error? | Disconnection error | partial_mapping_sample_data +2024-10-23T13:52:55.015Z | Connection error? | Disconnection error | partial_mapping_sample_data +2024-10-23T13:51:54.732Z | Connection error? | Disconnection error | partial_mapping_sample_data +2024-10-23T13:33:34.937Z | 42 | 43 | partial_mapping_sample_data +2024-10-23T12:27:28.948Z | Connected to 10.1.0.2! | Disconnected from 10.1.0.2 | partial_mapping_sample_data +2024-10-23T12:15:03.360Z | Connected to 10.1.0.3! | Disconnected from 10.1.0.3 | partial_mapping_sample_data +2023-10-23T13:55:01.543Z | Connected to 10.1.0.1 | null | sample_data +2023-10-23T13:53:55.832Z | Connection error | null | sample_data +2023-10-23T13:52:55.015Z | Connection error | null | sample_data +2023-10-23T13:51:54.732Z | Connection error | null | sample_data +2023-10-23T13:33:34.937Z | Disconnected | null | sample_data +2023-10-23T12:27:28.948Z | Connected to 10.1.0.2 | null | sample_data +2023-10-23T12:15:03.360Z | Connected to 10.1.0.3 | null | sample_data +; + + +fieldIsMappedToDifferentTypesMultiIndex +required_capability: index_metadata_field +required_capability: unmapped_fields +FROM sample_data_ts_long, sample_data METADATA _index +| INSIST_🐔 @timestamp +| KEEP _index, @timestamp +| SORT _index +; + +_index:keyword | @timestamp:unsupported +sample_data | null +sample_data | null +sample_data | null +sample_data | null +sample_data | null +sample_data | null +sample_data | null +sample_data_ts_long | null +sample_data_ts_long | null +sample_data_ts_long | null +sample_data_ts_long | null +sample_data_ts_long | null +sample_data_ts_long | null +sample_data_ts_long | null +; + +fieldIsMappedToDifferentTypesButDropped +required_capability: index_metadata_field +required_capability: unmapped_fields +FROM sample_data_ts_long, sample_data METADATA _index +| INSIST_🐔 @timestamp +| KEEP _index, @timestamp +| DROP @timestamp +| EVAL @timestamp = 42 +| SORT _index +; + +_index:keyword | @timestamp:integer +sample_data | 42 +sample_data | 42 +sample_data | 42 +sample_data | 42 +sample_data | 42 +sample_data | 42 +sample_data | 42 +sample_data_ts_long | 42 +sample_data_ts_long | 42 +sample_data_ts_long | 42 +sample_data_ts_long | 42 +sample_data_ts_long | 42 +sample_data_ts_long | 42 +sample_data_ts_long | 42 +; + +fieldIsPartiallyUnmappedMultiIndex +required_capability: index_metadata_field +required_capability: unmapped_fields +FROM sample_data, no_mapping_sample_data METADATA _index +| INSIST_🐔 message +| KEEP _index, message +| SORT _index, message DESC +; + +_index:keyword | message:keyword +no_mapping_sample_data | Connection error? +no_mapping_sample_data | Connection error? +no_mapping_sample_data | Connection error? +no_mapping_sample_data | Connected to 10.1.0.3! +no_mapping_sample_data | Connected to 10.1.0.2! +no_mapping_sample_data | Connected to 10.1.0.1! +no_mapping_sample_data | 42 +sample_data | Disconnected +sample_data | Connection error +sample_data | Connection error +sample_data | Connection error +sample_data | Connected to 10.1.0.3 +sample_data | Connected to 10.1.0.2 +sample_data | Connected to 10.1.0.1 +; + +fieldIsPartiallyUnmappedAndRenamedMultiIndex +required_capability: unmapped_fields +FROM sample_data, no_mapping_sample_data +| INSIST_🐔 message +| KEEP message +| RENAME message AS msg +| SORT msg DESC +; + +msg:keyword +Disconnected +Connection error? +Connection error? +Connection error? +Connection error +Connection error +Connection error +Connected to 10.1.0.3! +Connected to 10.1.0.3 +Connected to 10.1.0.2! +Connected to 10.1.0.2 +Connected to 10.1.0.1! +Connected to 10.1.0.1 +42 +; + +fieldIsPartiallyUnmappedPartiallySourceIsDisabledMultiIndex +required_capability: index_metadata_field +required_capability: source_field_mapping +required_capability: unmapped_fields +FROM partial_mapping_sample_data,partial_mapping_no_source_sample_data METADATA _index +| INSIST_🐔 message +| KEEP _index, @timestamp, message +| SORT _index, @timestamp +; + +_index:keyword | @timestamp:date | message:keyword +partial_mapping_no_source_sample_data | 2024-10-23T12:15:03.360Z | null +partial_mapping_no_source_sample_data | 2024-10-23T12:27:28.948Z | null +partial_mapping_no_source_sample_data | 2024-10-23T13:33:34.937Z | null +partial_mapping_no_source_sample_data | 2024-10-23T13:51:54.732Z | null +partial_mapping_no_source_sample_data | 2024-10-23T13:52:55.015Z | null +partial_mapping_no_source_sample_data | 2024-10-23T13:53:55.832Z | null +partial_mapping_no_source_sample_data | 2024-10-23T13:55:01.543Z | null +partial_mapping_sample_data | 2024-10-23T12:15:03.360Z | Connected to 10.1.0.3! +partial_mapping_sample_data | 2024-10-23T12:27:28.948Z | Connected to 10.1.0.2! +partial_mapping_sample_data | 2024-10-23T13:33:34.937Z | 42 +partial_mapping_sample_data | 2024-10-23T13:51:54.732Z | Connection error? +partial_mapping_sample_data | 2024-10-23T13:52:55.015Z | Connection error? +partial_mapping_sample_data | 2024-10-23T13:53:55.832Z | Connection error? +partial_mapping_sample_data | 2024-10-23T13:55:01.543Z | Connected to 10.1.0.1! +; + +partialMappingStats +required_capability: index_metadata_field +required_capability: source_field_mapping +required_capability: unmapped_fields +FROM partial_mapping_sample_data,partial_mapping_excluded_source_sample_data METADATA _index +| INSIST_🐔 message +| SORT message, @timestamp +| STATS max(@timestamp), count(*) BY message +; + +max(@timestamp):date | count(*):long | message:keyword +2024-10-23T13:55:01.543Z | 7 | null +2024-10-23T13:33:34.937Z | 1 | 42 +2024-10-23T13:55:01.543Z | 1 | Connected to 10.1.0.1! +2024-10-23T12:27:28.948Z | 1 | Connected to 10.1.0.2! +2024-10-23T12:15:03.360Z | 1 | Connected to 10.1.0.3! +2024-10-23T13:53:55.832Z | 3 | Connection error? +; + +partialMappingCoalesce +required_capability: index_metadata_field +required_capability: source_field_mapping +required_capability: unmapped_fields +FROM partial_mapping_sample_data,partial_mapping_excluded_source_sample_data METADATA _index +| INSIST_🐔 message +| EVAL actual_value = COALESCE(message, "no _source") +| DROP message +| KEEP @timestamp, _index, actual_value +| SORT _index, @timestamp ASC +; + +@timestamp:date | _index:keyword | actual_value:keyword +2024-10-23T12:15:03.360Z | partial_mapping_excluded_source_sample_data | no _source +2024-10-23T12:27:28.948Z | partial_mapping_excluded_source_sample_data | no _source +2024-10-23T13:33:34.937Z | partial_mapping_excluded_source_sample_data | no _source +2024-10-23T13:51:54.732Z | partial_mapping_excluded_source_sample_data | no _source +2024-10-23T13:52:55.015Z | partial_mapping_excluded_source_sample_data | no _source +2024-10-23T13:53:55.832Z | partial_mapping_excluded_source_sample_data | no _source +2024-10-23T13:55:01.543Z | partial_mapping_excluded_source_sample_data | no _source +2024-10-23T12:15:03.360Z | partial_mapping_sample_data | Connected to 10.1.0.3! +2024-10-23T12:27:28.948Z | partial_mapping_sample_data | Connected to 10.1.0.2! +2024-10-23T13:33:34.937Z | partial_mapping_sample_data | 42 +2024-10-23T13:51:54.732Z | partial_mapping_sample_data | Connection error? +2024-10-23T13:52:55.015Z | partial_mapping_sample_data | Connection error? +2024-10-23T13:53:55.832Z | partial_mapping_sample_data | Connection error? +2024-10-23T13:55:01.543Z | partial_mapping_sample_data | Connected to 10.1.0.1! +; + +partialMappingUnionTypes +required_capability: index_metadata_field +required_capability: source_field_mapping +required_capability: unmapped_fields +FROM partial_mapping_sample_data,partial_mapping_excluded_source_sample_data METADATA _index +| INSIST_🐔 message +| EVAL actual_value = message::STRING +| KEEP @timestamp, _index, actual_value +| SORT actual_value, @timestamp ASC +; + +@timestamp:date | _index:keyword | actual_value:string +2024-10-23T13:33:34.937Z | partial_mapping_sample_data | 42 +2024-10-23T13:55:01.543Z | partial_mapping_sample_data | Connected to 10.1.0.1! +2024-10-23T12:27:28.948Z | partial_mapping_sample_data | Connected to 10.1.0.2! +2024-10-23T12:15:03.360Z | partial_mapping_sample_data | Connected to 10.1.0.3! +2024-10-23T13:51:54.732Z | partial_mapping_sample_data | Connection error? +2024-10-23T13:52:55.015Z | partial_mapping_sample_data | Connection error? +2024-10-23T13:53:55.832Z | partial_mapping_sample_data | Connection error? +2024-10-23T12:15:03.360Z | partial_mapping_excluded_source_sample_data | null +2024-10-23T12:27:28.948Z | partial_mapping_excluded_source_sample_data | null +2024-10-23T13:33:34.937Z | partial_mapping_excluded_source_sample_data | null +2024-10-23T13:51:54.732Z | partial_mapping_excluded_source_sample_data | null +2024-10-23T13:52:55.015Z | partial_mapping_excluded_source_sample_data | null +2024-10-23T13:53:55.832Z | partial_mapping_excluded_source_sample_data | null +2024-10-23T13:55:01.543Z | partial_mapping_excluded_source_sample_data | null +; + +partialMappingStatsAfterCast +required_capability: index_metadata_field +required_capability: source_field_mapping +required_capability: unmapped_fields +FROM partial_mapping_sample_data,partial_mapping_excluded_source_sample_data +| INSIST_🐔 message +| STATS count(*) BY message::INT +; +warningRegex: Line 3:21: evaluation of \[message::INT\] failed, treating result as null. Only first 20 failures recorded. +warningRegex: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number \[.*\] + +count(*):long | message::INT:integer +13 | null +1 | 42 +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java index 99a81c60a9ad2..59cb6eff68831 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java @@ -8,9 +8,13 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.compute.operator.DriverStatus; +import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.async.AsyncStopRequest; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -31,7 +35,9 @@ import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.deleteAsyncId; import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQuery; import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.waitForCluster; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; // This tests if enrich after stop works correctly public class CrossClusterAsyncEnrichStopIT extends AbstractEnrichBasedCrossClusterTestCase { @@ -87,10 +93,27 @@ public void testEnrichAfterStop() throws Exception { // wait until c1 is done waitForCluster(client(), "c1", asyncExecutionId); waitForCluster(client(), LOCAL_CLUSTER, asyncExecutionId); + // wait until remote reduce task starts on c2 + assertBusy(() -> { + List tasks = getDriverTasks(client(REMOTE_CLUSTER_2)); + List reduceTasks = tasks.stream() + .filter(t -> t.status() instanceof DriverStatus ds && ds.taskDescription().equals("remote_reduce")) + .toList(); + assertThat(reduceTasks, not(empty())); + }); // Run the stop request var stopRequest = new AsyncStopRequest(asyncExecutionId); var stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest); + // wait until remote reduce tasks are gone + assertBusy(() -> { + List tasks = getDriverTasks(client(REMOTE_CLUSTER_2)); + List reduceTasks = tasks.stream() + .filter(t -> t.status() instanceof DriverStatus ds && ds.taskDescription().equals("remote_reduce")) + .toList(); + assertThat(reduceTasks, empty()); + }); + // Allow the processing to proceed SimplePauseFieldPlugin.allowEmitting.countDown(); @@ -153,4 +176,8 @@ record Event(long timestamp, String user, String host) {} } client.admin().indices().prepareRefresh("events").get(); } + + static List getDriverTasks(Client client) { + return client.admin().cluster().prepareListTasks().setActions(DriverTaskRunner.ACTION_NAME).setDetailed(true).get().getTasks(); + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 2d0a15436bf82..b15e4cfe739f0 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -38,6 +38,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.hamcrest.Matcher; import org.junit.Before; import java.io.IOException; @@ -75,9 +76,6 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { private static final Logger LOGGER = LogManager.getLogger(EsqlActionTaskIT.class); - private String READ_DESCRIPTION; - private String MERGE_DESCRIPTION; - private String REDUCE_DESCRIPTION; private boolean nodeLevelReduction; /** @@ -89,21 +87,6 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); nodeLevelReduction = randomBoolean(); - READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES] - \\_ValuesSourceReaderOperator[fields = [pause_me]] - \\_AggregationOperator[mode = INITIAL, aggs = sum of longs] - \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); - MERGE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_AggregationOperator[mode = FINAL, aggs = sum of longs] - \\_ProjectOperator[projection = [0]] - \\_LimitOperator[limit = 1000] - \\_OutputOperator[columns = [sum(pause_me)]]"""; - REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction ? "\\_AggregationOperator[mode = INTERMEDIATE, aggs = sum of longs]\n" : "") - + "\\_ExchangeSinkOperator"; - } public void testTaskContents() throws Exception { @@ -120,9 +103,11 @@ public void testTaskContents() throws Exception { for (TaskInfo task : foundTasks) { DriverStatus status = (DriverStatus) task.status(); assertThat(status.sessionId(), not(emptyOrNullString())); + String taskDescription = status.taskDescription(); for (DriverStatus.OperatorStatus o : status.activeOperators()) { logger.info("status {}", o); if (o.operator().startsWith("LuceneSourceOperator[maxPageSize = " + pageSize())) { + assertThat(taskDescription, equalTo("data")); LuceneSourceOperator.Status oStatus = (LuceneSourceOperator.Status) o.status(); assertThat(oStatus.processedSlices(), lessThanOrEqualTo(oStatus.totalSlices())); assertThat(oStatus.processedQueries(), equalTo(Set.of("*:*"))); @@ -142,6 +127,7 @@ public void testTaskContents() throws Exception { continue; } if (o.operator().equals("ValuesSourceReaderOperator[fields = [pause_me]]")) { + assertThat(taskDescription, equalTo("data")); ValuesSourceReaderOperator.Status oStatus = (ValuesSourceReaderOperator.Status) o.status(); assertMap( oStatus.readersBuilt(), @@ -152,6 +138,7 @@ public void testTaskContents() throws Exception { continue; } if (o.operator().equals("ExchangeSourceOperator")) { + assertThat(taskDescription, either(equalTo("node_reduce")).or(equalTo("final"))); ExchangeSourceOperator.Status oStatus = (ExchangeSourceOperator.Status) o.status(); assertThat(oStatus.pagesWaiting(), greaterThanOrEqualTo(0)); assertThat(oStatus.pagesEmitted(), greaterThanOrEqualTo(0)); @@ -159,6 +146,7 @@ public void testTaskContents() throws Exception { continue; } if (o.operator().equals("ExchangeSinkOperator")) { + assertThat(taskDescription, either(equalTo("data")).or(equalTo("node_reduce"))); ExchangeSinkOperator.Status oStatus = (ExchangeSinkOperator.Status) o.status(); assertThat(oStatus.pagesReceived(), greaterThanOrEqualTo(0)); exchangeSinks++; @@ -169,6 +157,29 @@ public void testTaskContents() throws Exception { assertThat(valuesSourceReaders, equalTo(1)); assertThat(exchangeSinks, greaterThanOrEqualTo(1)); assertThat(exchangeSources, equalTo(2)); + assertThat( + dataTasks(foundTasks).get(0).description(), + equalTo( + """ + \\_LuceneSourceOperator[sourceStatus] + \\_ValuesSourceReaderOperator[fields = [pause_me]] + \\_AggregationOperator[mode = INITIAL, aggs = sum of longs] + \\_ExchangeSinkOperator""".replace( + "sourceStatus", + "dataPartitioning = SHARD, maxPageSize = " + pageSize() + ", limit = 2147483647, scoreMode = COMPLETE_NO_SCORES" + ) + ) + ); + assertThat( + nodeReduceTasks(foundTasks).get(0).description(), + nodeLevelReduceDescriptionMatcher(foundTasks, "\\_AggregationOperator[mode = INTERMEDIATE, aggs = sum of longs]\n") + ); + assertThat(coordinatorTasks(foundTasks).get(0).description(), equalTo(""" + \\_ExchangeSourceOperator[] + \\_AggregationOperator[mode = FINAL, aggs = sum of longs] + \\_ProjectOperator[projection = [0]] + \\_LimitOperator[limit = 1000] + \\_OutputOperator[columns = [sum(pause_me)]]""")); } finally { scriptPermits.release(numberOfDocs()); try (EsqlQueryResponse esqlResponse = response.get()) { @@ -181,7 +192,7 @@ public void testCancelRead() throws Exception { ActionFuture response = startEsql(); try { List infos = getTasksStarting(); - TaskInfo running = infos.stream().filter(t -> t.description().equals(READ_DESCRIPTION)).findFirst().get(); + TaskInfo running = infos.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("data")).findFirst().get(); cancelTask(running.taskId()); assertCancelled(response); } finally { @@ -193,7 +204,7 @@ public void testCancelMerge() throws Exception { ActionFuture response = startEsql(); try { List infos = getTasksStarting(); - TaskInfo running = infos.stream().filter(t -> t.description().equals(MERGE_DESCRIPTION)).findFirst().get(); + TaskInfo running = infos.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("final")).findFirst().get(); cancelTask(running.taskId()); assertCancelled(response); } finally { @@ -277,8 +288,8 @@ private List getTasksStarting() throws Exception { for (TaskInfo task : tasks) { assertThat(task.action(), equalTo(DriverTaskRunner.ACTION_NAME)); DriverStatus status = (DriverStatus) task.status(); - logger.info("task {} {}", task.description(), status); - assertThat(task.description(), anyOf(equalTo(READ_DESCRIPTION), equalTo(MERGE_DESCRIPTION), equalTo(REDUCE_DESCRIPTION))); + logger.info("task {} {} {}", status.taskDescription(), task.description(), status); + assertThat(status.taskDescription(), anyOf(equalTo("data"), equalTo("node_reduce"), equalTo("final"))); /* * Accept tasks that are either starting or have gone * immediately async. The coordinating task is likely @@ -302,8 +313,8 @@ private List getTasksRunning() throws Exception { for (TaskInfo task : tasks) { assertThat(task.action(), equalTo(DriverTaskRunner.ACTION_NAME)); DriverStatus status = (DriverStatus) task.status(); - assertThat(task.description(), anyOf(equalTo(READ_DESCRIPTION), equalTo(MERGE_DESCRIPTION), equalTo(REDUCE_DESCRIPTION))); - if (task.description().equals(READ_DESCRIPTION)) { + assertThat(status.taskDescription(), anyOf(equalTo("data"), equalTo("node_reduce"), equalTo("final"))); + if (status.taskDescription().equals("data")) { assertThat(status.status(), equalTo(DriverStatus.Status.RUNNING)); } else { assertThat(status.status(), equalTo(DriverStatus.Status.ASYNC)); @@ -328,23 +339,26 @@ private List getDriverTasks() throws Exception { .get() .getTasks(); assertThat(tasks, hasSize(equalTo(3))); - List readTasks = tasks.stream().filter(t -> t.description().equals(READ_DESCRIPTION)).toList(); - List mergeTasks = tasks.stream().filter(t -> t.description().equals(MERGE_DESCRIPTION)).toList(); - assertThat(readTasks, hasSize(1)); - assertThat(mergeTasks, hasSize(1)); - // node-level reduction is disabled when the target data node is also the coordinator - if (readTasks.get(0).node().equals(mergeTasks.get(0).node())) { - REDUCE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_ExchangeSinkOperator"""; - } - List reduceTasks = tasks.stream().filter(t -> t.description().equals(REDUCE_DESCRIPTION)).toList(); - assertThat(reduceTasks, hasSize(1)); + assertThat(dataTasks(tasks), hasSize(1)); + assertThat(nodeReduceTasks(tasks), hasSize(1)); + assertThat(coordinatorTasks(tasks), hasSize(1)); foundTasks.addAll(tasks); }); return foundTasks; } + private List dataTasks(List tasks) { + return tasks.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("data")).toList(); + } + + private List nodeReduceTasks(List tasks) { + return tasks.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("node_reduce")).toList(); + } + + private List coordinatorTasks(List tasks) { + return tasks.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("final")).toList(); + } + private void assertCancelled(ActionFuture response) throws Exception { Exception e = expectThrows(Exception.class, response); Throwable cancelException = ExceptionsHelper.unwrap(e, TaskCancelledException.class); @@ -462,7 +476,9 @@ protected void doRun() throws Exception { } Exception failure = expectThrows(Exception.class, () -> future.actionGet().close()); EsqlTestUtils.assertEsqlFailure(failure); - assertThat(failure.getMessage(), containsString("failed to fetch pages")); + Throwable cause = ExceptionsHelper.unwrap(failure, IOException.class); + assertNotNull(cause); + assertThat(cause.getMessage(), containsString("failed to fetch pages")); // If we proceed without waiting for pages, we might cancel the main request before starting the data-node request. // As a result, the exchange sinks on data-nodes won't be removed until the inactive_timeout elapses, which is // longer than the assertBusy timeout. @@ -475,30 +491,41 @@ protected void doRun() throws Exception { } public void testTaskContentsForTopNQuery() throws Exception { - READ_DESCRIPTION = ("\\_LuceneTopNSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 1000, " - + "scoreMode = TOP_DOCS, " - + "sorts = [{\"pause_me\":{\"order\":\"asc\",\"missing\":\"_last\",\"unmapped_type\":\"long\"}}]]\n" - + "\\_ValuesSourceReaderOperator[fields = [pause_me]]\n" - + "\\_ProjectOperator[projection = [1]]\n" - + "\\_ExchangeSinkOperator").replace("pageSize()", Integer.toString(pageSize())); - MERGE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " - + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" - + "\\_ProjectOperator[projection = [0]]\n" - + "\\_OutputOperator[columns = [pause_me]]"; - REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction - ? "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " - + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" - : "") - + "\\_ExchangeSinkOperator"; - ActionFuture response = startEsql("from test | sort pause_me | keep pause_me"); try { getTasksStarting(); logger.info("unblocking script"); scriptPermits.release(pageSize()); - getTasksRunning(); + List tasks = getTasksRunning(); + String sortStatus = """ + [{"pause_me":{"order":"asc","missing":"_last","unmapped_type":"long"}}]"""; + String sourceStatus = "dataPartitioning = SHARD, maxPageSize = " + + pageSize() + + ", limit = 1000, scoreMode = TOP_DOCS, sorts = " + + sortStatus; + assertThat(dataTasks(tasks).get(0).description(), equalTo(""" + \\_LuceneTopNSourceOperator[sourceStatus] + \\_ValuesSourceReaderOperator[fields = [pause_me]] + \\_ProjectOperator[projection = [1]] + \\_ExchangeSinkOperator""".replace("sourceStatus", sourceStatus))); + assertThat( + nodeReduceTasks(tasks).get(0).description(), + nodeLevelReduceDescriptionMatcher( + tasks, + "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" + ) + ); + assertThat( + coordinatorTasks(tasks).get(0).description(), + equalTo( + "\\_ExchangeSourceOperator[]\n" + + "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" + + "\\_ProjectOperator[projection = [0]]\n" + + "\\_OutputOperator[columns = [pause_me]]" + ) + ); } finally { // each scripted field "emit" is called by LuceneTopNSourceOperator and by ValuesSourceReaderOperator scriptPermits.release(2 * numberOfDocs()); @@ -510,26 +537,26 @@ public void testTaskContentsForTopNQuery() throws Exception { public void testTaskContentsForLimitQuery() throws Exception { String limit = Integer.toString(randomIntBetween(pageSize() + 1, 2 * numberOfDocs())); - READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit(), scoreMode = COMPLETE_NO_SCORES] - \\_ValuesSourceReaderOperator[fields = [pause_me]] - \\_ProjectOperator[projection = [1]] - \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())).replace("limit()", limit); - MERGE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_LimitOperator[limit = limit()] - \\_ProjectOperator[projection = [0]] - \\_OutputOperator[columns = [pause_me]]""".replace("limit()", limit); - REDUCE_DESCRIPTION = ("\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction ? "\\_LimitOperator[limit = limit()]\n" : "") - + "\\_ExchangeSinkOperator").replace("limit()", limit); - ActionFuture response = startEsql("from test | keep pause_me | limit " + limit); try { getTasksStarting(); logger.info("unblocking script"); scriptPermits.release(pageSize() - prereleasedDocs); - getTasksRunning(); + List tasks = getTasksRunning(); + assertThat(dataTasks(tasks).get(0).description(), equalTo(""" + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit(), scoreMode = COMPLETE_NO_SCORES] + \\_ValuesSourceReaderOperator[fields = [pause_me]] + \\_ProjectOperator[projection = [1]] + \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())).replace("limit()", limit))); + assertThat( + nodeReduceTasks(tasks).get(0).description(), + nodeLevelReduceDescriptionMatcher(tasks, "\\_LimitOperator[limit = " + limit + "]\n") + ); + assertThat(coordinatorTasks(tasks).get(0).description(), equalTo(""" + \\_ExchangeSourceOperator[] + \\_LimitOperator[limit = limit()] + \\_ProjectOperator[projection = [0]] + \\_OutputOperator[columns = [pause_me]]""".replace("limit()", limit))); } finally { scriptPermits.release(numberOfDocs()); try (EsqlQueryResponse esqlResponse = response.get()) { @@ -539,27 +566,35 @@ public void testTaskContentsForLimitQuery() throws Exception { } public void testTaskContentsForGroupingStatsQuery() throws Exception { - READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES] - \\_ValuesSourceReaderOperator[fields = [foo]] - \\_OrdinalsGroupingOperator(aggs = max of longs) - \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); - MERGE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_HashAggregationOperator[mode = , aggs = max of longs] - \\_ProjectOperator[projection = [1, 0]] - \\_LimitOperator[limit = 1000] - \\_OutputOperator[columns = [max(foo), pause_me]]"""; - REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction ? "\\_HashAggregationOperator[mode = , aggs = max of longs]\n" : "") - + "\\_ExchangeSinkOperator"; - ActionFuture response = startEsql("from test | stats max(foo) by pause_me"); try { getTasksStarting(); logger.info("unblocking script"); scriptPermits.release(pageSize()); - getTasksRunning(); + List tasks = getTasksRunning(); + String sourceStatus = "dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES" + .replace("pageSize()", Integer.toString(pageSize())); + assertThat( + dataTasks(tasks).get(0).description(), + equalTo( + """ + \\_LuceneSourceOperator[sourceStatus] + \\_ValuesSourceReaderOperator[fields = [foo]] + \\_OrdinalsGroupingOperator(aggs = max of longs) + \\_ExchangeSinkOperator""".replace("sourceStatus", sourceStatus) + + ) + ); + assertThat( + nodeReduceTasks(tasks).get(0).description(), + nodeLevelReduceDescriptionMatcher(tasks, "\\_HashAggregationOperator[mode = , aggs = max of longs]\n") + ); + assertThat(coordinatorTasks(tasks).get(0).description(), equalTo(""" + \\_ExchangeSourceOperator[] + \\_HashAggregationOperator[mode = , aggs = max of longs] + \\_ProjectOperator[projection = [1, 0]] + \\_LimitOperator[limit = 1000] + \\_OutputOperator[columns = [max(foo), pause_me]]""")); } finally { scriptPermits.release(numberOfDocs()); try (EsqlQueryResponse esqlResponse = response.get()) { @@ -570,6 +605,13 @@ public void testTaskContentsForGroupingStatsQuery() throws Exception { } } + private Matcher nodeLevelReduceDescriptionMatcher(List tasks, String nodeReduce) { + boolean matchNodeReduction = nodeLevelReduction + // If the data node and the coordinator are the same node then we don't reduce aggs in it. + && false == dataTasks(tasks).get(0).node().equals(coordinatorTasks(tasks).get(0).node()); + return equalTo("\\_ExchangeSourceOperator[]\n" + (matchNodeReduction ? nodeReduce : "") + "\\_ExchangeSinkOperator"); + } + @Override protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java index 1e34421097aac..1118121b0becb 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.FailingFieldPlugin; @@ -27,9 +29,23 @@ */ @ESIntegTestCase.ClusterScope(minNumDataNodes = 2) public class EsqlNodeFailureIT extends AbstractEsqlIntegTestCase { + @Override protected Collection> nodePlugins() { - return CollectionUtils.appendToCopy(super.nodePlugins(), FailingFieldPlugin.class); + var plugins = new ArrayList<>(super.nodePlugins()); + plugins.add(FailingFieldPlugin.class); + plugins.add(InternalExchangePlugin.class); + return plugins; + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + Settings settings = Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(3000, 4000))) + .build(); + logger.info("settings {}", settings); + return settings; } /** @@ -49,7 +65,7 @@ public void testFailureLoadingFields() throws IOException { mapping.endObject(); client().admin().indices().prepareCreate("fail").setSettings(indexSettings(1, 0)).setMapping(mapping.endObject()).get(); - int docCount = 100; + int docCount = 50; List docs = new ArrayList<>(docCount); for (int d = 0; d < docCount; d++) { docs.add(client().prepareIndex("ok").setSource("foo", d)); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index 15bbc06836def..1bbcc46c0555f 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -227,6 +227,7 @@ private void runLookup(DataType keyType, PopulateIndices populateIndices) throws DriverContext driverContext = driverContext(); try ( var driver = new Driver( + "test", driverContext, source.get(driverContext), List.of(reader.get(driverContext), lookup.get(driverContext)), diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java index 7036216ebbbcf..c8dc134e0e706 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java @@ -142,11 +142,9 @@ public static Iterable parameters() { | EVAL y = to_str(host) | LOOKUP JOIN lookup_idx ON host """, - Build.current().isSnapshot() - ? Map.ofEntries(Map.entry("FROM", 1), Map.entry("EVAL", 1), Map.entry("LOOKUP JOIN", 1)) - : Collections.emptyMap(), - Build.current().isSnapshot() ? Map.ofEntries(Map.entry("TO_STRING", 1)) : Collections.emptyMap(), - Build.current().isSnapshot() + Map.ofEntries(Map.entry("FROM", 1), Map.entry("EVAL", 1), Map.entry("LOOKUP JOIN", 1)), + Map.ofEntries(Map.entry("TO_STRING", 1)), + true ) }, new Object[] { new Test( diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 5b731b5dac9d2..67dad1d61d4c3 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -73,6 +73,7 @@ SHOW : 'show' -> pushMode(SHOW_MODE); SORT : 'sort' -> pushMode(EXPRESSION_MODE); STATS : 'stats' -> pushMode(EXPRESSION_MODE); WHERE : 'where' -> pushMode(EXPRESSION_MODE); +JOIN_LOOKUP : 'lookup' -> pushMode(JOIN_MODE); // // in development // @@ -85,14 +86,13 @@ WHERE : 'where' -> pushMode(EXPRESSION_MODE); // main section while preserving alphabetical order: // MYCOMMAND : 'mycommand' -> ... DEV_INLINESTATS : {this.isDevVersion()}? 'inlinestats' -> pushMode(EXPRESSION_MODE); +DEV_INSIST : {this.isDevVersion()}? 'insist_🐔' -> pushMode(PROJECT_MODE); DEV_LOOKUP : {this.isDevVersion()}? 'lookup_🐔' -> pushMode(LOOKUP_MODE); DEV_METRICS : {this.isDevVersion()}? 'metrics' -> pushMode(METRICS_MODE); // list of all JOIN commands -DEV_JOIN : {this.isDevVersion()}? 'join' -> pushMode(JOIN_MODE); DEV_JOIN_FULL : {this.isDevVersion()}? 'full' -> pushMode(JOIN_MODE); DEV_JOIN_LEFT : {this.isDevVersion()}? 'left' -> pushMode(JOIN_MODE); DEV_JOIN_RIGHT : {this.isDevVersion()}? 'right' -> pushMode(JOIN_MODE); -DEV_JOIN_LOOKUP : {this.isDevVersion()}? 'lookup' -> pushMode(JOIN_MODE); // @@ -308,15 +308,16 @@ FROM_MULTILINE_COMMENT FROM_WS : WS -> channel(HIDDEN) ; + // -// DROP, KEEP +// DROP, KEEP, INSIST // mode PROJECT_MODE; PROJECT_PIPE : PIPE -> type(PIPE), popMode; PROJECT_DOT: DOT -> type(DOT); PROJECT_COMMA : COMMA -> type(COMMA); -PROJECT_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); -PROJECT_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +PROJECT_PARAM : PARAM -> type(PARAM); +PROJECT_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); fragment UNQUOTED_ID_BODY_WITH_PATTERN : (LETTER | DIGIT | UNDERSCORE | ASTERISK) @@ -350,8 +351,8 @@ RENAME_PIPE : PIPE -> type(PIPE), popMode; RENAME_ASSIGN : ASSIGN -> type(ASSIGN); RENAME_COMMA : COMMA -> type(COMMA); RENAME_DOT: DOT -> type(DOT); -RENAME_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); -RENAME_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +RENAME_PARAM : PARAM -> type(PARAM); +RENAME_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); AS : 'as'; @@ -423,8 +424,8 @@ ENRICH_FIELD_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) ; -ENRICH_FIELD_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); -ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +ENRICH_FIELD_PARAM : PARAM -> type(PARAM); +ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); ENRICH_FIELD_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) @@ -441,8 +442,8 @@ ENRICH_FIELD_WS mode MVEXPAND_MODE; MVEXPAND_PIPE : PIPE -> type(PIPE), popMode; MVEXPAND_DOT: DOT -> type(DOT); -MVEXPAND_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); -MVEXPAND_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +MVEXPAND_PARAM : PARAM -> type(PARAM); +MVEXPAND_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); MVEXPAND_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) @@ -556,7 +557,7 @@ LOOKUP_FIELD_WS // mode JOIN_MODE; JOIN_PIPE : PIPE -> type(PIPE), popMode; -JOIN_JOIN : DEV_JOIN -> type(DEV_JOIN); +JOIN : 'join'; JOIN_AS : AS -> type(AS); JOIN_ON : ON -> type(ON), popMode, pushMode(EXPRESSION_MODE); USING : 'USING' -> popMode, pushMode(EXPRESSION_MODE); @@ -639,3 +640,14 @@ CLOSING_METRICS_BY CLOSING_METRICS_PIPE : PIPE -> type(PIPE), popMode ; + +// +// INSIST command +// +mode INSIST_MODE; +INSIST_PIPE : PIPE -> type(PIPE), popMode; +INSIST_IDENTIFIER: UNQUOTED_IDENTIFIER -> type(UNQUOTED_IDENTIFIER); + +INSIST_WS : WS -> channel(HIDDEN); +INSIST_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN); +INSIST_MULTILINE_COMMENT : MULTILINE_COMMENT -> channel(HIDDEN); diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 218884913960f..ff27d74c959cd 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -14,14 +14,14 @@ SHOW=13 SORT=14 STATS=15 WHERE=16 -DEV_INLINESTATS=17 -DEV_LOOKUP=18 -DEV_METRICS=19 -DEV_JOIN=20 -DEV_JOIN_FULL=21 -DEV_JOIN_LEFT=22 -DEV_JOIN_RIGHT=23 -DEV_JOIN_LOOKUP=24 +JOIN_LOOKUP=17 +DEV_INLINESTATS=18 +DEV_INSIST=19 +DEV_LOOKUP=20 +DEV_METRICS=21 +DEV_JOIN_FULL=22 +DEV_JOIN_LEFT=23 +DEV_JOIN_RIGHT=24 UNKNOWN_CMD=25 LINE_COMMENT=26 MULTILINE_COMMENT=27 @@ -118,16 +118,20 @@ LOOKUP_WS=117 LOOKUP_FIELD_LINE_COMMENT=118 LOOKUP_FIELD_MULTILINE_COMMENT=119 LOOKUP_FIELD_WS=120 -USING=121 -JOIN_LINE_COMMENT=122 -JOIN_MULTILINE_COMMENT=123 -JOIN_WS=124 -METRICS_LINE_COMMENT=125 -METRICS_MULTILINE_COMMENT=126 -METRICS_WS=127 -CLOSING_METRICS_LINE_COMMENT=128 -CLOSING_METRICS_MULTILINE_COMMENT=129 -CLOSING_METRICS_WS=130 +JOIN=121 +USING=122 +JOIN_LINE_COMMENT=123 +JOIN_MULTILINE_COMMENT=124 +JOIN_WS=125 +METRICS_LINE_COMMENT=126 +METRICS_MULTILINE_COMMENT=127 +METRICS_WS=128 +CLOSING_METRICS_LINE_COMMENT=129 +CLOSING_METRICS_MULTILINE_COMMENT=130 +CLOSING_METRICS_WS=131 +INSIST_WS=132 +INSIST_LINE_COMMENT=133 +INSIST_MULTILINE_COMMENT=134 'dissect'=1 'drop'=2 'enrich'=3 @@ -144,6 +148,7 @@ CLOSING_METRICS_WS=130 'sort'=14 'stats'=15 'where'=16 +'lookup'=17 '|'=29 'by'=33 'and'=34 @@ -189,4 +194,5 @@ CLOSING_METRICS_WS=130 'on'=95 'with'=96 'info'=107 -'USING'=121 +'join'=121 +'USING'=122 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index c66da879a5709..f0bfb91c776c2 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -51,10 +51,11 @@ processingCommand | grokCommand | enrichCommand | mvExpandCommand + | joinCommand // in development | {this.isDevVersion()}? inlinestatsCommand | {this.isDevVersion()}? lookupCommand - | {this.isDevVersion()}? joinCommand + | {this.isDevVersion()}? insistCommand ; whereCommand @@ -143,6 +144,7 @@ indexPattern clusterString : UNQUOTED_SOURCE + | QUOTED_STRING ; indexString @@ -193,7 +195,7 @@ identifier identifierPattern : ID_PATTERN - | {this.isDevVersion()}? parameter + | parameter ; constant @@ -216,7 +218,7 @@ parameter identifierOrParameter : identifier - | {this.isDevVersion()}? parameter + | parameter ; limitCommand @@ -324,11 +326,11 @@ inlinestatsCommand ; joinCommand - : type=(DEV_JOIN_LOOKUP | DEV_JOIN_LEFT | DEV_JOIN_RIGHT)? DEV_JOIN joinTarget joinCondition + : type=(JOIN_LOOKUP | DEV_JOIN_LEFT | DEV_JOIN_RIGHT) JOIN joinTarget joinCondition ; joinTarget - : index=indexPattern (AS alias=identifier)? + : index=indexPattern ; joinCondition @@ -338,3 +340,7 @@ joinCondition joinPredicate : valueExpression ; + +insistCommand + : DEV_INSIST qualifiedNamePatterns + ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 218884913960f..ff27d74c959cd 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -14,14 +14,14 @@ SHOW=13 SORT=14 STATS=15 WHERE=16 -DEV_INLINESTATS=17 -DEV_LOOKUP=18 -DEV_METRICS=19 -DEV_JOIN=20 -DEV_JOIN_FULL=21 -DEV_JOIN_LEFT=22 -DEV_JOIN_RIGHT=23 -DEV_JOIN_LOOKUP=24 +JOIN_LOOKUP=17 +DEV_INLINESTATS=18 +DEV_INSIST=19 +DEV_LOOKUP=20 +DEV_METRICS=21 +DEV_JOIN_FULL=22 +DEV_JOIN_LEFT=23 +DEV_JOIN_RIGHT=24 UNKNOWN_CMD=25 LINE_COMMENT=26 MULTILINE_COMMENT=27 @@ -118,16 +118,20 @@ LOOKUP_WS=117 LOOKUP_FIELD_LINE_COMMENT=118 LOOKUP_FIELD_MULTILINE_COMMENT=119 LOOKUP_FIELD_WS=120 -USING=121 -JOIN_LINE_COMMENT=122 -JOIN_MULTILINE_COMMENT=123 -JOIN_WS=124 -METRICS_LINE_COMMENT=125 -METRICS_MULTILINE_COMMENT=126 -METRICS_WS=127 -CLOSING_METRICS_LINE_COMMENT=128 -CLOSING_METRICS_MULTILINE_COMMENT=129 -CLOSING_METRICS_WS=130 +JOIN=121 +USING=122 +JOIN_LINE_COMMENT=123 +JOIN_MULTILINE_COMMENT=124 +JOIN_WS=125 +METRICS_LINE_COMMENT=126 +METRICS_MULTILINE_COMMENT=127 +METRICS_WS=128 +CLOSING_METRICS_LINE_COMMENT=129 +CLOSING_METRICS_MULTILINE_COMMENT=130 +CLOSING_METRICS_WS=131 +INSIST_WS=132 +INSIST_LINE_COMMENT=133 +INSIST_MULTILINE_COMMENT=134 'dissect'=1 'drop'=2 'enrich'=3 @@ -144,6 +148,7 @@ CLOSING_METRICS_WS=130 'sort'=14 'stats'=15 'where'=16 +'lookup'=17 '|'=29 'by'=33 'and'=34 @@ -189,4 +194,5 @@ CLOSING_METRICS_WS=130 'on'=95 'with'=96 'info'=107 -'USING'=121 +'join'=121 +'USING'=122 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 47a8a586bf1df..ed1f940f20518 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -20,6 +20,8 @@ import java.util.Locale; import java.util.Set; +import static org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin.AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG; + /** * A {@link Set} of "capabilities" supported by the {@link RestEsqlQueryAction} * and {@link RestEsqlAsyncQueryAction} APIs. These are exposed over the @@ -119,12 +121,17 @@ public enum Cap { * Cast string literals to a desired data type for IN predicate and more types for BinaryComparison. */ STRING_LITERAL_AUTO_CASTING_EXTENDED, - /** * Support for metadata fields. */ METADATA_FIELDS, + /** + * Support specifically for *just* the _index METADATA field. Used by CsvTests, since that is the only metadata field currently + * supported. + */ + INDEX_METADATA_FIELD, + /** * Support for timespan units abbreviations */ @@ -296,6 +303,11 @@ public enum Cap { */ UNION_TYPES, + /** + * Support unmapped using the INSIST keyword. + */ + UNMAPPED_FIELDS(Build.current().isSnapshot()), + /** * Support for function {@code ST_DISTANCE}. Done in #108764. */ @@ -615,6 +627,11 @@ public enum Cap { */ SORT_RETURNING_SOURCE_OK, + /** + * _source field mapping directives: https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-source-field.html + */ + SOURCE_FIELD_MAPPING, + /** * Allow filter per individual aggregation. */ @@ -668,7 +685,7 @@ public enum Cap { /** * Support simplified syntax for named parameters for field and function names. */ - NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX(Build.current().isSnapshot()), + NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX(), /** * Fix pushdown of LIMIT past MV_EXPAND @@ -694,7 +711,7 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP_V12(Build.current().isSnapshot()), + JOIN_LOOKUP_V12, /** * LOOKUP JOIN with TEXT fields on the right (right side of the join) (#119473) @@ -784,7 +801,20 @@ public enum Cap { /** * Support for aggregate_metric_double type */ - AGGREGATE_METRIC_DOUBLE; + AGGREGATE_METRIC_DOUBLE(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG.isEnabled()), + + /** + * Fix for https://github.com/elastic/elasticsearch/issues/120817 + * and https://github.com/elastic/elasticsearch/issues/120803 + * Support for queries that have multiple SORTs that cannot become TopN + */ + REMOVE_REDUNDANT_SORT, + + /** + * Fixes a series of issues with inlinestats which had an incomplete implementation after lookup and inlinestats + * were refactored. + */ + INLINESTATS_V3(EsqlPlugin.INLINESTATS_FEATURE_FLAG); private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java index d8904288523a7..e77d7b41aaca6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java @@ -171,8 +171,7 @@ private static QueryParams parseParams(XContentParser p) throws IOException { String paramName = entry.getKey(); checkParamNameValidity(paramName, errors, loc); - if (EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - && entry.getValue() instanceof Map value) {// parameter specified as a key:value pair + if (entry.getValue() instanceof Map value) {// parameter specified as a key:value pair checkParamValueSize(paramName, value, loc, errors); for (Object keyName : value.keySet()) { classification = getParamClassification(keyName.toString(), errors, loc); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 1351b5ce51f44..74242cf9be3f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -9,13 +9,13 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.core.Strings; import org.elasticsearch.index.IndexMode; import org.elasticsearch.logging.Logger; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.analysis.AnalyzerRules.BaseAnalyzerRule; import org.elasticsearch.xpack.esql.analysis.AnalyzerRules.ParameterizedAnalyzerRule; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.InvalidMappedField; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; +import org.elasticsearch.xpack.esql.core.type.PotentiallyUnmappedKeywordEsField; import org.elasticsearch.xpack.esql.core.type.UnsupportedEsField; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -70,6 +71,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Insist; import org.elasticsearch.xpack.esql.plan.logical.Keep; import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; @@ -430,9 +432,9 @@ private LocalRelation tableMapAsRelation(Source source, Map mapT } } - public static class ResolveRefs extends BaseAnalyzerRule { + public static class ResolveRefs extends ParameterizedAnalyzerRule { @Override - protected LogicalPlan doRule(LogicalPlan plan) { + protected LogicalPlan rule(LogicalPlan plan, AnalyzerContext context) { if (plan.childrenResolved() == false) { return plan; } @@ -479,6 +481,10 @@ protected LogicalPlan doRule(LogicalPlan plan) { return resolveLookupJoin(j); } + if (plan instanceof Insist i) { + return resolveInsist(i, childrenOutput, context.indexResolution()); + } + return plan.transformExpressionsOnly(UnresolvedAttribute.class, ua -> maybeResolveAttribute(ua, childrenOutput)); } @@ -662,13 +668,13 @@ private List resolveUsingColumns(List cols, List resolved = new ArrayList<>(cols.size()); for (Attribute col : cols) { if (col instanceof UnresolvedAttribute ua) { - Attribute resolvedCol = maybeResolveAttribute(ua, output); - if (resolvedCol instanceof UnresolvedAttribute ucol) { + Attribute resolvedField = maybeResolveAttribute(ua, output); + if (resolvedField instanceof UnresolvedAttribute ucol) { String message = ua.unresolvedMessage(); String match = "column [" + ucol.name() + "]"; - resolvedCol = ucol.withUnresolvedMessage(message.replace(match, match + " in " + side + " side of join")); + resolvedField = ucol.withUnresolvedMessage(message.replace(match, match + " in " + side + " side of join")); } - resolved.add(resolvedCol); + resolved.add(resolvedField); } else { throw new IllegalStateException( "Surprised to discover column [ " + col.name() + "] already resolved when resolving JOIN keys" @@ -678,6 +684,49 @@ private List resolveUsingColumns(List cols, List childrenOutput, IndexResolution indexResolution) { + List list = new ArrayList<>(); + for (Attribute a : insist.insistedAttributes()) { + list.add(resolveInsistAttribute(a, childrenOutput, indexResolution)); + } + return insist.withAttributes(list); + } + + private Attribute resolveInsistAttribute(Attribute attribute, List childrenOutput, IndexResolution indexResolution) { + Attribute resolvedCol = maybeResolveAttribute((UnresolvedAttribute) attribute, childrenOutput); + // Field isn't mapped anywhere. + if (resolvedCol instanceof UnresolvedAttribute) { + return insistKeyword(attribute); + } + + // Field is partially unmapped. + if (resolvedCol instanceof FieldAttribute fa && indexResolution.get().isPartiallyUnmappedField(fa.name())) { + return fa.dataType() == KEYWORD ? insistKeyword(fa) : invalidInsistAttribute(fa); + } + + // Either the field is mapped everywhere and we can just use the resolved column, or the INSIST clause isn't on top of a FROM + // clause—for example, it might be on top of a ROW clause—so the verifier will catch it and fail. + return resolvedCol; + } + + private static Attribute invalidInsistAttribute(FieldAttribute fa) { + var name = fa.name(); + EsField field = fa.field() instanceof InvalidMappedField imf + ? new InvalidMappedField(name, InvalidMappedField.makeErrorsMessageIncludingInsistKeyword(imf.getTypesToIndices())) + : new InvalidMappedField( + name, + Strings.format( + "mapped as [2] incompatible types: [keyword] enforced by INSIST command, and [%s] in index mappings", + fa.dataType().typeName() + ) + ); + return new FieldAttribute(fa.source(), name, field); + } + + private static FieldAttribute insistKeyword(Attribute attribute) { + return new FieldAttribute(attribute.source(), attribute.name(), new PotentiallyUnmappedKeywordEsField(attribute.name())); + } + private Attribute maybeResolveAttribute(UnresolvedAttribute ua, List childrenOutput) { return maybeResolveAttribute(ua, childrenOutput, log); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index c2663650685eb..87e555e8d2f7f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -29,6 +29,8 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.Insist; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.Project; @@ -95,6 +97,7 @@ Collection verify(LogicalPlan plan, BitSet partialMetrics) { checkOperationsOnUnsignedLong(p, failures); checkBinaryComparison(p, failures); + checkInsist(p, failures); }); if (failures.hasFailures() == false) { @@ -132,7 +135,7 @@ else if (p.resolved()) { e.forEachUp(ae -> { // Special handling for Project and unsupported/union types: disallow renaming them but pass them through otherwise. - if (p instanceof Project) { + if (p instanceof Project || p instanceof Insist) { if (ae instanceof Alias as && as.child() instanceof UnsupportedAttribute ua) { failures.add(fail(ae, ua.unresolvedMessage())); } @@ -231,6 +234,15 @@ private static void checkBinaryComparison(LogicalPlan p, Failures failures) { }); } + private static void checkInsist(LogicalPlan p, Failures failures) { + if (p instanceof Insist i) { + LogicalPlan child = i.child(); + if ((child instanceof EsRelation || child instanceof Insist) == false) { + failures.add(fail(i, "[insist] can only be used after [from] or [insist] commands, but was [{}]", child.sourceText())); + } + } + } + private void licenseCheck(LogicalPlan plan, Failures failures) { Consumer> licenseCheck = n -> { if (n instanceof LicenseAware la && la.licenseCheck(licenseState) == false) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index cb2582db2ad33..d4d9372214753 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -327,6 +327,7 @@ private void doLookup(T request, CancellableTask task, ActionListener releasables.add(outputOperator); Driver driver = new Driver( "enrich-lookup:" + request.sessionId, + "enrich", System.currentTimeMillis(), System.nanoTime(), driverContext, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index a614a473ebe41..0535beab3e780 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -583,7 +583,7 @@ public record FunctionDescription( String[] returnType, String description, boolean variadic, - boolean isAggregation + FunctionType type ) { /** * The name of every argument. @@ -626,7 +626,7 @@ private static DataType getTargetType(String[] names) { public static FunctionDescription description(FunctionDefinition def) { Constructor constructor = constructorFor(def.clazz()); if (constructor == null) { - return new FunctionDescription(def.name(), List.of(), null, null, false, false); + return new FunctionDescription(def.name(), List.of(), null, null, false, FunctionType.SCALAR); } FunctionInfo functionInfo = functionInfo(def); String functionDescription = functionInfo == null ? "" : functionInfo.description().replace('\n', ' '); @@ -635,7 +635,6 @@ public static FunctionDescription description(FunctionDefinition def) { List args = new ArrayList<>(params.length); boolean variadic = false; - boolean isAggregation = functionInfo != null && functionInfo.isAggregation(); for (int i = 1; i < params.length; i++) { // skipping 1st argument, the source if (Configuration.class.isAssignableFrom(params[i].getType()) == false) { variadic |= List.class.isAssignableFrom(params[i].getType()); @@ -648,7 +647,7 @@ public static FunctionDescription description(FunctionDefinition def) { } } } - return new FunctionDescription(def.name(), args, returnType, functionDescription, variadic, isAggregation); + return new FunctionDescription(def.name(), args, returnType, functionDescription, variadic, functionInfo.type()); } public static ArgSignature param(Param param) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionDoc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionDoc.java deleted file mode 100644 index f6514c2a44ecd..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionDoc.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Retention(RetentionPolicy.RUNTIME) -@Target({ ElementType.TYPE }) -public @interface FunctionDoc { - enum FunctionType { - AGGREGATE, - DATE_TIME, - MATH, - STRING, - } - - FunctionType type(); - - String description(); - - String synopsis(); - - String[] arguments() default {}; - - String output(); - - String examples(); - -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java index f3cdd324769e5..5c5c2f26279a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java @@ -66,9 +66,9 @@ String appendix() default ""; /** - * Is this an aggregation (true) or a scalar function (false). + * The position the function can appear in the language. */ - boolean isAggregation() default false; + FunctionType type() default FunctionType.SCALAR; /** * Examples of using this function that are rendered in the docs. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionType.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionType.java new file mode 100644 index 0000000000000..876ac48eacaf4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionType.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function; + +/** + * The position the function can appear in the language. + */ +public enum FunctionType { + /** + * Functions that can appear anywhere. For example, {@code LENGTH} in + * {@code | STATS MAX(LENGTH(string))} and {@code | EVAL l = LENGTH(string)}. + */ + SCALAR, + /** + * Functions that can only appear in the "aggregate" position of a {@code STATS}. + * For example, {@code MAX} in {@code | STATS MAX(LENGTH(string))}. + */ + AGGREGATE, + /** + * Functions that can only appear in the "grouping" position of a {@code STATS}. + * For example, {@code CATEGORIZE} in {@code | STATS MAX(a) BY CATEGORIZE(message)}. + */ + GROUPING, +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java index 82c0f9d24899e..41feee0e63661 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAvg; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; @@ -34,7 +35,7 @@ public class Avg extends AggregateFunction implements SurrogateExpression { @FunctionInfo( returnType = "double", description = "The average of a numeric field.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "avg"), @Example( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index 5ce43c7b3872d..1d6a88ddcec3c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCount; @@ -42,7 +43,7 @@ public class Count extends AggregateFunction implements ToAggregator, SurrogateE @FunctionInfo( returnType = "long", description = "Returns the total number (count) of input values.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "count"), @Example(description = "To count the number of rows, use `COUNT()` or `COUNT(*)`", file = "docs", tag = "countAll"), @@ -126,8 +127,8 @@ public DataType dataType() { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { - return CountAggregatorFunction.supplier(inputChannels); + public AggregatorFunctionSupplier supplier() { + return CountAggregatorFunction.supplier(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 3170ae8f132c2..f97ead54c7be9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; @@ -40,7 +41,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.function.BiFunction; +import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -56,9 +57,9 @@ public class CountDistinct extends AggregateFunction implements OptionalArgument CountDistinct::new ); - private static final Map, Integer, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + private static final Map> SUPPLIERS = Map.ofEntries( // Booleans ignore the precision because there are only two possible values anyway - Map.entry(DataType.BOOLEAN, (inputChannels, precision) -> new CountDistinctBooleanAggregatorFunctionSupplier(inputChannels)), + Map.entry(DataType.BOOLEAN, (precision) -> new CountDistinctBooleanAggregatorFunctionSupplier()), Map.entry(DataType.LONG, CountDistinctLongAggregatorFunctionSupplier::new), Map.entry(DataType.DATETIME, CountDistinctLongAggregatorFunctionSupplier::new), Map.entry(DataType.DATE_NANOS, CountDistinctLongAggregatorFunctionSupplier::new), @@ -101,7 +102,7 @@ public class CountDistinct extends AggregateFunction implements OptionalArgument maximum supported value is 40000, thresholds above this number will have the same effect as a threshold of 40000. The default value is `3000`. """, - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats_count_distinct", tag = "count-distinct"), @Example( @@ -209,7 +210,7 @@ protected TypeResolution resolveType() { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { + public AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); int precision = this.precision == null ? DEFAULT_PRECISION @@ -218,7 +219,7 @@ public AggregatorFunctionSupplier supplier(List inputChannels) { // If the type checking did its job, this should never happen throw EsqlIllegalArgumentException.illegalDataType(type); } - return SUPPLIERS.get(type).apply(inputChannels, precision); + return SUPPLIERS.get(type).apply(precision); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java index a67b87c7617c4..bb9ed1780053f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java @@ -19,6 +19,7 @@ import org.elasticsearch.compute.aggregation.FromPartialGroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.GroupingAggregator; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -110,38 +111,44 @@ public FromPartial withFilter(Expression filter) { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { - final ToAggregator toAggregator = (ToAggregator) function; - if (inputChannels.size() != 1) { - assert false : "from_partial aggregation requires exactly one input channel; got " + inputChannels; - throw new IllegalArgumentException("from_partial aggregation requires exactly one input channel; got " + inputChannels); - } - final int inputChannel = inputChannels.get(0); + public AggregatorFunctionSupplier supplier() { + final AggregatorFunctionSupplier supplier = ((ToAggregator) function).supplier(); return new AggregatorFunctionSupplier() { @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return FromPartialAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return FromPartialGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { assert false : "aggregatorFactory() is override"; throw new UnsupportedOperationException(); } @Override - public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels) { assert false : "groupingAggregatorFactory() is override"; throw new UnsupportedOperationException(); } @Override - public Aggregator.Factory aggregatorFactory(AggregatorMode mode) { - final AggregatorFunctionSupplier supplier; - try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { - var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); - supplier = toAggregator.supplier(intermediateChannels); + public Aggregator.Factory aggregatorFactory(AggregatorMode mode, List channels) { + if (channels.size() != 1) { + assert false : "from_partial aggregation requires exactly one input channel; got " + channels; + throw new IllegalArgumentException("from_partial aggregation requires exactly one input channel; got " + channels); } + final int inputChannel = channels.get(0); + var intermediateChannels = IntStream.range(0, supplier.nonGroupingIntermediateStateDesc().size()).boxed().toList(); return new Aggregator.Factory() { @Override public Aggregator apply(DriverContext driverContext) { // use groupingAggregator since we can receive intermediate output from a grouping aggregate - final var groupingAggregator = supplier.groupingAggregator(driverContext); + final var groupingAggregator = supplier.groupingAggregator(driverContext, intermediateChannels); return new Aggregator(new FromPartialAggregatorFunction(driverContext, groupingAggregator, inputChannel), mode); } @@ -153,16 +160,17 @@ public String describe() { } @Override - public GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode) { - final AggregatorFunctionSupplier supplier; - try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { - var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); - supplier = toAggregator.supplier(intermediateChannels); + public GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode, List channels) { + if (channels.size() != 1) { + assert false : "from_partial aggregation requires exactly one input channel; got " + channels; + throw new IllegalArgumentException("from_partial aggregation requires exactly one input channel; got " + channels); } + final int inputChannel = channels.get(0); + var intermediateChannels = IntStream.range(0, supplier.nonGroupingIntermediateStateDesc().size()).boxed().toList(); return new GroupingAggregator.Factory() { @Override public GroupingAggregator apply(DriverContext driverContext) { - final GroupingAggregatorFunction aggregator = supplier.groupingAggregator(driverContext); + final GroupingAggregatorFunction aggregator = supplier.groupingAggregator(driverContext, intermediateChannels); return new GroupingAggregator(new FromPartialGroupingAggregatorFunction(aggregator, inputChannel), mode); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index 6a8ce792ec8c1..be08627a4fd6d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; @@ -35,7 +36,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.function.Function; +import java.util.function.Supplier; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; @@ -43,7 +44,7 @@ public class Max extends AggregateFunction implements ToAggregator, SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Max", Max::new); - private static final Map, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + private static final Map> SUPPLIERS = Map.ofEntries( Map.entry(DataType.BOOLEAN, MaxBooleanAggregatorFunctionSupplier::new), Map.entry(DataType.LONG, MaxLongAggregatorFunctionSupplier::new), Map.entry(DataType.DATETIME, MaxLongAggregatorFunctionSupplier::new), @@ -60,7 +61,7 @@ public class Max extends AggregateFunction implements ToAggregator, SurrogateExp @FunctionInfo( returnType = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "long", "version" }, description = "The maximum value of a field.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "max"), @Example( @@ -141,13 +142,13 @@ public DataType dataType() { } @Override - public final AggregatorFunctionSupplier supplier(List inputChannels) { + public final AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (SUPPLIERS.containsKey(type) == false) { // If the type checking did its job, this should never happen throw EsqlIllegalArgumentException.illegalDataType(type); } - return SUPPLIERS.get(type).apply(inputChannels); + return SUPPLIERS.get(type).get(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java index c47fa612c1c49..41f3ea0efea06 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMedian; @@ -44,7 +45,7 @@ public class Median extends AggregateFunction implements SurrogateExpression { `MEDIAN` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. This means you can get slightly different results using the same data. ====""", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats_percentile", tag = "median"), @Example( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java index 42960cafdfd3a..5c7db4e31502a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMedianAbsoluteDeviation; @@ -54,7 +55,7 @@ public class MedianAbsoluteDeviation extends NumericAggregate implements Surroga `MEDIAN_ABSOLUTE_DEVIATION` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. This means you can get slightly different results using the same data. ====""", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "median_absolute_deviation", tag = "median-absolute-deviation"), @Example( @@ -99,18 +100,18 @@ public MedianAbsoluteDeviation withFilter(Expression filter) { } @Override - protected AggregatorFunctionSupplier longSupplier(List inputChannels) { - return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier longSupplier() { + return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(); } @Override - protected AggregatorFunctionSupplier intSupplier(List inputChannels) { - return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier intSupplier() { + return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(); } @Override - protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { - return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier doubleSupplier() { + return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index f2ae1292e47e8..1b1c4ea7b0296 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; @@ -35,7 +36,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.function.Function; +import java.util.function.Supplier; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; @@ -43,7 +44,7 @@ public class Min extends AggregateFunction implements ToAggregator, SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Min", Min::new); - private static final Map, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + private static final Map> SUPPLIERS = Map.ofEntries( Map.entry(DataType.BOOLEAN, MinBooleanAggregatorFunctionSupplier::new), Map.entry(DataType.LONG, MinLongAggregatorFunctionSupplier::new), Map.entry(DataType.DATETIME, MinLongAggregatorFunctionSupplier::new), @@ -60,7 +61,7 @@ public class Min extends AggregateFunction implements ToAggregator, SurrogateExp @FunctionInfo( returnType = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "long", "version" }, description = "The minimum value of a field.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "min"), @Example( @@ -141,13 +142,13 @@ public DataType dataType() { } @Override - public final AggregatorFunctionSupplier supplier(List inputChannels) { + public final AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (SUPPLIERS.containsKey(type) == false) { // If the type checking did its job, this should never happen throw EsqlIllegalArgumentException.illegalDataType(type); } - return SUPPLIERS.get(type).apply(inputChannels); + return SUPPLIERS.get(type).get(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java index 5c639c465c649..3289e1aded4ea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java @@ -92,26 +92,26 @@ public DataType dataType() { } @Override - public final AggregatorFunctionSupplier supplier(List inputChannels) { + public final AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (supportsDates() && type == DataType.DATETIME) { - return longSupplier(inputChannels); + return longSupplier(); } if (type == DataType.LONG) { - return longSupplier(inputChannels); + return longSupplier(); } if (type == DataType.INTEGER) { - return intSupplier(inputChannels); + return intSupplier(); } if (type == DataType.DOUBLE) { - return doubleSupplier(inputChannels); + return doubleSupplier(); } throw EsqlIllegalArgumentException.illegalDataType(type); } - protected abstract AggregatorFunctionSupplier longSupplier(List inputChannels); + protected abstract AggregatorFunctionSupplier longSupplier(); - protected abstract AggregatorFunctionSupplier intSupplier(List inputChannels); + protected abstract AggregatorFunctionSupplier intSupplier(); - protected abstract AggregatorFunctionSupplier doubleSupplier(List inputChannels); + protected abstract AggregatorFunctionSupplier doubleSupplier(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index 8c943c991d501..fb61db603486b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvPercentile; @@ -65,7 +66,7 @@ public class Percentile extends NumericAggregate implements SurrogateExpression This means you can get slightly different results using the same data. ==== """, - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats_percentile", tag = "percentile"), @Example( @@ -156,18 +157,18 @@ protected TypeResolution resolveType() { } @Override - protected AggregatorFunctionSupplier longSupplier(List inputChannels) { - return new PercentileLongAggregatorFunctionSupplier(inputChannels, percentileValue()); + protected AggregatorFunctionSupplier longSupplier() { + return new PercentileLongAggregatorFunctionSupplier(percentileValue()); } @Override - protected AggregatorFunctionSupplier intSupplier(List inputChannels) { - return new PercentileIntAggregatorFunctionSupplier(inputChannels, percentileValue()); + protected AggregatorFunctionSupplier intSupplier() { + return new PercentileIntAggregatorFunctionSupplier(percentileValue()); } @Override - protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { - return new PercentileDoubleAggregatorFunctionSupplier(inputChannels, percentileValue()); + protected AggregatorFunctionSupplier doubleSupplier() { + return new PercentileDoubleAggregatorFunctionSupplier(percentileValue()); } private int percentileValue() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java index 85ae65b6c5dc3..ae385da4c86e3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; @@ -49,7 +50,7 @@ public class Rate extends AggregateFunction implements OptionalArgument, ToAggre @FunctionInfo( returnType = { "double" }, description = "compute the rate of a counter field. Available in METRICS command only", - isAggregation = true + type = FunctionType.AGGREGATE ) public Rate( Source source, @@ -168,16 +169,13 @@ long unitInMillis() { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { - if (inputChannels.size() != 2 && inputChannels.size() != 3) { - throw new IllegalArgumentException("rate requires two for raw input or three channels for partial input; got " + inputChannels); - } + public AggregatorFunctionSupplier supplier() { final long unitInMillis = unitInMillis(); final DataType type = field().dataType(); return switch (type) { - case COUNTER_LONG -> new RateLongAggregatorFunctionSupplier(inputChannels, unitInMillis); - case COUNTER_INTEGER -> new RateIntAggregatorFunctionSupplier(inputChannels, unitInMillis); - case COUNTER_DOUBLE -> new RateDoubleAggregatorFunctionSupplier(inputChannels, unitInMillis); + case COUNTER_LONG -> new RateLongAggregatorFunctionSupplier(unitInMillis); + case COUNTER_INTEGER -> new RateIntAggregatorFunctionSupplier(unitInMillis); + case COUNTER_DOUBLE -> new RateDoubleAggregatorFunctionSupplier(unitInMillis); default -> throw EsqlIllegalArgumentException.illegalDataType(type); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java index fad308e38cb26..70f264129a06c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -45,7 +46,7 @@ public class SpatialCentroid extends SpatialAggregateFunction implements ToAggre @FunctionInfo( returnType = { "geo_point", "cartesian_point" }, description = "Calculate the spatial centroid over a field with spatial point geometry type.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = @Example(file = "spatial", tag = "st_centroid_agg-airports") ) public SpatialCentroid(Source source, @Param(name = "field", type = { "geo_point", "cartesian_point" }) Expression field) { @@ -98,16 +99,16 @@ public SpatialCentroid replaceChildren(List newChildren) { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { + public AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); return switch (type) { case DataType.GEO_POINT -> switch (fieldExtractPreference) { - case DOC_VALUES -> new SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); + case DOC_VALUES -> new SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(); + case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(); }; case DataType.CARTESIAN_POINT -> switch (fieldExtractPreference) { - case DOC_VALUES -> new SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); + case DOC_VALUES -> new SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(); + case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier(); }; default -> throw EsqlIllegalArgumentException.illegalDataType(type); }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java index 5d56fe1e1169a..419c1a8416c9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -48,7 +49,7 @@ public final class SpatialExtent extends SpatialAggregateFunction implements ToA @FunctionInfo( returnType = { "geo_shape", "cartesian_shape" }, description = "Calculate the spatial extent over a field with geometry type. Returns a bounding box for all values of the field.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = @Example(file = "spatial", tag = "st_extent_agg-airports") ) public SpatialExtent( @@ -102,25 +103,25 @@ public SpatialExtent replaceChildren(List newChildren) { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { + public AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); return switch (type) { case DataType.GEO_POINT -> switch (fieldExtractPreference) { - case DOC_VALUES -> new SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); + case DOC_VALUES -> new SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier(); + case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier(); }; case DataType.CARTESIAN_POINT -> switch (fieldExtractPreference) { - case DOC_VALUES -> new SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); + case DOC_VALUES -> new SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier(); + case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier(); }; case DataType.GEO_SHAPE -> switch (fieldExtractPreference) { - case EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE -> new SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier(inputChannels); + case EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier(); + case NONE -> new SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier(); case DOC_VALUES -> throw new EsqlIllegalArgumentException("Illegal field extract preference: " + fieldExtractPreference); }; case DataType.CARTESIAN_SHAPE -> switch (fieldExtractPreference) { - case EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE -> new SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier(inputChannels); + case EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier(); + case NONE -> new SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier(); case DOC_VALUES -> throw new EsqlIllegalArgumentException("Illegal field extract preference: " + fieldExtractPreference); }; default -> throw EsqlIllegalArgumentException.illegalDataType(type); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java index 189b6a81912cb..19365c3166d13 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -37,7 +38,7 @@ public class StdDev extends AggregateFunction implements ToAggregator { @FunctionInfo( returnType = "double", description = "The standard deviation of a numeric field.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "stdev"), @Example( @@ -96,16 +97,16 @@ public StdDev withFilter(Expression filter) { } @Override - public final AggregatorFunctionSupplier supplier(List inputChannels) { + public final AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (type == DataType.LONG) { - return new StdDevLongAggregatorFunctionSupplier(inputChannels); + return new StdDevLongAggregatorFunctionSupplier(); } if (type == DataType.INTEGER) { - return new StdDevIntAggregatorFunctionSupplier(inputChannels); + return new StdDevIntAggregatorFunctionSupplier(); } if (type == DataType.DOUBLE) { - return new StdDevDoubleAggregatorFunctionSupplier(inputChannels); + return new StdDevDoubleAggregatorFunctionSupplier(); } throw EsqlIllegalArgumentException.illegalDataType(type); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index 1c69edb9f0da9..f8fe28d85a929 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; @@ -48,7 +49,7 @@ public class Sum extends NumericAggregate implements SurrogateExpression { @FunctionInfo( returnType = { "long", "double" }, description = "The sum of a numeric expression.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "sum"), @Example( @@ -98,18 +99,18 @@ public DataType dataType() { } @Override - protected AggregatorFunctionSupplier longSupplier(List inputChannels) { - return new SumLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier longSupplier() { + return new SumLongAggregatorFunctionSupplier(); } @Override - protected AggregatorFunctionSupplier intSupplier(List inputChannels) { - return new SumIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier intSupplier() { + return new SumIntAggregatorFunctionSupplier(); } @Override - protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { - return new SumDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier doubleSupplier() { + return new SumDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java index a2856f60e4c51..04dadb5e3bb91 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java @@ -18,6 +18,7 @@ import org.elasticsearch.compute.aggregation.FromPartialGroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.GroupingAggregator; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.aggregation.ToPartialAggregatorFunction; import org.elasticsearch.compute.aggregation.ToPartialGroupingAggregatorFunction; import org.elasticsearch.compute.operator.DriverContext; @@ -127,37 +128,41 @@ protected NodeInfo info() { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { - final ToAggregator toAggregator = (ToAggregator) function; + public AggregatorFunctionSupplier supplier() { + final AggregatorFunctionSupplier supplier = ((ToAggregator) function).supplier(); return new AggregatorFunctionSupplier() { @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return ToPartialAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ToPartialGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { assert false : "aggregatorFactory() is override"; throw new UnsupportedOperationException(); } @Override - public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels) { assert false : "groupingAggregatorFactory() is override"; throw new UnsupportedOperationException(); } @Override - public Aggregator.Factory aggregatorFactory(AggregatorMode mode) { - final AggregatorFunctionSupplier supplier; - if (mode.isInputPartial()) { - try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { - var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); - supplier = toAggregator.supplier(intermediateChannels); - } - } else { - supplier = toAggregator.supplier(inputChannels); - } + public Aggregator.Factory aggregatorFactory(AggregatorMode mode, List channels) { + List intermediateChannels = mode.isInputPartial() + ? IntStream.range(0, supplier.nonGroupingIntermediateStateDesc().size()).boxed().toList() + : channels; return new Aggregator.Factory() { @Override public Aggregator apply(DriverContext driverContext) { - final AggregatorFunction aggregatorFunction = supplier.aggregator(driverContext); - return new Aggregator(new ToPartialAggregatorFunction(aggregatorFunction, inputChannels), mode); + final AggregatorFunction aggregatorFunction = supplier.aggregator(driverContext, intermediateChannels); + return new Aggregator(new ToPartialAggregatorFunction(aggregatorFunction, channels), mode); } @Override @@ -168,21 +173,18 @@ public String describe() { } @Override - public GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode) { - final AggregatorFunctionSupplier supplier; - if (mode.isInputPartial()) { - try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { - var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); - supplier = toAggregator.supplier(intermediateChannels); - } - } else { - supplier = toAggregator.supplier(inputChannels); - } + public GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode, List channels) { + List intermediateChannels = mode.isInputPartial() + ? IntStream.range(0, supplier.nonGroupingIntermediateStateDesc().size()).boxed().toList() + : channels; return new GroupingAggregator.Factory() { @Override public GroupingAggregator apply(DriverContext driverContext) { - final GroupingAggregatorFunction aggregatorFunction = supplier.groupingAggregator(driverContext); - return new GroupingAggregator(new ToPartialGroupingAggregatorFunction(aggregatorFunction, inputChannels), mode); + final GroupingAggregatorFunction aggregatorFunction = supplier.groupingAggregator( + driverContext, + intermediateChannels + ); + return new GroupingAggregator(new ToPartialGroupingAggregatorFunction(aggregatorFunction, channels), mode); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java index 9be8c94266ee8..f31153d228e74 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -54,7 +55,7 @@ public class Top extends AggregateFunction implements ToAggregator, SurrogateExp @FunctionInfo( returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword" }, description = "Collects the top values for a field. Includes repeated values.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = @Example(file = "stats_top", tag = "top") ) public Top( @@ -188,25 +189,25 @@ public Top replaceChildren(List newChildren) { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { + public AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (type == DataType.LONG || type == DataType.DATETIME) { - return new TopLongAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopLongAggregatorFunctionSupplier(limitValue(), orderValue()); } if (type == DataType.INTEGER) { - return new TopIntAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopIntAggregatorFunctionSupplier(limitValue(), orderValue()); } if (type == DataType.DOUBLE) { - return new TopDoubleAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopDoubleAggregatorFunctionSupplier(limitValue(), orderValue()); } if (type == DataType.BOOLEAN) { - return new TopBooleanAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopBooleanAggregatorFunctionSupplier(limitValue(), orderValue()); } if (type == DataType.IP) { - return new TopIpAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopIpAggregatorFunctionSupplier(limitValue(), orderValue()); } if (DataType.isString(type)) { - return new TopBytesRefAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopBytesRefAggregatorFunctionSupplier(limitValue(), orderValue()); } throw EsqlIllegalArgumentException.illegalDataType(type); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index 5260b3e8fa279..4dbe0e93b5017 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -24,13 +24,14 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.function.Function; +import java.util.function.Supplier; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; @@ -38,7 +39,7 @@ public class Values extends AggregateFunction implements ToAggregator { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Values", Values::new); - private static final Map, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + private static final Map> SUPPLIERS = Map.ofEntries( Map.entry(DataType.INTEGER, ValuesIntAggregatorFunctionSupplier::new), Map.entry(DataType.LONG, ValuesLongAggregatorFunctionSupplier::new), Map.entry(DataType.DATETIME, ValuesLongAggregatorFunctionSupplier::new), @@ -66,7 +67,7 @@ public class Values extends AggregateFunction implements ToAggregator { collects too many values it will fail the query with a <>. ====""", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = @Example(file = "string", tag = "values-grouped") ) public Values( @@ -124,12 +125,12 @@ protected TypeResolution resolveType() { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { + public AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (SUPPLIERS.containsKey(type) == false) { // If the type checking did its job, this should never happen throw EsqlIllegalArgumentException.illegalDataType(type); } - return SUPPLIERS.get(type).apply(inputChannels); + return SUPPLIERS.get(type).get(); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java index bab65653ba576..c58bc997527b0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAvg; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; @@ -48,7 +49,7 @@ public class WeightedAvg extends AggregateFunction implements SurrogateExpressio @FunctionInfo( returnType = "double", description = "The weighted average of a numeric expression.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = @Example(file = "stats", tag = "weighted-avg") ) public WeightedAvg( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index 3223e96da7136..e5fb46209c5e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -251,7 +251,7 @@ public Match( valueHint = { "none", "all" }, description = "Number of beginning characters left unchanged for fuzzy matching." ) }, - description = "Match additional options as <>." + description = "(Optional) Match additional options as <>." + " See <> for more information.", optional = true ) Expression options diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index 90b35e469ddce..ac54e3d6484d5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.TwoOptionalArguments; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; @@ -175,7 +176,8 @@ Sometimes you need to change the start value of each bucket by a given duration inserting a negative offset of `1 hour` to buckets of `1 year` looks like this:""", file = "bucket", tag = "bucketWithOffset" - ) } + ) }, + type = FunctionType.GROUPING ) public Bucket( Source source, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java index 570ce7a96dd68..6cad20b4e28ba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; @@ -60,7 +61,8 @@ public class Categorize extends GroupingFunction { tag = "docsCategorize", description = "This example categorizes server logs messages into categories and aggregates their counts. " ) }, - preview = true + preview = true, + type = FunctionType.GROUPING ) public Categorize( Source source, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java index 1edab8ce0e4a6..09c3eda3dd69e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java @@ -19,18 +19,29 @@ import static java.util.stream.Collectors.toMap; -public record EsIndex(String name, Map mapping, Map indexNameWithModes) implements Writeable { +public record EsIndex( + String name, + Map mapping, + Map indexNameWithModes, + /** Fields mapped only in some (but *not* all) indices. Since this is only used by the analyzer, it is not serialized. */ + Set partiallyUnmappedFields +) implements Writeable { public EsIndex { assert name != null; assert mapping != null; + assert partiallyUnmappedFields != null; + } + + public EsIndex(String name, Map mapping, Map indexNameWithModes) { + this(name, mapping, indexNameWithModes, Set.of()); } /** * Intended for tests. Returns an index with an empty index mode map. */ public EsIndex(String name, Map mapping) { - this(name, mapping, Map.of()); + this(name, mapping, Map.of(), Set.of()); } public static EsIndex readFrom(StreamInput in) throws IOException { @@ -45,7 +56,8 @@ public static EsIndex readFrom(StreamInput in) throws IOException { assert indices != null; indexNameWithModes = indices.stream().collect(toMap(e -> e, e -> IndexMode.STANDARD)); } - return new EsIndex(name, mapping, indexNameWithModes); + // partially unmapped fields shouldn't pass the coordinator node anyway, since they are only used by the Analyzer. + return new EsIndex(name, mapping, indexNameWithModes, Set.of()); } @Override @@ -57,6 +69,11 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeGenericValue(indexNameWithModes.keySet()); } + // partially unmapped fields shouldn't pass the coordinator node anyway, since they are only used by the Analyzer. + } + + public boolean isPartiallyUnmappedField(String fieldName) { + return partiallyUnmappedFields.contains(fieldName); } public Set concreteIndices() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 36150083daec0..5fcf7d35b4760 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -10,7 +10,6 @@ import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.optimizer.rules.logical.AddDefaultTopN; import org.elasticsearch.xpack.esql.optimizer.rules.logical.BooleanFunctionEqualsElimination; import org.elasticsearch.xpack.esql.optimizer.rules.logical.BooleanSimplification; import org.elasticsearch.xpack.esql.optimizer.rules.logical.CombineBinaryComparisons; @@ -28,11 +27,12 @@ import org.elasticsearch.xpack.esql.optimizer.rules.logical.PropagateEvalFoldables; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PropagateInlineEvals; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PropagateNullable; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.PropgateUnmappedFields; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneColumns; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneEmptyPlans; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneFilters; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneLiteralsInOrderBy; -import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneOrderByBeforeStats; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneRedundantOrderBy; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneRedundantSortClauses; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownAndCombineFilters; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownAndCombineLimits; @@ -116,10 +116,9 @@ protected List> batches() { protected static List> rules() { var skip = new Batch<>("Skip Compute", new SkipQueryOnLimitZero()); - var defaultTopN = new Batch<>("Add default TopN", new AddDefaultTopN()); var label = new Batch<>("Set as Optimized", Limiter.ONCE, new SetAsOptimized()); - return asList(substitutions(), operators(), skip, cleanup(), defaultTopN, label); + return asList(substitutions(), operators(), skip, cleanup(), label); } protected static Batch substitutions() { @@ -189,12 +188,12 @@ protected static Batch operators() { new PushDownRegexExtract(), new PushDownEnrich(), new PushDownAndCombineOrderBy(), - new PruneOrderByBeforeStats(), + new PruneRedundantOrderBy(), new PruneRedundantSortClauses() ); } protected static Batch cleanup() { - return new Batch<>("Clean Up", new ReplaceLimitAndSortAsTopN(), new ReplaceRowAsLocalRelation()); + return new Batch<>("Clean Up", new ReplaceLimitAndSortAsTopN(), new ReplaceRowAsLocalRelation(), new PropgateUnmappedFields()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java index 94248ce2ecd0a..c474c48d6d96b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java @@ -27,6 +27,9 @@ public Failures verify(LogicalPlan plan) { PlanConsistencyChecker.checkPlan(p, dependencyFailures); if (failures.hasFailures() == false) { + if (p instanceof PostOptimizationVerificationAware pova) { + pova.postOptimizationVerification(failures); + } p.forEachExpression(ex -> { if (ex instanceof PostOptimizationVerificationAware va) { va.postOptimizationVerification(failures); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/AddDefaultTopN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/AddDefaultTopN.java deleted file mode 100644 index ef091686a4b38..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/AddDefaultTopN.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.optimizer.rules.logical; - -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; -import org.elasticsearch.xpack.esql.plan.logical.EsRelation; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.plan.logical.TopN; -import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; - -/** - * This adds an explicit TopN node to a plan that only has an OrderBy right before Lucene. - * To date, the only known use case that "needs" this is a query of the form - * from test - * | sort emp_no - * | mv_expand first_name - * | rename first_name AS x - * | where x LIKE "*a*" - * | limit 15 - *

- * or - *

- * from test - * | sort emp_no - * | mv_expand first_name - * | sort first_name - * | limit 15 - *

- * {@link PushDownAndCombineLimits} will copy the "limit 15" after "sort emp_no" if there is no filter on the expanded values - * OR if there is no sort between "limit" and "mv_expand". - * But, since this type of query has such a filter, the "sort emp_no" will have no limit when it reaches the current rule. - */ -public final class AddDefaultTopN extends OptimizerRules.ParameterizedOptimizerRule { - public AddDefaultTopN() { - super(OptimizerRules.TransformDirection.DOWN); - } - - @Override - protected LogicalPlan rule(LogicalPlan plan, LogicalOptimizerContext context) { - if (plan instanceof UnaryPlan unary && unary.child() instanceof OrderBy order && order.child() instanceof EsRelation relation) { - var limit = new Literal(plan.source(), context.configuration().resultTruncationMaxSize(), DataType.INTEGER); - return unary.replaceChild(new TopN(plan.source(), relation, order.order(), limit)); - } - return plan; - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropgateUnmappedFields.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropgateUnmappedFields.java new file mode 100644 index 0000000000000..570b5b7e82be6 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropgateUnmappedFields.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules.logical; + +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.type.PotentiallyUnmappedKeywordEsField; +import org.elasticsearch.xpack.esql.expression.NamedExpressions; +import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.rule.Rule; + +import java.util.ArrayList; + +/** + * Merges unmapped fields into the output of the ES relation. This marking is necessary for the block loaders to force loading from _source + * if the field is unmapped. + */ +public class PropgateUnmappedFields extends Rule { + @Override + public LogicalPlan apply(LogicalPlan logicalPlan) { + if (logicalPlan instanceof EsRelation) { + return logicalPlan; + } + var unmappedFields = new AttributeSet(); + logicalPlan.forEachExpressionDown(FieldAttribute.class, fa -> { + if (fa.field() instanceof PotentiallyUnmappedKeywordEsField) { + unmappedFields.add(fa); + } + }); + return unmappedFields.isEmpty() + ? logicalPlan + : logicalPlan.transformUp( + EsRelation.class, + er -> er.withAttributes(NamedExpressions.mergeOutputAttributes(new ArrayList<>(unmappedFields), er.output())) + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneOrderByBeforeStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneOrderByBeforeStats.java deleted file mode 100644 index 24fb8971487d5..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneOrderByBeforeStats.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.optimizer.rules.logical; - -import org.elasticsearch.xpack.esql.plan.logical.Aggregate; -import org.elasticsearch.xpack.esql.plan.logical.Enrich; -import org.elasticsearch.xpack.esql.plan.logical.Eval; -import org.elasticsearch.xpack.esql.plan.logical.Filter; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.plan.logical.Project; -import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; -import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; - -public final class PruneOrderByBeforeStats extends OptimizerRules.OptimizerRule { - - @Override - protected LogicalPlan rule(Aggregate agg) { - OrderBy order = findPullableOrderBy(agg.child()); - - LogicalPlan p = agg; - if (order != null) { - p = agg.transformDown(OrderBy.class, o -> o == order ? order.child() : o); - } - return p; - } - - private static OrderBy findPullableOrderBy(LogicalPlan plan) { - OrderBy pullable = null; - if (plan instanceof OrderBy o) { - pullable = o; - } else if (plan instanceof Eval - || plan instanceof Filter - || plan instanceof Project - || plan instanceof RegexExtract - || plan instanceof Enrich) { - pullable = findPullableOrderBy(((UnaryPlan) plan).child()); - } - return pullable; - } - -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneRedundantOrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneRedundantOrderBy.java new file mode 100644 index 0000000000000..2495f72864d1c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneRedundantOrderBy.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules.logical; + +import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; +import org.elasticsearch.xpack.esql.plan.logical.SortAgnostic; +import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; + +import java.util.ArrayDeque; +import java.util.Collections; +import java.util.Deque; +import java.util.IdentityHashMap; +import java.util.Set; + +/** + * SORT cannot be executed without a LIMIT, as ES|QL doesn't support unbounded sort (yet). + *

+ * The planner tries to push down LIMIT and transform all the unbounded sorts into a TopN. + * In some cases it's not possible though, eg. + *

+ * from test | sort x | lookup join lookup on x | sort y + *

+ * from test | sort x | mv_expand x | sort y + *

+ * "sort y" will become a TopN due to the addition of the default Limit, but "sort x" will remain unbounded, + * so the query could not be executed. + *

+ * In most cases though, following commands can make the previous SORTs redundant, + * because it will re-sort previously sorted results (eg. if there is another SORT) + * or because the order will be scrambled by another command (eg. a STATS) + *

+ * This rule finds and prunes redundant SORTs, attempting to make the plan executable. + */ +public class PruneRedundantOrderBy extends OptimizerRules.OptimizerRule { + + @Override + protected LogicalPlan rule(LogicalPlan plan) { + if (plan instanceof OrderBy || plan instanceof TopN || plan instanceof Aggregate) { + Set redundant = findRedundantSort(((UnaryPlan) plan).child()); + if (redundant.isEmpty()) { + return plan; + } + return plan.transformDown(p -> redundant.contains(p) ? ((UnaryPlan) p).child() : p); + } else { + return plan; + } + } + + /** + * breadth-first recursion to find redundant SORTs in the children tree. + * Returns an identity set (we need to compare and prune the exact instances) + */ + private Set findRedundantSort(LogicalPlan plan) { + Set result = Collections.newSetFromMap(new IdentityHashMap<>()); + + Deque toCheck = new ArrayDeque<>(); + toCheck.push(plan); + + while (true) { + if (toCheck.isEmpty()) { + return result; + } + LogicalPlan p = toCheck.pop(); + if (p instanceof OrderBy ob) { + result.add(ob); + toCheck.push(ob.child()); + } else if (p instanceof SortAgnostic) { + for (LogicalPlan child : p.children()) { + toCheck.push(child); + } + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java index e41e500aad110..d36fae54f5162 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.PotentiallyUnmappedKeywordEsField; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; @@ -70,7 +71,10 @@ else if (plan instanceof Project project) { for (NamedExpression projection : projections) { // Do not use the attribute name, this can deviate from the field name for union types. - if (projection instanceof FieldAttribute f && stats.exists(f.fieldName()) == false && joinAttributes.contains(f) == false) { + if (projection instanceof FieldAttribute f + && stats.exists(f.fieldName()) == false + && joinAttributes.contains(f) == false + && f.field() instanceof PotentiallyUnmappedKeywordEsField == false) { // TODO: Should do a searchStats lookup for join attributes instead of just ignoring them here // See TransportSearchShardsAction DataType dt = f.dataType(); @@ -105,7 +109,9 @@ else if (plan instanceof Project project) { // Do not use the attribute name, this can deviate from the field name for union types. // Also skip fields from lookup indices because we do not have stats for these. // TODO: We do have stats for lookup indices in case they are being used in the FROM clause; this can be refined. - f -> stats.exists(f.fieldName()) || lookupFields.contains(f) ? f : Literal.of(f, null) + f -> f.field() instanceof PotentiallyUnmappedKeywordEsField || (stats.exists(f.fieldName()) || lookupFields.contains(f)) + ? f + : Literal.of(f, null) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 1d050bd91e66c..c25e325c5fb7a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -16,7 +16,7 @@ null 'sort' 'stats' 'where' -null +'lookup' null null null @@ -120,6 +120,7 @@ null null null null +'join' 'USING' null null @@ -130,6 +131,9 @@ null null null null +null +null +null token symbolic names: null @@ -149,14 +153,14 @@ SHOW SORT STATS WHERE +JOIN_LOOKUP DEV_INLINESTATS +DEV_INSIST DEV_LOOKUP DEV_METRICS -DEV_JOIN DEV_JOIN_FULL DEV_JOIN_LEFT DEV_JOIN_RIGHT -DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -253,6 +257,7 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +JOIN USING JOIN_LINE_COMMENT JOIN_MULTILINE_COMMENT @@ -263,6 +268,9 @@ METRICS_WS CLOSING_METRICS_LINE_COMMENT CLOSING_METRICS_MULTILINE_COMMENT CLOSING_METRICS_WS +INSIST_WS +INSIST_LINE_COMMENT +INSIST_MULTILINE_COMMENT rule names: DISSECT @@ -281,14 +289,14 @@ SHOW SORT STATS WHERE +JOIN_LOOKUP DEV_INLINESTATS +DEV_INSIST DEV_LOOKUP DEV_METRICS -DEV_JOIN DEV_JOIN_FULL DEV_JOIN_LEFT DEV_JOIN_RIGHT -DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -456,7 +464,7 @@ LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS JOIN_PIPE -JOIN_JOIN +JOIN JOIN_AS JOIN_ON USING @@ -483,6 +491,11 @@ CLOSING_METRICS_QUOTED_IDENTIFIER CLOSING_METRICS_UNQUOTED_IDENTIFIER CLOSING_METRICS_BY CLOSING_METRICS_PIPE +INSIST_PIPE +INSIST_IDENTIFIER +INSIST_WS +INSIST_LINE_COMMENT +INSIST_MULTILINE_COMMENT channel names: DEFAULT_TOKEN_CHANNEL @@ -505,6 +518,7 @@ LOOKUP_FIELD_MODE JOIN_MODE METRICS_MODE CLOSING_METRICS_MODE +INSIST_MODE atn: -[4, 0, 130, 1627, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 2, 215, 7, 215, 2, 216, 7, 216, 2, 217, 7, 217, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 664, 8, 24, 11, 24, 12, 24, 665, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 674, 8, 25, 10, 25, 12, 25, 677, 9, 25, 1, 25, 3, 25, 680, 8, 25, 1, 25, 3, 25, 683, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 692, 8, 26, 10, 26, 12, 26, 695, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 703, 8, 27, 11, 27, 12, 27, 704, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 724, 8, 33, 1, 33, 4, 33, 727, 8, 33, 11, 33, 12, 33, 728, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 738, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 745, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 750, 8, 39, 10, 39, 12, 39, 753, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 761, 8, 39, 10, 39, 12, 39, 764, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 771, 8, 39, 1, 39, 3, 39, 774, 8, 39, 3, 39, 776, 8, 39, 1, 40, 4, 40, 779, 8, 40, 11, 40, 12, 40, 780, 1, 41, 4, 41, 784, 8, 41, 11, 41, 12, 41, 785, 1, 41, 1, 41, 5, 41, 790, 8, 41, 10, 41, 12, 41, 793, 9, 41, 1, 41, 1, 41, 4, 41, 797, 8, 41, 11, 41, 12, 41, 798, 1, 41, 4, 41, 802, 8, 41, 11, 41, 12, 41, 803, 1, 41, 1, 41, 5, 41, 808, 8, 41, 10, 41, 12, 41, 811, 9, 41, 3, 41, 813, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 819, 8, 41, 11, 41, 12, 41, 820, 1, 41, 1, 41, 3, 41, 825, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 3, 81, 957, 8, 81, 1, 81, 5, 81, 960, 8, 81, 10, 81, 12, 81, 963, 9, 81, 1, 81, 1, 81, 4, 81, 967, 8, 81, 11, 81, 12, 81, 968, 3, 81, 971, 8, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 5, 84, 985, 8, 84, 10, 84, 12, 84, 988, 9, 84, 1, 84, 1, 84, 3, 84, 992, 8, 84, 1, 84, 4, 84, 995, 8, 84, 11, 84, 12, 84, 996, 3, 84, 999, 8, 84, 1, 85, 1, 85, 4, 85, 1003, 8, 85, 11, 85, 12, 85, 1004, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 3, 102, 1082, 8, 102, 1, 103, 4, 103, 1085, 8, 103, 11, 103, 12, 103, 1086, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 3, 114, 1136, 8, 114, 1, 115, 1, 115, 3, 115, 1140, 8, 115, 1, 115, 5, 115, 1143, 8, 115, 10, 115, 12, 115, 1146, 9, 115, 1, 115, 1, 115, 3, 115, 1150, 8, 115, 1, 115, 4, 115, 1153, 8, 115, 11, 115, 12, 115, 1154, 3, 115, 1157, 8, 115, 1, 116, 1, 116, 4, 116, 1161, 8, 116, 11, 116, 12, 116, 1162, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 136, 4, 136, 1248, 8, 136, 11, 136, 12, 136, 1249, 1, 136, 1, 136, 3, 136, 1254, 8, 136, 1, 136, 4, 136, 1257, 8, 136, 11, 136, 12, 136, 1258, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 4, 169, 1404, 8, 169, 11, 169, 12, 169, 1405, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 217, 1, 217, 1, 217, 1, 217, 1, 217, 2, 693, 762, 0, 218, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 69, 174, 70, 176, 0, 178, 71, 180, 72, 182, 73, 184, 74, 186, 0, 188, 75, 190, 76, 192, 77, 194, 78, 196, 0, 198, 0, 200, 79, 202, 80, 204, 81, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 82, 220, 0, 222, 83, 224, 0, 226, 0, 228, 84, 230, 85, 232, 86, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 0, 248, 87, 250, 88, 252, 89, 254, 90, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 0, 268, 91, 270, 0, 272, 92, 274, 93, 276, 94, 278, 0, 280, 0, 282, 95, 284, 96, 286, 0, 288, 97, 290, 0, 292, 98, 294, 99, 296, 100, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 0, 316, 101, 318, 102, 320, 103, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 0, 334, 104, 336, 105, 338, 106, 340, 0, 342, 107, 344, 108, 346, 109, 348, 110, 350, 0, 352, 0, 354, 111, 356, 112, 358, 113, 360, 114, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 0, 376, 115, 378, 116, 380, 117, 382, 0, 384, 0, 386, 0, 388, 0, 390, 118, 392, 119, 394, 120, 396, 0, 398, 0, 400, 0, 402, 0, 404, 121, 406, 0, 408, 0, 410, 0, 412, 0, 414, 0, 416, 122, 418, 123, 420, 124, 422, 0, 424, 0, 426, 0, 428, 125, 430, 126, 432, 127, 434, 0, 436, 0, 438, 128, 440, 129, 442, 130, 444, 0, 446, 0, 448, 0, 450, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1654, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 1, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 2, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 3, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 242, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 4, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 5, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 6, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 7, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 8, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 9, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 10, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 11, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 12, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 13, 414, 1, 0, 0, 0, 13, 416, 1, 0, 0, 0, 13, 418, 1, 0, 0, 0, 13, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 14, 426, 1, 0, 0, 0, 14, 428, 1, 0, 0, 0, 14, 430, 1, 0, 0, 0, 14, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 15, 444, 1, 0, 0, 0, 15, 446, 1, 0, 0, 0, 15, 448, 1, 0, 0, 0, 15, 450, 1, 0, 0, 0, 16, 452, 1, 0, 0, 0, 18, 462, 1, 0, 0, 0, 20, 469, 1, 0, 0, 0, 22, 478, 1, 0, 0, 0, 24, 485, 1, 0, 0, 0, 26, 495, 1, 0, 0, 0, 28, 502, 1, 0, 0, 0, 30, 509, 1, 0, 0, 0, 32, 516, 1, 0, 0, 0, 34, 524, 1, 0, 0, 0, 36, 536, 1, 0, 0, 0, 38, 545, 1, 0, 0, 0, 40, 551, 1, 0, 0, 0, 42, 558, 1, 0, 0, 0, 44, 565, 1, 0, 0, 0, 46, 573, 1, 0, 0, 0, 48, 581, 1, 0, 0, 0, 50, 596, 1, 0, 0, 0, 52, 608, 1, 0, 0, 0, 54, 619, 1, 0, 0, 0, 56, 627, 1, 0, 0, 0, 58, 635, 1, 0, 0, 0, 60, 643, 1, 0, 0, 0, 62, 652, 1, 0, 0, 0, 64, 663, 1, 0, 0, 0, 66, 669, 1, 0, 0, 0, 68, 686, 1, 0, 0, 0, 70, 702, 1, 0, 0, 0, 72, 708, 1, 0, 0, 0, 74, 712, 1, 0, 0, 0, 76, 714, 1, 0, 0, 0, 78, 716, 1, 0, 0, 0, 80, 719, 1, 0, 0, 0, 82, 721, 1, 0, 0, 0, 84, 730, 1, 0, 0, 0, 86, 732, 1, 0, 0, 0, 88, 737, 1, 0, 0, 0, 90, 739, 1, 0, 0, 0, 92, 744, 1, 0, 0, 0, 94, 775, 1, 0, 0, 0, 96, 778, 1, 0, 0, 0, 98, 824, 1, 0, 0, 0, 100, 826, 1, 0, 0, 0, 102, 829, 1, 0, 0, 0, 104, 833, 1, 0, 0, 0, 106, 837, 1, 0, 0, 0, 108, 839, 1, 0, 0, 0, 110, 842, 1, 0, 0, 0, 112, 844, 1, 0, 0, 0, 114, 846, 1, 0, 0, 0, 116, 851, 1, 0, 0, 0, 118, 853, 1, 0, 0, 0, 120, 859, 1, 0, 0, 0, 122, 865, 1, 0, 0, 0, 124, 868, 1, 0, 0, 0, 126, 871, 1, 0, 0, 0, 128, 876, 1, 0, 0, 0, 130, 881, 1, 0, 0, 0, 132, 883, 1, 0, 0, 0, 134, 887, 1, 0, 0, 0, 136, 892, 1, 0, 0, 0, 138, 898, 1, 0, 0, 0, 140, 901, 1, 0, 0, 0, 142, 903, 1, 0, 0, 0, 144, 909, 1, 0, 0, 0, 146, 911, 1, 0, 0, 0, 148, 916, 1, 0, 0, 0, 150, 919, 1, 0, 0, 0, 152, 922, 1, 0, 0, 0, 154, 925, 1, 0, 0, 0, 156, 927, 1, 0, 0, 0, 158, 930, 1, 0, 0, 0, 160, 932, 1, 0, 0, 0, 162, 935, 1, 0, 0, 0, 164, 937, 1, 0, 0, 0, 166, 939, 1, 0, 0, 0, 168, 941, 1, 0, 0, 0, 170, 943, 1, 0, 0, 0, 172, 945, 1, 0, 0, 0, 174, 947, 1, 0, 0, 0, 176, 949, 1, 0, 0, 0, 178, 970, 1, 0, 0, 0, 180, 972, 1, 0, 0, 0, 182, 977, 1, 0, 0, 0, 184, 998, 1, 0, 0, 0, 186, 1000, 1, 0, 0, 0, 188, 1008, 1, 0, 0, 0, 190, 1010, 1, 0, 0, 0, 192, 1014, 1, 0, 0, 0, 194, 1018, 1, 0, 0, 0, 196, 1022, 1, 0, 0, 0, 198, 1027, 1, 0, 0, 0, 200, 1032, 1, 0, 0, 0, 202, 1036, 1, 0, 0, 0, 204, 1040, 1, 0, 0, 0, 206, 1044, 1, 0, 0, 0, 208, 1049, 1, 0, 0, 0, 210, 1053, 1, 0, 0, 0, 212, 1057, 1, 0, 0, 0, 214, 1061, 1, 0, 0, 0, 216, 1065, 1, 0, 0, 0, 218, 1069, 1, 0, 0, 0, 220, 1081, 1, 0, 0, 0, 222, 1084, 1, 0, 0, 0, 224, 1088, 1, 0, 0, 0, 226, 1092, 1, 0, 0, 0, 228, 1096, 1, 0, 0, 0, 230, 1100, 1, 0, 0, 0, 232, 1104, 1, 0, 0, 0, 234, 1108, 1, 0, 0, 0, 236, 1113, 1, 0, 0, 0, 238, 1117, 1, 0, 0, 0, 240, 1121, 1, 0, 0, 0, 242, 1126, 1, 0, 0, 0, 244, 1135, 1, 0, 0, 0, 246, 1156, 1, 0, 0, 0, 248, 1160, 1, 0, 0, 0, 250, 1164, 1, 0, 0, 0, 252, 1168, 1, 0, 0, 0, 254, 1172, 1, 0, 0, 0, 256, 1176, 1, 0, 0, 0, 258, 1181, 1, 0, 0, 0, 260, 1185, 1, 0, 0, 0, 262, 1189, 1, 0, 0, 0, 264, 1193, 1, 0, 0, 0, 266, 1198, 1, 0, 0, 0, 268, 1203, 1, 0, 0, 0, 270, 1206, 1, 0, 0, 0, 272, 1210, 1, 0, 0, 0, 274, 1214, 1, 0, 0, 0, 276, 1218, 1, 0, 0, 0, 278, 1222, 1, 0, 0, 0, 280, 1227, 1, 0, 0, 0, 282, 1232, 1, 0, 0, 0, 284, 1237, 1, 0, 0, 0, 286, 1244, 1, 0, 0, 0, 288, 1253, 1, 0, 0, 0, 290, 1260, 1, 0, 0, 0, 292, 1264, 1, 0, 0, 0, 294, 1268, 1, 0, 0, 0, 296, 1272, 1, 0, 0, 0, 298, 1276, 1, 0, 0, 0, 300, 1282, 1, 0, 0, 0, 302, 1286, 1, 0, 0, 0, 304, 1290, 1, 0, 0, 0, 306, 1294, 1, 0, 0, 0, 308, 1298, 1, 0, 0, 0, 310, 1302, 1, 0, 0, 0, 312, 1306, 1, 0, 0, 0, 314, 1311, 1, 0, 0, 0, 316, 1316, 1, 0, 0, 0, 318, 1320, 1, 0, 0, 0, 320, 1324, 1, 0, 0, 0, 322, 1328, 1, 0, 0, 0, 324, 1333, 1, 0, 0, 0, 326, 1337, 1, 0, 0, 0, 328, 1342, 1, 0, 0, 0, 330, 1347, 1, 0, 0, 0, 332, 1351, 1, 0, 0, 0, 334, 1355, 1, 0, 0, 0, 336, 1359, 1, 0, 0, 0, 338, 1363, 1, 0, 0, 0, 340, 1367, 1, 0, 0, 0, 342, 1372, 1, 0, 0, 0, 344, 1377, 1, 0, 0, 0, 346, 1381, 1, 0, 0, 0, 348, 1385, 1, 0, 0, 0, 350, 1389, 1, 0, 0, 0, 352, 1394, 1, 0, 0, 0, 354, 1403, 1, 0, 0, 0, 356, 1407, 1, 0, 0, 0, 358, 1411, 1, 0, 0, 0, 360, 1415, 1, 0, 0, 0, 362, 1419, 1, 0, 0, 0, 364, 1424, 1, 0, 0, 0, 366, 1428, 1, 0, 0, 0, 368, 1432, 1, 0, 0, 0, 370, 1436, 1, 0, 0, 0, 372, 1441, 1, 0, 0, 0, 374, 1445, 1, 0, 0, 0, 376, 1449, 1, 0, 0, 0, 378, 1453, 1, 0, 0, 0, 380, 1457, 1, 0, 0, 0, 382, 1461, 1, 0, 0, 0, 384, 1467, 1, 0, 0, 0, 386, 1471, 1, 0, 0, 0, 388, 1475, 1, 0, 0, 0, 390, 1479, 1, 0, 0, 0, 392, 1483, 1, 0, 0, 0, 394, 1487, 1, 0, 0, 0, 396, 1491, 1, 0, 0, 0, 398, 1496, 1, 0, 0, 0, 400, 1500, 1, 0, 0, 0, 402, 1504, 1, 0, 0, 0, 404, 1510, 1, 0, 0, 0, 406, 1519, 1, 0, 0, 0, 408, 1523, 1, 0, 0, 0, 410, 1527, 1, 0, 0, 0, 412, 1531, 1, 0, 0, 0, 414, 1535, 1, 0, 0, 0, 416, 1539, 1, 0, 0, 0, 418, 1543, 1, 0, 0, 0, 420, 1547, 1, 0, 0, 0, 422, 1551, 1, 0, 0, 0, 424, 1556, 1, 0, 0, 0, 426, 1562, 1, 0, 0, 0, 428, 1568, 1, 0, 0, 0, 430, 1572, 1, 0, 0, 0, 432, 1576, 1, 0, 0, 0, 434, 1580, 1, 0, 0, 0, 436, 1586, 1, 0, 0, 0, 438, 1592, 1, 0, 0, 0, 440, 1596, 1, 0, 0, 0, 442, 1600, 1, 0, 0, 0, 444, 1604, 1, 0, 0, 0, 446, 1610, 1, 0, 0, 0, 448, 1616, 1, 0, 0, 0, 450, 1622, 1, 0, 0, 0, 452, 453, 7, 0, 0, 0, 453, 454, 7, 1, 0, 0, 454, 455, 7, 2, 0, 0, 455, 456, 7, 2, 0, 0, 456, 457, 7, 3, 0, 0, 457, 458, 7, 4, 0, 0, 458, 459, 7, 5, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 6, 0, 0, 0, 461, 17, 1, 0, 0, 0, 462, 463, 7, 0, 0, 0, 463, 464, 7, 6, 0, 0, 464, 465, 7, 7, 0, 0, 465, 466, 7, 8, 0, 0, 466, 467, 1, 0, 0, 0, 467, 468, 6, 1, 1, 0, 468, 19, 1, 0, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 9, 0, 0, 471, 472, 7, 6, 0, 0, 472, 473, 7, 1, 0, 0, 473, 474, 7, 4, 0, 0, 474, 475, 7, 10, 0, 0, 475, 476, 1, 0, 0, 0, 476, 477, 6, 2, 2, 0, 477, 21, 1, 0, 0, 0, 478, 479, 7, 3, 0, 0, 479, 480, 7, 11, 0, 0, 480, 481, 7, 12, 0, 0, 481, 482, 7, 13, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 3, 0, 0, 484, 23, 1, 0, 0, 0, 485, 486, 7, 3, 0, 0, 486, 487, 7, 14, 0, 0, 487, 488, 7, 8, 0, 0, 488, 489, 7, 13, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 1, 0, 0, 491, 492, 7, 9, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 4, 3, 0, 494, 25, 1, 0, 0, 0, 495, 496, 7, 15, 0, 0, 496, 497, 7, 6, 0, 0, 497, 498, 7, 7, 0, 0, 498, 499, 7, 16, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 6, 5, 4, 0, 501, 27, 1, 0, 0, 0, 502, 503, 7, 17, 0, 0, 503, 504, 7, 6, 0, 0, 504, 505, 7, 7, 0, 0, 505, 506, 7, 18, 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 6, 6, 0, 0, 508, 29, 1, 0, 0, 0, 509, 510, 7, 18, 0, 0, 510, 511, 7, 3, 0, 0, 511, 512, 7, 3, 0, 0, 512, 513, 7, 8, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 7, 1, 0, 515, 31, 1, 0, 0, 0, 516, 517, 7, 13, 0, 0, 517, 518, 7, 1, 0, 0, 518, 519, 7, 16, 0, 0, 519, 520, 7, 1, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 8, 0, 0, 523, 33, 1, 0, 0, 0, 524, 525, 7, 16, 0, 0, 525, 526, 7, 11, 0, 0, 526, 527, 5, 95, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 14, 0, 0, 529, 530, 7, 8, 0, 0, 530, 531, 7, 12, 0, 0, 531, 532, 7, 9, 0, 0, 532, 533, 7, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 535, 6, 9, 5, 0, 535, 35, 1, 0, 0, 0, 536, 537, 7, 6, 0, 0, 537, 538, 7, 3, 0, 0, 538, 539, 7, 9, 0, 0, 539, 540, 7, 12, 0, 0, 540, 541, 7, 16, 0, 0, 541, 542, 7, 3, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 6, 10, 6, 0, 544, 37, 1, 0, 0, 0, 545, 546, 7, 6, 0, 0, 546, 547, 7, 7, 0, 0, 547, 548, 7, 19, 0, 0, 548, 549, 1, 0, 0, 0, 549, 550, 6, 11, 0, 0, 550, 39, 1, 0, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 7, 10, 0, 0, 553, 554, 7, 7, 0, 0, 554, 555, 7, 19, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 6, 12, 7, 0, 557, 41, 1, 0, 0, 0, 558, 559, 7, 2, 0, 0, 559, 560, 7, 7, 0, 0, 560, 561, 7, 6, 0, 0, 561, 562, 7, 5, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 13, 0, 0, 564, 43, 1, 0, 0, 0, 565, 566, 7, 2, 0, 0, 566, 567, 7, 5, 0, 0, 567, 568, 7, 12, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 2, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 14, 0, 0, 572, 45, 1, 0, 0, 0, 573, 574, 7, 19, 0, 0, 574, 575, 7, 10, 0, 0, 575, 576, 7, 3, 0, 0, 576, 577, 7, 6, 0, 0, 577, 578, 7, 3, 0, 0, 578, 579, 1, 0, 0, 0, 579, 580, 6, 15, 0, 0, 580, 47, 1, 0, 0, 0, 581, 582, 4, 16, 0, 0, 582, 583, 7, 1, 0, 0, 583, 584, 7, 9, 0, 0, 584, 585, 7, 13, 0, 0, 585, 586, 7, 1, 0, 0, 586, 587, 7, 9, 0, 0, 587, 588, 7, 3, 0, 0, 588, 589, 7, 2, 0, 0, 589, 590, 7, 5, 0, 0, 590, 591, 7, 12, 0, 0, 591, 592, 7, 5, 0, 0, 592, 593, 7, 2, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 6, 16, 0, 0, 595, 49, 1, 0, 0, 0, 596, 597, 4, 17, 1, 0, 597, 598, 7, 13, 0, 0, 598, 599, 7, 7, 0, 0, 599, 600, 7, 7, 0, 0, 600, 601, 7, 18, 0, 0, 601, 602, 7, 20, 0, 0, 602, 603, 7, 8, 0, 0, 603, 604, 5, 95, 0, 0, 604, 605, 5, 128020, 0, 0, 605, 606, 1, 0, 0, 0, 606, 607, 6, 17, 8, 0, 607, 51, 1, 0, 0, 0, 608, 609, 4, 18, 2, 0, 609, 610, 7, 16, 0, 0, 610, 611, 7, 3, 0, 0, 611, 612, 7, 5, 0, 0, 612, 613, 7, 6, 0, 0, 613, 614, 7, 1, 0, 0, 614, 615, 7, 4, 0, 0, 615, 616, 7, 2, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 18, 9, 0, 618, 53, 1, 0, 0, 0, 619, 620, 4, 19, 3, 0, 620, 621, 7, 21, 0, 0, 621, 622, 7, 7, 0, 0, 622, 623, 7, 1, 0, 0, 623, 624, 7, 9, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 6, 19, 10, 0, 626, 55, 1, 0, 0, 0, 627, 628, 4, 20, 4, 0, 628, 629, 7, 15, 0, 0, 629, 630, 7, 20, 0, 0, 630, 631, 7, 13, 0, 0, 631, 632, 7, 13, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 20, 10, 0, 634, 57, 1, 0, 0, 0, 635, 636, 4, 21, 5, 0, 636, 637, 7, 13, 0, 0, 637, 638, 7, 3, 0, 0, 638, 639, 7, 15, 0, 0, 639, 640, 7, 5, 0, 0, 640, 641, 1, 0, 0, 0, 641, 642, 6, 21, 10, 0, 642, 59, 1, 0, 0, 0, 643, 644, 4, 22, 6, 0, 644, 645, 7, 6, 0, 0, 645, 646, 7, 1, 0, 0, 646, 647, 7, 17, 0, 0, 647, 648, 7, 10, 0, 0, 648, 649, 7, 5, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 6, 22, 10, 0, 651, 61, 1, 0, 0, 0, 652, 653, 4, 23, 7, 0, 653, 654, 7, 13, 0, 0, 654, 655, 7, 7, 0, 0, 655, 656, 7, 7, 0, 0, 656, 657, 7, 18, 0, 0, 657, 658, 7, 20, 0, 0, 658, 659, 7, 8, 0, 0, 659, 660, 1, 0, 0, 0, 660, 661, 6, 23, 10, 0, 661, 63, 1, 0, 0, 0, 662, 664, 8, 22, 0, 0, 663, 662, 1, 0, 0, 0, 664, 665, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 667, 1, 0, 0, 0, 667, 668, 6, 24, 0, 0, 668, 65, 1, 0, 0, 0, 669, 670, 5, 47, 0, 0, 670, 671, 5, 47, 0, 0, 671, 675, 1, 0, 0, 0, 672, 674, 8, 23, 0, 0, 673, 672, 1, 0, 0, 0, 674, 677, 1, 0, 0, 0, 675, 673, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 679, 1, 0, 0, 0, 677, 675, 1, 0, 0, 0, 678, 680, 5, 13, 0, 0, 679, 678, 1, 0, 0, 0, 679, 680, 1, 0, 0, 0, 680, 682, 1, 0, 0, 0, 681, 683, 5, 10, 0, 0, 682, 681, 1, 0, 0, 0, 682, 683, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, 685, 6, 25, 11, 0, 685, 67, 1, 0, 0, 0, 686, 687, 5, 47, 0, 0, 687, 688, 5, 42, 0, 0, 688, 693, 1, 0, 0, 0, 689, 692, 3, 68, 26, 0, 690, 692, 9, 0, 0, 0, 691, 689, 1, 0, 0, 0, 691, 690, 1, 0, 0, 0, 692, 695, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 693, 691, 1, 0, 0, 0, 694, 696, 1, 0, 0, 0, 695, 693, 1, 0, 0, 0, 696, 697, 5, 42, 0, 0, 697, 698, 5, 47, 0, 0, 698, 699, 1, 0, 0, 0, 699, 700, 6, 26, 11, 0, 700, 69, 1, 0, 0, 0, 701, 703, 7, 24, 0, 0, 702, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 702, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 707, 6, 27, 11, 0, 707, 71, 1, 0, 0, 0, 708, 709, 5, 124, 0, 0, 709, 710, 1, 0, 0, 0, 710, 711, 6, 28, 12, 0, 711, 73, 1, 0, 0, 0, 712, 713, 7, 25, 0, 0, 713, 75, 1, 0, 0, 0, 714, 715, 7, 26, 0, 0, 715, 77, 1, 0, 0, 0, 716, 717, 5, 92, 0, 0, 717, 718, 7, 27, 0, 0, 718, 79, 1, 0, 0, 0, 719, 720, 8, 28, 0, 0, 720, 81, 1, 0, 0, 0, 721, 723, 7, 3, 0, 0, 722, 724, 7, 29, 0, 0, 723, 722, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 726, 1, 0, 0, 0, 725, 727, 3, 74, 29, 0, 726, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 726, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 83, 1, 0, 0, 0, 730, 731, 5, 64, 0, 0, 731, 85, 1, 0, 0, 0, 732, 733, 5, 96, 0, 0, 733, 87, 1, 0, 0, 0, 734, 738, 8, 30, 0, 0, 735, 736, 5, 96, 0, 0, 736, 738, 5, 96, 0, 0, 737, 734, 1, 0, 0, 0, 737, 735, 1, 0, 0, 0, 738, 89, 1, 0, 0, 0, 739, 740, 5, 95, 0, 0, 740, 91, 1, 0, 0, 0, 741, 745, 3, 76, 30, 0, 742, 745, 3, 74, 29, 0, 743, 745, 3, 90, 37, 0, 744, 741, 1, 0, 0, 0, 744, 742, 1, 0, 0, 0, 744, 743, 1, 0, 0, 0, 745, 93, 1, 0, 0, 0, 746, 751, 5, 34, 0, 0, 747, 750, 3, 78, 31, 0, 748, 750, 3, 80, 32, 0, 749, 747, 1, 0, 0, 0, 749, 748, 1, 0, 0, 0, 750, 753, 1, 0, 0, 0, 751, 749, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 752, 754, 1, 0, 0, 0, 753, 751, 1, 0, 0, 0, 754, 776, 5, 34, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 758, 5, 34, 0, 0, 758, 762, 1, 0, 0, 0, 759, 761, 8, 23, 0, 0, 760, 759, 1, 0, 0, 0, 761, 764, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 762, 760, 1, 0, 0, 0, 763, 765, 1, 0, 0, 0, 764, 762, 1, 0, 0, 0, 765, 766, 5, 34, 0, 0, 766, 767, 5, 34, 0, 0, 767, 768, 5, 34, 0, 0, 768, 770, 1, 0, 0, 0, 769, 771, 5, 34, 0, 0, 770, 769, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 773, 1, 0, 0, 0, 772, 774, 5, 34, 0, 0, 773, 772, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 776, 1, 0, 0, 0, 775, 746, 1, 0, 0, 0, 775, 755, 1, 0, 0, 0, 776, 95, 1, 0, 0, 0, 777, 779, 3, 74, 29, 0, 778, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 97, 1, 0, 0, 0, 782, 784, 3, 74, 29, 0, 783, 782, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 787, 1, 0, 0, 0, 787, 791, 3, 116, 50, 0, 788, 790, 3, 74, 29, 0, 789, 788, 1, 0, 0, 0, 790, 793, 1, 0, 0, 0, 791, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 825, 1, 0, 0, 0, 793, 791, 1, 0, 0, 0, 794, 796, 3, 116, 50, 0, 795, 797, 3, 74, 29, 0, 796, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 825, 1, 0, 0, 0, 800, 802, 3, 74, 29, 0, 801, 800, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 812, 1, 0, 0, 0, 805, 809, 3, 116, 50, 0, 806, 808, 3, 74, 29, 0, 807, 806, 1, 0, 0, 0, 808, 811, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 813, 1, 0, 0, 0, 811, 809, 1, 0, 0, 0, 812, 805, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 815, 3, 82, 33, 0, 815, 825, 1, 0, 0, 0, 816, 818, 3, 116, 50, 0, 817, 819, 3, 74, 29, 0, 818, 817, 1, 0, 0, 0, 819, 820, 1, 0, 0, 0, 820, 818, 1, 0, 0, 0, 820, 821, 1, 0, 0, 0, 821, 822, 1, 0, 0, 0, 822, 823, 3, 82, 33, 0, 823, 825, 1, 0, 0, 0, 824, 783, 1, 0, 0, 0, 824, 794, 1, 0, 0, 0, 824, 801, 1, 0, 0, 0, 824, 816, 1, 0, 0, 0, 825, 99, 1, 0, 0, 0, 826, 827, 7, 31, 0, 0, 827, 828, 7, 32, 0, 0, 828, 101, 1, 0, 0, 0, 829, 830, 7, 12, 0, 0, 830, 831, 7, 9, 0, 0, 831, 832, 7, 0, 0, 0, 832, 103, 1, 0, 0, 0, 833, 834, 7, 12, 0, 0, 834, 835, 7, 2, 0, 0, 835, 836, 7, 4, 0, 0, 836, 105, 1, 0, 0, 0, 837, 838, 5, 61, 0, 0, 838, 107, 1, 0, 0, 0, 839, 840, 5, 58, 0, 0, 840, 841, 5, 58, 0, 0, 841, 109, 1, 0, 0, 0, 842, 843, 5, 58, 0, 0, 843, 111, 1, 0, 0, 0, 844, 845, 5, 44, 0, 0, 845, 113, 1, 0, 0, 0, 846, 847, 7, 0, 0, 0, 847, 848, 7, 3, 0, 0, 848, 849, 7, 2, 0, 0, 849, 850, 7, 4, 0, 0, 850, 115, 1, 0, 0, 0, 851, 852, 5, 46, 0, 0, 852, 117, 1, 0, 0, 0, 853, 854, 7, 15, 0, 0, 854, 855, 7, 12, 0, 0, 855, 856, 7, 13, 0, 0, 856, 857, 7, 2, 0, 0, 857, 858, 7, 3, 0, 0, 858, 119, 1, 0, 0, 0, 859, 860, 7, 15, 0, 0, 860, 861, 7, 1, 0, 0, 861, 862, 7, 6, 0, 0, 862, 863, 7, 2, 0, 0, 863, 864, 7, 5, 0, 0, 864, 121, 1, 0, 0, 0, 865, 866, 7, 1, 0, 0, 866, 867, 7, 9, 0, 0, 867, 123, 1, 0, 0, 0, 868, 869, 7, 1, 0, 0, 869, 870, 7, 2, 0, 0, 870, 125, 1, 0, 0, 0, 871, 872, 7, 13, 0, 0, 872, 873, 7, 12, 0, 0, 873, 874, 7, 2, 0, 0, 874, 875, 7, 5, 0, 0, 875, 127, 1, 0, 0, 0, 876, 877, 7, 13, 0, 0, 877, 878, 7, 1, 0, 0, 878, 879, 7, 18, 0, 0, 879, 880, 7, 3, 0, 0, 880, 129, 1, 0, 0, 0, 881, 882, 5, 40, 0, 0, 882, 131, 1, 0, 0, 0, 883, 884, 7, 9, 0, 0, 884, 885, 7, 7, 0, 0, 885, 886, 7, 5, 0, 0, 886, 133, 1, 0, 0, 0, 887, 888, 7, 9, 0, 0, 888, 889, 7, 20, 0, 0, 889, 890, 7, 13, 0, 0, 890, 891, 7, 13, 0, 0, 891, 135, 1, 0, 0, 0, 892, 893, 7, 9, 0, 0, 893, 894, 7, 20, 0, 0, 894, 895, 7, 13, 0, 0, 895, 896, 7, 13, 0, 0, 896, 897, 7, 2, 0, 0, 897, 137, 1, 0, 0, 0, 898, 899, 7, 7, 0, 0, 899, 900, 7, 6, 0, 0, 900, 139, 1, 0, 0, 0, 901, 902, 5, 63, 0, 0, 902, 141, 1, 0, 0, 0, 903, 904, 7, 6, 0, 0, 904, 905, 7, 13, 0, 0, 905, 906, 7, 1, 0, 0, 906, 907, 7, 18, 0, 0, 907, 908, 7, 3, 0, 0, 908, 143, 1, 0, 0, 0, 909, 910, 5, 41, 0, 0, 910, 145, 1, 0, 0, 0, 911, 912, 7, 5, 0, 0, 912, 913, 7, 6, 0, 0, 913, 914, 7, 20, 0, 0, 914, 915, 7, 3, 0, 0, 915, 147, 1, 0, 0, 0, 916, 917, 5, 61, 0, 0, 917, 918, 5, 61, 0, 0, 918, 149, 1, 0, 0, 0, 919, 920, 5, 61, 0, 0, 920, 921, 5, 126, 0, 0, 921, 151, 1, 0, 0, 0, 922, 923, 5, 33, 0, 0, 923, 924, 5, 61, 0, 0, 924, 153, 1, 0, 0, 0, 925, 926, 5, 60, 0, 0, 926, 155, 1, 0, 0, 0, 927, 928, 5, 60, 0, 0, 928, 929, 5, 61, 0, 0, 929, 157, 1, 0, 0, 0, 930, 931, 5, 62, 0, 0, 931, 159, 1, 0, 0, 0, 932, 933, 5, 62, 0, 0, 933, 934, 5, 61, 0, 0, 934, 161, 1, 0, 0, 0, 935, 936, 5, 43, 0, 0, 936, 163, 1, 0, 0, 0, 937, 938, 5, 45, 0, 0, 938, 165, 1, 0, 0, 0, 939, 940, 5, 42, 0, 0, 940, 167, 1, 0, 0, 0, 941, 942, 5, 47, 0, 0, 942, 169, 1, 0, 0, 0, 943, 944, 5, 37, 0, 0, 944, 171, 1, 0, 0, 0, 945, 946, 5, 123, 0, 0, 946, 173, 1, 0, 0, 0, 947, 948, 5, 125, 0, 0, 948, 175, 1, 0, 0, 0, 949, 950, 3, 46, 15, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 80, 13, 0, 952, 177, 1, 0, 0, 0, 953, 956, 3, 140, 62, 0, 954, 957, 3, 76, 30, 0, 955, 957, 3, 90, 37, 0, 956, 954, 1, 0, 0, 0, 956, 955, 1, 0, 0, 0, 957, 961, 1, 0, 0, 0, 958, 960, 3, 92, 38, 0, 959, 958, 1, 0, 0, 0, 960, 963, 1, 0, 0, 0, 961, 959, 1, 0, 0, 0, 961, 962, 1, 0, 0, 0, 962, 971, 1, 0, 0, 0, 963, 961, 1, 0, 0, 0, 964, 966, 3, 140, 62, 0, 965, 967, 3, 74, 29, 0, 966, 965, 1, 0, 0, 0, 967, 968, 1, 0, 0, 0, 968, 966, 1, 0, 0, 0, 968, 969, 1, 0, 0, 0, 969, 971, 1, 0, 0, 0, 970, 953, 1, 0, 0, 0, 970, 964, 1, 0, 0, 0, 971, 179, 1, 0, 0, 0, 972, 973, 5, 91, 0, 0, 973, 974, 1, 0, 0, 0, 974, 975, 6, 82, 0, 0, 975, 976, 6, 82, 0, 0, 976, 181, 1, 0, 0, 0, 977, 978, 5, 93, 0, 0, 978, 979, 1, 0, 0, 0, 979, 980, 6, 83, 12, 0, 980, 981, 6, 83, 12, 0, 981, 183, 1, 0, 0, 0, 982, 986, 3, 76, 30, 0, 983, 985, 3, 92, 38, 0, 984, 983, 1, 0, 0, 0, 985, 988, 1, 0, 0, 0, 986, 984, 1, 0, 0, 0, 986, 987, 1, 0, 0, 0, 987, 999, 1, 0, 0, 0, 988, 986, 1, 0, 0, 0, 989, 992, 3, 90, 37, 0, 990, 992, 3, 84, 34, 0, 991, 989, 1, 0, 0, 0, 991, 990, 1, 0, 0, 0, 992, 994, 1, 0, 0, 0, 993, 995, 3, 92, 38, 0, 994, 993, 1, 0, 0, 0, 995, 996, 1, 0, 0, 0, 996, 994, 1, 0, 0, 0, 996, 997, 1, 0, 0, 0, 997, 999, 1, 0, 0, 0, 998, 982, 1, 0, 0, 0, 998, 991, 1, 0, 0, 0, 999, 185, 1, 0, 0, 0, 1000, 1002, 3, 86, 35, 0, 1001, 1003, 3, 88, 36, 0, 1002, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1002, 1, 0, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 3, 86, 35, 0, 1007, 187, 1, 0, 0, 0, 1008, 1009, 3, 186, 85, 0, 1009, 189, 1, 0, 0, 0, 1010, 1011, 3, 66, 25, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 87, 11, 0, 1013, 191, 1, 0, 0, 0, 1014, 1015, 3, 68, 26, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 88, 11, 0, 1017, 193, 1, 0, 0, 0, 1018, 1019, 3, 70, 27, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 89, 11, 0, 1021, 195, 1, 0, 0, 0, 1022, 1023, 3, 180, 82, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 90, 14, 0, 1025, 1026, 6, 90, 15, 0, 1026, 197, 1, 0, 0, 0, 1027, 1028, 3, 72, 28, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1030, 6, 91, 16, 0, 1030, 1031, 6, 91, 12, 0, 1031, 199, 1, 0, 0, 0, 1032, 1033, 3, 70, 27, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 6, 92, 11, 0, 1035, 201, 1, 0, 0, 0, 1036, 1037, 3, 66, 25, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 93, 11, 0, 1039, 203, 1, 0, 0, 0, 1040, 1041, 3, 68, 26, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 94, 11, 0, 1043, 205, 1, 0, 0, 0, 1044, 1045, 3, 72, 28, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 95, 16, 0, 1047, 1048, 6, 95, 12, 0, 1048, 207, 1, 0, 0, 0, 1049, 1050, 3, 180, 82, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1052, 6, 96, 14, 0, 1052, 209, 1, 0, 0, 0, 1053, 1054, 3, 182, 83, 0, 1054, 1055, 1, 0, 0, 0, 1055, 1056, 6, 97, 17, 0, 1056, 211, 1, 0, 0, 0, 1057, 1058, 3, 110, 47, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1060, 6, 98, 18, 0, 1060, 213, 1, 0, 0, 0, 1061, 1062, 3, 112, 48, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 99, 19, 0, 1064, 215, 1, 0, 0, 0, 1065, 1066, 3, 106, 45, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 100, 20, 0, 1068, 217, 1, 0, 0, 0, 1069, 1070, 7, 16, 0, 0, 1070, 1071, 7, 3, 0, 0, 1071, 1072, 7, 5, 0, 0, 1072, 1073, 7, 12, 0, 0, 1073, 1074, 7, 0, 0, 0, 1074, 1075, 7, 12, 0, 0, 1075, 1076, 7, 5, 0, 0, 1076, 1077, 7, 12, 0, 0, 1077, 219, 1, 0, 0, 0, 1078, 1082, 8, 33, 0, 0, 1079, 1080, 5, 47, 0, 0, 1080, 1082, 8, 34, 0, 0, 1081, 1078, 1, 0, 0, 0, 1081, 1079, 1, 0, 0, 0, 1082, 221, 1, 0, 0, 0, 1083, 1085, 3, 220, 102, 0, 1084, 1083, 1, 0, 0, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1084, 1, 0, 0, 0, 1086, 1087, 1, 0, 0, 0, 1087, 223, 1, 0, 0, 0, 1088, 1089, 3, 222, 103, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 104, 21, 0, 1091, 225, 1, 0, 0, 0, 1092, 1093, 3, 94, 39, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 105, 22, 0, 1095, 227, 1, 0, 0, 0, 1096, 1097, 3, 66, 25, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 106, 11, 0, 1099, 229, 1, 0, 0, 0, 1100, 1101, 3, 68, 26, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 107, 11, 0, 1103, 231, 1, 0, 0, 0, 1104, 1105, 3, 70, 27, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 108, 11, 0, 1107, 233, 1, 0, 0, 0, 1108, 1109, 3, 72, 28, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 109, 16, 0, 1111, 1112, 6, 109, 12, 0, 1112, 235, 1, 0, 0, 0, 1113, 1114, 3, 116, 50, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 110, 23, 0, 1116, 237, 1, 0, 0, 0, 1117, 1118, 3, 112, 48, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 111, 19, 0, 1120, 239, 1, 0, 0, 0, 1121, 1122, 4, 112, 8, 0, 1122, 1123, 3, 140, 62, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 112, 24, 0, 1125, 241, 1, 0, 0, 0, 1126, 1127, 4, 113, 9, 0, 1127, 1128, 3, 178, 81, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 113, 25, 0, 1130, 243, 1, 0, 0, 0, 1131, 1136, 3, 76, 30, 0, 1132, 1136, 3, 74, 29, 0, 1133, 1136, 3, 90, 37, 0, 1134, 1136, 3, 166, 75, 0, 1135, 1131, 1, 0, 0, 0, 1135, 1132, 1, 0, 0, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1134, 1, 0, 0, 0, 1136, 245, 1, 0, 0, 0, 1137, 1140, 3, 76, 30, 0, 1138, 1140, 3, 166, 75, 0, 1139, 1137, 1, 0, 0, 0, 1139, 1138, 1, 0, 0, 0, 1140, 1144, 1, 0, 0, 0, 1141, 1143, 3, 244, 114, 0, 1142, 1141, 1, 0, 0, 0, 1143, 1146, 1, 0, 0, 0, 1144, 1142, 1, 0, 0, 0, 1144, 1145, 1, 0, 0, 0, 1145, 1157, 1, 0, 0, 0, 1146, 1144, 1, 0, 0, 0, 1147, 1150, 3, 90, 37, 0, 1148, 1150, 3, 84, 34, 0, 1149, 1147, 1, 0, 0, 0, 1149, 1148, 1, 0, 0, 0, 1150, 1152, 1, 0, 0, 0, 1151, 1153, 3, 244, 114, 0, 1152, 1151, 1, 0, 0, 0, 1153, 1154, 1, 0, 0, 0, 1154, 1152, 1, 0, 0, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1157, 1, 0, 0, 0, 1156, 1139, 1, 0, 0, 0, 1156, 1149, 1, 0, 0, 0, 1157, 247, 1, 0, 0, 0, 1158, 1161, 3, 246, 115, 0, 1159, 1161, 3, 186, 85, 0, 1160, 1158, 1, 0, 0, 0, 1160, 1159, 1, 0, 0, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1160, 1, 0, 0, 0, 1162, 1163, 1, 0, 0, 0, 1163, 249, 1, 0, 0, 0, 1164, 1165, 3, 66, 25, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 117, 11, 0, 1167, 251, 1, 0, 0, 0, 1168, 1169, 3, 68, 26, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 118, 11, 0, 1171, 253, 1, 0, 0, 0, 1172, 1173, 3, 70, 27, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 119, 11, 0, 1175, 255, 1, 0, 0, 0, 1176, 1177, 3, 72, 28, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 120, 16, 0, 1179, 1180, 6, 120, 12, 0, 1180, 257, 1, 0, 0, 0, 1181, 1182, 3, 106, 45, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 121, 20, 0, 1184, 259, 1, 0, 0, 0, 1185, 1186, 3, 112, 48, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 122, 19, 0, 1188, 261, 1, 0, 0, 0, 1189, 1190, 3, 116, 50, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 123, 23, 0, 1192, 263, 1, 0, 0, 0, 1193, 1194, 4, 124, 10, 0, 1194, 1195, 3, 140, 62, 0, 1195, 1196, 1, 0, 0, 0, 1196, 1197, 6, 124, 24, 0, 1197, 265, 1, 0, 0, 0, 1198, 1199, 4, 125, 11, 0, 1199, 1200, 3, 178, 81, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 125, 25, 0, 1202, 267, 1, 0, 0, 0, 1203, 1204, 7, 12, 0, 0, 1204, 1205, 7, 2, 0, 0, 1205, 269, 1, 0, 0, 0, 1206, 1207, 3, 248, 116, 0, 1207, 1208, 1, 0, 0, 0, 1208, 1209, 6, 127, 26, 0, 1209, 271, 1, 0, 0, 0, 1210, 1211, 3, 66, 25, 0, 1211, 1212, 1, 0, 0, 0, 1212, 1213, 6, 128, 11, 0, 1213, 273, 1, 0, 0, 0, 1214, 1215, 3, 68, 26, 0, 1215, 1216, 1, 0, 0, 0, 1216, 1217, 6, 129, 11, 0, 1217, 275, 1, 0, 0, 0, 1218, 1219, 3, 70, 27, 0, 1219, 1220, 1, 0, 0, 0, 1220, 1221, 6, 130, 11, 0, 1221, 277, 1, 0, 0, 0, 1222, 1223, 3, 72, 28, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1225, 6, 131, 16, 0, 1225, 1226, 6, 131, 12, 0, 1226, 279, 1, 0, 0, 0, 1227, 1228, 3, 180, 82, 0, 1228, 1229, 1, 0, 0, 0, 1229, 1230, 6, 132, 14, 0, 1230, 1231, 6, 132, 27, 0, 1231, 281, 1, 0, 0, 0, 1232, 1233, 7, 7, 0, 0, 1233, 1234, 7, 9, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 133, 28, 0, 1236, 283, 1, 0, 0, 0, 1237, 1238, 7, 19, 0, 0, 1238, 1239, 7, 1, 0, 0, 1239, 1240, 7, 5, 0, 0, 1240, 1241, 7, 10, 0, 0, 1241, 1242, 1, 0, 0, 0, 1242, 1243, 6, 134, 28, 0, 1243, 285, 1, 0, 0, 0, 1244, 1245, 8, 35, 0, 0, 1245, 287, 1, 0, 0, 0, 1246, 1248, 3, 286, 135, 0, 1247, 1246, 1, 0, 0, 0, 1248, 1249, 1, 0, 0, 0, 1249, 1247, 1, 0, 0, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1252, 3, 110, 47, 0, 1252, 1254, 1, 0, 0, 0, 1253, 1247, 1, 0, 0, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1256, 1, 0, 0, 0, 1255, 1257, 3, 286, 135, 0, 1256, 1255, 1, 0, 0, 0, 1257, 1258, 1, 0, 0, 0, 1258, 1256, 1, 0, 0, 0, 1258, 1259, 1, 0, 0, 0, 1259, 289, 1, 0, 0, 0, 1260, 1261, 3, 288, 136, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 137, 29, 0, 1263, 291, 1, 0, 0, 0, 1264, 1265, 3, 66, 25, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 138, 11, 0, 1267, 293, 1, 0, 0, 0, 1268, 1269, 3, 68, 26, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 139, 11, 0, 1271, 295, 1, 0, 0, 0, 1272, 1273, 3, 70, 27, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 140, 11, 0, 1275, 297, 1, 0, 0, 0, 1276, 1277, 3, 72, 28, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 141, 16, 0, 1279, 1280, 6, 141, 12, 0, 1280, 1281, 6, 141, 12, 0, 1281, 299, 1, 0, 0, 0, 1282, 1283, 3, 106, 45, 0, 1283, 1284, 1, 0, 0, 0, 1284, 1285, 6, 142, 20, 0, 1285, 301, 1, 0, 0, 0, 1286, 1287, 3, 112, 48, 0, 1287, 1288, 1, 0, 0, 0, 1288, 1289, 6, 143, 19, 0, 1289, 303, 1, 0, 0, 0, 1290, 1291, 3, 116, 50, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 144, 23, 0, 1293, 305, 1, 0, 0, 0, 1294, 1295, 3, 284, 134, 0, 1295, 1296, 1, 0, 0, 0, 1296, 1297, 6, 145, 30, 0, 1297, 307, 1, 0, 0, 0, 1298, 1299, 3, 248, 116, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 146, 26, 0, 1301, 309, 1, 0, 0, 0, 1302, 1303, 3, 188, 86, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 147, 31, 0, 1305, 311, 1, 0, 0, 0, 1306, 1307, 4, 148, 12, 0, 1307, 1308, 3, 140, 62, 0, 1308, 1309, 1, 0, 0, 0, 1309, 1310, 6, 148, 24, 0, 1310, 313, 1, 0, 0, 0, 1311, 1312, 4, 149, 13, 0, 1312, 1313, 3, 178, 81, 0, 1313, 1314, 1, 0, 0, 0, 1314, 1315, 6, 149, 25, 0, 1315, 315, 1, 0, 0, 0, 1316, 1317, 3, 66, 25, 0, 1317, 1318, 1, 0, 0, 0, 1318, 1319, 6, 150, 11, 0, 1319, 317, 1, 0, 0, 0, 1320, 1321, 3, 68, 26, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1323, 6, 151, 11, 0, 1323, 319, 1, 0, 0, 0, 1324, 1325, 3, 70, 27, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 152, 11, 0, 1327, 321, 1, 0, 0, 0, 1328, 1329, 3, 72, 28, 0, 1329, 1330, 1, 0, 0, 0, 1330, 1331, 6, 153, 16, 0, 1331, 1332, 6, 153, 12, 0, 1332, 323, 1, 0, 0, 0, 1333, 1334, 3, 116, 50, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1336, 6, 154, 23, 0, 1336, 325, 1, 0, 0, 0, 1337, 1338, 4, 155, 14, 0, 1338, 1339, 3, 140, 62, 0, 1339, 1340, 1, 0, 0, 0, 1340, 1341, 6, 155, 24, 0, 1341, 327, 1, 0, 0, 0, 1342, 1343, 4, 156, 15, 0, 1343, 1344, 3, 178, 81, 0, 1344, 1345, 1, 0, 0, 0, 1345, 1346, 6, 156, 25, 0, 1346, 329, 1, 0, 0, 0, 1347, 1348, 3, 188, 86, 0, 1348, 1349, 1, 0, 0, 0, 1349, 1350, 6, 157, 31, 0, 1350, 331, 1, 0, 0, 0, 1351, 1352, 3, 184, 84, 0, 1352, 1353, 1, 0, 0, 0, 1353, 1354, 6, 158, 32, 0, 1354, 333, 1, 0, 0, 0, 1355, 1356, 3, 66, 25, 0, 1356, 1357, 1, 0, 0, 0, 1357, 1358, 6, 159, 11, 0, 1358, 335, 1, 0, 0, 0, 1359, 1360, 3, 68, 26, 0, 1360, 1361, 1, 0, 0, 0, 1361, 1362, 6, 160, 11, 0, 1362, 337, 1, 0, 0, 0, 1363, 1364, 3, 70, 27, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 161, 11, 0, 1366, 339, 1, 0, 0, 0, 1367, 1368, 3, 72, 28, 0, 1368, 1369, 1, 0, 0, 0, 1369, 1370, 6, 162, 16, 0, 1370, 1371, 6, 162, 12, 0, 1371, 341, 1, 0, 0, 0, 1372, 1373, 7, 1, 0, 0, 1373, 1374, 7, 9, 0, 0, 1374, 1375, 7, 15, 0, 0, 1375, 1376, 7, 7, 0, 0, 1376, 343, 1, 0, 0, 0, 1377, 1378, 3, 66, 25, 0, 1378, 1379, 1, 0, 0, 0, 1379, 1380, 6, 164, 11, 0, 1380, 345, 1, 0, 0, 0, 1381, 1382, 3, 68, 26, 0, 1382, 1383, 1, 0, 0, 0, 1383, 1384, 6, 165, 11, 0, 1384, 347, 1, 0, 0, 0, 1385, 1386, 3, 70, 27, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 6, 166, 11, 0, 1388, 349, 1, 0, 0, 0, 1389, 1390, 3, 182, 83, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1392, 6, 167, 17, 0, 1392, 1393, 6, 167, 12, 0, 1393, 351, 1, 0, 0, 0, 1394, 1395, 3, 110, 47, 0, 1395, 1396, 1, 0, 0, 0, 1396, 1397, 6, 168, 18, 0, 1397, 353, 1, 0, 0, 0, 1398, 1404, 3, 84, 34, 0, 1399, 1404, 3, 74, 29, 0, 1400, 1404, 3, 116, 50, 0, 1401, 1404, 3, 76, 30, 0, 1402, 1404, 3, 90, 37, 0, 1403, 1398, 1, 0, 0, 0, 1403, 1399, 1, 0, 0, 0, 1403, 1400, 1, 0, 0, 0, 1403, 1401, 1, 0, 0, 0, 1403, 1402, 1, 0, 0, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1403, 1, 0, 0, 0, 1405, 1406, 1, 0, 0, 0, 1406, 355, 1, 0, 0, 0, 1407, 1408, 3, 66, 25, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 170, 11, 0, 1410, 357, 1, 0, 0, 0, 1411, 1412, 3, 68, 26, 0, 1412, 1413, 1, 0, 0, 0, 1413, 1414, 6, 171, 11, 0, 1414, 359, 1, 0, 0, 0, 1415, 1416, 3, 70, 27, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 172, 11, 0, 1418, 361, 1, 0, 0, 0, 1419, 1420, 3, 72, 28, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 173, 16, 0, 1422, 1423, 6, 173, 12, 0, 1423, 363, 1, 0, 0, 0, 1424, 1425, 3, 110, 47, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 174, 18, 0, 1427, 365, 1, 0, 0, 0, 1428, 1429, 3, 112, 48, 0, 1429, 1430, 1, 0, 0, 0, 1430, 1431, 6, 175, 19, 0, 1431, 367, 1, 0, 0, 0, 1432, 1433, 3, 116, 50, 0, 1433, 1434, 1, 0, 0, 0, 1434, 1435, 6, 176, 23, 0, 1435, 369, 1, 0, 0, 0, 1436, 1437, 3, 282, 133, 0, 1437, 1438, 1, 0, 0, 0, 1438, 1439, 6, 177, 33, 0, 1439, 1440, 6, 177, 34, 0, 1440, 371, 1, 0, 0, 0, 1441, 1442, 3, 222, 103, 0, 1442, 1443, 1, 0, 0, 0, 1443, 1444, 6, 178, 21, 0, 1444, 373, 1, 0, 0, 0, 1445, 1446, 3, 94, 39, 0, 1446, 1447, 1, 0, 0, 0, 1447, 1448, 6, 179, 22, 0, 1448, 375, 1, 0, 0, 0, 1449, 1450, 3, 66, 25, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 180, 11, 0, 1452, 377, 1, 0, 0, 0, 1453, 1454, 3, 68, 26, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 181, 11, 0, 1456, 379, 1, 0, 0, 0, 1457, 1458, 3, 70, 27, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 182, 11, 0, 1460, 381, 1, 0, 0, 0, 1461, 1462, 3, 72, 28, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 183, 16, 0, 1464, 1465, 6, 183, 12, 0, 1465, 1466, 6, 183, 12, 0, 1466, 383, 1, 0, 0, 0, 1467, 1468, 3, 112, 48, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 184, 19, 0, 1470, 385, 1, 0, 0, 0, 1471, 1472, 3, 116, 50, 0, 1472, 1473, 1, 0, 0, 0, 1473, 1474, 6, 185, 23, 0, 1474, 387, 1, 0, 0, 0, 1475, 1476, 3, 248, 116, 0, 1476, 1477, 1, 0, 0, 0, 1477, 1478, 6, 186, 26, 0, 1478, 389, 1, 0, 0, 0, 1479, 1480, 3, 66, 25, 0, 1480, 1481, 1, 0, 0, 0, 1481, 1482, 6, 187, 11, 0, 1482, 391, 1, 0, 0, 0, 1483, 1484, 3, 68, 26, 0, 1484, 1485, 1, 0, 0, 0, 1485, 1486, 6, 188, 11, 0, 1486, 393, 1, 0, 0, 0, 1487, 1488, 3, 70, 27, 0, 1488, 1489, 1, 0, 0, 0, 1489, 1490, 6, 189, 11, 0, 1490, 395, 1, 0, 0, 0, 1491, 1492, 3, 72, 28, 0, 1492, 1493, 1, 0, 0, 0, 1493, 1494, 6, 190, 16, 0, 1494, 1495, 6, 190, 12, 0, 1495, 397, 1, 0, 0, 0, 1496, 1497, 3, 54, 19, 0, 1497, 1498, 1, 0, 0, 0, 1498, 1499, 6, 191, 35, 0, 1499, 399, 1, 0, 0, 0, 1500, 1501, 3, 268, 126, 0, 1501, 1502, 1, 0, 0, 0, 1502, 1503, 6, 192, 36, 0, 1503, 401, 1, 0, 0, 0, 1504, 1505, 3, 282, 133, 0, 1505, 1506, 1, 0, 0, 0, 1506, 1507, 6, 193, 33, 0, 1507, 1508, 6, 193, 12, 0, 1508, 1509, 6, 193, 0, 0, 1509, 403, 1, 0, 0, 0, 1510, 1511, 7, 20, 0, 0, 1511, 1512, 7, 2, 0, 0, 1512, 1513, 7, 1, 0, 0, 1513, 1514, 7, 9, 0, 0, 1514, 1515, 7, 17, 0, 0, 1515, 1516, 1, 0, 0, 0, 1516, 1517, 6, 194, 12, 0, 1517, 1518, 6, 194, 0, 0, 1518, 405, 1, 0, 0, 0, 1519, 1520, 3, 222, 103, 0, 1520, 1521, 1, 0, 0, 0, 1521, 1522, 6, 195, 21, 0, 1522, 407, 1, 0, 0, 0, 1523, 1524, 3, 94, 39, 0, 1524, 1525, 1, 0, 0, 0, 1525, 1526, 6, 196, 22, 0, 1526, 409, 1, 0, 0, 0, 1527, 1528, 3, 110, 47, 0, 1528, 1529, 1, 0, 0, 0, 1529, 1530, 6, 197, 18, 0, 1530, 411, 1, 0, 0, 0, 1531, 1532, 3, 184, 84, 0, 1532, 1533, 1, 0, 0, 0, 1533, 1534, 6, 198, 32, 0, 1534, 413, 1, 0, 0, 0, 1535, 1536, 3, 188, 86, 0, 1536, 1537, 1, 0, 0, 0, 1537, 1538, 6, 199, 31, 0, 1538, 415, 1, 0, 0, 0, 1539, 1540, 3, 66, 25, 0, 1540, 1541, 1, 0, 0, 0, 1541, 1542, 6, 200, 11, 0, 1542, 417, 1, 0, 0, 0, 1543, 1544, 3, 68, 26, 0, 1544, 1545, 1, 0, 0, 0, 1545, 1546, 6, 201, 11, 0, 1546, 419, 1, 0, 0, 0, 1547, 1548, 3, 70, 27, 0, 1548, 1549, 1, 0, 0, 0, 1549, 1550, 6, 202, 11, 0, 1550, 421, 1, 0, 0, 0, 1551, 1552, 3, 72, 28, 0, 1552, 1553, 1, 0, 0, 0, 1553, 1554, 6, 203, 16, 0, 1554, 1555, 6, 203, 12, 0, 1555, 423, 1, 0, 0, 0, 1556, 1557, 3, 222, 103, 0, 1557, 1558, 1, 0, 0, 0, 1558, 1559, 6, 204, 21, 0, 1559, 1560, 6, 204, 12, 0, 1560, 1561, 6, 204, 37, 0, 1561, 425, 1, 0, 0, 0, 1562, 1563, 3, 94, 39, 0, 1563, 1564, 1, 0, 0, 0, 1564, 1565, 6, 205, 22, 0, 1565, 1566, 6, 205, 12, 0, 1566, 1567, 6, 205, 37, 0, 1567, 427, 1, 0, 0, 0, 1568, 1569, 3, 66, 25, 0, 1569, 1570, 1, 0, 0, 0, 1570, 1571, 6, 206, 11, 0, 1571, 429, 1, 0, 0, 0, 1572, 1573, 3, 68, 26, 0, 1573, 1574, 1, 0, 0, 0, 1574, 1575, 6, 207, 11, 0, 1575, 431, 1, 0, 0, 0, 1576, 1577, 3, 70, 27, 0, 1577, 1578, 1, 0, 0, 0, 1578, 1579, 6, 208, 11, 0, 1579, 433, 1, 0, 0, 0, 1580, 1581, 3, 110, 47, 0, 1581, 1582, 1, 0, 0, 0, 1582, 1583, 6, 209, 18, 0, 1583, 1584, 6, 209, 12, 0, 1584, 1585, 6, 209, 9, 0, 1585, 435, 1, 0, 0, 0, 1586, 1587, 3, 112, 48, 0, 1587, 1588, 1, 0, 0, 0, 1588, 1589, 6, 210, 19, 0, 1589, 1590, 6, 210, 12, 0, 1590, 1591, 6, 210, 9, 0, 1591, 437, 1, 0, 0, 0, 1592, 1593, 3, 66, 25, 0, 1593, 1594, 1, 0, 0, 0, 1594, 1595, 6, 211, 11, 0, 1595, 439, 1, 0, 0, 0, 1596, 1597, 3, 68, 26, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1599, 6, 212, 11, 0, 1599, 441, 1, 0, 0, 0, 1600, 1601, 3, 70, 27, 0, 1601, 1602, 1, 0, 0, 0, 1602, 1603, 6, 213, 11, 0, 1603, 443, 1, 0, 0, 0, 1604, 1605, 3, 188, 86, 0, 1605, 1606, 1, 0, 0, 0, 1606, 1607, 6, 214, 12, 0, 1607, 1608, 6, 214, 0, 0, 1608, 1609, 6, 214, 31, 0, 1609, 445, 1, 0, 0, 0, 1610, 1611, 3, 184, 84, 0, 1611, 1612, 1, 0, 0, 0, 1612, 1613, 6, 215, 12, 0, 1613, 1614, 6, 215, 0, 0, 1614, 1615, 6, 215, 32, 0, 1615, 447, 1, 0, 0, 0, 1616, 1617, 3, 100, 42, 0, 1617, 1618, 1, 0, 0, 0, 1618, 1619, 6, 216, 12, 0, 1619, 1620, 6, 216, 0, 0, 1620, 1621, 6, 216, 38, 0, 1621, 449, 1, 0, 0, 0, 1622, 1623, 3, 72, 28, 0, 1623, 1624, 1, 0, 0, 0, 1624, 1625, 6, 217, 16, 0, 1625, 1626, 6, 217, 12, 0, 1626, 451, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 665, 675, 679, 682, 691, 693, 704, 723, 728, 737, 744, 749, 751, 762, 770, 773, 775, 780, 785, 791, 798, 803, 809, 812, 820, 824, 956, 961, 968, 970, 986, 991, 996, 998, 1004, 1081, 1086, 1135, 1139, 1144, 1149, 1154, 1156, 1160, 1162, 1249, 1253, 1258, 1403, 1405, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 72, 0, 5, 0, 0, 7, 29, 0, 7, 73, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 83, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 71, 0, 7, 87, 0, 5, 10, 0, 5, 7, 0, 7, 97, 0, 7, 96, 0, 7, 75, 0, 7, 74, 0, 7, 95, 0, 5, 12, 0, 7, 20, 0, 7, 91, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file +[4, 0, 134, 1655, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 2, 215, 7, 215, 2, 216, 7, 216, 2, 217, 7, 217, 2, 218, 7, 218, 2, 219, 7, 219, 2, 220, 7, 220, 2, 221, 7, 221, 2, 222, 7, 222, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 678, 8, 24, 11, 24, 12, 24, 679, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 688, 8, 25, 10, 25, 12, 25, 691, 9, 25, 1, 25, 3, 25, 694, 8, 25, 1, 25, 3, 25, 697, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 706, 8, 26, 10, 26, 12, 26, 709, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 717, 8, 27, 11, 27, 12, 27, 718, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 738, 8, 33, 1, 33, 4, 33, 741, 8, 33, 11, 33, 12, 33, 742, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 752, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 759, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 764, 8, 39, 10, 39, 12, 39, 767, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 775, 8, 39, 10, 39, 12, 39, 778, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 785, 8, 39, 1, 39, 3, 39, 788, 8, 39, 3, 39, 790, 8, 39, 1, 40, 4, 40, 793, 8, 40, 11, 40, 12, 40, 794, 1, 41, 4, 41, 798, 8, 41, 11, 41, 12, 41, 799, 1, 41, 1, 41, 5, 41, 804, 8, 41, 10, 41, 12, 41, 807, 9, 41, 1, 41, 1, 41, 4, 41, 811, 8, 41, 11, 41, 12, 41, 812, 1, 41, 4, 41, 816, 8, 41, 11, 41, 12, 41, 817, 1, 41, 1, 41, 5, 41, 822, 8, 41, 10, 41, 12, 41, 825, 9, 41, 3, 41, 827, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 833, 8, 41, 11, 41, 12, 41, 834, 1, 41, 1, 41, 3, 41, 839, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 3, 81, 971, 8, 81, 1, 81, 5, 81, 974, 8, 81, 10, 81, 12, 81, 977, 9, 81, 1, 81, 1, 81, 4, 81, 981, 8, 81, 11, 81, 12, 81, 982, 3, 81, 985, 8, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 5, 84, 999, 8, 84, 10, 84, 12, 84, 1002, 9, 84, 1, 84, 1, 84, 3, 84, 1006, 8, 84, 1, 84, 4, 84, 1009, 8, 84, 11, 84, 12, 84, 1010, 3, 84, 1013, 8, 84, 1, 85, 1, 85, 4, 85, 1017, 8, 85, 11, 85, 12, 85, 1018, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 3, 102, 1096, 8, 102, 1, 103, 4, 103, 1099, 8, 103, 11, 103, 12, 103, 1100, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 3, 114, 1148, 8, 114, 1, 115, 1, 115, 3, 115, 1152, 8, 115, 1, 115, 5, 115, 1155, 8, 115, 10, 115, 12, 115, 1158, 9, 115, 1, 115, 1, 115, 3, 115, 1162, 8, 115, 1, 115, 4, 115, 1165, 8, 115, 11, 115, 12, 115, 1166, 3, 115, 1169, 8, 115, 1, 116, 1, 116, 4, 116, 1173, 8, 116, 11, 116, 12, 116, 1174, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 136, 4, 136, 1258, 8, 136, 11, 136, 12, 136, 1259, 1, 136, 1, 136, 3, 136, 1264, 8, 136, 1, 136, 4, 136, 1267, 8, 136, 11, 136, 12, 136, 1268, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 4, 169, 1410, 8, 169, 11, 169, 12, 169, 1411, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 217, 1, 217, 1, 217, 1, 217, 1, 217, 1, 218, 1, 218, 1, 218, 1, 218, 1, 218, 1, 219, 1, 219, 1, 219, 1, 219, 1, 220, 1, 220, 1, 220, 1, 220, 1, 221, 1, 221, 1, 221, 1, 221, 1, 222, 1, 222, 1, 222, 1, 222, 2, 707, 776, 0, 223, 17, 1, 19, 2, 21, 3, 23, 4, 25, 5, 27, 6, 29, 7, 31, 8, 33, 9, 35, 10, 37, 11, 39, 12, 41, 13, 43, 14, 45, 15, 47, 16, 49, 17, 51, 18, 53, 19, 55, 20, 57, 21, 59, 22, 61, 23, 63, 24, 65, 25, 67, 26, 69, 27, 71, 28, 73, 29, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 0, 87, 0, 89, 0, 91, 0, 93, 0, 95, 30, 97, 31, 99, 32, 101, 33, 103, 34, 105, 35, 107, 36, 109, 37, 111, 38, 113, 39, 115, 40, 117, 41, 119, 42, 121, 43, 123, 44, 125, 45, 127, 46, 129, 47, 131, 48, 133, 49, 135, 50, 137, 51, 139, 52, 141, 53, 143, 54, 145, 55, 147, 56, 149, 57, 151, 58, 153, 59, 155, 60, 157, 61, 159, 62, 161, 63, 163, 64, 165, 65, 167, 66, 169, 67, 171, 68, 173, 69, 175, 70, 177, 0, 179, 71, 181, 72, 183, 73, 185, 74, 187, 0, 189, 75, 191, 76, 193, 77, 195, 78, 197, 0, 199, 0, 201, 79, 203, 80, 205, 81, 207, 0, 209, 0, 211, 0, 213, 0, 215, 0, 217, 0, 219, 82, 221, 0, 223, 83, 225, 0, 227, 0, 229, 84, 231, 85, 233, 86, 235, 0, 237, 0, 239, 0, 241, 0, 243, 0, 245, 0, 247, 0, 249, 87, 251, 88, 253, 89, 255, 90, 257, 0, 259, 0, 261, 0, 263, 0, 265, 0, 267, 0, 269, 91, 271, 0, 273, 92, 275, 93, 277, 94, 279, 0, 281, 0, 283, 95, 285, 96, 287, 0, 289, 97, 291, 0, 293, 98, 295, 99, 297, 100, 299, 0, 301, 0, 303, 0, 305, 0, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 101, 319, 102, 321, 103, 323, 0, 325, 0, 327, 0, 329, 0, 331, 0, 333, 0, 335, 104, 337, 105, 339, 106, 341, 0, 343, 107, 345, 108, 347, 109, 349, 110, 351, 0, 353, 0, 355, 111, 357, 112, 359, 113, 361, 114, 363, 0, 365, 0, 367, 0, 369, 0, 371, 0, 373, 0, 375, 0, 377, 115, 379, 116, 381, 117, 383, 0, 385, 0, 387, 0, 389, 0, 391, 118, 393, 119, 395, 120, 397, 0, 399, 121, 401, 0, 403, 0, 405, 122, 407, 0, 409, 0, 411, 0, 413, 0, 415, 0, 417, 123, 419, 124, 421, 125, 423, 0, 425, 0, 427, 0, 429, 126, 431, 127, 433, 128, 435, 0, 437, 0, 439, 129, 441, 130, 443, 131, 445, 0, 447, 0, 449, 0, 451, 0, 453, 0, 455, 0, 457, 132, 459, 133, 461, 134, 17, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 2, 0, 74, 74, 106, 106, 1681, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 1, 73, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 171, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 1, 181, 1, 0, 0, 0, 1, 183, 1, 0, 0, 0, 1, 185, 1, 0, 0, 0, 1, 189, 1, 0, 0, 0, 1, 191, 1, 0, 0, 0, 1, 193, 1, 0, 0, 0, 1, 195, 1, 0, 0, 0, 2, 197, 1, 0, 0, 0, 2, 199, 1, 0, 0, 0, 2, 201, 1, 0, 0, 0, 2, 203, 1, 0, 0, 0, 2, 205, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 3, 219, 1, 0, 0, 0, 3, 223, 1, 0, 0, 0, 3, 225, 1, 0, 0, 0, 3, 227, 1, 0, 0, 0, 3, 229, 1, 0, 0, 0, 3, 231, 1, 0, 0, 0, 3, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 4, 241, 1, 0, 0, 0, 4, 243, 1, 0, 0, 0, 4, 249, 1, 0, 0, 0, 4, 251, 1, 0, 0, 0, 4, 253, 1, 0, 0, 0, 4, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 5, 263, 1, 0, 0, 0, 5, 265, 1, 0, 0, 0, 5, 267, 1, 0, 0, 0, 5, 269, 1, 0, 0, 0, 5, 271, 1, 0, 0, 0, 5, 273, 1, 0, 0, 0, 5, 275, 1, 0, 0, 0, 5, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 6, 283, 1, 0, 0, 0, 6, 285, 1, 0, 0, 0, 6, 289, 1, 0, 0, 0, 6, 291, 1, 0, 0, 0, 6, 293, 1, 0, 0, 0, 6, 295, 1, 0, 0, 0, 6, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 7, 307, 1, 0, 0, 0, 7, 309, 1, 0, 0, 0, 7, 311, 1, 0, 0, 0, 7, 313, 1, 0, 0, 0, 7, 315, 1, 0, 0, 0, 7, 317, 1, 0, 0, 0, 7, 319, 1, 0, 0, 0, 7, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 8, 325, 1, 0, 0, 0, 8, 327, 1, 0, 0, 0, 8, 329, 1, 0, 0, 0, 8, 331, 1, 0, 0, 0, 8, 333, 1, 0, 0, 0, 8, 335, 1, 0, 0, 0, 8, 337, 1, 0, 0, 0, 8, 339, 1, 0, 0, 0, 9, 341, 1, 0, 0, 0, 9, 343, 1, 0, 0, 0, 9, 345, 1, 0, 0, 0, 9, 347, 1, 0, 0, 0, 9, 349, 1, 0, 0, 0, 10, 351, 1, 0, 0, 0, 10, 353, 1, 0, 0, 0, 10, 355, 1, 0, 0, 0, 10, 357, 1, 0, 0, 0, 10, 359, 1, 0, 0, 0, 10, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 11, 367, 1, 0, 0, 0, 11, 369, 1, 0, 0, 0, 11, 371, 1, 0, 0, 0, 11, 373, 1, 0, 0, 0, 11, 375, 1, 0, 0, 0, 11, 377, 1, 0, 0, 0, 11, 379, 1, 0, 0, 0, 11, 381, 1, 0, 0, 0, 12, 383, 1, 0, 0, 0, 12, 385, 1, 0, 0, 0, 12, 387, 1, 0, 0, 0, 12, 389, 1, 0, 0, 0, 12, 391, 1, 0, 0, 0, 12, 393, 1, 0, 0, 0, 12, 395, 1, 0, 0, 0, 13, 397, 1, 0, 0, 0, 13, 399, 1, 0, 0, 0, 13, 401, 1, 0, 0, 0, 13, 403, 1, 0, 0, 0, 13, 405, 1, 0, 0, 0, 13, 407, 1, 0, 0, 0, 13, 409, 1, 0, 0, 0, 13, 411, 1, 0, 0, 0, 13, 413, 1, 0, 0, 0, 13, 415, 1, 0, 0, 0, 13, 417, 1, 0, 0, 0, 13, 419, 1, 0, 0, 0, 13, 421, 1, 0, 0, 0, 14, 423, 1, 0, 0, 0, 14, 425, 1, 0, 0, 0, 14, 427, 1, 0, 0, 0, 14, 429, 1, 0, 0, 0, 14, 431, 1, 0, 0, 0, 14, 433, 1, 0, 0, 0, 15, 435, 1, 0, 0, 0, 15, 437, 1, 0, 0, 0, 15, 439, 1, 0, 0, 0, 15, 441, 1, 0, 0, 0, 15, 443, 1, 0, 0, 0, 15, 445, 1, 0, 0, 0, 15, 447, 1, 0, 0, 0, 15, 449, 1, 0, 0, 0, 15, 451, 1, 0, 0, 0, 16, 453, 1, 0, 0, 0, 16, 455, 1, 0, 0, 0, 16, 457, 1, 0, 0, 0, 16, 459, 1, 0, 0, 0, 16, 461, 1, 0, 0, 0, 17, 463, 1, 0, 0, 0, 19, 473, 1, 0, 0, 0, 21, 480, 1, 0, 0, 0, 23, 489, 1, 0, 0, 0, 25, 496, 1, 0, 0, 0, 27, 506, 1, 0, 0, 0, 29, 513, 1, 0, 0, 0, 31, 520, 1, 0, 0, 0, 33, 527, 1, 0, 0, 0, 35, 535, 1, 0, 0, 0, 37, 547, 1, 0, 0, 0, 39, 556, 1, 0, 0, 0, 41, 562, 1, 0, 0, 0, 43, 569, 1, 0, 0, 0, 45, 576, 1, 0, 0, 0, 47, 584, 1, 0, 0, 0, 49, 592, 1, 0, 0, 0, 51, 601, 1, 0, 0, 0, 53, 616, 1, 0, 0, 0, 55, 628, 1, 0, 0, 0, 57, 640, 1, 0, 0, 0, 59, 651, 1, 0, 0, 0, 61, 659, 1, 0, 0, 0, 63, 667, 1, 0, 0, 0, 65, 677, 1, 0, 0, 0, 67, 683, 1, 0, 0, 0, 69, 700, 1, 0, 0, 0, 71, 716, 1, 0, 0, 0, 73, 722, 1, 0, 0, 0, 75, 726, 1, 0, 0, 0, 77, 728, 1, 0, 0, 0, 79, 730, 1, 0, 0, 0, 81, 733, 1, 0, 0, 0, 83, 735, 1, 0, 0, 0, 85, 744, 1, 0, 0, 0, 87, 746, 1, 0, 0, 0, 89, 751, 1, 0, 0, 0, 91, 753, 1, 0, 0, 0, 93, 758, 1, 0, 0, 0, 95, 789, 1, 0, 0, 0, 97, 792, 1, 0, 0, 0, 99, 838, 1, 0, 0, 0, 101, 840, 1, 0, 0, 0, 103, 843, 1, 0, 0, 0, 105, 847, 1, 0, 0, 0, 107, 851, 1, 0, 0, 0, 109, 853, 1, 0, 0, 0, 111, 856, 1, 0, 0, 0, 113, 858, 1, 0, 0, 0, 115, 860, 1, 0, 0, 0, 117, 865, 1, 0, 0, 0, 119, 867, 1, 0, 0, 0, 121, 873, 1, 0, 0, 0, 123, 879, 1, 0, 0, 0, 125, 882, 1, 0, 0, 0, 127, 885, 1, 0, 0, 0, 129, 890, 1, 0, 0, 0, 131, 895, 1, 0, 0, 0, 133, 897, 1, 0, 0, 0, 135, 901, 1, 0, 0, 0, 137, 906, 1, 0, 0, 0, 139, 912, 1, 0, 0, 0, 141, 915, 1, 0, 0, 0, 143, 917, 1, 0, 0, 0, 145, 923, 1, 0, 0, 0, 147, 925, 1, 0, 0, 0, 149, 930, 1, 0, 0, 0, 151, 933, 1, 0, 0, 0, 153, 936, 1, 0, 0, 0, 155, 939, 1, 0, 0, 0, 157, 941, 1, 0, 0, 0, 159, 944, 1, 0, 0, 0, 161, 946, 1, 0, 0, 0, 163, 949, 1, 0, 0, 0, 165, 951, 1, 0, 0, 0, 167, 953, 1, 0, 0, 0, 169, 955, 1, 0, 0, 0, 171, 957, 1, 0, 0, 0, 173, 959, 1, 0, 0, 0, 175, 961, 1, 0, 0, 0, 177, 963, 1, 0, 0, 0, 179, 984, 1, 0, 0, 0, 181, 986, 1, 0, 0, 0, 183, 991, 1, 0, 0, 0, 185, 1012, 1, 0, 0, 0, 187, 1014, 1, 0, 0, 0, 189, 1022, 1, 0, 0, 0, 191, 1024, 1, 0, 0, 0, 193, 1028, 1, 0, 0, 0, 195, 1032, 1, 0, 0, 0, 197, 1036, 1, 0, 0, 0, 199, 1041, 1, 0, 0, 0, 201, 1046, 1, 0, 0, 0, 203, 1050, 1, 0, 0, 0, 205, 1054, 1, 0, 0, 0, 207, 1058, 1, 0, 0, 0, 209, 1063, 1, 0, 0, 0, 211, 1067, 1, 0, 0, 0, 213, 1071, 1, 0, 0, 0, 215, 1075, 1, 0, 0, 0, 217, 1079, 1, 0, 0, 0, 219, 1083, 1, 0, 0, 0, 221, 1095, 1, 0, 0, 0, 223, 1098, 1, 0, 0, 0, 225, 1102, 1, 0, 0, 0, 227, 1106, 1, 0, 0, 0, 229, 1110, 1, 0, 0, 0, 231, 1114, 1, 0, 0, 0, 233, 1118, 1, 0, 0, 0, 235, 1122, 1, 0, 0, 0, 237, 1127, 1, 0, 0, 0, 239, 1131, 1, 0, 0, 0, 241, 1135, 1, 0, 0, 0, 243, 1139, 1, 0, 0, 0, 245, 1147, 1, 0, 0, 0, 247, 1168, 1, 0, 0, 0, 249, 1172, 1, 0, 0, 0, 251, 1176, 1, 0, 0, 0, 253, 1180, 1, 0, 0, 0, 255, 1184, 1, 0, 0, 0, 257, 1188, 1, 0, 0, 0, 259, 1193, 1, 0, 0, 0, 261, 1197, 1, 0, 0, 0, 263, 1201, 1, 0, 0, 0, 265, 1205, 1, 0, 0, 0, 267, 1209, 1, 0, 0, 0, 269, 1213, 1, 0, 0, 0, 271, 1216, 1, 0, 0, 0, 273, 1220, 1, 0, 0, 0, 275, 1224, 1, 0, 0, 0, 277, 1228, 1, 0, 0, 0, 279, 1232, 1, 0, 0, 0, 281, 1237, 1, 0, 0, 0, 283, 1242, 1, 0, 0, 0, 285, 1247, 1, 0, 0, 0, 287, 1254, 1, 0, 0, 0, 289, 1263, 1, 0, 0, 0, 291, 1270, 1, 0, 0, 0, 293, 1274, 1, 0, 0, 0, 295, 1278, 1, 0, 0, 0, 297, 1282, 1, 0, 0, 0, 299, 1286, 1, 0, 0, 0, 301, 1292, 1, 0, 0, 0, 303, 1296, 1, 0, 0, 0, 305, 1300, 1, 0, 0, 0, 307, 1304, 1, 0, 0, 0, 309, 1308, 1, 0, 0, 0, 311, 1312, 1, 0, 0, 0, 313, 1316, 1, 0, 0, 0, 315, 1320, 1, 0, 0, 0, 317, 1324, 1, 0, 0, 0, 319, 1328, 1, 0, 0, 0, 321, 1332, 1, 0, 0, 0, 323, 1336, 1, 0, 0, 0, 325, 1341, 1, 0, 0, 0, 327, 1345, 1, 0, 0, 0, 329, 1349, 1, 0, 0, 0, 331, 1353, 1, 0, 0, 0, 333, 1357, 1, 0, 0, 0, 335, 1361, 1, 0, 0, 0, 337, 1365, 1, 0, 0, 0, 339, 1369, 1, 0, 0, 0, 341, 1373, 1, 0, 0, 0, 343, 1378, 1, 0, 0, 0, 345, 1383, 1, 0, 0, 0, 347, 1387, 1, 0, 0, 0, 349, 1391, 1, 0, 0, 0, 351, 1395, 1, 0, 0, 0, 353, 1400, 1, 0, 0, 0, 355, 1409, 1, 0, 0, 0, 357, 1413, 1, 0, 0, 0, 359, 1417, 1, 0, 0, 0, 361, 1421, 1, 0, 0, 0, 363, 1425, 1, 0, 0, 0, 365, 1430, 1, 0, 0, 0, 367, 1434, 1, 0, 0, 0, 369, 1438, 1, 0, 0, 0, 371, 1442, 1, 0, 0, 0, 373, 1447, 1, 0, 0, 0, 375, 1451, 1, 0, 0, 0, 377, 1455, 1, 0, 0, 0, 379, 1459, 1, 0, 0, 0, 381, 1463, 1, 0, 0, 0, 383, 1467, 1, 0, 0, 0, 385, 1473, 1, 0, 0, 0, 387, 1477, 1, 0, 0, 0, 389, 1481, 1, 0, 0, 0, 391, 1485, 1, 0, 0, 0, 393, 1489, 1, 0, 0, 0, 395, 1493, 1, 0, 0, 0, 397, 1497, 1, 0, 0, 0, 399, 1502, 1, 0, 0, 0, 401, 1507, 1, 0, 0, 0, 403, 1511, 1, 0, 0, 0, 405, 1517, 1, 0, 0, 0, 407, 1526, 1, 0, 0, 0, 409, 1530, 1, 0, 0, 0, 411, 1534, 1, 0, 0, 0, 413, 1538, 1, 0, 0, 0, 415, 1542, 1, 0, 0, 0, 417, 1546, 1, 0, 0, 0, 419, 1550, 1, 0, 0, 0, 421, 1554, 1, 0, 0, 0, 423, 1558, 1, 0, 0, 0, 425, 1563, 1, 0, 0, 0, 427, 1569, 1, 0, 0, 0, 429, 1575, 1, 0, 0, 0, 431, 1579, 1, 0, 0, 0, 433, 1583, 1, 0, 0, 0, 435, 1587, 1, 0, 0, 0, 437, 1593, 1, 0, 0, 0, 439, 1599, 1, 0, 0, 0, 441, 1603, 1, 0, 0, 0, 443, 1607, 1, 0, 0, 0, 445, 1611, 1, 0, 0, 0, 447, 1617, 1, 0, 0, 0, 449, 1623, 1, 0, 0, 0, 451, 1629, 1, 0, 0, 0, 453, 1634, 1, 0, 0, 0, 455, 1639, 1, 0, 0, 0, 457, 1643, 1, 0, 0, 0, 459, 1647, 1, 0, 0, 0, 461, 1651, 1, 0, 0, 0, 463, 464, 7, 0, 0, 0, 464, 465, 7, 1, 0, 0, 465, 466, 7, 2, 0, 0, 466, 467, 7, 2, 0, 0, 467, 468, 7, 3, 0, 0, 468, 469, 7, 4, 0, 0, 469, 470, 7, 5, 0, 0, 470, 471, 1, 0, 0, 0, 471, 472, 6, 0, 0, 0, 472, 18, 1, 0, 0, 0, 473, 474, 7, 0, 0, 0, 474, 475, 7, 6, 0, 0, 475, 476, 7, 7, 0, 0, 476, 477, 7, 8, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 6, 1, 1, 0, 479, 20, 1, 0, 0, 0, 480, 481, 7, 3, 0, 0, 481, 482, 7, 9, 0, 0, 482, 483, 7, 6, 0, 0, 483, 484, 7, 1, 0, 0, 484, 485, 7, 4, 0, 0, 485, 486, 7, 10, 0, 0, 486, 487, 1, 0, 0, 0, 487, 488, 6, 2, 2, 0, 488, 22, 1, 0, 0, 0, 489, 490, 7, 3, 0, 0, 490, 491, 7, 11, 0, 0, 491, 492, 7, 12, 0, 0, 492, 493, 7, 13, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 6, 3, 0, 0, 495, 24, 1, 0, 0, 0, 496, 497, 7, 3, 0, 0, 497, 498, 7, 14, 0, 0, 498, 499, 7, 8, 0, 0, 499, 500, 7, 13, 0, 0, 500, 501, 7, 12, 0, 0, 501, 502, 7, 1, 0, 0, 502, 503, 7, 9, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 4, 3, 0, 505, 26, 1, 0, 0, 0, 506, 507, 7, 15, 0, 0, 507, 508, 7, 6, 0, 0, 508, 509, 7, 7, 0, 0, 509, 510, 7, 16, 0, 0, 510, 511, 1, 0, 0, 0, 511, 512, 6, 5, 4, 0, 512, 28, 1, 0, 0, 0, 513, 514, 7, 17, 0, 0, 514, 515, 7, 6, 0, 0, 515, 516, 7, 7, 0, 0, 516, 517, 7, 18, 0, 0, 517, 518, 1, 0, 0, 0, 518, 519, 6, 6, 0, 0, 519, 30, 1, 0, 0, 0, 520, 521, 7, 18, 0, 0, 521, 522, 7, 3, 0, 0, 522, 523, 7, 3, 0, 0, 523, 524, 7, 8, 0, 0, 524, 525, 1, 0, 0, 0, 525, 526, 6, 7, 1, 0, 526, 32, 1, 0, 0, 0, 527, 528, 7, 13, 0, 0, 528, 529, 7, 1, 0, 0, 529, 530, 7, 16, 0, 0, 530, 531, 7, 1, 0, 0, 531, 532, 7, 5, 0, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 8, 0, 0, 534, 34, 1, 0, 0, 0, 535, 536, 7, 16, 0, 0, 536, 537, 7, 11, 0, 0, 537, 538, 5, 95, 0, 0, 538, 539, 7, 3, 0, 0, 539, 540, 7, 14, 0, 0, 540, 541, 7, 8, 0, 0, 541, 542, 7, 12, 0, 0, 542, 543, 7, 9, 0, 0, 543, 544, 7, 0, 0, 0, 544, 545, 1, 0, 0, 0, 545, 546, 6, 9, 5, 0, 546, 36, 1, 0, 0, 0, 547, 548, 7, 6, 0, 0, 548, 549, 7, 3, 0, 0, 549, 550, 7, 9, 0, 0, 550, 551, 7, 12, 0, 0, 551, 552, 7, 16, 0, 0, 552, 553, 7, 3, 0, 0, 553, 554, 1, 0, 0, 0, 554, 555, 6, 10, 6, 0, 555, 38, 1, 0, 0, 0, 556, 557, 7, 6, 0, 0, 557, 558, 7, 7, 0, 0, 558, 559, 7, 19, 0, 0, 559, 560, 1, 0, 0, 0, 560, 561, 6, 11, 0, 0, 561, 40, 1, 0, 0, 0, 562, 563, 7, 2, 0, 0, 563, 564, 7, 10, 0, 0, 564, 565, 7, 7, 0, 0, 565, 566, 7, 19, 0, 0, 566, 567, 1, 0, 0, 0, 567, 568, 6, 12, 7, 0, 568, 42, 1, 0, 0, 0, 569, 570, 7, 2, 0, 0, 570, 571, 7, 7, 0, 0, 571, 572, 7, 6, 0, 0, 572, 573, 7, 5, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 6, 13, 0, 0, 575, 44, 1, 0, 0, 0, 576, 577, 7, 2, 0, 0, 577, 578, 7, 5, 0, 0, 578, 579, 7, 12, 0, 0, 579, 580, 7, 5, 0, 0, 580, 581, 7, 2, 0, 0, 581, 582, 1, 0, 0, 0, 582, 583, 6, 14, 0, 0, 583, 46, 1, 0, 0, 0, 584, 585, 7, 19, 0, 0, 585, 586, 7, 10, 0, 0, 586, 587, 7, 3, 0, 0, 587, 588, 7, 6, 0, 0, 588, 589, 7, 3, 0, 0, 589, 590, 1, 0, 0, 0, 590, 591, 6, 15, 0, 0, 591, 48, 1, 0, 0, 0, 592, 593, 7, 13, 0, 0, 593, 594, 7, 7, 0, 0, 594, 595, 7, 7, 0, 0, 595, 596, 7, 18, 0, 0, 596, 597, 7, 20, 0, 0, 597, 598, 7, 8, 0, 0, 598, 599, 1, 0, 0, 0, 599, 600, 6, 16, 8, 0, 600, 50, 1, 0, 0, 0, 601, 602, 4, 17, 0, 0, 602, 603, 7, 1, 0, 0, 603, 604, 7, 9, 0, 0, 604, 605, 7, 13, 0, 0, 605, 606, 7, 1, 0, 0, 606, 607, 7, 9, 0, 0, 607, 608, 7, 3, 0, 0, 608, 609, 7, 2, 0, 0, 609, 610, 7, 5, 0, 0, 610, 611, 7, 12, 0, 0, 611, 612, 7, 5, 0, 0, 612, 613, 7, 2, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 6, 17, 0, 0, 615, 52, 1, 0, 0, 0, 616, 617, 4, 18, 1, 0, 617, 618, 7, 1, 0, 0, 618, 619, 7, 9, 0, 0, 619, 620, 7, 2, 0, 0, 620, 621, 7, 1, 0, 0, 621, 622, 7, 2, 0, 0, 622, 623, 7, 5, 0, 0, 623, 624, 5, 95, 0, 0, 624, 625, 5, 128020, 0, 0, 625, 626, 1, 0, 0, 0, 626, 627, 6, 18, 1, 0, 627, 54, 1, 0, 0, 0, 628, 629, 4, 19, 2, 0, 629, 630, 7, 13, 0, 0, 630, 631, 7, 7, 0, 0, 631, 632, 7, 7, 0, 0, 632, 633, 7, 18, 0, 0, 633, 634, 7, 20, 0, 0, 634, 635, 7, 8, 0, 0, 635, 636, 5, 95, 0, 0, 636, 637, 5, 128020, 0, 0, 637, 638, 1, 0, 0, 0, 638, 639, 6, 19, 9, 0, 639, 56, 1, 0, 0, 0, 640, 641, 4, 20, 3, 0, 641, 642, 7, 16, 0, 0, 642, 643, 7, 3, 0, 0, 643, 644, 7, 5, 0, 0, 644, 645, 7, 6, 0, 0, 645, 646, 7, 1, 0, 0, 646, 647, 7, 4, 0, 0, 647, 648, 7, 2, 0, 0, 648, 649, 1, 0, 0, 0, 649, 650, 6, 20, 10, 0, 650, 58, 1, 0, 0, 0, 651, 652, 4, 21, 4, 0, 652, 653, 7, 15, 0, 0, 653, 654, 7, 20, 0, 0, 654, 655, 7, 13, 0, 0, 655, 656, 7, 13, 0, 0, 656, 657, 1, 0, 0, 0, 657, 658, 6, 21, 8, 0, 658, 60, 1, 0, 0, 0, 659, 660, 4, 22, 5, 0, 660, 661, 7, 13, 0, 0, 661, 662, 7, 3, 0, 0, 662, 663, 7, 15, 0, 0, 663, 664, 7, 5, 0, 0, 664, 665, 1, 0, 0, 0, 665, 666, 6, 22, 8, 0, 666, 62, 1, 0, 0, 0, 667, 668, 4, 23, 6, 0, 668, 669, 7, 6, 0, 0, 669, 670, 7, 1, 0, 0, 670, 671, 7, 17, 0, 0, 671, 672, 7, 10, 0, 0, 672, 673, 7, 5, 0, 0, 673, 674, 1, 0, 0, 0, 674, 675, 6, 23, 8, 0, 675, 64, 1, 0, 0, 0, 676, 678, 8, 21, 0, 0, 677, 676, 1, 0, 0, 0, 678, 679, 1, 0, 0, 0, 679, 677, 1, 0, 0, 0, 679, 680, 1, 0, 0, 0, 680, 681, 1, 0, 0, 0, 681, 682, 6, 24, 0, 0, 682, 66, 1, 0, 0, 0, 683, 684, 5, 47, 0, 0, 684, 685, 5, 47, 0, 0, 685, 689, 1, 0, 0, 0, 686, 688, 8, 22, 0, 0, 687, 686, 1, 0, 0, 0, 688, 691, 1, 0, 0, 0, 689, 687, 1, 0, 0, 0, 689, 690, 1, 0, 0, 0, 690, 693, 1, 0, 0, 0, 691, 689, 1, 0, 0, 0, 692, 694, 5, 13, 0, 0, 693, 692, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 696, 1, 0, 0, 0, 695, 697, 5, 10, 0, 0, 696, 695, 1, 0, 0, 0, 696, 697, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 699, 6, 25, 11, 0, 699, 68, 1, 0, 0, 0, 700, 701, 5, 47, 0, 0, 701, 702, 5, 42, 0, 0, 702, 707, 1, 0, 0, 0, 703, 706, 3, 69, 26, 0, 704, 706, 9, 0, 0, 0, 705, 703, 1, 0, 0, 0, 705, 704, 1, 0, 0, 0, 706, 709, 1, 0, 0, 0, 707, 708, 1, 0, 0, 0, 707, 705, 1, 0, 0, 0, 708, 710, 1, 0, 0, 0, 709, 707, 1, 0, 0, 0, 710, 711, 5, 42, 0, 0, 711, 712, 5, 47, 0, 0, 712, 713, 1, 0, 0, 0, 713, 714, 6, 26, 11, 0, 714, 70, 1, 0, 0, 0, 715, 717, 7, 23, 0, 0, 716, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 716, 1, 0, 0, 0, 718, 719, 1, 0, 0, 0, 719, 720, 1, 0, 0, 0, 720, 721, 6, 27, 11, 0, 721, 72, 1, 0, 0, 0, 722, 723, 5, 124, 0, 0, 723, 724, 1, 0, 0, 0, 724, 725, 6, 28, 12, 0, 725, 74, 1, 0, 0, 0, 726, 727, 7, 24, 0, 0, 727, 76, 1, 0, 0, 0, 728, 729, 7, 25, 0, 0, 729, 78, 1, 0, 0, 0, 730, 731, 5, 92, 0, 0, 731, 732, 7, 26, 0, 0, 732, 80, 1, 0, 0, 0, 733, 734, 8, 27, 0, 0, 734, 82, 1, 0, 0, 0, 735, 737, 7, 3, 0, 0, 736, 738, 7, 28, 0, 0, 737, 736, 1, 0, 0, 0, 737, 738, 1, 0, 0, 0, 738, 740, 1, 0, 0, 0, 739, 741, 3, 75, 29, 0, 740, 739, 1, 0, 0, 0, 741, 742, 1, 0, 0, 0, 742, 740, 1, 0, 0, 0, 742, 743, 1, 0, 0, 0, 743, 84, 1, 0, 0, 0, 744, 745, 5, 64, 0, 0, 745, 86, 1, 0, 0, 0, 746, 747, 5, 96, 0, 0, 747, 88, 1, 0, 0, 0, 748, 752, 8, 29, 0, 0, 749, 750, 5, 96, 0, 0, 750, 752, 5, 96, 0, 0, 751, 748, 1, 0, 0, 0, 751, 749, 1, 0, 0, 0, 752, 90, 1, 0, 0, 0, 753, 754, 5, 95, 0, 0, 754, 92, 1, 0, 0, 0, 755, 759, 3, 77, 30, 0, 756, 759, 3, 75, 29, 0, 757, 759, 3, 91, 37, 0, 758, 755, 1, 0, 0, 0, 758, 756, 1, 0, 0, 0, 758, 757, 1, 0, 0, 0, 759, 94, 1, 0, 0, 0, 760, 765, 5, 34, 0, 0, 761, 764, 3, 79, 31, 0, 762, 764, 3, 81, 32, 0, 763, 761, 1, 0, 0, 0, 763, 762, 1, 0, 0, 0, 764, 767, 1, 0, 0, 0, 765, 763, 1, 0, 0, 0, 765, 766, 1, 0, 0, 0, 766, 768, 1, 0, 0, 0, 767, 765, 1, 0, 0, 0, 768, 790, 5, 34, 0, 0, 769, 770, 5, 34, 0, 0, 770, 771, 5, 34, 0, 0, 771, 772, 5, 34, 0, 0, 772, 776, 1, 0, 0, 0, 773, 775, 8, 22, 0, 0, 774, 773, 1, 0, 0, 0, 775, 778, 1, 0, 0, 0, 776, 777, 1, 0, 0, 0, 776, 774, 1, 0, 0, 0, 777, 779, 1, 0, 0, 0, 778, 776, 1, 0, 0, 0, 779, 780, 5, 34, 0, 0, 780, 781, 5, 34, 0, 0, 781, 782, 5, 34, 0, 0, 782, 784, 1, 0, 0, 0, 783, 785, 5, 34, 0, 0, 784, 783, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 787, 1, 0, 0, 0, 786, 788, 5, 34, 0, 0, 787, 786, 1, 0, 0, 0, 787, 788, 1, 0, 0, 0, 788, 790, 1, 0, 0, 0, 789, 760, 1, 0, 0, 0, 789, 769, 1, 0, 0, 0, 790, 96, 1, 0, 0, 0, 791, 793, 3, 75, 29, 0, 792, 791, 1, 0, 0, 0, 793, 794, 1, 0, 0, 0, 794, 792, 1, 0, 0, 0, 794, 795, 1, 0, 0, 0, 795, 98, 1, 0, 0, 0, 796, 798, 3, 75, 29, 0, 797, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 801, 1, 0, 0, 0, 801, 805, 3, 117, 50, 0, 802, 804, 3, 75, 29, 0, 803, 802, 1, 0, 0, 0, 804, 807, 1, 0, 0, 0, 805, 803, 1, 0, 0, 0, 805, 806, 1, 0, 0, 0, 806, 839, 1, 0, 0, 0, 807, 805, 1, 0, 0, 0, 808, 810, 3, 117, 50, 0, 809, 811, 3, 75, 29, 0, 810, 809, 1, 0, 0, 0, 811, 812, 1, 0, 0, 0, 812, 810, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 839, 1, 0, 0, 0, 814, 816, 3, 75, 29, 0, 815, 814, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 815, 1, 0, 0, 0, 817, 818, 1, 0, 0, 0, 818, 826, 1, 0, 0, 0, 819, 823, 3, 117, 50, 0, 820, 822, 3, 75, 29, 0, 821, 820, 1, 0, 0, 0, 822, 825, 1, 0, 0, 0, 823, 821, 1, 0, 0, 0, 823, 824, 1, 0, 0, 0, 824, 827, 1, 0, 0, 0, 825, 823, 1, 0, 0, 0, 826, 819, 1, 0, 0, 0, 826, 827, 1, 0, 0, 0, 827, 828, 1, 0, 0, 0, 828, 829, 3, 83, 33, 0, 829, 839, 1, 0, 0, 0, 830, 832, 3, 117, 50, 0, 831, 833, 3, 75, 29, 0, 832, 831, 1, 0, 0, 0, 833, 834, 1, 0, 0, 0, 834, 832, 1, 0, 0, 0, 834, 835, 1, 0, 0, 0, 835, 836, 1, 0, 0, 0, 836, 837, 3, 83, 33, 0, 837, 839, 1, 0, 0, 0, 838, 797, 1, 0, 0, 0, 838, 808, 1, 0, 0, 0, 838, 815, 1, 0, 0, 0, 838, 830, 1, 0, 0, 0, 839, 100, 1, 0, 0, 0, 840, 841, 7, 30, 0, 0, 841, 842, 7, 31, 0, 0, 842, 102, 1, 0, 0, 0, 843, 844, 7, 12, 0, 0, 844, 845, 7, 9, 0, 0, 845, 846, 7, 0, 0, 0, 846, 104, 1, 0, 0, 0, 847, 848, 7, 12, 0, 0, 848, 849, 7, 2, 0, 0, 849, 850, 7, 4, 0, 0, 850, 106, 1, 0, 0, 0, 851, 852, 5, 61, 0, 0, 852, 108, 1, 0, 0, 0, 853, 854, 5, 58, 0, 0, 854, 855, 5, 58, 0, 0, 855, 110, 1, 0, 0, 0, 856, 857, 5, 58, 0, 0, 857, 112, 1, 0, 0, 0, 858, 859, 5, 44, 0, 0, 859, 114, 1, 0, 0, 0, 860, 861, 7, 0, 0, 0, 861, 862, 7, 3, 0, 0, 862, 863, 7, 2, 0, 0, 863, 864, 7, 4, 0, 0, 864, 116, 1, 0, 0, 0, 865, 866, 5, 46, 0, 0, 866, 118, 1, 0, 0, 0, 867, 868, 7, 15, 0, 0, 868, 869, 7, 12, 0, 0, 869, 870, 7, 13, 0, 0, 870, 871, 7, 2, 0, 0, 871, 872, 7, 3, 0, 0, 872, 120, 1, 0, 0, 0, 873, 874, 7, 15, 0, 0, 874, 875, 7, 1, 0, 0, 875, 876, 7, 6, 0, 0, 876, 877, 7, 2, 0, 0, 877, 878, 7, 5, 0, 0, 878, 122, 1, 0, 0, 0, 879, 880, 7, 1, 0, 0, 880, 881, 7, 9, 0, 0, 881, 124, 1, 0, 0, 0, 882, 883, 7, 1, 0, 0, 883, 884, 7, 2, 0, 0, 884, 126, 1, 0, 0, 0, 885, 886, 7, 13, 0, 0, 886, 887, 7, 12, 0, 0, 887, 888, 7, 2, 0, 0, 888, 889, 7, 5, 0, 0, 889, 128, 1, 0, 0, 0, 890, 891, 7, 13, 0, 0, 891, 892, 7, 1, 0, 0, 892, 893, 7, 18, 0, 0, 893, 894, 7, 3, 0, 0, 894, 130, 1, 0, 0, 0, 895, 896, 5, 40, 0, 0, 896, 132, 1, 0, 0, 0, 897, 898, 7, 9, 0, 0, 898, 899, 7, 7, 0, 0, 899, 900, 7, 5, 0, 0, 900, 134, 1, 0, 0, 0, 901, 902, 7, 9, 0, 0, 902, 903, 7, 20, 0, 0, 903, 904, 7, 13, 0, 0, 904, 905, 7, 13, 0, 0, 905, 136, 1, 0, 0, 0, 906, 907, 7, 9, 0, 0, 907, 908, 7, 20, 0, 0, 908, 909, 7, 13, 0, 0, 909, 910, 7, 13, 0, 0, 910, 911, 7, 2, 0, 0, 911, 138, 1, 0, 0, 0, 912, 913, 7, 7, 0, 0, 913, 914, 7, 6, 0, 0, 914, 140, 1, 0, 0, 0, 915, 916, 5, 63, 0, 0, 916, 142, 1, 0, 0, 0, 917, 918, 7, 6, 0, 0, 918, 919, 7, 13, 0, 0, 919, 920, 7, 1, 0, 0, 920, 921, 7, 18, 0, 0, 921, 922, 7, 3, 0, 0, 922, 144, 1, 0, 0, 0, 923, 924, 5, 41, 0, 0, 924, 146, 1, 0, 0, 0, 925, 926, 7, 5, 0, 0, 926, 927, 7, 6, 0, 0, 927, 928, 7, 20, 0, 0, 928, 929, 7, 3, 0, 0, 929, 148, 1, 0, 0, 0, 930, 931, 5, 61, 0, 0, 931, 932, 5, 61, 0, 0, 932, 150, 1, 0, 0, 0, 933, 934, 5, 61, 0, 0, 934, 935, 5, 126, 0, 0, 935, 152, 1, 0, 0, 0, 936, 937, 5, 33, 0, 0, 937, 938, 5, 61, 0, 0, 938, 154, 1, 0, 0, 0, 939, 940, 5, 60, 0, 0, 940, 156, 1, 0, 0, 0, 941, 942, 5, 60, 0, 0, 942, 943, 5, 61, 0, 0, 943, 158, 1, 0, 0, 0, 944, 945, 5, 62, 0, 0, 945, 160, 1, 0, 0, 0, 946, 947, 5, 62, 0, 0, 947, 948, 5, 61, 0, 0, 948, 162, 1, 0, 0, 0, 949, 950, 5, 43, 0, 0, 950, 164, 1, 0, 0, 0, 951, 952, 5, 45, 0, 0, 952, 166, 1, 0, 0, 0, 953, 954, 5, 42, 0, 0, 954, 168, 1, 0, 0, 0, 955, 956, 5, 47, 0, 0, 956, 170, 1, 0, 0, 0, 957, 958, 5, 37, 0, 0, 958, 172, 1, 0, 0, 0, 959, 960, 5, 123, 0, 0, 960, 174, 1, 0, 0, 0, 961, 962, 5, 125, 0, 0, 962, 176, 1, 0, 0, 0, 963, 964, 3, 47, 15, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 80, 13, 0, 966, 178, 1, 0, 0, 0, 967, 970, 3, 141, 62, 0, 968, 971, 3, 77, 30, 0, 969, 971, 3, 91, 37, 0, 970, 968, 1, 0, 0, 0, 970, 969, 1, 0, 0, 0, 971, 975, 1, 0, 0, 0, 972, 974, 3, 93, 38, 0, 973, 972, 1, 0, 0, 0, 974, 977, 1, 0, 0, 0, 975, 973, 1, 0, 0, 0, 975, 976, 1, 0, 0, 0, 976, 985, 1, 0, 0, 0, 977, 975, 1, 0, 0, 0, 978, 980, 3, 141, 62, 0, 979, 981, 3, 75, 29, 0, 980, 979, 1, 0, 0, 0, 981, 982, 1, 0, 0, 0, 982, 980, 1, 0, 0, 0, 982, 983, 1, 0, 0, 0, 983, 985, 1, 0, 0, 0, 984, 967, 1, 0, 0, 0, 984, 978, 1, 0, 0, 0, 985, 180, 1, 0, 0, 0, 986, 987, 5, 91, 0, 0, 987, 988, 1, 0, 0, 0, 988, 989, 6, 82, 0, 0, 989, 990, 6, 82, 0, 0, 990, 182, 1, 0, 0, 0, 991, 992, 5, 93, 0, 0, 992, 993, 1, 0, 0, 0, 993, 994, 6, 83, 12, 0, 994, 995, 6, 83, 12, 0, 995, 184, 1, 0, 0, 0, 996, 1000, 3, 77, 30, 0, 997, 999, 3, 93, 38, 0, 998, 997, 1, 0, 0, 0, 999, 1002, 1, 0, 0, 0, 1000, 998, 1, 0, 0, 0, 1000, 1001, 1, 0, 0, 0, 1001, 1013, 1, 0, 0, 0, 1002, 1000, 1, 0, 0, 0, 1003, 1006, 3, 91, 37, 0, 1004, 1006, 3, 85, 34, 0, 1005, 1003, 1, 0, 0, 0, 1005, 1004, 1, 0, 0, 0, 1006, 1008, 1, 0, 0, 0, 1007, 1009, 3, 93, 38, 0, 1008, 1007, 1, 0, 0, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1008, 1, 0, 0, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1013, 1, 0, 0, 0, 1012, 996, 1, 0, 0, 0, 1012, 1005, 1, 0, 0, 0, 1013, 186, 1, 0, 0, 0, 1014, 1016, 3, 87, 35, 0, 1015, 1017, 3, 89, 36, 0, 1016, 1015, 1, 0, 0, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1016, 1, 0, 0, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 3, 87, 35, 0, 1021, 188, 1, 0, 0, 0, 1022, 1023, 3, 187, 85, 0, 1023, 190, 1, 0, 0, 0, 1024, 1025, 3, 67, 25, 0, 1025, 1026, 1, 0, 0, 0, 1026, 1027, 6, 87, 11, 0, 1027, 192, 1, 0, 0, 0, 1028, 1029, 3, 69, 26, 0, 1029, 1030, 1, 0, 0, 0, 1030, 1031, 6, 88, 11, 0, 1031, 194, 1, 0, 0, 0, 1032, 1033, 3, 71, 27, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 6, 89, 11, 0, 1035, 196, 1, 0, 0, 0, 1036, 1037, 3, 181, 82, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 90, 14, 0, 1039, 1040, 6, 90, 15, 0, 1040, 198, 1, 0, 0, 0, 1041, 1042, 3, 73, 28, 0, 1042, 1043, 1, 0, 0, 0, 1043, 1044, 6, 91, 16, 0, 1044, 1045, 6, 91, 12, 0, 1045, 200, 1, 0, 0, 0, 1046, 1047, 3, 71, 27, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 92, 11, 0, 1049, 202, 1, 0, 0, 0, 1050, 1051, 3, 67, 25, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 93, 11, 0, 1053, 204, 1, 0, 0, 0, 1054, 1055, 3, 69, 26, 0, 1055, 1056, 1, 0, 0, 0, 1056, 1057, 6, 94, 11, 0, 1057, 206, 1, 0, 0, 0, 1058, 1059, 3, 73, 28, 0, 1059, 1060, 1, 0, 0, 0, 1060, 1061, 6, 95, 16, 0, 1061, 1062, 6, 95, 12, 0, 1062, 208, 1, 0, 0, 0, 1063, 1064, 3, 181, 82, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 6, 96, 14, 0, 1066, 210, 1, 0, 0, 0, 1067, 1068, 3, 183, 83, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1070, 6, 97, 17, 0, 1070, 212, 1, 0, 0, 0, 1071, 1072, 3, 111, 47, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1074, 6, 98, 18, 0, 1074, 214, 1, 0, 0, 0, 1075, 1076, 3, 113, 48, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 99, 19, 0, 1078, 216, 1, 0, 0, 0, 1079, 1080, 3, 107, 45, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 100, 20, 0, 1082, 218, 1, 0, 0, 0, 1083, 1084, 7, 16, 0, 0, 1084, 1085, 7, 3, 0, 0, 1085, 1086, 7, 5, 0, 0, 1086, 1087, 7, 12, 0, 0, 1087, 1088, 7, 0, 0, 0, 1088, 1089, 7, 12, 0, 0, 1089, 1090, 7, 5, 0, 0, 1090, 1091, 7, 12, 0, 0, 1091, 220, 1, 0, 0, 0, 1092, 1096, 8, 32, 0, 0, 1093, 1094, 5, 47, 0, 0, 1094, 1096, 8, 33, 0, 0, 1095, 1092, 1, 0, 0, 0, 1095, 1093, 1, 0, 0, 0, 1096, 222, 1, 0, 0, 0, 1097, 1099, 3, 221, 102, 0, 1098, 1097, 1, 0, 0, 0, 1099, 1100, 1, 0, 0, 0, 1100, 1098, 1, 0, 0, 0, 1100, 1101, 1, 0, 0, 0, 1101, 224, 1, 0, 0, 0, 1102, 1103, 3, 223, 103, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 104, 21, 0, 1105, 226, 1, 0, 0, 0, 1106, 1107, 3, 95, 39, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 105, 22, 0, 1109, 228, 1, 0, 0, 0, 1110, 1111, 3, 67, 25, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 106, 11, 0, 1113, 230, 1, 0, 0, 0, 1114, 1115, 3, 69, 26, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 6, 107, 11, 0, 1117, 232, 1, 0, 0, 0, 1118, 1119, 3, 71, 27, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 108, 11, 0, 1121, 234, 1, 0, 0, 0, 1122, 1123, 3, 73, 28, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 109, 16, 0, 1125, 1126, 6, 109, 12, 0, 1126, 236, 1, 0, 0, 0, 1127, 1128, 3, 117, 50, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 110, 23, 0, 1130, 238, 1, 0, 0, 0, 1131, 1132, 3, 113, 48, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1134, 6, 111, 19, 0, 1134, 240, 1, 0, 0, 0, 1135, 1136, 3, 141, 62, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1138, 6, 112, 24, 0, 1138, 242, 1, 0, 0, 0, 1139, 1140, 3, 179, 81, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1142, 6, 113, 25, 0, 1142, 244, 1, 0, 0, 0, 1143, 1148, 3, 77, 30, 0, 1144, 1148, 3, 75, 29, 0, 1145, 1148, 3, 91, 37, 0, 1146, 1148, 3, 167, 75, 0, 1147, 1143, 1, 0, 0, 0, 1147, 1144, 1, 0, 0, 0, 1147, 1145, 1, 0, 0, 0, 1147, 1146, 1, 0, 0, 0, 1148, 246, 1, 0, 0, 0, 1149, 1152, 3, 77, 30, 0, 1150, 1152, 3, 167, 75, 0, 1151, 1149, 1, 0, 0, 0, 1151, 1150, 1, 0, 0, 0, 1152, 1156, 1, 0, 0, 0, 1153, 1155, 3, 245, 114, 0, 1154, 1153, 1, 0, 0, 0, 1155, 1158, 1, 0, 0, 0, 1156, 1154, 1, 0, 0, 0, 1156, 1157, 1, 0, 0, 0, 1157, 1169, 1, 0, 0, 0, 1158, 1156, 1, 0, 0, 0, 1159, 1162, 3, 91, 37, 0, 1160, 1162, 3, 85, 34, 0, 1161, 1159, 1, 0, 0, 0, 1161, 1160, 1, 0, 0, 0, 1162, 1164, 1, 0, 0, 0, 1163, 1165, 3, 245, 114, 0, 1164, 1163, 1, 0, 0, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1164, 1, 0, 0, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1169, 1, 0, 0, 0, 1168, 1151, 1, 0, 0, 0, 1168, 1161, 1, 0, 0, 0, 1169, 248, 1, 0, 0, 0, 1170, 1173, 3, 247, 115, 0, 1171, 1173, 3, 187, 85, 0, 1172, 1170, 1, 0, 0, 0, 1172, 1171, 1, 0, 0, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1172, 1, 0, 0, 0, 1174, 1175, 1, 0, 0, 0, 1175, 250, 1, 0, 0, 0, 1176, 1177, 3, 67, 25, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 117, 11, 0, 1179, 252, 1, 0, 0, 0, 1180, 1181, 3, 69, 26, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 118, 11, 0, 1183, 254, 1, 0, 0, 0, 1184, 1185, 3, 71, 27, 0, 1185, 1186, 1, 0, 0, 0, 1186, 1187, 6, 119, 11, 0, 1187, 256, 1, 0, 0, 0, 1188, 1189, 3, 73, 28, 0, 1189, 1190, 1, 0, 0, 0, 1190, 1191, 6, 120, 16, 0, 1191, 1192, 6, 120, 12, 0, 1192, 258, 1, 0, 0, 0, 1193, 1194, 3, 107, 45, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 121, 20, 0, 1196, 260, 1, 0, 0, 0, 1197, 1198, 3, 113, 48, 0, 1198, 1199, 1, 0, 0, 0, 1199, 1200, 6, 122, 19, 0, 1200, 262, 1, 0, 0, 0, 1201, 1202, 3, 117, 50, 0, 1202, 1203, 1, 0, 0, 0, 1203, 1204, 6, 123, 23, 0, 1204, 264, 1, 0, 0, 0, 1205, 1206, 3, 141, 62, 0, 1206, 1207, 1, 0, 0, 0, 1207, 1208, 6, 124, 24, 0, 1208, 266, 1, 0, 0, 0, 1209, 1210, 3, 179, 81, 0, 1210, 1211, 1, 0, 0, 0, 1211, 1212, 6, 125, 25, 0, 1212, 268, 1, 0, 0, 0, 1213, 1214, 7, 12, 0, 0, 1214, 1215, 7, 2, 0, 0, 1215, 270, 1, 0, 0, 0, 1216, 1217, 3, 249, 116, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1219, 6, 127, 26, 0, 1219, 272, 1, 0, 0, 0, 1220, 1221, 3, 67, 25, 0, 1221, 1222, 1, 0, 0, 0, 1222, 1223, 6, 128, 11, 0, 1223, 274, 1, 0, 0, 0, 1224, 1225, 3, 69, 26, 0, 1225, 1226, 1, 0, 0, 0, 1226, 1227, 6, 129, 11, 0, 1227, 276, 1, 0, 0, 0, 1228, 1229, 3, 71, 27, 0, 1229, 1230, 1, 0, 0, 0, 1230, 1231, 6, 130, 11, 0, 1231, 278, 1, 0, 0, 0, 1232, 1233, 3, 73, 28, 0, 1233, 1234, 1, 0, 0, 0, 1234, 1235, 6, 131, 16, 0, 1235, 1236, 6, 131, 12, 0, 1236, 280, 1, 0, 0, 0, 1237, 1238, 3, 181, 82, 0, 1238, 1239, 1, 0, 0, 0, 1239, 1240, 6, 132, 14, 0, 1240, 1241, 6, 132, 27, 0, 1241, 282, 1, 0, 0, 0, 1242, 1243, 7, 7, 0, 0, 1243, 1244, 7, 9, 0, 0, 1244, 1245, 1, 0, 0, 0, 1245, 1246, 6, 133, 28, 0, 1246, 284, 1, 0, 0, 0, 1247, 1248, 7, 19, 0, 0, 1248, 1249, 7, 1, 0, 0, 1249, 1250, 7, 5, 0, 0, 1250, 1251, 7, 10, 0, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 134, 28, 0, 1253, 286, 1, 0, 0, 0, 1254, 1255, 8, 34, 0, 0, 1255, 288, 1, 0, 0, 0, 1256, 1258, 3, 287, 135, 0, 1257, 1256, 1, 0, 0, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1257, 1, 0, 0, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1261, 1, 0, 0, 0, 1261, 1262, 3, 111, 47, 0, 1262, 1264, 1, 0, 0, 0, 1263, 1257, 1, 0, 0, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1266, 1, 0, 0, 0, 1265, 1267, 3, 287, 135, 0, 1266, 1265, 1, 0, 0, 0, 1267, 1268, 1, 0, 0, 0, 1268, 1266, 1, 0, 0, 0, 1268, 1269, 1, 0, 0, 0, 1269, 290, 1, 0, 0, 0, 1270, 1271, 3, 289, 136, 0, 1271, 1272, 1, 0, 0, 0, 1272, 1273, 6, 137, 29, 0, 1273, 292, 1, 0, 0, 0, 1274, 1275, 3, 67, 25, 0, 1275, 1276, 1, 0, 0, 0, 1276, 1277, 6, 138, 11, 0, 1277, 294, 1, 0, 0, 0, 1278, 1279, 3, 69, 26, 0, 1279, 1280, 1, 0, 0, 0, 1280, 1281, 6, 139, 11, 0, 1281, 296, 1, 0, 0, 0, 1282, 1283, 3, 71, 27, 0, 1283, 1284, 1, 0, 0, 0, 1284, 1285, 6, 140, 11, 0, 1285, 298, 1, 0, 0, 0, 1286, 1287, 3, 73, 28, 0, 1287, 1288, 1, 0, 0, 0, 1288, 1289, 6, 141, 16, 0, 1289, 1290, 6, 141, 12, 0, 1290, 1291, 6, 141, 12, 0, 1291, 300, 1, 0, 0, 0, 1292, 1293, 3, 107, 45, 0, 1293, 1294, 1, 0, 0, 0, 1294, 1295, 6, 142, 20, 0, 1295, 302, 1, 0, 0, 0, 1296, 1297, 3, 113, 48, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 143, 19, 0, 1299, 304, 1, 0, 0, 0, 1300, 1301, 3, 117, 50, 0, 1301, 1302, 1, 0, 0, 0, 1302, 1303, 6, 144, 23, 0, 1303, 306, 1, 0, 0, 0, 1304, 1305, 3, 285, 134, 0, 1305, 1306, 1, 0, 0, 0, 1306, 1307, 6, 145, 30, 0, 1307, 308, 1, 0, 0, 0, 1308, 1309, 3, 249, 116, 0, 1309, 1310, 1, 0, 0, 0, 1310, 1311, 6, 146, 26, 0, 1311, 310, 1, 0, 0, 0, 1312, 1313, 3, 189, 86, 0, 1313, 1314, 1, 0, 0, 0, 1314, 1315, 6, 147, 31, 0, 1315, 312, 1, 0, 0, 0, 1316, 1317, 3, 141, 62, 0, 1317, 1318, 1, 0, 0, 0, 1318, 1319, 6, 148, 24, 0, 1319, 314, 1, 0, 0, 0, 1320, 1321, 3, 179, 81, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1323, 6, 149, 25, 0, 1323, 316, 1, 0, 0, 0, 1324, 1325, 3, 67, 25, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 150, 11, 0, 1327, 318, 1, 0, 0, 0, 1328, 1329, 3, 69, 26, 0, 1329, 1330, 1, 0, 0, 0, 1330, 1331, 6, 151, 11, 0, 1331, 320, 1, 0, 0, 0, 1332, 1333, 3, 71, 27, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1335, 6, 152, 11, 0, 1335, 322, 1, 0, 0, 0, 1336, 1337, 3, 73, 28, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 153, 16, 0, 1339, 1340, 6, 153, 12, 0, 1340, 324, 1, 0, 0, 0, 1341, 1342, 3, 117, 50, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 154, 23, 0, 1344, 326, 1, 0, 0, 0, 1345, 1346, 3, 141, 62, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 155, 24, 0, 1348, 328, 1, 0, 0, 0, 1349, 1350, 3, 179, 81, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 156, 25, 0, 1352, 330, 1, 0, 0, 0, 1353, 1354, 3, 189, 86, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 157, 31, 0, 1356, 332, 1, 0, 0, 0, 1357, 1358, 3, 185, 84, 0, 1358, 1359, 1, 0, 0, 0, 1359, 1360, 6, 158, 32, 0, 1360, 334, 1, 0, 0, 0, 1361, 1362, 3, 67, 25, 0, 1362, 1363, 1, 0, 0, 0, 1363, 1364, 6, 159, 11, 0, 1364, 336, 1, 0, 0, 0, 1365, 1366, 3, 69, 26, 0, 1366, 1367, 1, 0, 0, 0, 1367, 1368, 6, 160, 11, 0, 1368, 338, 1, 0, 0, 0, 1369, 1370, 3, 71, 27, 0, 1370, 1371, 1, 0, 0, 0, 1371, 1372, 6, 161, 11, 0, 1372, 340, 1, 0, 0, 0, 1373, 1374, 3, 73, 28, 0, 1374, 1375, 1, 0, 0, 0, 1375, 1376, 6, 162, 16, 0, 1376, 1377, 6, 162, 12, 0, 1377, 342, 1, 0, 0, 0, 1378, 1379, 7, 1, 0, 0, 1379, 1380, 7, 9, 0, 0, 1380, 1381, 7, 15, 0, 0, 1381, 1382, 7, 7, 0, 0, 1382, 344, 1, 0, 0, 0, 1383, 1384, 3, 67, 25, 0, 1384, 1385, 1, 0, 0, 0, 1385, 1386, 6, 164, 11, 0, 1386, 346, 1, 0, 0, 0, 1387, 1388, 3, 69, 26, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 165, 11, 0, 1390, 348, 1, 0, 0, 0, 1391, 1392, 3, 71, 27, 0, 1392, 1393, 1, 0, 0, 0, 1393, 1394, 6, 166, 11, 0, 1394, 350, 1, 0, 0, 0, 1395, 1396, 3, 183, 83, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 6, 167, 17, 0, 1398, 1399, 6, 167, 12, 0, 1399, 352, 1, 0, 0, 0, 1400, 1401, 3, 111, 47, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 168, 18, 0, 1403, 354, 1, 0, 0, 0, 1404, 1410, 3, 85, 34, 0, 1405, 1410, 3, 75, 29, 0, 1406, 1410, 3, 117, 50, 0, 1407, 1410, 3, 77, 30, 0, 1408, 1410, 3, 91, 37, 0, 1409, 1404, 1, 0, 0, 0, 1409, 1405, 1, 0, 0, 0, 1409, 1406, 1, 0, 0, 0, 1409, 1407, 1, 0, 0, 0, 1409, 1408, 1, 0, 0, 0, 1410, 1411, 1, 0, 0, 0, 1411, 1409, 1, 0, 0, 0, 1411, 1412, 1, 0, 0, 0, 1412, 356, 1, 0, 0, 0, 1413, 1414, 3, 67, 25, 0, 1414, 1415, 1, 0, 0, 0, 1415, 1416, 6, 170, 11, 0, 1416, 358, 1, 0, 0, 0, 1417, 1418, 3, 69, 26, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 171, 11, 0, 1420, 360, 1, 0, 0, 0, 1421, 1422, 3, 71, 27, 0, 1422, 1423, 1, 0, 0, 0, 1423, 1424, 6, 172, 11, 0, 1424, 362, 1, 0, 0, 0, 1425, 1426, 3, 73, 28, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 173, 16, 0, 1428, 1429, 6, 173, 12, 0, 1429, 364, 1, 0, 0, 0, 1430, 1431, 3, 111, 47, 0, 1431, 1432, 1, 0, 0, 0, 1432, 1433, 6, 174, 18, 0, 1433, 366, 1, 0, 0, 0, 1434, 1435, 3, 113, 48, 0, 1435, 1436, 1, 0, 0, 0, 1436, 1437, 6, 175, 19, 0, 1437, 368, 1, 0, 0, 0, 1438, 1439, 3, 117, 50, 0, 1439, 1440, 1, 0, 0, 0, 1440, 1441, 6, 176, 23, 0, 1441, 370, 1, 0, 0, 0, 1442, 1443, 3, 283, 133, 0, 1443, 1444, 1, 0, 0, 0, 1444, 1445, 6, 177, 33, 0, 1445, 1446, 6, 177, 34, 0, 1446, 372, 1, 0, 0, 0, 1447, 1448, 3, 223, 103, 0, 1448, 1449, 1, 0, 0, 0, 1449, 1450, 6, 178, 21, 0, 1450, 374, 1, 0, 0, 0, 1451, 1452, 3, 95, 39, 0, 1452, 1453, 1, 0, 0, 0, 1453, 1454, 6, 179, 22, 0, 1454, 376, 1, 0, 0, 0, 1455, 1456, 3, 67, 25, 0, 1456, 1457, 1, 0, 0, 0, 1457, 1458, 6, 180, 11, 0, 1458, 378, 1, 0, 0, 0, 1459, 1460, 3, 69, 26, 0, 1460, 1461, 1, 0, 0, 0, 1461, 1462, 6, 181, 11, 0, 1462, 380, 1, 0, 0, 0, 1463, 1464, 3, 71, 27, 0, 1464, 1465, 1, 0, 0, 0, 1465, 1466, 6, 182, 11, 0, 1466, 382, 1, 0, 0, 0, 1467, 1468, 3, 73, 28, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 183, 16, 0, 1470, 1471, 6, 183, 12, 0, 1471, 1472, 6, 183, 12, 0, 1472, 384, 1, 0, 0, 0, 1473, 1474, 3, 113, 48, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 184, 19, 0, 1476, 386, 1, 0, 0, 0, 1477, 1478, 3, 117, 50, 0, 1478, 1479, 1, 0, 0, 0, 1479, 1480, 6, 185, 23, 0, 1480, 388, 1, 0, 0, 0, 1481, 1482, 3, 249, 116, 0, 1482, 1483, 1, 0, 0, 0, 1483, 1484, 6, 186, 26, 0, 1484, 390, 1, 0, 0, 0, 1485, 1486, 3, 67, 25, 0, 1486, 1487, 1, 0, 0, 0, 1487, 1488, 6, 187, 11, 0, 1488, 392, 1, 0, 0, 0, 1489, 1490, 3, 69, 26, 0, 1490, 1491, 1, 0, 0, 0, 1491, 1492, 6, 188, 11, 0, 1492, 394, 1, 0, 0, 0, 1493, 1494, 3, 71, 27, 0, 1494, 1495, 1, 0, 0, 0, 1495, 1496, 6, 189, 11, 0, 1496, 396, 1, 0, 0, 0, 1497, 1498, 3, 73, 28, 0, 1498, 1499, 1, 0, 0, 0, 1499, 1500, 6, 190, 16, 0, 1500, 1501, 6, 190, 12, 0, 1501, 398, 1, 0, 0, 0, 1502, 1503, 7, 35, 0, 0, 1503, 1504, 7, 7, 0, 0, 1504, 1505, 7, 1, 0, 0, 1505, 1506, 7, 9, 0, 0, 1506, 400, 1, 0, 0, 0, 1507, 1508, 3, 269, 126, 0, 1508, 1509, 1, 0, 0, 0, 1509, 1510, 6, 192, 35, 0, 1510, 402, 1, 0, 0, 0, 1511, 1512, 3, 283, 133, 0, 1512, 1513, 1, 0, 0, 0, 1513, 1514, 6, 193, 33, 0, 1514, 1515, 6, 193, 12, 0, 1515, 1516, 6, 193, 0, 0, 1516, 404, 1, 0, 0, 0, 1517, 1518, 7, 20, 0, 0, 1518, 1519, 7, 2, 0, 0, 1519, 1520, 7, 1, 0, 0, 1520, 1521, 7, 9, 0, 0, 1521, 1522, 7, 17, 0, 0, 1522, 1523, 1, 0, 0, 0, 1523, 1524, 6, 194, 12, 0, 1524, 1525, 6, 194, 0, 0, 1525, 406, 1, 0, 0, 0, 1526, 1527, 3, 223, 103, 0, 1527, 1528, 1, 0, 0, 0, 1528, 1529, 6, 195, 21, 0, 1529, 408, 1, 0, 0, 0, 1530, 1531, 3, 95, 39, 0, 1531, 1532, 1, 0, 0, 0, 1532, 1533, 6, 196, 22, 0, 1533, 410, 1, 0, 0, 0, 1534, 1535, 3, 111, 47, 0, 1535, 1536, 1, 0, 0, 0, 1536, 1537, 6, 197, 18, 0, 1537, 412, 1, 0, 0, 0, 1538, 1539, 3, 185, 84, 0, 1539, 1540, 1, 0, 0, 0, 1540, 1541, 6, 198, 32, 0, 1541, 414, 1, 0, 0, 0, 1542, 1543, 3, 189, 86, 0, 1543, 1544, 1, 0, 0, 0, 1544, 1545, 6, 199, 31, 0, 1545, 416, 1, 0, 0, 0, 1546, 1547, 3, 67, 25, 0, 1547, 1548, 1, 0, 0, 0, 1548, 1549, 6, 200, 11, 0, 1549, 418, 1, 0, 0, 0, 1550, 1551, 3, 69, 26, 0, 1551, 1552, 1, 0, 0, 0, 1552, 1553, 6, 201, 11, 0, 1553, 420, 1, 0, 0, 0, 1554, 1555, 3, 71, 27, 0, 1555, 1556, 1, 0, 0, 0, 1556, 1557, 6, 202, 11, 0, 1557, 422, 1, 0, 0, 0, 1558, 1559, 3, 73, 28, 0, 1559, 1560, 1, 0, 0, 0, 1560, 1561, 6, 203, 16, 0, 1561, 1562, 6, 203, 12, 0, 1562, 424, 1, 0, 0, 0, 1563, 1564, 3, 223, 103, 0, 1564, 1565, 1, 0, 0, 0, 1565, 1566, 6, 204, 21, 0, 1566, 1567, 6, 204, 12, 0, 1567, 1568, 6, 204, 36, 0, 1568, 426, 1, 0, 0, 0, 1569, 1570, 3, 95, 39, 0, 1570, 1571, 1, 0, 0, 0, 1571, 1572, 6, 205, 22, 0, 1572, 1573, 6, 205, 12, 0, 1573, 1574, 6, 205, 36, 0, 1574, 428, 1, 0, 0, 0, 1575, 1576, 3, 67, 25, 0, 1576, 1577, 1, 0, 0, 0, 1577, 1578, 6, 206, 11, 0, 1578, 430, 1, 0, 0, 0, 1579, 1580, 3, 69, 26, 0, 1580, 1581, 1, 0, 0, 0, 1581, 1582, 6, 207, 11, 0, 1582, 432, 1, 0, 0, 0, 1583, 1584, 3, 71, 27, 0, 1584, 1585, 1, 0, 0, 0, 1585, 1586, 6, 208, 11, 0, 1586, 434, 1, 0, 0, 0, 1587, 1588, 3, 111, 47, 0, 1588, 1589, 1, 0, 0, 0, 1589, 1590, 6, 209, 18, 0, 1590, 1591, 6, 209, 12, 0, 1591, 1592, 6, 209, 10, 0, 1592, 436, 1, 0, 0, 0, 1593, 1594, 3, 113, 48, 0, 1594, 1595, 1, 0, 0, 0, 1595, 1596, 6, 210, 19, 0, 1596, 1597, 6, 210, 12, 0, 1597, 1598, 6, 210, 10, 0, 1598, 438, 1, 0, 0, 0, 1599, 1600, 3, 67, 25, 0, 1600, 1601, 1, 0, 0, 0, 1601, 1602, 6, 211, 11, 0, 1602, 440, 1, 0, 0, 0, 1603, 1604, 3, 69, 26, 0, 1604, 1605, 1, 0, 0, 0, 1605, 1606, 6, 212, 11, 0, 1606, 442, 1, 0, 0, 0, 1607, 1608, 3, 71, 27, 0, 1608, 1609, 1, 0, 0, 0, 1609, 1610, 6, 213, 11, 0, 1610, 444, 1, 0, 0, 0, 1611, 1612, 3, 189, 86, 0, 1612, 1613, 1, 0, 0, 0, 1613, 1614, 6, 214, 12, 0, 1614, 1615, 6, 214, 0, 0, 1615, 1616, 6, 214, 31, 0, 1616, 446, 1, 0, 0, 0, 1617, 1618, 3, 185, 84, 0, 1618, 1619, 1, 0, 0, 0, 1619, 1620, 6, 215, 12, 0, 1620, 1621, 6, 215, 0, 0, 1621, 1622, 6, 215, 32, 0, 1622, 448, 1, 0, 0, 0, 1623, 1624, 3, 101, 42, 0, 1624, 1625, 1, 0, 0, 0, 1625, 1626, 6, 216, 12, 0, 1626, 1627, 6, 216, 0, 0, 1627, 1628, 6, 216, 37, 0, 1628, 450, 1, 0, 0, 0, 1629, 1630, 3, 73, 28, 0, 1630, 1631, 1, 0, 0, 0, 1631, 1632, 6, 217, 16, 0, 1632, 1633, 6, 217, 12, 0, 1633, 452, 1, 0, 0, 0, 1634, 1635, 3, 73, 28, 0, 1635, 1636, 1, 0, 0, 0, 1636, 1637, 6, 218, 16, 0, 1637, 1638, 6, 218, 12, 0, 1638, 454, 1, 0, 0, 0, 1639, 1640, 3, 185, 84, 0, 1640, 1641, 1, 0, 0, 0, 1641, 1642, 6, 219, 32, 0, 1642, 456, 1, 0, 0, 0, 1643, 1644, 3, 71, 27, 0, 1644, 1645, 1, 0, 0, 0, 1645, 1646, 6, 220, 11, 0, 1646, 458, 1, 0, 0, 0, 1647, 1648, 3, 67, 25, 0, 1648, 1649, 1, 0, 0, 0, 1649, 1650, 6, 221, 11, 0, 1650, 460, 1, 0, 0, 0, 1651, 1652, 3, 69, 26, 0, 1652, 1653, 1, 0, 0, 0, 1653, 1654, 6, 222, 11, 0, 1654, 462, 1, 0, 0, 0, 67, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 679, 689, 693, 696, 705, 707, 718, 737, 742, 751, 758, 763, 765, 776, 784, 787, 789, 794, 799, 805, 812, 817, 823, 826, 834, 838, 970, 975, 982, 984, 1000, 1005, 1010, 1012, 1018, 1095, 1100, 1147, 1151, 1156, 1161, 1166, 1168, 1172, 1174, 1259, 1263, 1268, 1409, 1411, 38, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 13, 0, 5, 11, 0, 5, 14, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 72, 0, 5, 0, 0, 7, 29, 0, 7, 73, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 83, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 71, 0, 7, 87, 0, 5, 10, 0, 5, 7, 0, 7, 97, 0, 7, 96, 0, 7, 75, 0, 7, 74, 0, 7, 95, 0, 5, 12, 0, 7, 91, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 28358a0f614e6..8a40fd33ba64d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -27,8 +27,8 @@ public class EsqlBaseLexer extends LexerConfig { public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, - DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, + WHERE=16, JOIN_LOOKUP=17, DEV_INLINESTATS=18, DEV_INSIST=19, DEV_LOOKUP=20, + DEV_METRICS=21, DEV_JOIN_FULL=22, DEV_JOIN_LEFT=23, DEV_JOIN_RIGHT=24, UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, @@ -48,14 +48,16 @@ public class EsqlBaseLexer extends LexerConfig { SHOW_MULTILINE_COMMENT=109, SHOW_WS=110, SETTING=111, SETTING_LINE_COMMENT=112, SETTTING_MULTILINE_COMMENT=113, SETTING_WS=114, LOOKUP_LINE_COMMENT=115, LOOKUP_MULTILINE_COMMENT=116, LOOKUP_WS=117, LOOKUP_FIELD_LINE_COMMENT=118, - LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, USING=121, JOIN_LINE_COMMENT=122, - JOIN_MULTILINE_COMMENT=123, JOIN_WS=124, METRICS_LINE_COMMENT=125, METRICS_MULTILINE_COMMENT=126, - METRICS_WS=127, CLOSING_METRICS_LINE_COMMENT=128, CLOSING_METRICS_MULTILINE_COMMENT=129, - CLOSING_METRICS_WS=130; + LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, JOIN=121, USING=122, + JOIN_LINE_COMMENT=123, JOIN_MULTILINE_COMMENT=124, JOIN_WS=125, METRICS_LINE_COMMENT=126, + METRICS_MULTILINE_COMMENT=127, METRICS_WS=128, CLOSING_METRICS_LINE_COMMENT=129, + CLOSING_METRICS_MULTILINE_COMMENT=130, CLOSING_METRICS_WS=131, INSIST_WS=132, + INSIST_LINE_COMMENT=133, INSIST_MULTILINE_COMMENT=134; public static final int EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, - LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, JOIN_MODE=13, METRICS_MODE=14, CLOSING_METRICS_MODE=15; + LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, JOIN_MODE=13, METRICS_MODE=14, CLOSING_METRICS_MODE=15, + INSIST_MODE=16; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; @@ -64,22 +66,22 @@ public class EsqlBaseLexer extends LexerConfig { "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "JOIN_MODE", "METRICS_MODE", - "CLOSING_METRICS_MODE" + "CLOSING_METRICS_MODE", "INSIST_MODE" }; private static String[] makeRuleNames() { return new String[] { "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", - "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", - "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", - "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", - "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", - "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", - "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "JOIN_LOOKUP", "DEV_INLINESTATS", "DEV_INSIST", "DEV_LOOKUP", "DEV_METRICS", + "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", + "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", + "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COLON", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", + "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", + "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", @@ -108,14 +110,16 @@ private static String[] makeRuleNames() { "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", - "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "JOIN_PIPE", "JOIN_JOIN", + "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "JOIN_PIPE", "JOIN", "JOIN_AS", "JOIN_ON", "USING", "JOIN_UNQUOTED_SOURCE", "JOIN_QUOTED_SOURCE", "JOIN_COLON", "JOIN_UNQUOTED_IDENTIFER", "JOIN_QUOTED_IDENTIFIER", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_PIPE", "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", - "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" + "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE", + "INSIST_PIPE", "INSIST_IDENTIFIER", "INSIST_WS", "INSIST_LINE_COMMENT", + "INSIST_MULTILINE_COMMENT" }; } public static final String[] ruleNames = makeRuleNames(); @@ -124,17 +128,17 @@ private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", - "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", - "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'{'", "'}'", - null, null, "']'", null, null, null, null, null, null, null, null, "'metadata'", - null, null, null, null, null, null, null, null, "'as'", null, null, null, - "'on'", "'with'", null, null, null, null, null, null, null, null, null, - null, "'info'", null, null, null, null, null, null, null, null, null, - null, null, null, null, "'USING'" + "'sort'", "'stats'", "'where'", "'lookup'", null, null, null, null, null, + null, null, null, null, null, null, "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", + "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", + "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", + "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", + "'{'", "'}'", null, null, "']'", null, null, null, null, null, null, + null, null, "'metadata'", null, null, null, null, null, null, null, null, + "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, + null, null, null, null, null, "'info'", null, null, null, null, null, + null, null, null, null, null, null, null, null, "'join'", "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -142,13 +146,13 @@ private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", - "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", - "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", - "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "WHERE", "JOIN_LOOKUP", "DEV_INLINESTATS", "DEV_INSIST", "DEV_LOOKUP", + "DEV_METRICS", "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COLON", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", + "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", + "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", @@ -163,9 +167,10 @@ private static String[] makeSymbolicNames() { "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", - "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", - "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" + "JOIN", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", + "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", + "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS", "INSIST_WS", + "INSIST_LINE_COMMENT", "INSIST_MULTILINE_COMMENT" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -230,38 +235,20 @@ public EsqlBaseLexer(CharStream input) { @Override public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 16: - return DEV_INLINESTATS_sempred((RuleContext)_localctx, predIndex); case 17: - return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); + return DEV_INLINESTATS_sempred((RuleContext)_localctx, predIndex); case 18: - return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); + return DEV_INSIST_sempred((RuleContext)_localctx, predIndex); case 19: - return DEV_JOIN_sempred((RuleContext)_localctx, predIndex); + return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 20: - return DEV_JOIN_FULL_sempred((RuleContext)_localctx, predIndex); + return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); case 21: - return DEV_JOIN_LEFT_sempred((RuleContext)_localctx, predIndex); + return DEV_JOIN_FULL_sempred((RuleContext)_localctx, predIndex); case 22: - return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); + return DEV_JOIN_LEFT_sempred((RuleContext)_localctx, predIndex); case 23: - return DEV_JOIN_LOOKUP_sempred((RuleContext)_localctx, predIndex); - case 112: - return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); - case 113: - return PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 124: - return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); - case 125: - return RENAME_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 148: - return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); - case 149: - return ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 155: - return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); - case 156: - return MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); + return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); } return true; } @@ -272,21 +259,21 @@ private boolean DEV_INLINESTATS_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean DEV_LOOKUP_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_INSIST_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 1: return this.isDevVersion(); } return true; } - private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_LOOKUP_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 2: return this.isDevVersion(); } return true; } - private boolean DEV_JOIN_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 3: return this.isDevVersion(); @@ -314,226 +301,165 @@ private boolean DEV_JOIN_RIGHT_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean DEV_JOIN_LOOKUP_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 7: - return this.isDevVersion(); - } - return true; - } - private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 8: - return this.isDevVersion(); - } - return true; - } - private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 9: - return this.isDevVersion(); - } - return true; - } - private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 10: - return this.isDevVersion(); - } - return true; - } - private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 11: - return this.isDevVersion(); - } - return true; - } - private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 12: - return this.isDevVersion(); - } - return true; - } - private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 13: - return this.isDevVersion(); - } - return true; - } - private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 14: - return this.isDevVersion(); - } - return true; - } - private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 15: - return this.isDevVersion(); - } - return true; - } public static final String _serializedATN = - "\u0004\u0000\u0082\u065b\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\u0004\u0000\u0086\u0677\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ - "\uffff\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ - "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ - "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ - "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ - "\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007"+ - "\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007"+ - "\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007"+ - "\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007"+ - "\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007"+ - "\u001b\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007"+ - "\u001e\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007"+ - "\"\u0002#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007"+ - "\'\u0002(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007"+ - ",\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u0007"+ - "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ - "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ - ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ - "@\u0002A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007"+ - "E\u0002F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007"+ - "J\u0002K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007"+ - "O\u0002P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007"+ - "T\u0002U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007"+ - "Y\u0002Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007"+ - "^\u0002_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007"+ - "c\u0002d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007"+ - "h\u0002i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007"+ - "m\u0002n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007"+ - "r\u0002s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007"+ - "w\u0002x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007"+ - "|\u0002}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007"+ - "\u0080\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007"+ - "\u0083\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007"+ - "\u0086\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007"+ - "\u0089\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007"+ - "\u008c\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007"+ - "\u008f\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007"+ - "\u0092\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007"+ - "\u0095\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007"+ - "\u0098\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007"+ - "\u009b\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007"+ - "\u009e\u0002\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0\u0002\u00a1\u0007"+ - "\u00a1\u0002\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3\u0002\u00a4\u0007"+ - "\u00a4\u0002\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6\u0002\u00a7\u0007"+ - "\u00a7\u0002\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9\u0002\u00aa\u0007"+ - "\u00aa\u0002\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac\u0002\u00ad\u0007"+ - "\u00ad\u0002\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af\u0002\u00b0\u0007"+ - "\u00b0\u0002\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2\u0002\u00b3\u0007"+ - "\u00b3\u0002\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5\u0002\u00b6\u0007"+ - "\u00b6\u0002\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007"+ - "\u00b9\u0002\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007"+ - "\u00bc\u0002\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007"+ - "\u00bf\u0002\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007"+ - "\u00c2\u0002\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007"+ - "\u00c5\u0002\u00c6\u0007\u00c6\u0002\u00c7\u0007\u00c7\u0002\u00c8\u0007"+ - "\u00c8\u0002\u00c9\u0007\u00c9\u0002\u00ca\u0007\u00ca\u0002\u00cb\u0007"+ - "\u00cb\u0002\u00cc\u0007\u00cc\u0002\u00cd\u0007\u00cd\u0002\u00ce\u0007"+ - "\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0\u0002\u00d1\u0007"+ - "\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3\u0002\u00d4\u0007"+ - "\u00d4\u0002\u00d5\u0007\u00d5\u0002\u00d6\u0007\u00d6\u0002\u00d7\u0007"+ - "\u00d7\u0002\u00d8\u0007\u00d8\u0002\u00d9\u0007\u00d9\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ - "\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ - "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ - "\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ - "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004\u0018\u0298\b\u0018\u000b"+ - "\u0018\f\u0018\u0299\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0005\u0019\u02a2\b\u0019\n\u0019\f\u0019\u02a5\t\u0019"+ - "\u0001\u0019\u0003\u0019\u02a8\b\u0019\u0001\u0019\u0003\u0019\u02ab\b"+ - "\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0005\u001a\u02b4\b\u001a\n\u001a\f\u001a\u02b7\t\u001a"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b"+ - "\u0004\u001b\u02bf\b\u001b\u000b\u001b\f\u001b\u02c0\u0001\u001b\u0001"+ - "\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001d\u0001"+ - "\u001d\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ - " \u0001 \u0001!\u0001!\u0003!\u02d4\b!\u0001!\u0004!\u02d7\b!\u000b!\f"+ - "!\u02d8\u0001\"\u0001\"\u0001#\u0001#\u0001$\u0001$\u0001$\u0003$\u02e2"+ - "\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003&\u02e9\b&\u0001\'\u0001\'"+ - "\u0001\'\u0005\'\u02ee\b\'\n\'\f\'\u02f1\t\'\u0001\'\u0001\'\u0001\'\u0001"+ - "\'\u0001\'\u0001\'\u0005\'\u02f9\b\'\n\'\f\'\u02fc\t\'\u0001\'\u0001\'"+ - "\u0001\'\u0001\'\u0001\'\u0003\'\u0303\b\'\u0001\'\u0003\'\u0306\b\'\u0003"+ - "\'\u0308\b\'\u0001(\u0004(\u030b\b(\u000b(\f(\u030c\u0001)\u0004)\u0310"+ - "\b)\u000b)\f)\u0311\u0001)\u0001)\u0005)\u0316\b)\n)\f)\u0319\t)\u0001"+ - ")\u0001)\u0004)\u031d\b)\u000b)\f)\u031e\u0001)\u0004)\u0322\b)\u000b"+ - ")\f)\u0323\u0001)\u0001)\u0005)\u0328\b)\n)\f)\u032b\t)\u0003)\u032d\b"+ - ")\u0001)\u0001)\u0001)\u0001)\u0004)\u0333\b)\u000b)\f)\u0334\u0001)\u0001"+ - ")\u0003)\u0339\b)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001"+ - "/\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ - "3\u00013\u00013\u00013\u00013\u00013\u00014\u00014\u00014\u00014\u0001"+ - "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u0001"+ - "7\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u0001"+ - ":\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001"+ - "<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001"+ - "?\u0001?\u0001?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001"+ - "A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001"+ - "D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001"+ - "H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001"+ - "M\u0001M\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001"+ - "Q\u0001Q\u0001Q\u0003Q\u03bd\bQ\u0001Q\u0005Q\u03c0\bQ\nQ\fQ\u03c3\tQ"+ - "\u0001Q\u0001Q\u0004Q\u03c7\bQ\u000bQ\fQ\u03c8\u0003Q\u03cb\bQ\u0001R"+ - "\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001S\u0001"+ - "T\u0001T\u0005T\u03d9\bT\nT\fT\u03dc\tT\u0001T\u0001T\u0003T\u03e0\bT"+ - "\u0001T\u0004T\u03e3\bT\u000bT\fT\u03e4\u0003T\u03e7\bT\u0001U\u0001U"+ - "\u0004U\u03eb\bU\u000bU\fU\u03ec\u0001U\u0001U\u0001V\u0001V\u0001W\u0001"+ - "W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001"+ - "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001"+ - "[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001"+ - "^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001"+ - "`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001"+ - "c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001"+ - "e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0003"+ - "f\u043a\bf\u0001g\u0004g\u043d\bg\u000bg\fg\u043e\u0001h\u0001h\u0001"+ - "h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001"+ - "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ - "m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001"+ - "o\u0001p\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001"+ - "q\u0001r\u0001r\u0001r\u0001r\u0003r\u0470\br\u0001s\u0001s\u0003s\u0474"+ - "\bs\u0001s\u0005s\u0477\bs\ns\fs\u047a\ts\u0001s\u0001s\u0003s\u047e\b"+ - "s\u0001s\u0004s\u0481\bs\u000bs\fs\u0482\u0003s\u0485\bs\u0001t\u0001"+ - "t\u0004t\u0489\bt\u000bt\ft\u048a\u0001u\u0001u\u0001u\u0001u\u0001v\u0001"+ - "v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001"+ - "x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001"+ - "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001"+ + "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002"+ + "\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002"+ + "\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002"+ + "\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002"+ + "\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e"+ + "\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011"+ + "\u0002\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014"+ + "\u0002\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017"+ + "\u0002\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a"+ + "\u0002\u001b\u0007\u001b\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d"+ + "\u0002\u001e\u0007\u001e\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!"+ + "\u0007!\u0002\"\u0007\"\u0002#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002"+ + "&\u0007&\u0002\'\u0007\'\u0002(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002"+ + "+\u0007+\u0002,\u0007,\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u0002"+ + "0\u00070\u00021\u00071\u00022\u00072\u00023\u00073\u00024\u00074\u0002"+ + "5\u00075\u00026\u00076\u00027\u00077\u00028\u00078\u00029\u00079\u0002"+ + ":\u0007:\u0002;\u0007;\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002"+ + "?\u0007?\u0002@\u0007@\u0002A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002"+ + "D\u0007D\u0002E\u0007E\u0002F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002"+ + "I\u0007I\u0002J\u0007J\u0002K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002"+ + "N\u0007N\u0002O\u0007O\u0002P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002"+ + "S\u0007S\u0002T\u0007T\u0002U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002"+ + "X\u0007X\u0002Y\u0007Y\u0002Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002"+ + "]\u0007]\u0002^\u0007^\u0002_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002"+ + "b\u0007b\u0002c\u0007c\u0002d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002"+ + "g\u0007g\u0002h\u0007h\u0002i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002"+ + "l\u0007l\u0002m\u0007m\u0002n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002"+ + "q\u0007q\u0002r\u0007r\u0002s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002"+ + "v\u0007v\u0002w\u0007w\u0002x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002"+ + "{\u0007{\u0002|\u0007|\u0002}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f"+ + "\u0002\u0080\u0007\u0080\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082"+ + "\u0002\u0083\u0007\u0083\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085"+ + "\u0002\u0086\u0007\u0086\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088"+ + "\u0002\u0089\u0007\u0089\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b"+ + "\u0002\u008c\u0007\u008c\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e"+ + "\u0002\u008f\u0007\u008f\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091"+ + "\u0002\u0092\u0007\u0092\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094"+ + "\u0002\u0095\u0007\u0095\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097"+ + "\u0002\u0098\u0007\u0098\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a"+ + "\u0002\u009b\u0007\u009b\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d"+ + "\u0002\u009e\u0007\u009e\u0002\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0"+ + "\u0002\u00a1\u0007\u00a1\u0002\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3"+ + "\u0002\u00a4\u0007\u00a4\u0002\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6"+ + "\u0002\u00a7\u0007\u00a7\u0002\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9"+ + "\u0002\u00aa\u0007\u00aa\u0002\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac"+ + "\u0002\u00ad\u0007\u00ad\u0002\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af"+ + "\u0002\u00b0\u0007\u00b0\u0002\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2"+ + "\u0002\u00b3\u0007\u00b3\u0002\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5"+ + "\u0002\u00b6\u0007\u00b6\u0002\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8"+ + "\u0002\u00b9\u0007\u00b9\u0002\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb"+ + "\u0002\u00bc\u0007\u00bc\u0002\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be"+ + "\u0002\u00bf\u0007\u00bf\u0002\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1"+ + "\u0002\u00c2\u0007\u00c2\u0002\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4"+ + "\u0002\u00c5\u0007\u00c5\u0002\u00c6\u0007\u00c6\u0002\u00c7\u0007\u00c7"+ + "\u0002\u00c8\u0007\u00c8\u0002\u00c9\u0007\u00c9\u0002\u00ca\u0007\u00ca"+ + "\u0002\u00cb\u0007\u00cb\u0002\u00cc\u0007\u00cc\u0002\u00cd\u0007\u00cd"+ + "\u0002\u00ce\u0007\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0"+ + "\u0002\u00d1\u0007\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3"+ + "\u0002\u00d4\u0007\u00d4\u0002\u00d5\u0007\u00d5\u0002\u00d6\u0007\u00d6"+ + "\u0002\u00d7\u0007\u00d7\u0002\u00d8\u0007\u00d8\u0002\u00d9\u0007\u00d9"+ + "\u0002\u00da\u0007\u00da\u0002\u00db\u0007\u00db\u0002\u00dc\u0007\u00dc"+ + "\u0002\u00dd\u0007\u00dd\u0002\u00de\u0007\u00de\u0001\u0000\u0001\u0000"+ + "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+ + "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013"+ + "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"+ + "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014"+ + "\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015"+ + "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ + "\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018"+ + "\u0004\u0018\u02a6\b\u0018\u000b\u0018\f\u0018\u02a7\u0001\u0018\u0001"+ + "\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u02b0"+ + "\b\u0019\n\u0019\f\u0019\u02b3\t\u0019\u0001\u0019\u0003\u0019\u02b6\b"+ + "\u0019\u0001\u0019\u0003\u0019\u02b9\b\u0019\u0001\u0019\u0001\u0019\u0001"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u02c2"+ + "\b\u001a\n\u001a\f\u001a\u02c5\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ + "\u0001\u001a\u0001\u001a\u0001\u001b\u0004\u001b\u02cd\b\u001b\u000b\u001b"+ + "\f\u001b\u02ce\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0003!\u02e2\b!\u0001"+ + "!\u0004!\u02e5\b!\u000b!\f!\u02e6\u0001\"\u0001\"\u0001#\u0001#\u0001"+ + "$\u0001$\u0001$\u0003$\u02f0\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003"+ + "&\u02f7\b&\u0001\'\u0001\'\u0001\'\u0005\'\u02fc\b\'\n\'\f\'\u02ff\t\'"+ + "\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0005\'\u0307\b\'\n\'"+ + "\f\'\u030a\t\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0003\'\u0311\b"+ + "\'\u0001\'\u0003\'\u0314\b\'\u0003\'\u0316\b\'\u0001(\u0004(\u0319\b("+ + "\u000b(\f(\u031a\u0001)\u0004)\u031e\b)\u000b)\f)\u031f\u0001)\u0001)"+ + "\u0005)\u0324\b)\n)\f)\u0327\t)\u0001)\u0001)\u0004)\u032b\b)\u000b)\f"+ + ")\u032c\u0001)\u0004)\u0330\b)\u000b)\f)\u0331\u0001)\u0001)\u0005)\u0336"+ + "\b)\n)\f)\u0339\t)\u0003)\u033b\b)\u0001)\u0001)\u0001)\u0001)\u0004)"+ + "\u0341\b)\u000b)\f)\u0342\u0001)\u0001)\u0003)\u0347\b)\u0001*\u0001*"+ + "\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00011\u0001"+ + "1\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00013\u0001"+ + "3\u00013\u00014\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u0001"+ + "5\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u0001"+ + "8\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001"+ + ";\u0001;\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001"+ + "?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001B\u0001"+ + "B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001"+ + "F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001H\u0001I\u0001I\u0001"+ + "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001N\u0001N\u0001"+ + "O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0003Q\u03cb"+ + "\bQ\u0001Q\u0005Q\u03ce\bQ\nQ\fQ\u03d1\tQ\u0001Q\u0001Q\u0004Q\u03d5\b"+ + "Q\u000bQ\fQ\u03d6\u0003Q\u03d9\bQ\u0001R\u0001R\u0001R\u0001R\u0001R\u0001"+ + "S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0005T\u03e7\bT\nT\fT\u03ea"+ + "\tT\u0001T\u0001T\u0003T\u03ee\bT\u0001T\u0004T\u03f1\bT\u000bT\fT\u03f2"+ + "\u0003T\u03f5\bT\u0001U\u0001U\u0004U\u03f9\bU\u000bU\fU\u03fa\u0001U"+ + "\u0001U\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001"+ + "X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ + "Z\u0001[\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001"+ + "]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001"+ + "_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001"+ + "a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001"+ + "d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001"+ + "e\u0001e\u0001f\u0001f\u0001f\u0003f\u0448\bf\u0001g\u0004g\u044b\bg\u000b"+ + "g\fg\u044c\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001"+ + "j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001"+ + "l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001"+ + "n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001"+ + "q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0003r\u047c\br\u0001s\u0001"+ + "s\u0003s\u0480\bs\u0001s\u0005s\u0483\bs\ns\fs\u0486\ts\u0001s\u0001s"+ + "\u0003s\u048a\bs\u0001s\u0004s\u048d\bs\u000bs\fs\u048e\u0003s\u0491\b"+ + "s\u0001t\u0001t\u0004t\u0495\bt\u000bt\ft\u0496\u0001u\u0001u\u0001u\u0001"+ + "u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001"+ + "x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001"+ + "z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001"+ "}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001"+ "\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001"+ "\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001"+ @@ -541,9 +467,9 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ "\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ - "\u0087\u0001\u0087\u0001\u0088\u0004\u0088\u04e0\b\u0088\u000b\u0088\f"+ - "\u0088\u04e1\u0001\u0088\u0001\u0088\u0003\u0088\u04e6\b\u0088\u0001\u0088"+ - "\u0004\u0088\u04e9\b\u0088\u000b\u0088\f\u0088\u04ea\u0001\u0089\u0001"+ + "\u0087\u0001\u0087\u0001\u0088\u0004\u0088\u04ea\b\u0088\u000b\u0088\f"+ + "\u0088\u04eb\u0001\u0088\u0001\u0088\u0003\u0088\u04f0\b\u0088\u0001\u0088"+ + "\u0004\u0088\u04f3\b\u0088\u000b\u0088\f\u0088\u04f4\u0001\u0089\u0001"+ "\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001"+ "\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001"+ "\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ @@ -552,894 +478,911 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ "\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001"+ "\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001"+ - "\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ - "\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001"+ - "\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ - "\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ - "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001"+ - "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ - "\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001"+ - "\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001"+ - "\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ - "\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001"+ - "\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001"+ - "\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001"+ - "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ - "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ - "\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001"+ - "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0004\u00a9\u057c\b\u00a9\u000b"+ - "\u00a9\f\u00a9\u057d\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ - "\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001"+ - "\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ - "\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001"+ - "\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001"+ - "\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ - "\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001"+ - "\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001"+ - "\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001"+ - "\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001"+ - "\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001"+ - "\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001"+ - "\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001"+ - "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001"+ - "\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ - "\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001"+ - "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ - "\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ - "\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001"+ - "\u00c4\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001"+ - "\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7\u0001"+ - "\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001"+ - "\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001"+ - "\u00ca\u0001\u00ca\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001"+ - "\u00cb\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001"+ - "\u00cc\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001"+ - "\u00cd\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001"+ - "\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001"+ - "\u00d0\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001"+ - "\u00d1\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001"+ - "\u00d2\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001"+ - "\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001"+ - "\u00d5\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001"+ - "\u00d6\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001"+ - "\u00d7\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001"+ - "\u00d8\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0002"+ - "\u02b5\u02fa\u0000\u00da\u0010\u0001\u0012\u0002\u0014\u0003\u0016\u0004"+ - "\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e"+ - ",\u000f.\u00100\u00112\u00124\u00136\u00148\u0015:\u0016<\u0017>\u0018"+ - "@\u0019B\u001aD\u001bF\u001cH\u001dJ\u0000L\u0000N\u0000P\u0000R\u0000"+ - "T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u001e`\u001fb d!f\"h#j$l%n&p\'r"+ - "(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a4\u008c5\u008e6"+ - "\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e>\u00a0?\u00a2"+ - "@\u00a4A\u00a6B\u00a8C\u00aaD\u00acE\u00aeF\u00b0\u0000\u00b2G\u00b4H"+ - "\u00b6I\u00b8J\u00ba\u0000\u00bcK\u00beL\u00c0M\u00c2N\u00c4\u0000\u00c6"+ - "\u0000\u00c8O\u00caP\u00ccQ\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4"+ - "\u0000\u00d6\u0000\u00d8\u0000\u00daR\u00dc\u0000\u00deS\u00e0\u0000\u00e2"+ - "\u0000\u00e4T\u00e6U\u00e8V\u00ea\u0000\u00ec\u0000\u00ee\u0000\u00f0"+ - "\u0000\u00f2\u0000\u00f4\u0000\u00f6\u0000\u00f8W\u00faX\u00fcY\u00fe"+ - "Z\u0100\u0000\u0102\u0000\u0104\u0000\u0106\u0000\u0108\u0000\u010a\u0000"+ - "\u010c[\u010e\u0000\u0110\\\u0112]\u0114^\u0116\u0000\u0118\u0000\u011a"+ - "_\u011c`\u011e\u0000\u0120a\u0122\u0000\u0124b\u0126c\u0128d\u012a\u0000"+ - "\u012c\u0000\u012e\u0000\u0130\u0000\u0132\u0000\u0134\u0000\u0136\u0000"+ - "\u0138\u0000\u013a\u0000\u013ce\u013ef\u0140g\u0142\u0000\u0144\u0000"+ - "\u0146\u0000\u0148\u0000\u014a\u0000\u014c\u0000\u014eh\u0150i\u0152j"+ - "\u0154\u0000\u0156k\u0158l\u015am\u015cn\u015e\u0000\u0160\u0000\u0162"+ - "o\u0164p\u0166q\u0168r\u016a\u0000\u016c\u0000\u016e\u0000\u0170\u0000"+ - "\u0172\u0000\u0174\u0000\u0176\u0000\u0178s\u017at\u017cu\u017e\u0000"+ - "\u0180\u0000\u0182\u0000\u0184\u0000\u0186v\u0188w\u018ax\u018c\u0000"+ - "\u018e\u0000\u0190\u0000\u0192\u0000\u0194y\u0196\u0000\u0198\u0000\u019a"+ - "\u0000\u019c\u0000\u019e\u0000\u01a0z\u01a2{\u01a4|\u01a6\u0000\u01a8"+ - "\u0000\u01aa\u0000\u01ac}\u01ae~\u01b0\u007f\u01b2\u0000\u01b4\u0000\u01b6"+ - "\u0080\u01b8\u0081\u01ba\u0082\u01bc\u0000\u01be\u0000\u01c0\u0000\u01c2"+ - "\u0000\u0010\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b"+ - "\f\r\u000e\u000f$\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002"+ - "\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002\u0000"+ - "OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002"+ - "\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002\u0000"+ - "MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0002"+ - "\u0000JJjj\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000"+ - "\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004"+ - "\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002"+ - "\u0000YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b"+ - "\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0676\u0000\u0010\u0001\u0000\u0000"+ - "\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000"+ - "\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000"+ - "\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000"+ - "\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000"+ - "\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000"+ - "&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001"+ - "\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000"+ - "\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u0000"+ - "4\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000\u00008\u0001"+ - "\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000"+ - "\u0000\u0000>\u0001\u0000\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000"+ - "B\u0001\u0000\u0000\u0000\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001"+ - "\u0000\u0000\u0000\u0001H\u0001\u0000\u0000\u0000\u0001^\u0001\u0000\u0000"+ - "\u0000\u0001`\u0001\u0000\u0000\u0000\u0001b\u0001\u0000\u0000\u0000\u0001"+ - "d\u0001\u0000\u0000\u0000\u0001f\u0001\u0000\u0000\u0000\u0001h\u0001"+ - "\u0000\u0000\u0000\u0001j\u0001\u0000\u0000\u0000\u0001l\u0001\u0000\u0000"+ - "\u0000\u0001n\u0001\u0000\u0000\u0000\u0001p\u0001\u0000\u0000\u0000\u0001"+ - "r\u0001\u0000\u0000\u0000\u0001t\u0001\u0000\u0000\u0000\u0001v\u0001"+ - "\u0000\u0000\u0000\u0001x\u0001\u0000\u0000\u0000\u0001z\u0001\u0000\u0000"+ - "\u0000\u0001|\u0001\u0000\u0000\u0000\u0001~\u0001\u0000\u0000\u0000\u0001"+ - "\u0080\u0001\u0000\u0000\u0000\u0001\u0082\u0001\u0000\u0000\u0000\u0001"+ - "\u0084\u0001\u0000\u0000\u0000\u0001\u0086\u0001\u0000\u0000\u0000\u0001"+ - "\u0088\u0001\u0000\u0000\u0000\u0001\u008a\u0001\u0000\u0000\u0000\u0001"+ - "\u008c\u0001\u0000\u0000\u0000\u0001\u008e\u0001\u0000\u0000\u0000\u0001"+ - "\u0090\u0001\u0000\u0000\u0000\u0001\u0092\u0001\u0000\u0000\u0000\u0001"+ - "\u0094\u0001\u0000\u0000\u0000\u0001\u0096\u0001\u0000\u0000\u0000\u0001"+ - "\u0098\u0001\u0000\u0000\u0000\u0001\u009a\u0001\u0000\u0000\u0000\u0001"+ - "\u009c\u0001\u0000\u0000\u0000\u0001\u009e\u0001\u0000\u0000\u0000\u0001"+ - "\u00a0\u0001\u0000\u0000\u0000\u0001\u00a2\u0001\u0000\u0000\u0000\u0001"+ - "\u00a4\u0001\u0000\u0000\u0000\u0001\u00a6\u0001\u0000\u0000\u0000\u0001"+ - "\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0001"+ - "\u00ac\u0001\u0000\u0000\u0000\u0001\u00ae\u0001\u0000\u0000\u0000\u0001"+ - "\u00b0\u0001\u0000\u0000\u0000\u0001\u00b2\u0001\u0000\u0000\u0000\u0001"+ - "\u00b4\u0001\u0000\u0000\u0000\u0001\u00b6\u0001\u0000\u0000\u0000\u0001"+ - "\u00b8\u0001\u0000\u0000\u0000\u0001\u00bc\u0001\u0000\u0000\u0000\u0001"+ - "\u00be\u0001\u0000\u0000\u0000\u0001\u00c0\u0001\u0000\u0000\u0000\u0001"+ - "\u00c2\u0001\u0000\u0000\u0000\u0002\u00c4\u0001\u0000\u0000\u0000\u0002"+ - "\u00c6\u0001\u0000\u0000\u0000\u0002\u00c8\u0001\u0000\u0000\u0000\u0002"+ - "\u00ca\u0001\u0000\u0000\u0000\u0002\u00cc\u0001\u0000\u0000\u0000\u0003"+ - "\u00ce\u0001\u0000\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003"+ - "\u00d2\u0001\u0000\u0000\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003"+ - "\u00d6\u0001\u0000\u0000\u0000\u0003\u00d8\u0001\u0000\u0000\u0000\u0003"+ - "\u00da\u0001\u0000\u0000\u0000\u0003\u00de\u0001\u0000\u0000\u0000\u0003"+ - "\u00e0\u0001\u0000\u0000\u0000\u0003\u00e2\u0001\u0000\u0000\u0000\u0003"+ - "\u00e4\u0001\u0000\u0000\u0000\u0003\u00e6\u0001\u0000\u0000\u0000\u0003"+ - "\u00e8\u0001\u0000\u0000\u0000\u0004\u00ea\u0001\u0000\u0000\u0000\u0004"+ - "\u00ec\u0001\u0000\u0000\u0000\u0004\u00ee\u0001\u0000\u0000\u0000\u0004"+ - "\u00f0\u0001\u0000\u0000\u0000\u0004\u00f2\u0001\u0000\u0000\u0000\u0004"+ - "\u00f8\u0001\u0000\u0000\u0000\u0004\u00fa\u0001\u0000\u0000\u0000\u0004"+ - "\u00fc\u0001\u0000\u0000\u0000\u0004\u00fe\u0001\u0000\u0000\u0000\u0005"+ - "\u0100\u0001\u0000\u0000\u0000\u0005\u0102\u0001\u0000\u0000\u0000\u0005"+ - "\u0104\u0001\u0000\u0000\u0000\u0005\u0106\u0001\u0000\u0000\u0000\u0005"+ - "\u0108\u0001\u0000\u0000\u0000\u0005\u010a\u0001\u0000\u0000\u0000\u0005"+ - "\u010c\u0001\u0000\u0000\u0000\u0005\u010e\u0001\u0000\u0000\u0000\u0005"+ - "\u0110\u0001\u0000\u0000\u0000\u0005\u0112\u0001\u0000\u0000\u0000\u0005"+ - "\u0114\u0001\u0000\u0000\u0000\u0006\u0116\u0001\u0000\u0000\u0000\u0006"+ - "\u0118\u0001\u0000\u0000\u0000\u0006\u011a\u0001\u0000\u0000\u0000\u0006"+ - "\u011c\u0001\u0000\u0000\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006"+ - "\u0122\u0001\u0000\u0000\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0006"+ - "\u0126\u0001\u0000\u0000\u0000\u0006\u0128\u0001\u0000\u0000\u0000\u0007"+ - "\u012a\u0001\u0000\u0000\u0000\u0007\u012c\u0001\u0000\u0000\u0000\u0007"+ - "\u012e\u0001\u0000\u0000\u0000\u0007\u0130\u0001\u0000\u0000\u0000\u0007"+ - "\u0132\u0001\u0000\u0000\u0000\u0007\u0134\u0001\u0000\u0000\u0000\u0007"+ - "\u0136\u0001\u0000\u0000\u0000\u0007\u0138\u0001\u0000\u0000\u0000\u0007"+ - "\u013a\u0001\u0000\u0000\u0000\u0007\u013c\u0001\u0000\u0000\u0000\u0007"+ - "\u013e\u0001\u0000\u0000\u0000\u0007\u0140\u0001\u0000\u0000\u0000\b\u0142"+ - "\u0001\u0000\u0000\u0000\b\u0144\u0001\u0000\u0000\u0000\b\u0146\u0001"+ - "\u0000\u0000\u0000\b\u0148\u0001\u0000\u0000\u0000\b\u014a\u0001\u0000"+ - "\u0000\u0000\b\u014c\u0001\u0000\u0000\u0000\b\u014e\u0001\u0000\u0000"+ - "\u0000\b\u0150\u0001\u0000\u0000\u0000\b\u0152\u0001\u0000\u0000\u0000"+ - "\t\u0154\u0001\u0000\u0000\u0000\t\u0156\u0001\u0000\u0000\u0000\t\u0158"+ - "\u0001\u0000\u0000\u0000\t\u015a\u0001\u0000\u0000\u0000\t\u015c\u0001"+ - "\u0000\u0000\u0000\n\u015e\u0001\u0000\u0000\u0000\n\u0160\u0001\u0000"+ - "\u0000\u0000\n\u0162\u0001\u0000\u0000\u0000\n\u0164\u0001\u0000\u0000"+ - "\u0000\n\u0166\u0001\u0000\u0000\u0000\n\u0168\u0001\u0000\u0000\u0000"+ - "\u000b\u016a\u0001\u0000\u0000\u0000\u000b\u016c\u0001\u0000\u0000\u0000"+ - "\u000b\u016e\u0001\u0000\u0000\u0000\u000b\u0170\u0001\u0000\u0000\u0000"+ - "\u000b\u0172\u0001\u0000\u0000\u0000\u000b\u0174\u0001\u0000\u0000\u0000"+ - "\u000b\u0176\u0001\u0000\u0000\u0000\u000b\u0178\u0001\u0000\u0000\u0000"+ - "\u000b\u017a\u0001\u0000\u0000\u0000\u000b\u017c\u0001\u0000\u0000\u0000"+ - "\f\u017e\u0001\u0000\u0000\u0000\f\u0180\u0001\u0000\u0000\u0000\f\u0182"+ - "\u0001\u0000\u0000\u0000\f\u0184\u0001\u0000\u0000\u0000\f\u0186\u0001"+ - "\u0000\u0000\u0000\f\u0188\u0001\u0000\u0000\u0000\f\u018a\u0001\u0000"+ - "\u0000\u0000\r\u018c\u0001\u0000\u0000\u0000\r\u018e\u0001\u0000\u0000"+ - "\u0000\r\u0190\u0001\u0000\u0000\u0000\r\u0192\u0001\u0000\u0000\u0000"+ - "\r\u0194\u0001\u0000\u0000\u0000\r\u0196\u0001\u0000\u0000\u0000\r\u0198"+ - "\u0001\u0000\u0000\u0000\r\u019a\u0001\u0000\u0000\u0000\r\u019c\u0001"+ - "\u0000\u0000\u0000\r\u019e\u0001\u0000\u0000\u0000\r\u01a0\u0001\u0000"+ - "\u0000\u0000\r\u01a2\u0001\u0000\u0000\u0000\r\u01a4\u0001\u0000\u0000"+ - "\u0000\u000e\u01a6\u0001\u0000\u0000\u0000\u000e\u01a8\u0001\u0000\u0000"+ - "\u0000\u000e\u01aa\u0001\u0000\u0000\u0000\u000e\u01ac\u0001\u0000\u0000"+ - "\u0000\u000e\u01ae\u0001\u0000\u0000\u0000\u000e\u01b0\u0001\u0000\u0000"+ - "\u0000\u000f\u01b2\u0001\u0000\u0000\u0000\u000f\u01b4\u0001\u0000\u0000"+ - "\u0000\u000f\u01b6\u0001\u0000\u0000\u0000\u000f\u01b8\u0001\u0000\u0000"+ - "\u0000\u000f\u01ba\u0001\u0000\u0000\u0000\u000f\u01bc\u0001\u0000\u0000"+ - "\u0000\u000f\u01be\u0001\u0000\u0000\u0000\u000f\u01c0\u0001\u0000\u0000"+ - "\u0000\u000f\u01c2\u0001\u0000\u0000\u0000\u0010\u01c4\u0001\u0000\u0000"+ - "\u0000\u0012\u01ce\u0001\u0000\u0000\u0000\u0014\u01d5\u0001\u0000\u0000"+ - "\u0000\u0016\u01de\u0001\u0000\u0000\u0000\u0018\u01e5\u0001\u0000\u0000"+ - "\u0000\u001a\u01ef\u0001\u0000\u0000\u0000\u001c\u01f6\u0001\u0000\u0000"+ - "\u0000\u001e\u01fd\u0001\u0000\u0000\u0000 \u0204\u0001\u0000\u0000\u0000"+ - "\"\u020c\u0001\u0000\u0000\u0000$\u0218\u0001\u0000\u0000\u0000&\u0221"+ - "\u0001\u0000\u0000\u0000(\u0227\u0001\u0000\u0000\u0000*\u022e\u0001\u0000"+ - "\u0000\u0000,\u0235\u0001\u0000\u0000\u0000.\u023d\u0001\u0000\u0000\u0000"+ - "0\u0245\u0001\u0000\u0000\u00002\u0254\u0001\u0000\u0000\u00004\u0260"+ - "\u0001\u0000\u0000\u00006\u026b\u0001\u0000\u0000\u00008\u0273\u0001\u0000"+ - "\u0000\u0000:\u027b\u0001\u0000\u0000\u0000<\u0283\u0001\u0000\u0000\u0000"+ - ">\u028c\u0001\u0000\u0000\u0000@\u0297\u0001\u0000\u0000\u0000B\u029d"+ - "\u0001\u0000\u0000\u0000D\u02ae\u0001\u0000\u0000\u0000F\u02be\u0001\u0000"+ - "\u0000\u0000H\u02c4\u0001\u0000\u0000\u0000J\u02c8\u0001\u0000\u0000\u0000"+ - "L\u02ca\u0001\u0000\u0000\u0000N\u02cc\u0001\u0000\u0000\u0000P\u02cf"+ - "\u0001\u0000\u0000\u0000R\u02d1\u0001\u0000\u0000\u0000T\u02da\u0001\u0000"+ - "\u0000\u0000V\u02dc\u0001\u0000\u0000\u0000X\u02e1\u0001\u0000\u0000\u0000"+ - "Z\u02e3\u0001\u0000\u0000\u0000\\\u02e8\u0001\u0000\u0000\u0000^\u0307"+ - "\u0001\u0000\u0000\u0000`\u030a\u0001\u0000\u0000\u0000b\u0338\u0001\u0000"+ - "\u0000\u0000d\u033a\u0001\u0000\u0000\u0000f\u033d\u0001\u0000\u0000\u0000"+ - "h\u0341\u0001\u0000\u0000\u0000j\u0345\u0001\u0000\u0000\u0000l\u0347"+ - "\u0001\u0000\u0000\u0000n\u034a\u0001\u0000\u0000\u0000p\u034c\u0001\u0000"+ - "\u0000\u0000r\u034e\u0001\u0000\u0000\u0000t\u0353\u0001\u0000\u0000\u0000"+ - "v\u0355\u0001\u0000\u0000\u0000x\u035b\u0001\u0000\u0000\u0000z\u0361"+ - "\u0001\u0000\u0000\u0000|\u0364\u0001\u0000\u0000\u0000~\u0367\u0001\u0000"+ - "\u0000\u0000\u0080\u036c\u0001\u0000\u0000\u0000\u0082\u0371\u0001\u0000"+ - "\u0000\u0000\u0084\u0373\u0001\u0000\u0000\u0000\u0086\u0377\u0001\u0000"+ - "\u0000\u0000\u0088\u037c\u0001\u0000\u0000\u0000\u008a\u0382\u0001\u0000"+ - "\u0000\u0000\u008c\u0385\u0001\u0000\u0000\u0000\u008e\u0387\u0001\u0000"+ - "\u0000\u0000\u0090\u038d\u0001\u0000\u0000\u0000\u0092\u038f\u0001\u0000"+ - "\u0000\u0000\u0094\u0394\u0001\u0000\u0000\u0000\u0096\u0397\u0001\u0000"+ - "\u0000\u0000\u0098\u039a\u0001\u0000\u0000\u0000\u009a\u039d\u0001\u0000"+ - "\u0000\u0000\u009c\u039f\u0001\u0000\u0000\u0000\u009e\u03a2\u0001\u0000"+ - "\u0000\u0000\u00a0\u03a4\u0001\u0000\u0000\u0000\u00a2\u03a7\u0001\u0000"+ - "\u0000\u0000\u00a4\u03a9\u0001\u0000\u0000\u0000\u00a6\u03ab\u0001\u0000"+ - "\u0000\u0000\u00a8\u03ad\u0001\u0000\u0000\u0000\u00aa\u03af\u0001\u0000"+ - "\u0000\u0000\u00ac\u03b1\u0001\u0000\u0000\u0000\u00ae\u03b3\u0001\u0000"+ - "\u0000\u0000\u00b0\u03b5\u0001\u0000\u0000\u0000\u00b2\u03ca\u0001\u0000"+ - "\u0000\u0000\u00b4\u03cc\u0001\u0000\u0000\u0000\u00b6\u03d1\u0001\u0000"+ - "\u0000\u0000\u00b8\u03e6\u0001\u0000\u0000\u0000\u00ba\u03e8\u0001\u0000"+ - "\u0000\u0000\u00bc\u03f0\u0001\u0000\u0000\u0000\u00be\u03f2\u0001\u0000"+ - "\u0000\u0000\u00c0\u03f6\u0001\u0000\u0000\u0000\u00c2\u03fa\u0001\u0000"+ - "\u0000\u0000\u00c4\u03fe\u0001\u0000\u0000\u0000\u00c6\u0403\u0001\u0000"+ - "\u0000\u0000\u00c8\u0408\u0001\u0000\u0000\u0000\u00ca\u040c\u0001\u0000"+ - "\u0000\u0000\u00cc\u0410\u0001\u0000\u0000\u0000\u00ce\u0414\u0001\u0000"+ - "\u0000\u0000\u00d0\u0419\u0001\u0000\u0000\u0000\u00d2\u041d\u0001\u0000"+ - "\u0000\u0000\u00d4\u0421\u0001\u0000\u0000\u0000\u00d6\u0425\u0001\u0000"+ - "\u0000\u0000\u00d8\u0429\u0001\u0000\u0000\u0000\u00da\u042d\u0001\u0000"+ - "\u0000\u0000\u00dc\u0439\u0001\u0000\u0000\u0000\u00de\u043c\u0001\u0000"+ - "\u0000\u0000\u00e0\u0440\u0001\u0000\u0000\u0000\u00e2\u0444\u0001\u0000"+ - "\u0000\u0000\u00e4\u0448\u0001\u0000\u0000\u0000\u00e6\u044c\u0001\u0000"+ - "\u0000\u0000\u00e8\u0450\u0001\u0000\u0000\u0000\u00ea\u0454\u0001\u0000"+ - "\u0000\u0000\u00ec\u0459\u0001\u0000\u0000\u0000\u00ee\u045d\u0001\u0000"+ - "\u0000\u0000\u00f0\u0461\u0001\u0000\u0000\u0000\u00f2\u0466\u0001\u0000"+ - "\u0000\u0000\u00f4\u046f\u0001\u0000\u0000\u0000\u00f6\u0484\u0001\u0000"+ - "\u0000\u0000\u00f8\u0488\u0001\u0000\u0000\u0000\u00fa\u048c\u0001\u0000"+ - "\u0000\u0000\u00fc\u0490\u0001\u0000\u0000\u0000\u00fe\u0494\u0001\u0000"+ - "\u0000\u0000\u0100\u0498\u0001\u0000\u0000\u0000\u0102\u049d\u0001\u0000"+ - "\u0000\u0000\u0104\u04a1\u0001\u0000\u0000\u0000\u0106\u04a5\u0001\u0000"+ - "\u0000\u0000\u0108\u04a9\u0001\u0000\u0000\u0000\u010a\u04ae\u0001\u0000"+ - "\u0000\u0000\u010c\u04b3\u0001\u0000\u0000\u0000\u010e\u04b6\u0001\u0000"+ - "\u0000\u0000\u0110\u04ba\u0001\u0000\u0000\u0000\u0112\u04be\u0001\u0000"+ - "\u0000\u0000\u0114\u04c2\u0001\u0000\u0000\u0000\u0116\u04c6\u0001\u0000"+ - "\u0000\u0000\u0118\u04cb\u0001\u0000\u0000\u0000\u011a\u04d0\u0001\u0000"+ - "\u0000\u0000\u011c\u04d5\u0001\u0000\u0000\u0000\u011e\u04dc\u0001\u0000"+ - "\u0000\u0000\u0120\u04e5\u0001\u0000\u0000\u0000\u0122\u04ec\u0001\u0000"+ - "\u0000\u0000\u0124\u04f0\u0001\u0000\u0000\u0000\u0126\u04f4\u0001\u0000"+ - "\u0000\u0000\u0128\u04f8\u0001\u0000\u0000\u0000\u012a\u04fc\u0001\u0000"+ - "\u0000\u0000\u012c\u0502\u0001\u0000\u0000\u0000\u012e\u0506\u0001\u0000"+ - "\u0000\u0000\u0130\u050a\u0001\u0000\u0000\u0000\u0132\u050e\u0001\u0000"+ - "\u0000\u0000\u0134\u0512\u0001\u0000\u0000\u0000\u0136\u0516\u0001\u0000"+ - "\u0000\u0000\u0138\u051a\u0001\u0000\u0000\u0000\u013a\u051f\u0001\u0000"+ - "\u0000\u0000\u013c\u0524\u0001\u0000\u0000\u0000\u013e\u0528\u0001\u0000"+ - "\u0000\u0000\u0140\u052c\u0001\u0000\u0000\u0000\u0142\u0530\u0001\u0000"+ - "\u0000\u0000\u0144\u0535\u0001\u0000\u0000\u0000\u0146\u0539\u0001\u0000"+ - "\u0000\u0000\u0148\u053e\u0001\u0000\u0000\u0000\u014a\u0543\u0001\u0000"+ - "\u0000\u0000\u014c\u0547\u0001\u0000\u0000\u0000\u014e\u054b\u0001\u0000"+ - "\u0000\u0000\u0150\u054f\u0001\u0000\u0000\u0000\u0152\u0553\u0001\u0000"+ - "\u0000\u0000\u0154\u0557\u0001\u0000\u0000\u0000\u0156\u055c\u0001\u0000"+ - "\u0000\u0000\u0158\u0561\u0001\u0000\u0000\u0000\u015a\u0565\u0001\u0000"+ - "\u0000\u0000\u015c\u0569\u0001\u0000\u0000\u0000\u015e\u056d\u0001\u0000"+ - "\u0000\u0000\u0160\u0572\u0001\u0000\u0000\u0000\u0162\u057b\u0001\u0000"+ - "\u0000\u0000\u0164\u057f\u0001\u0000\u0000\u0000\u0166\u0583\u0001\u0000"+ - "\u0000\u0000\u0168\u0587\u0001\u0000\u0000\u0000\u016a\u058b\u0001\u0000"+ - "\u0000\u0000\u016c\u0590\u0001\u0000\u0000\u0000\u016e\u0594\u0001\u0000"+ - "\u0000\u0000\u0170\u0598\u0001\u0000\u0000\u0000\u0172\u059c\u0001\u0000"+ - "\u0000\u0000\u0174\u05a1\u0001\u0000\u0000\u0000\u0176\u05a5\u0001\u0000"+ - "\u0000\u0000\u0178\u05a9\u0001\u0000\u0000\u0000\u017a\u05ad\u0001\u0000"+ - "\u0000\u0000\u017c\u05b1\u0001\u0000\u0000\u0000\u017e\u05b5\u0001\u0000"+ - "\u0000\u0000\u0180\u05bb\u0001\u0000\u0000\u0000\u0182\u05bf\u0001\u0000"+ - "\u0000\u0000\u0184\u05c3\u0001\u0000\u0000\u0000\u0186\u05c7\u0001\u0000"+ - "\u0000\u0000\u0188\u05cb\u0001\u0000\u0000\u0000\u018a\u05cf\u0001\u0000"+ - "\u0000\u0000\u018c\u05d3\u0001\u0000\u0000\u0000\u018e\u05d8\u0001\u0000"+ - "\u0000\u0000\u0190\u05dc\u0001\u0000\u0000\u0000\u0192\u05e0\u0001\u0000"+ - "\u0000\u0000\u0194\u05e6\u0001\u0000\u0000\u0000\u0196\u05ef\u0001\u0000"+ - "\u0000\u0000\u0198\u05f3\u0001\u0000\u0000\u0000\u019a\u05f7\u0001\u0000"+ - "\u0000\u0000\u019c\u05fb\u0001\u0000\u0000\u0000\u019e\u05ff\u0001\u0000"+ - "\u0000\u0000\u01a0\u0603\u0001\u0000\u0000\u0000\u01a2\u0607\u0001\u0000"+ - "\u0000\u0000\u01a4\u060b\u0001\u0000\u0000\u0000\u01a6\u060f\u0001\u0000"+ - "\u0000\u0000\u01a8\u0614\u0001\u0000\u0000\u0000\u01aa\u061a\u0001\u0000"+ - "\u0000\u0000\u01ac\u0620\u0001\u0000\u0000\u0000\u01ae\u0624\u0001\u0000"+ - "\u0000\u0000\u01b0\u0628\u0001\u0000\u0000\u0000\u01b2\u062c\u0001\u0000"+ - "\u0000\u0000\u01b4\u0632\u0001\u0000\u0000\u0000\u01b6\u0638\u0001\u0000"+ - "\u0000\u0000\u01b8\u063c\u0001\u0000\u0000\u0000\u01ba\u0640\u0001\u0000"+ - "\u0000\u0000\u01bc\u0644\u0001\u0000\u0000\u0000\u01be\u064a\u0001\u0000"+ - "\u0000\u0000\u01c0\u0650\u0001\u0000\u0000\u0000\u01c2\u0656\u0001\u0000"+ - "\u0000\u0000\u01c4\u01c5\u0007\u0000\u0000\u0000\u01c5\u01c6\u0007\u0001"+ - "\u0000\u0000\u01c6\u01c7\u0007\u0002\u0000\u0000\u01c7\u01c8\u0007\u0002"+ - "\u0000\u0000\u01c8\u01c9\u0007\u0003\u0000\u0000\u01c9\u01ca\u0007\u0004"+ - "\u0000\u0000\u01ca\u01cb\u0007\u0005\u0000\u0000\u01cb\u01cc\u0001\u0000"+ - "\u0000\u0000\u01cc\u01cd\u0006\u0000\u0000\u0000\u01cd\u0011\u0001\u0000"+ - "\u0000\u0000\u01ce\u01cf\u0007\u0000\u0000\u0000\u01cf\u01d0\u0007\u0006"+ - "\u0000\u0000\u01d0\u01d1\u0007\u0007\u0000\u0000\u01d1\u01d2\u0007\b\u0000"+ - "\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3\u01d4\u0006\u0001\u0001"+ - "\u0000\u01d4\u0013\u0001\u0000\u0000\u0000\u01d5\u01d6\u0007\u0003\u0000"+ - "\u0000\u01d6\u01d7\u0007\t\u0000\u0000\u01d7\u01d8\u0007\u0006\u0000\u0000"+ - "\u01d8\u01d9\u0007\u0001\u0000\u0000\u01d9\u01da\u0007\u0004\u0000\u0000"+ - "\u01da\u01db\u0007\n\u0000\u0000\u01db\u01dc\u0001\u0000\u0000\u0000\u01dc"+ - "\u01dd\u0006\u0002\u0002\u0000\u01dd\u0015\u0001\u0000\u0000\u0000\u01de"+ - "\u01df\u0007\u0003\u0000\u0000\u01df\u01e0\u0007\u000b\u0000\u0000\u01e0"+ - "\u01e1\u0007\f\u0000\u0000\u01e1\u01e2\u0007\r\u0000\u0000\u01e2\u01e3"+ - "\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006\u0003\u0000\u0000\u01e4\u0017"+ - "\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0003\u0000\u0000\u01e6\u01e7"+ - "\u0007\u000e\u0000\u0000\u01e7\u01e8\u0007\b\u0000\u0000\u01e8\u01e9\u0007"+ - "\r\u0000\u0000\u01e9\u01ea\u0007\f\u0000\u0000\u01ea\u01eb\u0007\u0001"+ - "\u0000\u0000\u01eb\u01ec\u0007\t\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000"+ - "\u0000\u01ed\u01ee\u0006\u0004\u0003\u0000\u01ee\u0019\u0001\u0000\u0000"+ - "\u0000\u01ef\u01f0\u0007\u000f\u0000\u0000\u01f0\u01f1\u0007\u0006\u0000"+ - "\u0000\u01f1\u01f2\u0007\u0007\u0000\u0000\u01f2\u01f3\u0007\u0010\u0000"+ - "\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4\u01f5\u0006\u0005\u0004"+ - "\u0000\u01f5\u001b\u0001\u0000\u0000\u0000\u01f6\u01f7\u0007\u0011\u0000"+ - "\u0000\u01f7\u01f8\u0007\u0006\u0000\u0000\u01f8\u01f9\u0007\u0007\u0000"+ - "\u0000\u01f9\u01fa\u0007\u0012\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000"+ - "\u0000\u01fb\u01fc\u0006\u0006\u0000\u0000\u01fc\u001d\u0001\u0000\u0000"+ - "\u0000\u01fd\u01fe\u0007\u0012\u0000\u0000\u01fe\u01ff\u0007\u0003\u0000"+ - "\u0000\u01ff\u0200\u0007\u0003\u0000\u0000\u0200\u0201\u0007\b\u0000\u0000"+ - "\u0201\u0202\u0001\u0000\u0000\u0000\u0202\u0203\u0006\u0007\u0001\u0000"+ - "\u0203\u001f\u0001\u0000\u0000\u0000\u0204\u0205\u0007\r\u0000\u0000\u0205"+ - "\u0206\u0007\u0001\u0000\u0000\u0206\u0207\u0007\u0010\u0000\u0000\u0207"+ - "\u0208\u0007\u0001\u0000\u0000\u0208\u0209\u0007\u0005\u0000\u0000\u0209"+ - "\u020a\u0001\u0000\u0000\u0000\u020a\u020b\u0006\b\u0000\u0000\u020b!"+ - "\u0001\u0000\u0000\u0000\u020c\u020d\u0007\u0010\u0000\u0000\u020d\u020e"+ - "\u0007\u000b\u0000\u0000\u020e\u020f\u0005_\u0000\u0000\u020f\u0210\u0007"+ - "\u0003\u0000\u0000\u0210\u0211\u0007\u000e\u0000\u0000\u0211\u0212\u0007"+ - "\b\u0000\u0000\u0212\u0213\u0007\f\u0000\u0000\u0213\u0214\u0007\t\u0000"+ - "\u0000\u0214\u0215\u0007\u0000\u0000\u0000\u0215\u0216\u0001\u0000\u0000"+ - "\u0000\u0216\u0217\u0006\t\u0005\u0000\u0217#\u0001\u0000\u0000\u0000"+ - "\u0218\u0219\u0007\u0006\u0000\u0000\u0219\u021a\u0007\u0003\u0000\u0000"+ - "\u021a\u021b\u0007\t\u0000\u0000\u021b\u021c\u0007\f\u0000\u0000\u021c"+ - "\u021d\u0007\u0010\u0000\u0000\u021d\u021e\u0007\u0003\u0000\u0000\u021e"+ - "\u021f\u0001\u0000\u0000\u0000\u021f\u0220\u0006\n\u0006\u0000\u0220%"+ - "\u0001\u0000\u0000\u0000\u0221\u0222\u0007\u0006\u0000\u0000\u0222\u0223"+ - "\u0007\u0007\u0000\u0000\u0223\u0224\u0007\u0013\u0000\u0000\u0224\u0225"+ - "\u0001\u0000\u0000\u0000\u0225\u0226\u0006\u000b\u0000\u0000\u0226\'\u0001"+ - "\u0000\u0000\u0000\u0227\u0228\u0007\u0002\u0000\u0000\u0228\u0229\u0007"+ - "\n\u0000\u0000\u0229\u022a\u0007\u0007\u0000\u0000\u022a\u022b\u0007\u0013"+ - "\u0000\u0000\u022b\u022c\u0001\u0000\u0000\u0000\u022c\u022d\u0006\f\u0007"+ - "\u0000\u022d)\u0001\u0000\u0000\u0000\u022e\u022f\u0007\u0002\u0000\u0000"+ - "\u022f\u0230\u0007\u0007\u0000\u0000\u0230\u0231\u0007\u0006\u0000\u0000"+ - "\u0231\u0232\u0007\u0005\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000"+ - "\u0233\u0234\u0006\r\u0000\u0000\u0234+\u0001\u0000\u0000\u0000\u0235"+ - "\u0236\u0007\u0002\u0000\u0000\u0236\u0237\u0007\u0005\u0000\u0000\u0237"+ - "\u0238\u0007\f\u0000\u0000\u0238\u0239\u0007\u0005\u0000\u0000\u0239\u023a"+ - "\u0007\u0002\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000\u023b\u023c"+ - "\u0006\u000e\u0000\u0000\u023c-\u0001\u0000\u0000\u0000\u023d\u023e\u0007"+ - "\u0013\u0000\u0000\u023e\u023f\u0007\n\u0000\u0000\u023f\u0240\u0007\u0003"+ - "\u0000\u0000\u0240\u0241\u0007\u0006\u0000\u0000\u0241\u0242\u0007\u0003"+ - "\u0000\u0000\u0242\u0243\u0001\u0000\u0000\u0000\u0243\u0244\u0006\u000f"+ - "\u0000\u0000\u0244/\u0001\u0000\u0000\u0000\u0245\u0246\u0004\u0010\u0000"+ - "\u0000\u0246\u0247\u0007\u0001\u0000\u0000\u0247\u0248\u0007\t\u0000\u0000"+ - "\u0248\u0249\u0007\r\u0000\u0000\u0249\u024a\u0007\u0001\u0000\u0000\u024a"+ - "\u024b\u0007\t\u0000\u0000\u024b\u024c\u0007\u0003\u0000\u0000\u024c\u024d"+ - "\u0007\u0002\u0000\u0000\u024d\u024e\u0007\u0005\u0000\u0000\u024e\u024f"+ - "\u0007\f\u0000\u0000\u024f\u0250\u0007\u0005\u0000\u0000\u0250\u0251\u0007"+ - "\u0002\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0253\u0006"+ - "\u0010\u0000\u0000\u02531\u0001\u0000\u0000\u0000\u0254\u0255\u0004\u0011"+ - "\u0001\u0000\u0255\u0256\u0007\r\u0000\u0000\u0256\u0257\u0007\u0007\u0000"+ - "\u0000\u0257\u0258\u0007\u0007\u0000\u0000\u0258\u0259\u0007\u0012\u0000"+ - "\u0000\u0259\u025a\u0007\u0014\u0000\u0000\u025a\u025b\u0007\b\u0000\u0000"+ - "\u025b\u025c\u0005_\u0000\u0000\u025c\u025d\u0005\u8001\uf414\u0000\u0000"+ - "\u025d\u025e\u0001\u0000\u0000\u0000\u025e\u025f\u0006\u0011\b\u0000\u025f"+ - "3\u0001\u0000\u0000\u0000\u0260\u0261\u0004\u0012\u0002\u0000\u0261\u0262"+ - "\u0007\u0010\u0000\u0000\u0262\u0263\u0007\u0003\u0000\u0000\u0263\u0264"+ - "\u0007\u0005\u0000\u0000\u0264\u0265\u0007\u0006\u0000\u0000\u0265\u0266"+ - "\u0007\u0001\u0000\u0000\u0266\u0267\u0007\u0004\u0000\u0000\u0267\u0268"+ - "\u0007\u0002\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026a"+ - "\u0006\u0012\t\u0000\u026a5\u0001\u0000\u0000\u0000\u026b\u026c\u0004"+ - "\u0013\u0003\u0000\u026c\u026d\u0007\u0015\u0000\u0000\u026d\u026e\u0007"+ - "\u0007\u0000\u0000\u026e\u026f\u0007\u0001\u0000\u0000\u026f\u0270\u0007"+ - "\t\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u0272\u0006\u0013"+ - "\n\u0000\u02727\u0001\u0000\u0000\u0000\u0273\u0274\u0004\u0014\u0004"+ - "\u0000\u0274\u0275\u0007\u000f\u0000\u0000\u0275\u0276\u0007\u0014\u0000"+ - "\u0000\u0276\u0277\u0007\r\u0000\u0000\u0277\u0278\u0007\r\u0000\u0000"+ - "\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u027a\u0006\u0014\n\u0000\u027a"+ - "9\u0001\u0000\u0000\u0000\u027b\u027c\u0004\u0015\u0005\u0000\u027c\u027d"+ - "\u0007\r\u0000\u0000\u027d\u027e\u0007\u0003\u0000\u0000\u027e\u027f\u0007"+ - "\u000f\u0000\u0000\u027f\u0280\u0007\u0005\u0000\u0000\u0280\u0281\u0001"+ - "\u0000\u0000\u0000\u0281\u0282\u0006\u0015\n\u0000\u0282;\u0001\u0000"+ - "\u0000\u0000\u0283\u0284\u0004\u0016\u0006\u0000\u0284\u0285\u0007\u0006"+ - "\u0000\u0000\u0285\u0286\u0007\u0001\u0000\u0000\u0286\u0287\u0007\u0011"+ - "\u0000\u0000\u0287\u0288\u0007\n\u0000\u0000\u0288\u0289\u0007\u0005\u0000"+ - "\u0000\u0289\u028a\u0001\u0000\u0000\u0000\u028a\u028b\u0006\u0016\n\u0000"+ - "\u028b=\u0001\u0000\u0000\u0000\u028c\u028d\u0004\u0017\u0007\u0000\u028d"+ - "\u028e\u0007\r\u0000\u0000\u028e\u028f\u0007\u0007\u0000\u0000\u028f\u0290"+ - "\u0007\u0007\u0000\u0000\u0290\u0291\u0007\u0012\u0000\u0000\u0291\u0292"+ - "\u0007\u0014\u0000\u0000\u0292\u0293\u0007\b\u0000\u0000\u0293\u0294\u0001"+ - "\u0000\u0000\u0000\u0294\u0295\u0006\u0017\n\u0000\u0295?\u0001\u0000"+ - "\u0000\u0000\u0296\u0298\b\u0016\u0000\u0000\u0297\u0296\u0001\u0000\u0000"+ - "\u0000\u0298\u0299\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000\u0000"+ - "\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029b\u0001\u0000\u0000"+ - "\u0000\u029b\u029c\u0006\u0018\u0000\u0000\u029cA\u0001\u0000\u0000\u0000"+ - "\u029d\u029e\u0005/\u0000\u0000\u029e\u029f\u0005/\u0000\u0000\u029f\u02a3"+ - "\u0001\u0000\u0000\u0000\u02a0\u02a2\b\u0017\u0000\u0000\u02a1\u02a0\u0001"+ - "\u0000\u0000\u0000\u02a2\u02a5\u0001\u0000\u0000\u0000\u02a3\u02a1\u0001"+ - "\u0000\u0000\u0000\u02a3\u02a4\u0001\u0000\u0000\u0000\u02a4\u02a7\u0001"+ - "\u0000\u0000\u0000\u02a5\u02a3\u0001\u0000\u0000\u0000\u02a6\u02a8\u0005"+ - "\r\u0000\u0000\u02a7\u02a6\u0001\u0000\u0000\u0000\u02a7\u02a8\u0001\u0000"+ - "\u0000\u0000\u02a8\u02aa\u0001\u0000\u0000\u0000\u02a9\u02ab\u0005\n\u0000"+ - "\u0000\u02aa\u02a9\u0001\u0000\u0000\u0000\u02aa\u02ab\u0001\u0000\u0000"+ - "\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ac\u02ad\u0006\u0019\u000b"+ - "\u0000\u02adC\u0001\u0000\u0000\u0000\u02ae\u02af\u0005/\u0000\u0000\u02af"+ - "\u02b0\u0005*\u0000\u0000\u02b0\u02b5\u0001\u0000\u0000\u0000\u02b1\u02b4"+ - "\u0003D\u001a\u0000\u02b2\u02b4\t\u0000\u0000\u0000\u02b3\u02b1\u0001"+ - "\u0000\u0000\u0000\u02b3\u02b2\u0001\u0000\u0000\u0000\u02b4\u02b7\u0001"+ - "\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b5\u02b3\u0001"+ - "\u0000\u0000\u0000\u02b6\u02b8\u0001\u0000\u0000\u0000\u02b7\u02b5\u0001"+ - "\u0000\u0000\u0000\u02b8\u02b9\u0005*\u0000\u0000\u02b9\u02ba\u0005/\u0000"+ - "\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb\u02bc\u0006\u001a\u000b"+ - "\u0000\u02bcE\u0001\u0000\u0000\u0000\u02bd\u02bf\u0007\u0018\u0000\u0000"+ - "\u02be\u02bd\u0001\u0000\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000"+ - "\u02c0\u02be\u0001\u0000\u0000\u0000\u02c0\u02c1\u0001\u0000\u0000\u0000"+ - "\u02c1\u02c2\u0001\u0000\u0000\u0000\u02c2\u02c3\u0006\u001b\u000b\u0000"+ - "\u02c3G\u0001\u0000\u0000\u0000\u02c4\u02c5\u0005|\u0000\u0000\u02c5\u02c6"+ - "\u0001\u0000\u0000\u0000\u02c6\u02c7\u0006\u001c\f\u0000\u02c7I\u0001"+ - "\u0000\u0000\u0000\u02c8\u02c9\u0007\u0019\u0000\u0000\u02c9K\u0001\u0000"+ - "\u0000\u0000\u02ca\u02cb\u0007\u001a\u0000\u0000\u02cbM\u0001\u0000\u0000"+ - "\u0000\u02cc\u02cd\u0005\\\u0000\u0000\u02cd\u02ce\u0007\u001b\u0000\u0000"+ - "\u02ceO\u0001\u0000\u0000\u0000\u02cf\u02d0\b\u001c\u0000\u0000\u02d0"+ - "Q\u0001\u0000\u0000\u0000\u02d1\u02d3\u0007\u0003\u0000\u0000\u02d2\u02d4"+ - "\u0007\u001d\u0000\u0000\u02d3\u02d2\u0001\u0000\u0000\u0000\u02d3\u02d4"+ - "\u0001\u0000\u0000\u0000\u02d4\u02d6\u0001\u0000\u0000\u0000\u02d5\u02d7"+ - "\u0003J\u001d\u0000\u02d6\u02d5\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001"+ - "\u0000\u0000\u0000\u02d8\u02d6\u0001\u0000\u0000\u0000\u02d8\u02d9\u0001"+ - "\u0000\u0000\u0000\u02d9S\u0001\u0000\u0000\u0000\u02da\u02db\u0005@\u0000"+ - "\u0000\u02dbU\u0001\u0000\u0000\u0000\u02dc\u02dd\u0005`\u0000\u0000\u02dd"+ - "W\u0001\u0000\u0000\u0000\u02de\u02e2\b\u001e\u0000\u0000\u02df\u02e0"+ - "\u0005`\u0000\u0000\u02e0\u02e2\u0005`\u0000\u0000\u02e1\u02de\u0001\u0000"+ - "\u0000\u0000\u02e1\u02df\u0001\u0000\u0000\u0000\u02e2Y\u0001\u0000\u0000"+ - "\u0000\u02e3\u02e4\u0005_\u0000\u0000\u02e4[\u0001\u0000\u0000\u0000\u02e5"+ - "\u02e9\u0003L\u001e\u0000\u02e6\u02e9\u0003J\u001d\u0000\u02e7\u02e9\u0003"+ - "Z%\u0000\u02e8\u02e5\u0001\u0000\u0000\u0000\u02e8\u02e6\u0001\u0000\u0000"+ - "\u0000\u02e8\u02e7\u0001\u0000\u0000\u0000\u02e9]\u0001\u0000\u0000\u0000"+ - "\u02ea\u02ef\u0005\"\u0000\u0000\u02eb\u02ee\u0003N\u001f\u0000\u02ec"+ - "\u02ee\u0003P \u0000\u02ed\u02eb\u0001\u0000\u0000\u0000\u02ed\u02ec\u0001"+ - "\u0000\u0000\u0000\u02ee\u02f1\u0001\u0000\u0000\u0000\u02ef\u02ed\u0001"+ - "\u0000\u0000\u0000\u02ef\u02f0\u0001\u0000\u0000\u0000\u02f0\u02f2\u0001"+ - "\u0000\u0000\u0000\u02f1\u02ef\u0001\u0000\u0000\u0000\u02f2\u0308\u0005"+ - "\"\u0000\u0000\u02f3\u02f4\u0005\"\u0000\u0000\u02f4\u02f5\u0005\"\u0000"+ - "\u0000\u02f5\u02f6\u0005\"\u0000\u0000\u02f6\u02fa\u0001\u0000\u0000\u0000"+ - "\u02f7\u02f9\b\u0017\u0000\u0000\u02f8\u02f7\u0001\u0000\u0000\u0000\u02f9"+ - "\u02fc\u0001\u0000\u0000\u0000\u02fa\u02fb\u0001\u0000\u0000\u0000\u02fa"+ - "\u02f8\u0001\u0000\u0000\u0000\u02fb\u02fd\u0001\u0000\u0000\u0000\u02fc"+ - "\u02fa\u0001\u0000\u0000\u0000\u02fd\u02fe\u0005\"\u0000\u0000\u02fe\u02ff"+ - "\u0005\"\u0000\u0000\u02ff\u0300\u0005\"\u0000\u0000\u0300\u0302\u0001"+ - "\u0000\u0000\u0000\u0301\u0303\u0005\"\u0000\u0000\u0302\u0301\u0001\u0000"+ - "\u0000\u0000\u0302\u0303\u0001\u0000\u0000\u0000\u0303\u0305\u0001\u0000"+ - "\u0000\u0000\u0304\u0306\u0005\"\u0000\u0000\u0305\u0304\u0001\u0000\u0000"+ - "\u0000\u0305\u0306\u0001\u0000\u0000\u0000\u0306\u0308\u0001\u0000\u0000"+ - "\u0000\u0307\u02ea\u0001\u0000\u0000\u0000\u0307\u02f3\u0001\u0000\u0000"+ - "\u0000\u0308_\u0001\u0000\u0000\u0000\u0309\u030b\u0003J\u001d\u0000\u030a"+ - "\u0309\u0001\u0000\u0000\u0000\u030b\u030c\u0001\u0000\u0000\u0000\u030c"+ - "\u030a\u0001\u0000\u0000\u0000\u030c\u030d\u0001\u0000\u0000\u0000\u030d"+ - "a\u0001\u0000\u0000\u0000\u030e\u0310\u0003J\u001d\u0000\u030f\u030e\u0001"+ - "\u0000\u0000\u0000\u0310\u0311\u0001\u0000\u0000\u0000\u0311\u030f\u0001"+ - "\u0000\u0000\u0000\u0311\u0312\u0001\u0000\u0000\u0000\u0312\u0313\u0001"+ - "\u0000\u0000\u0000\u0313\u0317\u0003t2\u0000\u0314\u0316\u0003J\u001d"+ - "\u0000\u0315\u0314\u0001\u0000\u0000\u0000\u0316\u0319\u0001\u0000\u0000"+ - "\u0000\u0317\u0315\u0001\u0000\u0000\u0000\u0317\u0318\u0001\u0000\u0000"+ - "\u0000\u0318\u0339\u0001\u0000\u0000\u0000\u0319\u0317\u0001\u0000\u0000"+ - "\u0000\u031a\u031c\u0003t2\u0000\u031b\u031d\u0003J\u001d\u0000\u031c"+ - "\u031b\u0001\u0000\u0000\u0000\u031d\u031e\u0001\u0000\u0000\u0000\u031e"+ - "\u031c\u0001\u0000\u0000\u0000\u031e\u031f\u0001\u0000\u0000\u0000\u031f"+ - "\u0339\u0001\u0000\u0000\u0000\u0320\u0322\u0003J\u001d\u0000\u0321\u0320"+ - "\u0001\u0000\u0000\u0000\u0322\u0323\u0001\u0000\u0000\u0000\u0323\u0321"+ - "\u0001\u0000\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u032c"+ - "\u0001\u0000\u0000\u0000\u0325\u0329\u0003t2\u0000\u0326\u0328\u0003J"+ - "\u001d\u0000\u0327\u0326\u0001\u0000\u0000\u0000\u0328\u032b\u0001\u0000"+ - "\u0000\u0000\u0329\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000"+ - "\u0000\u0000\u032a\u032d\u0001\u0000\u0000\u0000\u032b\u0329\u0001\u0000"+ - "\u0000\u0000\u032c\u0325\u0001\u0000\u0000\u0000\u032c\u032d\u0001\u0000"+ - "\u0000\u0000\u032d\u032e\u0001\u0000\u0000\u0000\u032e\u032f\u0003R!\u0000"+ - "\u032f\u0339\u0001\u0000\u0000\u0000\u0330\u0332\u0003t2\u0000\u0331\u0333"+ - "\u0003J\u001d\u0000\u0332\u0331\u0001\u0000\u0000\u0000\u0333\u0334\u0001"+ - "\u0000\u0000\u0000\u0334\u0332\u0001\u0000\u0000\u0000\u0334\u0335\u0001"+ - "\u0000\u0000\u0000\u0335\u0336\u0001\u0000\u0000\u0000\u0336\u0337\u0003"+ - "R!\u0000\u0337\u0339\u0001\u0000\u0000\u0000\u0338\u030f\u0001\u0000\u0000"+ - "\u0000\u0338\u031a\u0001\u0000\u0000\u0000\u0338\u0321\u0001\u0000\u0000"+ - "\u0000\u0338\u0330\u0001\u0000\u0000\u0000\u0339c\u0001\u0000\u0000\u0000"+ - "\u033a\u033b\u0007\u001f\u0000\u0000\u033b\u033c\u0007 \u0000\u0000\u033c"+ - "e\u0001\u0000\u0000\u0000\u033d\u033e\u0007\f\u0000\u0000\u033e\u033f"+ - "\u0007\t\u0000\u0000\u033f\u0340\u0007\u0000\u0000\u0000\u0340g\u0001"+ - "\u0000\u0000\u0000\u0341\u0342\u0007\f\u0000\u0000\u0342\u0343\u0007\u0002"+ - "\u0000\u0000\u0343\u0344\u0007\u0004\u0000\u0000\u0344i\u0001\u0000\u0000"+ - "\u0000\u0345\u0346\u0005=\u0000\u0000\u0346k\u0001\u0000\u0000\u0000\u0347"+ - "\u0348\u0005:\u0000\u0000\u0348\u0349\u0005:\u0000\u0000\u0349m\u0001"+ - "\u0000\u0000\u0000\u034a\u034b\u0005:\u0000\u0000\u034bo\u0001\u0000\u0000"+ - "\u0000\u034c\u034d\u0005,\u0000\u0000\u034dq\u0001\u0000\u0000\u0000\u034e"+ - "\u034f\u0007\u0000\u0000\u0000\u034f\u0350\u0007\u0003\u0000\u0000\u0350"+ - "\u0351\u0007\u0002\u0000\u0000\u0351\u0352\u0007\u0004\u0000\u0000\u0352"+ - "s\u0001\u0000\u0000\u0000\u0353\u0354\u0005.\u0000\u0000\u0354u\u0001"+ - "\u0000\u0000\u0000\u0355\u0356\u0007\u000f\u0000\u0000\u0356\u0357\u0007"+ - "\f\u0000\u0000\u0357\u0358\u0007\r\u0000\u0000\u0358\u0359\u0007\u0002"+ - "\u0000\u0000\u0359\u035a\u0007\u0003\u0000\u0000\u035aw\u0001\u0000\u0000"+ - "\u0000\u035b\u035c\u0007\u000f\u0000\u0000\u035c\u035d\u0007\u0001\u0000"+ - "\u0000\u035d\u035e\u0007\u0006\u0000\u0000\u035e\u035f\u0007\u0002\u0000"+ - "\u0000\u035f\u0360\u0007\u0005\u0000\u0000\u0360y\u0001\u0000\u0000\u0000"+ - "\u0361\u0362\u0007\u0001\u0000\u0000\u0362\u0363\u0007\t\u0000\u0000\u0363"+ - "{\u0001\u0000\u0000\u0000\u0364\u0365\u0007\u0001\u0000\u0000\u0365\u0366"+ - "\u0007\u0002\u0000\u0000\u0366}\u0001\u0000\u0000\u0000\u0367\u0368\u0007"+ - "\r\u0000\u0000\u0368\u0369\u0007\f\u0000\u0000\u0369\u036a\u0007\u0002"+ - "\u0000\u0000\u036a\u036b\u0007\u0005\u0000\u0000\u036b\u007f\u0001\u0000"+ - "\u0000\u0000\u036c\u036d\u0007\r\u0000\u0000\u036d\u036e\u0007\u0001\u0000"+ - "\u0000\u036e\u036f\u0007\u0012\u0000\u0000\u036f\u0370\u0007\u0003\u0000"+ - "\u0000\u0370\u0081\u0001\u0000\u0000\u0000\u0371\u0372\u0005(\u0000\u0000"+ - "\u0372\u0083\u0001\u0000\u0000\u0000\u0373\u0374\u0007\t\u0000\u0000\u0374"+ - "\u0375\u0007\u0007\u0000\u0000\u0375\u0376\u0007\u0005\u0000\u0000\u0376"+ - "\u0085\u0001\u0000\u0000\u0000\u0377\u0378\u0007\t\u0000\u0000\u0378\u0379"+ - "\u0007\u0014\u0000\u0000\u0379\u037a\u0007\r\u0000\u0000\u037a\u037b\u0007"+ - "\r\u0000\u0000\u037b\u0087\u0001\u0000\u0000\u0000\u037c\u037d\u0007\t"+ - "\u0000\u0000\u037d\u037e\u0007\u0014\u0000\u0000\u037e\u037f\u0007\r\u0000"+ - "\u0000\u037f\u0380\u0007\r\u0000\u0000\u0380\u0381\u0007\u0002\u0000\u0000"+ - "\u0381\u0089\u0001\u0000\u0000\u0000\u0382\u0383\u0007\u0007\u0000\u0000"+ - "\u0383\u0384\u0007\u0006\u0000\u0000\u0384\u008b\u0001\u0000\u0000\u0000"+ - "\u0385\u0386\u0005?\u0000\u0000\u0386\u008d\u0001\u0000\u0000\u0000\u0387"+ - "\u0388\u0007\u0006\u0000\u0000\u0388\u0389\u0007\r\u0000\u0000\u0389\u038a"+ - "\u0007\u0001\u0000\u0000\u038a\u038b\u0007\u0012\u0000\u0000\u038b\u038c"+ - "\u0007\u0003\u0000\u0000\u038c\u008f\u0001\u0000\u0000\u0000\u038d\u038e"+ - "\u0005)\u0000\u0000\u038e\u0091\u0001\u0000\u0000\u0000\u038f\u0390\u0007"+ - "\u0005\u0000\u0000\u0390\u0391\u0007\u0006\u0000\u0000\u0391\u0392\u0007"+ - "\u0014\u0000\u0000\u0392\u0393\u0007\u0003\u0000\u0000\u0393\u0093\u0001"+ - "\u0000\u0000\u0000\u0394\u0395\u0005=\u0000\u0000\u0395\u0396\u0005=\u0000"+ - "\u0000\u0396\u0095\u0001\u0000\u0000\u0000\u0397\u0398\u0005=\u0000\u0000"+ - "\u0398\u0399\u0005~\u0000\u0000\u0399\u0097\u0001\u0000\u0000\u0000\u039a"+ - "\u039b\u0005!\u0000\u0000\u039b\u039c\u0005=\u0000\u0000\u039c\u0099\u0001"+ - "\u0000\u0000\u0000\u039d\u039e\u0005<\u0000\u0000\u039e\u009b\u0001\u0000"+ - "\u0000\u0000\u039f\u03a0\u0005<\u0000\u0000\u03a0\u03a1\u0005=\u0000\u0000"+ - "\u03a1\u009d\u0001\u0000\u0000\u0000\u03a2\u03a3\u0005>\u0000\u0000\u03a3"+ - "\u009f\u0001\u0000\u0000\u0000\u03a4\u03a5\u0005>\u0000\u0000\u03a5\u03a6"+ - "\u0005=\u0000\u0000\u03a6\u00a1\u0001\u0000\u0000\u0000\u03a7\u03a8\u0005"+ - "+\u0000\u0000\u03a8\u00a3\u0001\u0000\u0000\u0000\u03a9\u03aa\u0005-\u0000"+ - "\u0000\u03aa\u00a5\u0001\u0000\u0000\u0000\u03ab\u03ac\u0005*\u0000\u0000"+ - "\u03ac\u00a7\u0001\u0000\u0000\u0000\u03ad\u03ae\u0005/\u0000\u0000\u03ae"+ - "\u00a9\u0001\u0000\u0000\u0000\u03af\u03b0\u0005%\u0000\u0000\u03b0\u00ab"+ - "\u0001\u0000\u0000\u0000\u03b1\u03b2\u0005{\u0000\u0000\u03b2\u00ad\u0001"+ - "\u0000\u0000\u0000\u03b3\u03b4\u0005}\u0000\u0000\u03b4\u00af\u0001\u0000"+ - "\u0000\u0000\u03b5\u03b6\u0003.\u000f\u0000\u03b6\u03b7\u0001\u0000\u0000"+ - "\u0000\u03b7\u03b8\u0006P\r\u0000\u03b8\u00b1\u0001\u0000\u0000\u0000"+ - "\u03b9\u03bc\u0003\u008c>\u0000\u03ba\u03bd\u0003L\u001e\u0000\u03bb\u03bd"+ - "\u0003Z%\u0000\u03bc\u03ba\u0001\u0000\u0000\u0000\u03bc\u03bb\u0001\u0000"+ - "\u0000\u0000\u03bd\u03c1\u0001\u0000\u0000\u0000\u03be\u03c0\u0003\\&"+ - "\u0000\u03bf\u03be\u0001\u0000\u0000\u0000\u03c0\u03c3\u0001\u0000\u0000"+ - "\u0000\u03c1\u03bf\u0001\u0000\u0000\u0000\u03c1\u03c2\u0001\u0000\u0000"+ - "\u0000\u03c2\u03cb\u0001\u0000\u0000\u0000\u03c3\u03c1\u0001\u0000\u0000"+ - "\u0000\u03c4\u03c6\u0003\u008c>\u0000\u03c5\u03c7\u0003J\u001d\u0000\u03c6"+ - "\u03c5\u0001\u0000\u0000\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8"+ - "\u03c6\u0001\u0000\u0000\u0000\u03c8\u03c9\u0001\u0000\u0000\u0000\u03c9"+ - "\u03cb\u0001\u0000\u0000\u0000\u03ca\u03b9\u0001\u0000\u0000\u0000\u03ca"+ - "\u03c4\u0001\u0000\u0000\u0000\u03cb\u00b3\u0001\u0000\u0000\u0000\u03cc"+ - "\u03cd\u0005[\u0000\u0000\u03cd\u03ce\u0001\u0000\u0000\u0000\u03ce\u03cf"+ - "\u0006R\u0000\u0000\u03cf\u03d0\u0006R\u0000\u0000\u03d0\u00b5\u0001\u0000"+ - "\u0000\u0000\u03d1\u03d2\u0005]\u0000\u0000\u03d2\u03d3\u0001\u0000\u0000"+ - "\u0000\u03d3\u03d4\u0006S\f\u0000\u03d4\u03d5\u0006S\f\u0000\u03d5\u00b7"+ - "\u0001\u0000\u0000\u0000\u03d6\u03da\u0003L\u001e\u0000\u03d7\u03d9\u0003"+ - "\\&\u0000\u03d8\u03d7\u0001\u0000\u0000\u0000\u03d9\u03dc\u0001\u0000"+ - "\u0000\u0000\u03da\u03d8\u0001\u0000\u0000\u0000\u03da\u03db\u0001\u0000"+ - "\u0000\u0000\u03db\u03e7\u0001\u0000\u0000\u0000\u03dc\u03da\u0001\u0000"+ - "\u0000\u0000\u03dd\u03e0\u0003Z%\u0000\u03de\u03e0\u0003T\"\u0000\u03df"+ - "\u03dd\u0001\u0000\u0000\u0000\u03df\u03de\u0001\u0000\u0000\u0000\u03e0"+ - "\u03e2\u0001\u0000\u0000\u0000\u03e1\u03e3\u0003\\&\u0000\u03e2\u03e1"+ - "\u0001\u0000\u0000\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4\u03e2"+ - "\u0001\u0000\u0000\u0000\u03e4\u03e5\u0001\u0000\u0000\u0000\u03e5\u03e7"+ - "\u0001\u0000\u0000\u0000\u03e6\u03d6\u0001\u0000\u0000\u0000\u03e6\u03df"+ - "\u0001\u0000\u0000\u0000\u03e7\u00b9\u0001\u0000\u0000\u0000\u03e8\u03ea"+ - "\u0003V#\u0000\u03e9\u03eb\u0003X$\u0000\u03ea\u03e9\u0001\u0000\u0000"+ - "\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ea\u0001\u0000\u0000"+ - "\u0000\u03ec\u03ed\u0001\u0000\u0000\u0000\u03ed\u03ee\u0001\u0000\u0000"+ - "\u0000\u03ee\u03ef\u0003V#\u0000\u03ef\u00bb\u0001\u0000\u0000\u0000\u03f0"+ - "\u03f1\u0003\u00baU\u0000\u03f1\u00bd\u0001\u0000\u0000\u0000\u03f2\u03f3"+ - "\u0003B\u0019\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006"+ - "W\u000b\u0000\u03f5\u00bf\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003D\u001a"+ - "\u0000\u03f7\u03f8\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006X\u000b\u0000"+ - "\u03f9\u00c1\u0001\u0000\u0000\u0000\u03fa\u03fb\u0003F\u001b\u0000\u03fb"+ - "\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd\u0006Y\u000b\u0000\u03fd\u00c3"+ - "\u0001\u0000\u0000\u0000\u03fe\u03ff\u0003\u00b4R\u0000\u03ff\u0400\u0001"+ - "\u0000\u0000\u0000\u0400\u0401\u0006Z\u000e\u0000\u0401\u0402\u0006Z\u000f"+ - "\u0000\u0402\u00c5\u0001\u0000\u0000\u0000\u0403\u0404\u0003H\u001c\u0000"+ - "\u0404\u0405\u0001\u0000\u0000\u0000\u0405\u0406\u0006[\u0010\u0000\u0406"+ - "\u0407\u0006[\f\u0000\u0407\u00c7\u0001\u0000\u0000\u0000\u0408\u0409"+ - "\u0003F\u001b\u0000\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u040b\u0006"+ - "\\\u000b\u0000\u040b\u00c9\u0001\u0000\u0000\u0000\u040c\u040d\u0003B"+ - "\u0019\u0000\u040d\u040e\u0001\u0000\u0000\u0000\u040e\u040f\u0006]\u000b"+ - "\u0000\u040f\u00cb\u0001\u0000\u0000\u0000\u0410\u0411\u0003D\u001a\u0000"+ - "\u0411\u0412\u0001\u0000\u0000\u0000\u0412\u0413\u0006^\u000b\u0000\u0413"+ - "\u00cd\u0001\u0000\u0000\u0000\u0414\u0415\u0003H\u001c\u0000\u0415\u0416"+ - "\u0001\u0000\u0000\u0000\u0416\u0417\u0006_\u0010\u0000\u0417\u0418\u0006"+ - "_\f\u0000\u0418\u00cf\u0001\u0000\u0000\u0000\u0419\u041a\u0003\u00b4"+ - "R\u0000\u041a\u041b\u0001\u0000\u0000\u0000\u041b\u041c\u0006`\u000e\u0000"+ - "\u041c\u00d1\u0001\u0000\u0000\u0000\u041d\u041e\u0003\u00b6S\u0000\u041e"+ - "\u041f\u0001\u0000\u0000\u0000\u041f\u0420\u0006a\u0011\u0000\u0420\u00d3"+ - "\u0001\u0000\u0000\u0000\u0421\u0422\u0003n/\u0000\u0422\u0423\u0001\u0000"+ - "\u0000\u0000\u0423\u0424\u0006b\u0012\u0000\u0424\u00d5\u0001\u0000\u0000"+ - "\u0000\u0425\u0426\u0003p0\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427"+ - "\u0428\u0006c\u0013\u0000\u0428\u00d7\u0001\u0000\u0000\u0000\u0429\u042a"+ - "\u0003j-\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u042c\u0006d"+ - "\u0014\u0000\u042c\u00d9\u0001\u0000\u0000\u0000\u042d\u042e\u0007\u0010"+ - "\u0000\u0000\u042e\u042f\u0007\u0003\u0000\u0000\u042f\u0430\u0007\u0005"+ - "\u0000\u0000\u0430\u0431\u0007\f\u0000\u0000\u0431\u0432\u0007\u0000\u0000"+ - "\u0000\u0432\u0433\u0007\f\u0000\u0000\u0433\u0434\u0007\u0005\u0000\u0000"+ - "\u0434\u0435\u0007\f\u0000\u0000\u0435\u00db\u0001\u0000\u0000\u0000\u0436"+ - "\u043a\b!\u0000\u0000\u0437\u0438\u0005/\u0000\u0000\u0438\u043a\b\"\u0000"+ - "\u0000\u0439\u0436\u0001\u0000\u0000\u0000\u0439\u0437\u0001\u0000\u0000"+ - "\u0000\u043a\u00dd\u0001\u0000\u0000\u0000\u043b\u043d\u0003\u00dcf\u0000"+ - "\u043c\u043b\u0001\u0000\u0000\u0000\u043d\u043e\u0001\u0000\u0000\u0000"+ - "\u043e\u043c\u0001\u0000\u0000\u0000\u043e\u043f\u0001\u0000\u0000\u0000"+ - "\u043f\u00df\u0001\u0000\u0000\u0000\u0440\u0441\u0003\u00deg\u0000\u0441"+ - "\u0442\u0001\u0000\u0000\u0000\u0442\u0443\u0006h\u0015\u0000\u0443\u00e1"+ - "\u0001\u0000\u0000\u0000\u0444\u0445\u0003^\'\u0000\u0445\u0446\u0001"+ - "\u0000\u0000\u0000\u0446\u0447\u0006i\u0016\u0000\u0447\u00e3\u0001\u0000"+ - "\u0000\u0000\u0448\u0449\u0003B\u0019\u0000\u0449\u044a\u0001\u0000\u0000"+ - "\u0000\u044a\u044b\u0006j\u000b\u0000\u044b\u00e5\u0001\u0000\u0000\u0000"+ - "\u044c\u044d\u0003D\u001a\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e"+ - "\u044f\u0006k\u000b\u0000\u044f\u00e7\u0001\u0000\u0000\u0000\u0450\u0451"+ - "\u0003F\u001b\u0000\u0451\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006"+ - "l\u000b\u0000\u0453\u00e9\u0001\u0000\u0000\u0000\u0454\u0455\u0003H\u001c"+ - "\u0000\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457\u0006m\u0010\u0000"+ - "\u0457\u0458\u0006m\f\u0000\u0458\u00eb\u0001\u0000\u0000\u0000\u0459"+ - "\u045a\u0003t2\u0000\u045a\u045b\u0001\u0000\u0000\u0000\u045b\u045c\u0006"+ - "n\u0017\u0000\u045c\u00ed\u0001\u0000\u0000\u0000\u045d\u045e\u0003p0"+ - "\u0000\u045e\u045f\u0001\u0000\u0000\u0000\u045f\u0460\u0006o\u0013\u0000"+ - "\u0460\u00ef\u0001\u0000\u0000\u0000\u0461\u0462\u0004p\b\u0000\u0462"+ - "\u0463\u0003\u008c>\u0000\u0463\u0464\u0001\u0000\u0000\u0000\u0464\u0465"+ - "\u0006p\u0018\u0000\u0465\u00f1\u0001\u0000\u0000\u0000\u0466\u0467\u0004"+ - "q\t\u0000\u0467\u0468\u0003\u00b2Q\u0000\u0468\u0469\u0001\u0000\u0000"+ - "\u0000\u0469\u046a\u0006q\u0019\u0000\u046a\u00f3\u0001\u0000\u0000\u0000"+ - "\u046b\u0470\u0003L\u001e\u0000\u046c\u0470\u0003J\u001d\u0000\u046d\u0470"+ - "\u0003Z%\u0000\u046e\u0470\u0003\u00a6K\u0000\u046f\u046b\u0001\u0000"+ - "\u0000\u0000\u046f\u046c\u0001\u0000\u0000\u0000\u046f\u046d\u0001\u0000"+ - "\u0000\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u00f5\u0001\u0000"+ - "\u0000\u0000\u0471\u0474\u0003L\u001e\u0000\u0472\u0474\u0003\u00a6K\u0000"+ - "\u0473\u0471\u0001\u0000\u0000\u0000\u0473\u0472\u0001\u0000\u0000\u0000"+ - "\u0474\u0478\u0001\u0000\u0000\u0000\u0475\u0477\u0003\u00f4r\u0000\u0476"+ - "\u0475\u0001\u0000\u0000\u0000\u0477\u047a\u0001\u0000\u0000\u0000\u0478"+ - "\u0476\u0001\u0000\u0000\u0000\u0478\u0479\u0001\u0000\u0000\u0000\u0479"+ - "\u0485\u0001\u0000\u0000\u0000\u047a\u0478\u0001\u0000\u0000\u0000\u047b"+ - "\u047e\u0003Z%\u0000\u047c\u047e\u0003T\"\u0000\u047d\u047b\u0001\u0000"+ - "\u0000\u0000\u047d\u047c\u0001\u0000\u0000\u0000\u047e\u0480\u0001\u0000"+ - "\u0000\u0000\u047f\u0481\u0003\u00f4r\u0000\u0480\u047f\u0001\u0000\u0000"+ - "\u0000\u0481\u0482\u0001\u0000\u0000\u0000\u0482\u0480\u0001\u0000\u0000"+ - "\u0000\u0482\u0483\u0001\u0000\u0000\u0000\u0483\u0485\u0001\u0000\u0000"+ - "\u0000\u0484\u0473\u0001\u0000\u0000\u0000\u0484\u047d\u0001\u0000\u0000"+ - "\u0000\u0485\u00f7\u0001\u0000\u0000\u0000\u0486\u0489\u0003\u00f6s\u0000"+ - "\u0487\u0489\u0003\u00baU\u0000\u0488\u0486\u0001\u0000\u0000\u0000\u0488"+ - "\u0487\u0001\u0000\u0000\u0000\u0489\u048a\u0001\u0000\u0000\u0000\u048a"+ - "\u0488\u0001\u0000\u0000\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b"+ - "\u00f9\u0001\u0000\u0000\u0000\u048c\u048d\u0003B\u0019\u0000\u048d\u048e"+ - "\u0001\u0000\u0000\u0000\u048e\u048f\u0006u\u000b\u0000\u048f\u00fb\u0001"+ - "\u0000\u0000\u0000\u0490\u0491\u0003D\u001a\u0000\u0491\u0492\u0001\u0000"+ - "\u0000\u0000\u0492\u0493\u0006v\u000b\u0000\u0493\u00fd\u0001\u0000\u0000"+ - "\u0000\u0494\u0495\u0003F\u001b\u0000\u0495\u0496\u0001\u0000\u0000\u0000"+ - "\u0496\u0497\u0006w\u000b\u0000\u0497\u00ff\u0001\u0000\u0000\u0000\u0498"+ - "\u0499\u0003H\u001c\u0000\u0499\u049a\u0001\u0000\u0000\u0000\u049a\u049b"+ - "\u0006x\u0010\u0000\u049b\u049c\u0006x\f\u0000\u049c\u0101\u0001\u0000"+ - "\u0000\u0000\u049d\u049e\u0003j-\u0000\u049e\u049f\u0001\u0000\u0000\u0000"+ - "\u049f\u04a0\u0006y\u0014\u0000\u04a0\u0103\u0001\u0000\u0000\u0000\u04a1"+ - "\u04a2\u0003p0\u0000\u04a2\u04a3\u0001\u0000\u0000\u0000\u04a3\u04a4\u0006"+ - "z\u0013\u0000\u04a4\u0105\u0001\u0000\u0000\u0000\u04a5\u04a6\u0003t2"+ - "\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000\u04a7\u04a8\u0006{\u0017\u0000"+ - "\u04a8\u0107\u0001\u0000\u0000\u0000\u04a9\u04aa\u0004|\n\u0000\u04aa"+ - "\u04ab\u0003\u008c>\u0000\u04ab\u04ac\u0001\u0000\u0000\u0000\u04ac\u04ad"+ - "\u0006|\u0018\u0000\u04ad\u0109\u0001\u0000\u0000\u0000\u04ae\u04af\u0004"+ - "}\u000b\u0000\u04af\u04b0\u0003\u00b2Q\u0000\u04b0\u04b1\u0001\u0000\u0000"+ - "\u0000\u04b1\u04b2\u0006}\u0019\u0000\u04b2\u010b\u0001\u0000\u0000\u0000"+ - "\u04b3\u04b4\u0007\f\u0000\u0000\u04b4\u04b5\u0007\u0002\u0000\u0000\u04b5"+ - "\u010d\u0001\u0000\u0000\u0000\u04b6\u04b7\u0003\u00f8t\u0000\u04b7\u04b8"+ - "\u0001\u0000\u0000\u0000\u04b8\u04b9\u0006\u007f\u001a\u0000\u04b9\u010f"+ - "\u0001\u0000\u0000\u0000\u04ba\u04bb\u0003B\u0019\u0000\u04bb\u04bc\u0001"+ - "\u0000\u0000\u0000\u04bc\u04bd\u0006\u0080\u000b\u0000\u04bd\u0111\u0001"+ - "\u0000\u0000\u0000\u04be\u04bf\u0003D\u001a\u0000\u04bf\u04c0\u0001\u0000"+ - "\u0000\u0000\u04c0\u04c1\u0006\u0081\u000b\u0000\u04c1\u0113\u0001\u0000"+ - "\u0000\u0000\u04c2\u04c3\u0003F\u001b\u0000\u04c3\u04c4\u0001\u0000\u0000"+ - "\u0000\u04c4\u04c5\u0006\u0082\u000b\u0000\u04c5\u0115\u0001\u0000\u0000"+ - "\u0000\u04c6\u04c7\u0003H\u001c\u0000\u04c7\u04c8\u0001\u0000\u0000\u0000"+ - "\u04c8\u04c9\u0006\u0083\u0010\u0000\u04c9\u04ca\u0006\u0083\f\u0000\u04ca"+ - "\u0117\u0001\u0000\u0000\u0000\u04cb\u04cc\u0003\u00b4R\u0000\u04cc\u04cd"+ - "\u0001\u0000\u0000\u0000\u04cd\u04ce\u0006\u0084\u000e\u0000\u04ce\u04cf"+ - "\u0006\u0084\u001b\u0000\u04cf\u0119\u0001\u0000\u0000\u0000\u04d0\u04d1"+ - "\u0007\u0007\u0000\u0000\u04d1\u04d2\u0007\t\u0000\u0000\u04d2\u04d3\u0001"+ - "\u0000\u0000\u0000\u04d3\u04d4\u0006\u0085\u001c\u0000\u04d4\u011b\u0001"+ - "\u0000\u0000\u0000\u04d5\u04d6\u0007\u0013\u0000\u0000\u04d6\u04d7\u0007"+ - "\u0001\u0000\u0000\u04d7\u04d8\u0007\u0005\u0000\u0000\u04d8\u04d9\u0007"+ - "\n\u0000\u0000\u04d9\u04da\u0001\u0000\u0000\u0000\u04da\u04db\u0006\u0086"+ - "\u001c\u0000\u04db\u011d\u0001\u0000\u0000\u0000\u04dc\u04dd\b#\u0000"+ - "\u0000\u04dd\u011f\u0001\u0000\u0000\u0000\u04de\u04e0\u0003\u011e\u0087"+ - "\u0000\u04df\u04de\u0001\u0000\u0000\u0000\u04e0\u04e1\u0001\u0000\u0000"+ - "\u0000\u04e1\u04df\u0001\u0000\u0000\u0000\u04e1\u04e2\u0001\u0000\u0000"+ - "\u0000\u04e2\u04e3\u0001\u0000\u0000\u0000\u04e3\u04e4\u0003n/\u0000\u04e4"+ - "\u04e6\u0001\u0000\u0000\u0000\u04e5\u04df\u0001\u0000\u0000\u0000\u04e5"+ - "\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e8\u0001\u0000\u0000\u0000\u04e7"+ - "\u04e9\u0003\u011e\u0087\u0000\u04e8\u04e7\u0001\u0000\u0000\u0000\u04e9"+ - "\u04ea\u0001\u0000\u0000\u0000\u04ea\u04e8\u0001\u0000\u0000\u0000\u04ea"+ - "\u04eb\u0001\u0000\u0000\u0000\u04eb\u0121\u0001\u0000\u0000\u0000\u04ec"+ - "\u04ed\u0003\u0120\u0088\u0000\u04ed\u04ee\u0001\u0000\u0000\u0000\u04ee"+ - "\u04ef\u0006\u0089\u001d\u0000\u04ef\u0123\u0001\u0000\u0000\u0000\u04f0"+ - "\u04f1\u0003B\u0019\u0000\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2\u04f3"+ - "\u0006\u008a\u000b\u0000\u04f3\u0125\u0001\u0000\u0000\u0000\u04f4\u04f5"+ - "\u0003D\u001a\u0000\u04f5\u04f6\u0001\u0000\u0000\u0000\u04f6\u04f7\u0006"+ - "\u008b\u000b\u0000\u04f7\u0127\u0001\u0000\u0000\u0000\u04f8\u04f9\u0003"+ - "F\u001b\u0000\u04f9\u04fa\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u008c"+ - "\u000b\u0000\u04fb\u0129\u0001\u0000\u0000\u0000\u04fc\u04fd\u0003H\u001c"+ - "\u0000\u04fd\u04fe\u0001\u0000\u0000\u0000\u04fe\u04ff\u0006\u008d\u0010"+ - "\u0000\u04ff\u0500\u0006\u008d\f\u0000\u0500\u0501\u0006\u008d\f\u0000"+ - "\u0501\u012b\u0001\u0000\u0000\u0000\u0502\u0503\u0003j-\u0000\u0503\u0504"+ - "\u0001\u0000\u0000\u0000\u0504\u0505\u0006\u008e\u0014\u0000\u0505\u012d"+ - "\u0001\u0000\u0000\u0000\u0506\u0507\u0003p0\u0000\u0507\u0508\u0001\u0000"+ - "\u0000\u0000\u0508\u0509\u0006\u008f\u0013\u0000\u0509\u012f\u0001\u0000"+ - "\u0000\u0000\u050a\u050b\u0003t2\u0000\u050b\u050c\u0001\u0000\u0000\u0000"+ - "\u050c\u050d\u0006\u0090\u0017\u0000\u050d\u0131\u0001\u0000\u0000\u0000"+ - "\u050e\u050f\u0003\u011c\u0086\u0000\u050f\u0510\u0001\u0000\u0000\u0000"+ - "\u0510\u0511\u0006\u0091\u001e\u0000\u0511\u0133\u0001\u0000\u0000\u0000"+ - "\u0512\u0513\u0003\u00f8t\u0000\u0513\u0514\u0001\u0000\u0000\u0000\u0514"+ - "\u0515\u0006\u0092\u001a\u0000\u0515\u0135\u0001\u0000\u0000\u0000\u0516"+ - "\u0517\u0003\u00bcV\u0000\u0517\u0518\u0001\u0000\u0000\u0000\u0518\u0519"+ - "\u0006\u0093\u001f\u0000\u0519\u0137\u0001\u0000\u0000\u0000\u051a\u051b"+ - "\u0004\u0094\f\u0000\u051b\u051c\u0003\u008c>\u0000\u051c\u051d\u0001"+ - "\u0000\u0000\u0000\u051d\u051e\u0006\u0094\u0018\u0000\u051e\u0139\u0001"+ - "\u0000\u0000\u0000\u051f\u0520\u0004\u0095\r\u0000\u0520\u0521\u0003\u00b2"+ - "Q\u0000\u0521\u0522\u0001\u0000\u0000\u0000\u0522\u0523\u0006\u0095\u0019"+ - "\u0000\u0523\u013b\u0001\u0000\u0000\u0000\u0524\u0525\u0003B\u0019\u0000"+ - "\u0525\u0526\u0001\u0000\u0000\u0000\u0526\u0527\u0006\u0096\u000b\u0000"+ - "\u0527\u013d\u0001\u0000\u0000\u0000\u0528\u0529\u0003D\u001a\u0000\u0529"+ - "\u052a\u0001\u0000\u0000\u0000\u052a\u052b\u0006\u0097\u000b\u0000\u052b"+ - "\u013f\u0001\u0000\u0000\u0000\u052c\u052d\u0003F\u001b\u0000\u052d\u052e"+ - "\u0001\u0000\u0000\u0000\u052e\u052f\u0006\u0098\u000b\u0000\u052f\u0141"+ - "\u0001\u0000\u0000\u0000\u0530\u0531\u0003H\u001c\u0000\u0531\u0532\u0001"+ - "\u0000\u0000\u0000\u0532\u0533\u0006\u0099\u0010\u0000\u0533\u0534\u0006"+ - "\u0099\f\u0000\u0534\u0143\u0001\u0000\u0000\u0000\u0535\u0536\u0003t"+ - "2\u0000\u0536\u0537\u0001\u0000\u0000\u0000\u0537\u0538\u0006\u009a\u0017"+ - "\u0000\u0538\u0145\u0001\u0000\u0000\u0000\u0539\u053a\u0004\u009b\u000e"+ - "\u0000\u053a\u053b\u0003\u008c>\u0000\u053b\u053c\u0001\u0000\u0000\u0000"+ - "\u053c\u053d\u0006\u009b\u0018\u0000\u053d\u0147\u0001\u0000\u0000\u0000"+ - "\u053e\u053f\u0004\u009c\u000f\u0000\u053f\u0540\u0003\u00b2Q\u0000\u0540"+ - "\u0541\u0001\u0000\u0000\u0000\u0541\u0542\u0006\u009c\u0019\u0000\u0542"+ - "\u0149\u0001\u0000\u0000\u0000\u0543\u0544\u0003\u00bcV\u0000\u0544\u0545"+ - "\u0001\u0000\u0000\u0000\u0545\u0546\u0006\u009d\u001f\u0000\u0546\u014b"+ - "\u0001\u0000\u0000\u0000\u0547\u0548\u0003\u00b8T\u0000\u0548\u0549\u0001"+ - "\u0000\u0000\u0000\u0549\u054a\u0006\u009e \u0000\u054a\u014d\u0001\u0000"+ - "\u0000\u0000\u054b\u054c\u0003B\u0019\u0000\u054c\u054d\u0001\u0000\u0000"+ - "\u0000\u054d\u054e\u0006\u009f\u000b\u0000\u054e\u014f\u0001\u0000\u0000"+ - "\u0000\u054f\u0550\u0003D\u001a\u0000\u0550\u0551\u0001\u0000\u0000\u0000"+ - "\u0551\u0552\u0006\u00a0\u000b\u0000\u0552\u0151\u0001\u0000\u0000\u0000"+ - "\u0553\u0554\u0003F\u001b\u0000\u0554\u0555\u0001\u0000\u0000\u0000\u0555"+ - "\u0556\u0006\u00a1\u000b\u0000\u0556\u0153\u0001\u0000\u0000\u0000\u0557"+ - "\u0558\u0003H\u001c\u0000\u0558\u0559\u0001\u0000\u0000\u0000\u0559\u055a"+ - "\u0006\u00a2\u0010\u0000\u055a\u055b\u0006\u00a2\f\u0000\u055b\u0155\u0001"+ - "\u0000\u0000\u0000\u055c\u055d\u0007\u0001\u0000\u0000\u055d\u055e\u0007"+ - "\t\u0000\u0000\u055e\u055f\u0007\u000f\u0000\u0000\u055f\u0560\u0007\u0007"+ - "\u0000\u0000\u0560\u0157\u0001\u0000\u0000\u0000\u0561\u0562\u0003B\u0019"+ - "\u0000\u0562\u0563\u0001\u0000\u0000\u0000\u0563\u0564\u0006\u00a4\u000b"+ - "\u0000\u0564\u0159\u0001\u0000\u0000\u0000\u0565\u0566\u0003D\u001a\u0000"+ - "\u0566\u0567\u0001\u0000\u0000\u0000\u0567\u0568\u0006\u00a5\u000b\u0000"+ - "\u0568\u015b\u0001\u0000\u0000\u0000\u0569\u056a\u0003F\u001b\u0000\u056a"+ - "\u056b\u0001\u0000\u0000\u0000\u056b\u056c\u0006\u00a6\u000b\u0000\u056c"+ - "\u015d\u0001\u0000\u0000\u0000\u056d\u056e\u0003\u00b6S\u0000\u056e\u056f"+ - "\u0001\u0000\u0000\u0000\u056f\u0570\u0006\u00a7\u0011\u0000\u0570\u0571"+ - "\u0006\u00a7\f\u0000\u0571\u015f\u0001\u0000\u0000\u0000\u0572\u0573\u0003"+ - "n/\u0000\u0573\u0574\u0001\u0000\u0000\u0000\u0574\u0575\u0006\u00a8\u0012"+ - "\u0000\u0575\u0161\u0001\u0000\u0000\u0000\u0576\u057c\u0003T\"\u0000"+ - "\u0577\u057c\u0003J\u001d\u0000\u0578\u057c\u0003t2\u0000\u0579\u057c"+ - "\u0003L\u001e\u0000\u057a\u057c\u0003Z%\u0000\u057b\u0576\u0001\u0000"+ - "\u0000\u0000\u057b\u0577\u0001\u0000\u0000\u0000\u057b\u0578\u0001\u0000"+ - "\u0000\u0000\u057b\u0579\u0001\u0000\u0000\u0000\u057b\u057a\u0001\u0000"+ - "\u0000\u0000\u057c\u057d\u0001\u0000\u0000\u0000\u057d\u057b\u0001\u0000"+ - "\u0000\u0000\u057d\u057e\u0001\u0000\u0000\u0000\u057e\u0163\u0001\u0000"+ - "\u0000\u0000\u057f\u0580\u0003B\u0019\u0000\u0580\u0581\u0001\u0000\u0000"+ - "\u0000\u0581\u0582\u0006\u00aa\u000b\u0000\u0582\u0165\u0001\u0000\u0000"+ - "\u0000\u0583\u0584\u0003D\u001a\u0000\u0584\u0585\u0001\u0000\u0000\u0000"+ - "\u0585\u0586\u0006\u00ab\u000b\u0000\u0586\u0167\u0001\u0000\u0000\u0000"+ - "\u0587\u0588\u0003F\u001b\u0000\u0588\u0589\u0001\u0000\u0000\u0000\u0589"+ - "\u058a\u0006\u00ac\u000b\u0000\u058a\u0169\u0001\u0000\u0000\u0000\u058b"+ - "\u058c\u0003H\u001c\u0000\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e"+ - "\u0006\u00ad\u0010\u0000\u058e\u058f\u0006\u00ad\f\u0000\u058f\u016b\u0001"+ - "\u0000\u0000\u0000\u0590\u0591\u0003n/\u0000\u0591\u0592\u0001\u0000\u0000"+ - "\u0000\u0592\u0593\u0006\u00ae\u0012\u0000\u0593\u016d\u0001\u0000\u0000"+ - "\u0000\u0594\u0595\u0003p0\u0000\u0595\u0596\u0001\u0000\u0000\u0000\u0596"+ - "\u0597\u0006\u00af\u0013\u0000\u0597\u016f\u0001\u0000\u0000\u0000\u0598"+ - "\u0599\u0003t2\u0000\u0599\u059a\u0001\u0000\u0000\u0000\u059a\u059b\u0006"+ - "\u00b0\u0017\u0000\u059b\u0171\u0001\u0000\u0000\u0000\u059c\u059d\u0003"+ - "\u011a\u0085\u0000\u059d\u059e\u0001\u0000\u0000\u0000\u059e\u059f\u0006"+ - "\u00b1!\u0000\u059f\u05a0\u0006\u00b1\"\u0000\u05a0\u0173\u0001\u0000"+ - "\u0000\u0000\u05a1\u05a2\u0003\u00deg\u0000\u05a2\u05a3\u0001\u0000\u0000"+ - "\u0000\u05a3\u05a4\u0006\u00b2\u0015\u0000\u05a4\u0175\u0001\u0000\u0000"+ - "\u0000\u05a5\u05a6\u0003^\'\u0000\u05a6\u05a7\u0001\u0000\u0000\u0000"+ - "\u05a7\u05a8\u0006\u00b3\u0016\u0000\u05a8\u0177\u0001\u0000\u0000\u0000"+ - "\u05a9\u05aa\u0003B\u0019\u0000\u05aa\u05ab\u0001\u0000\u0000\u0000\u05ab"+ - "\u05ac\u0006\u00b4\u000b\u0000\u05ac\u0179\u0001\u0000\u0000\u0000\u05ad"+ - "\u05ae\u0003D\u001a\u0000\u05ae\u05af\u0001\u0000\u0000\u0000\u05af\u05b0"+ - "\u0006\u00b5\u000b\u0000\u05b0\u017b\u0001\u0000\u0000\u0000\u05b1\u05b2"+ - "\u0003F\u001b\u0000\u05b2\u05b3\u0001\u0000\u0000\u0000\u05b3\u05b4\u0006"+ - "\u00b6\u000b\u0000\u05b4\u017d\u0001\u0000\u0000\u0000\u05b5\u05b6\u0003"+ - "H\u001c\u0000\u05b6\u05b7\u0001\u0000\u0000\u0000\u05b7\u05b8\u0006\u00b7"+ - "\u0010\u0000\u05b8\u05b9\u0006\u00b7\f\u0000\u05b9\u05ba\u0006\u00b7\f"+ - "\u0000\u05ba\u017f\u0001\u0000\u0000\u0000\u05bb\u05bc\u0003p0\u0000\u05bc"+ - "\u05bd\u0001\u0000\u0000\u0000\u05bd\u05be\u0006\u00b8\u0013\u0000\u05be"+ - "\u0181\u0001\u0000\u0000\u0000\u05bf\u05c0\u0003t2\u0000\u05c0\u05c1\u0001"+ - "\u0000\u0000\u0000\u05c1\u05c2\u0006\u00b9\u0017\u0000\u05c2\u0183\u0001"+ - "\u0000\u0000\u0000\u05c3\u05c4\u0003\u00f8t\u0000\u05c4\u05c5\u0001\u0000"+ - "\u0000\u0000\u05c5\u05c6\u0006\u00ba\u001a\u0000\u05c6\u0185\u0001\u0000"+ - "\u0000\u0000\u05c7\u05c8\u0003B\u0019\u0000\u05c8\u05c9\u0001\u0000\u0000"+ - "\u0000\u05c9\u05ca\u0006\u00bb\u000b\u0000\u05ca\u0187\u0001\u0000\u0000"+ - "\u0000\u05cb\u05cc\u0003D\u001a\u0000\u05cc\u05cd\u0001\u0000\u0000\u0000"+ - "\u05cd\u05ce\u0006\u00bc\u000b\u0000\u05ce\u0189\u0001\u0000\u0000\u0000"+ - "\u05cf\u05d0\u0003F\u001b\u0000\u05d0\u05d1\u0001\u0000\u0000\u0000\u05d1"+ - "\u05d2\u0006\u00bd\u000b\u0000\u05d2\u018b\u0001\u0000\u0000\u0000\u05d3"+ - "\u05d4\u0003H\u001c\u0000\u05d4\u05d5\u0001\u0000\u0000\u0000\u05d5\u05d6"+ - "\u0006\u00be\u0010\u0000\u05d6\u05d7\u0006\u00be\f\u0000\u05d7\u018d\u0001"+ - "\u0000\u0000\u0000\u05d8\u05d9\u00036\u0013\u0000\u05d9\u05da\u0001\u0000"+ - "\u0000\u0000\u05da\u05db\u0006\u00bf#\u0000\u05db\u018f\u0001\u0000\u0000"+ - "\u0000\u05dc\u05dd\u0003\u010c~\u0000\u05dd\u05de\u0001\u0000\u0000\u0000"+ - "\u05de\u05df\u0006\u00c0$\u0000\u05df\u0191\u0001\u0000\u0000\u0000\u05e0"+ - "\u05e1\u0003\u011a\u0085\u0000\u05e1\u05e2\u0001\u0000\u0000\u0000\u05e2"+ - "\u05e3\u0006\u00c1!\u0000\u05e3\u05e4\u0006\u00c1\f\u0000\u05e4\u05e5"+ - "\u0006\u00c1\u0000\u0000\u05e5\u0193\u0001\u0000\u0000\u0000\u05e6\u05e7"+ - "\u0007\u0014\u0000\u0000\u05e7\u05e8\u0007\u0002\u0000\u0000\u05e8\u05e9"+ - "\u0007\u0001\u0000\u0000\u05e9\u05ea\u0007\t\u0000\u0000\u05ea\u05eb\u0007"+ - "\u0011\u0000\u0000\u05eb\u05ec\u0001\u0000\u0000\u0000\u05ec\u05ed\u0006"+ - "\u00c2\f\u0000\u05ed\u05ee\u0006\u00c2\u0000\u0000\u05ee\u0195\u0001\u0000"+ - "\u0000\u0000\u05ef\u05f0\u0003\u00deg\u0000\u05f0\u05f1\u0001\u0000\u0000"+ - "\u0000\u05f1\u05f2\u0006\u00c3\u0015\u0000\u05f2\u0197\u0001\u0000\u0000"+ - "\u0000\u05f3\u05f4\u0003^\'\u0000\u05f4\u05f5\u0001\u0000\u0000\u0000"+ - "\u05f5\u05f6\u0006\u00c4\u0016\u0000\u05f6\u0199\u0001\u0000\u0000\u0000"+ - "\u05f7\u05f8\u0003n/\u0000\u05f8\u05f9\u0001\u0000\u0000\u0000\u05f9\u05fa"+ - "\u0006\u00c5\u0012\u0000\u05fa\u019b\u0001\u0000\u0000\u0000\u05fb\u05fc"+ - "\u0003\u00b8T\u0000\u05fc\u05fd\u0001\u0000\u0000\u0000\u05fd\u05fe\u0006"+ - "\u00c6 \u0000\u05fe\u019d\u0001\u0000\u0000\u0000\u05ff\u0600\u0003\u00bc"+ - "V\u0000\u0600\u0601\u0001\u0000\u0000\u0000\u0601\u0602\u0006\u00c7\u001f"+ - "\u0000\u0602\u019f\u0001\u0000\u0000\u0000\u0603\u0604\u0003B\u0019\u0000"+ - "\u0604\u0605\u0001\u0000\u0000\u0000\u0605\u0606\u0006\u00c8\u000b\u0000"+ - "\u0606\u01a1\u0001\u0000\u0000\u0000\u0607\u0608\u0003D\u001a\u0000\u0608"+ - "\u0609\u0001\u0000\u0000\u0000\u0609\u060a\u0006\u00c9\u000b\u0000\u060a"+ - "\u01a3\u0001\u0000\u0000\u0000\u060b\u060c\u0003F\u001b\u0000\u060c\u060d"+ - "\u0001\u0000\u0000\u0000\u060d\u060e\u0006\u00ca\u000b\u0000\u060e\u01a5"+ - "\u0001\u0000\u0000\u0000\u060f\u0610\u0003H\u001c\u0000\u0610\u0611\u0001"+ - "\u0000\u0000\u0000\u0611\u0612\u0006\u00cb\u0010\u0000\u0612\u0613\u0006"+ - "\u00cb\f\u0000\u0613\u01a7\u0001\u0000\u0000\u0000\u0614\u0615\u0003\u00de"+ - "g\u0000\u0615\u0616\u0001\u0000\u0000\u0000\u0616\u0617\u0006\u00cc\u0015"+ - "\u0000\u0617\u0618\u0006\u00cc\f\u0000\u0618\u0619\u0006\u00cc%\u0000"+ - "\u0619\u01a9\u0001\u0000\u0000\u0000\u061a\u061b\u0003^\'\u0000\u061b"+ - "\u061c\u0001\u0000\u0000\u0000\u061c\u061d\u0006\u00cd\u0016\u0000\u061d"+ - "\u061e\u0006\u00cd\f\u0000\u061e\u061f\u0006\u00cd%\u0000\u061f\u01ab"+ - "\u0001\u0000\u0000\u0000\u0620\u0621\u0003B\u0019\u0000\u0621\u0622\u0001"+ - "\u0000\u0000\u0000\u0622\u0623\u0006\u00ce\u000b\u0000\u0623\u01ad\u0001"+ - "\u0000\u0000\u0000\u0624\u0625\u0003D\u001a\u0000\u0625\u0626\u0001\u0000"+ - "\u0000\u0000\u0626\u0627\u0006\u00cf\u000b\u0000\u0627\u01af\u0001\u0000"+ - "\u0000\u0000\u0628\u0629\u0003F\u001b\u0000\u0629\u062a\u0001\u0000\u0000"+ - "\u0000\u062a\u062b\u0006\u00d0\u000b\u0000\u062b\u01b1\u0001\u0000\u0000"+ - "\u0000\u062c\u062d\u0003n/\u0000\u062d\u062e\u0001\u0000\u0000\u0000\u062e"+ - "\u062f\u0006\u00d1\u0012\u0000\u062f\u0630\u0006\u00d1\f\u0000\u0630\u0631"+ - "\u0006\u00d1\t\u0000\u0631\u01b3\u0001\u0000\u0000\u0000\u0632\u0633\u0003"+ - "p0\u0000\u0633\u0634\u0001\u0000\u0000\u0000\u0634\u0635\u0006\u00d2\u0013"+ - "\u0000\u0635\u0636\u0006\u00d2\f\u0000\u0636\u0637\u0006\u00d2\t\u0000"+ - "\u0637\u01b5\u0001\u0000\u0000\u0000\u0638\u0639\u0003B\u0019\u0000\u0639"+ - "\u063a\u0001\u0000\u0000\u0000\u063a\u063b\u0006\u00d3\u000b\u0000\u063b"+ - "\u01b7\u0001\u0000\u0000\u0000\u063c\u063d\u0003D\u001a\u0000\u063d\u063e"+ - "\u0001\u0000\u0000\u0000\u063e\u063f\u0006\u00d4\u000b\u0000\u063f\u01b9"+ - "\u0001\u0000\u0000\u0000\u0640\u0641\u0003F\u001b\u0000\u0641\u0642\u0001"+ - "\u0000\u0000\u0000\u0642\u0643\u0006\u00d5\u000b\u0000\u0643\u01bb\u0001"+ - "\u0000\u0000\u0000\u0644\u0645\u0003\u00bcV\u0000\u0645\u0646\u0001\u0000"+ - "\u0000\u0000\u0646\u0647\u0006\u00d6\f\u0000\u0647\u0648\u0006\u00d6\u0000"+ - "\u0000\u0648\u0649\u0006\u00d6\u001f\u0000\u0649\u01bd\u0001\u0000\u0000"+ - "\u0000\u064a\u064b\u0003\u00b8T\u0000\u064b\u064c\u0001\u0000\u0000\u0000"+ - "\u064c\u064d\u0006\u00d7\f\u0000\u064d\u064e\u0006\u00d7\u0000\u0000\u064e"+ - "\u064f\u0006\u00d7 \u0000\u064f\u01bf\u0001\u0000\u0000\u0000\u0650\u0651"+ - "\u0003d*\u0000\u0651\u0652\u0001\u0000\u0000\u0000\u0652\u0653\u0006\u00d8"+ - "\f\u0000\u0653\u0654\u0006\u00d8\u0000\u0000\u0654\u0655\u0006\u00d8&"+ - "\u0000\u0655\u01c1\u0001\u0000\u0000\u0000\u0656\u0657\u0003H\u001c\u0000"+ - "\u0657\u0658\u0001\u0000\u0000\u0000\u0658\u0659\u0006\u00d9\u0010\u0000"+ - "\u0659\u065a\u0006\u00d9\f\u0000\u065a\u01c3\u0001\u0000\u0000\u0000B"+ + "\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001"+ + "\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ + "\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001"+ + "\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001"+ + "\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001"+ + "\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001"+ + "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ + "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001"+ + "\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ + "\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001"+ + "\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001"+ + "\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001"+ + "\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001"+ + "\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001"+ + "\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0004"+ + "\u00a9\u0582\b\u00a9\u000b\u00a9\f\u00a9\u0583\u0001\u00aa\u0001\u00aa"+ + "\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab"+ + "\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad"+ + "\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae"+ + "\u0001\u00ae\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0"+ + "\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1"+ + "\u0001\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2"+ + "\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4"+ + "\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5"+ + "\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7"+ + "\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8"+ + "\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9"+ + "\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb"+ + "\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc"+ + "\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be"+ + "\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf"+ + "\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0"+ + "\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1"+ + "\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2"+ + "\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3\u0001\u00c3"+ + "\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c5"+ + "\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c6\u0001\u00c6\u0001\u00c6"+ + "\u0001\u00c6\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c8"+ + "\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9\u0001\u00c9"+ + "\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00cb"+ + "\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001\u00cc"+ + "\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd"+ + "\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00ce\u0001\u00ce"+ + "\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf"+ + "\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001\u00d1"+ + "\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d2\u0001\u00d2"+ + "\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3\u0001\u00d3"+ + "\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d4"+ + "\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d6\u0001\u00d6"+ + "\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d7\u0001\u00d7"+ + "\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d8\u0001\u00d8"+ + "\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d9\u0001\u00d9"+ + "\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00da\u0001\u00da\u0001\u00da"+ + "\u0001\u00da\u0001\u00da\u0001\u00db\u0001\u00db\u0001\u00db\u0001\u00db"+ + "\u0001\u00dc\u0001\u00dc\u0001\u00dc\u0001\u00dc\u0001\u00dd\u0001\u00dd"+ + "\u0001\u00dd\u0001\u00dd\u0001\u00de\u0001\u00de\u0001\u00de\u0001\u00de"+ + "\u0002\u02c3\u0308\u0000\u00df\u0011\u0001\u0013\u0002\u0015\u0003\u0017"+ + "\u0004\u0019\u0005\u001b\u0006\u001d\u0007\u001f\b!\t#\n%\u000b\'\f)\r"+ + "+\u000e-\u000f/\u00101\u00113\u00125\u00137\u00149\u0015;\u0016=\u0017"+ + "?\u0018A\u0019C\u001aE\u001bG\u001cI\u001dK\u0000M\u0000O\u0000Q\u0000"+ + "S\u0000U\u0000W\u0000Y\u0000[\u0000]\u0000_\u001ea\u001fc e!g\"i#k$m%"+ + "o&q\'s(u)w*y+{,}-\u007f.\u0081/\u00830\u00851\u00872\u00893\u008b4\u008d"+ + "5\u008f6\u00917\u00938\u00959\u0097:\u0099;\u009b<\u009d=\u009f>\u00a1"+ + "?\u00a3@\u00a5A\u00a7B\u00a9C\u00abD\u00adE\u00afF\u00b1\u0000\u00b3G"+ + "\u00b5H\u00b7I\u00b9J\u00bb\u0000\u00bdK\u00bfL\u00c1M\u00c3N\u00c5\u0000"+ + "\u00c7\u0000\u00c9O\u00cbP\u00cdQ\u00cf\u0000\u00d1\u0000\u00d3\u0000"+ + "\u00d5\u0000\u00d7\u0000\u00d9\u0000\u00dbR\u00dd\u0000\u00dfS\u00e1\u0000"+ + "\u00e3\u0000\u00e5T\u00e7U\u00e9V\u00eb\u0000\u00ed\u0000\u00ef\u0000"+ + "\u00f1\u0000\u00f3\u0000\u00f5\u0000\u00f7\u0000\u00f9W\u00fbX\u00fdY"+ + "\u00ffZ\u0101\u0000\u0103\u0000\u0105\u0000\u0107\u0000\u0109\u0000\u010b"+ + "\u0000\u010d[\u010f\u0000\u0111\\\u0113]\u0115^\u0117\u0000\u0119\u0000"+ + "\u011b_\u011d`\u011f\u0000\u0121a\u0123\u0000\u0125b\u0127c\u0129d\u012b"+ + "\u0000\u012d\u0000\u012f\u0000\u0131\u0000\u0133\u0000\u0135\u0000\u0137"+ + "\u0000\u0139\u0000\u013b\u0000\u013de\u013ff\u0141g\u0143\u0000\u0145"+ + "\u0000\u0147\u0000\u0149\u0000\u014b\u0000\u014d\u0000\u014fh\u0151i\u0153"+ + "j\u0155\u0000\u0157k\u0159l\u015bm\u015dn\u015f\u0000\u0161\u0000\u0163"+ + "o\u0165p\u0167q\u0169r\u016b\u0000\u016d\u0000\u016f\u0000\u0171\u0000"+ + "\u0173\u0000\u0175\u0000\u0177\u0000\u0179s\u017bt\u017du\u017f\u0000"+ + "\u0181\u0000\u0183\u0000\u0185\u0000\u0187v\u0189w\u018bx\u018d\u0000"+ + "\u018fy\u0191\u0000\u0193\u0000\u0195z\u0197\u0000\u0199\u0000\u019b\u0000"+ + "\u019d\u0000\u019f\u0000\u01a1{\u01a3|\u01a5}\u01a7\u0000\u01a9\u0000"+ + "\u01ab\u0000\u01ad~\u01af\u007f\u01b1\u0080\u01b3\u0000\u01b5\u0000\u01b7"+ + "\u0081\u01b9\u0082\u01bb\u0083\u01bd\u0000\u01bf\u0000\u01c1\u0000\u01c3"+ + "\u0000\u01c5\u0000\u01c7\u0000\u01c9\u0084\u01cb\u0085\u01cd\u0086\u0011"+ "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ - "\u000f\u0299\u02a3\u02a7\u02aa\u02b3\u02b5\u02c0\u02d3\u02d8\u02e1\u02e8"+ - "\u02ed\u02ef\u02fa\u0302\u0305\u0307\u030c\u0311\u0317\u031e\u0323\u0329"+ - "\u032c\u0334\u0338\u03bc\u03c1\u03c8\u03ca\u03da\u03df\u03e4\u03e6\u03ec"+ - "\u0439\u043e\u046f\u0473\u0478\u047d\u0482\u0484\u0488\u048a\u04e1\u04e5"+ - "\u04ea\u057b\u057d\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000"+ - "\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005"+ - "\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000\u0001"+ - "\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007H\u0000\u0005\u0000\u0000"+ - "\u0007\u001d\u0000\u0007I\u0000\u0007&\u0000\u0007\'\u0000\u0007$\u0000"+ - "\u0007S\u0000\u0007\u001e\u0000\u0007)\u0000\u00075\u0000\u0007G\u0000"+ - "\u0007W\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007a\u0000\u0007`\u0000"+ - "\u0007K\u0000\u0007J\u0000\u0007_\u0000\u0005\f\u0000\u0007\u0014\u0000"+ - "\u0007[\u0000\u0005\u000f\u0000\u0007!\u0000"; + "\u000f\u0010$\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000"+ + "EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002"+ + "\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000"+ + "AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002"+ + "\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0006\u0000"+ + "\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u0000"+ + "09\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\""+ + "\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b"+ + "\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r"+ + " \"#,,//::<<>?\\\\||\u0002\u0000JJjj\u0691\u0000\u0011\u0001\u0000\u0000"+ + "\u0000\u0000\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001\u0000\u0000"+ + "\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001\u0000\u0000"+ + "\u0000\u0000\u001b\u0001\u0000\u0000\u0000\u0000\u001d\u0001\u0000\u0000"+ + "\u0000\u0000\u001f\u0001\u0000\u0000\u0000\u0000!\u0001\u0000\u0000\u0000"+ + "\u0000#\u0001\u0000\u0000\u0000\u0000%\u0001\u0000\u0000\u0000\u0000\'"+ + "\u0001\u0000\u0000\u0000\u0000)\u0001\u0000\u0000\u0000\u0000+\u0001\u0000"+ + "\u0000\u0000\u0000-\u0001\u0000\u0000\u0000\u0000/\u0001\u0000\u0000\u0000"+ + "\u00001\u0001\u0000\u0000\u0000\u00003\u0001\u0000\u0000\u0000\u00005"+ + "\u0001\u0000\u0000\u0000\u00007\u0001\u0000\u0000\u0000\u00009\u0001\u0000"+ + "\u0000\u0000\u0000;\u0001\u0000\u0000\u0000\u0000=\u0001\u0000\u0000\u0000"+ + "\u0000?\u0001\u0000\u0000\u0000\u0000A\u0001\u0000\u0000\u0000\u0000C"+ + "\u0001\u0000\u0000\u0000\u0000E\u0001\u0000\u0000\u0000\u0000G\u0001\u0000"+ + "\u0000\u0000\u0001I\u0001\u0000\u0000\u0000\u0001_\u0001\u0000\u0000\u0000"+ + "\u0001a\u0001\u0000\u0000\u0000\u0001c\u0001\u0000\u0000\u0000\u0001e"+ + "\u0001\u0000\u0000\u0000\u0001g\u0001\u0000\u0000\u0000\u0001i\u0001\u0000"+ + "\u0000\u0000\u0001k\u0001\u0000\u0000\u0000\u0001m\u0001\u0000\u0000\u0000"+ + "\u0001o\u0001\u0000\u0000\u0000\u0001q\u0001\u0000\u0000\u0000\u0001s"+ + "\u0001\u0000\u0000\u0000\u0001u\u0001\u0000\u0000\u0000\u0001w\u0001\u0000"+ + "\u0000\u0000\u0001y\u0001\u0000\u0000\u0000\u0001{\u0001\u0000\u0000\u0000"+ + "\u0001}\u0001\u0000\u0000\u0000\u0001\u007f\u0001\u0000\u0000\u0000\u0001"+ + "\u0081\u0001\u0000\u0000\u0000\u0001\u0083\u0001\u0000\u0000\u0000\u0001"+ + "\u0085\u0001\u0000\u0000\u0000\u0001\u0087\u0001\u0000\u0000\u0000\u0001"+ + "\u0089\u0001\u0000\u0000\u0000\u0001\u008b\u0001\u0000\u0000\u0000\u0001"+ + "\u008d\u0001\u0000\u0000\u0000\u0001\u008f\u0001\u0000\u0000\u0000\u0001"+ + "\u0091\u0001\u0000\u0000\u0000\u0001\u0093\u0001\u0000\u0000\u0000\u0001"+ + "\u0095\u0001\u0000\u0000\u0000\u0001\u0097\u0001\u0000\u0000\u0000\u0001"+ + "\u0099\u0001\u0000\u0000\u0000\u0001\u009b\u0001\u0000\u0000\u0000\u0001"+ + "\u009d\u0001\u0000\u0000\u0000\u0001\u009f\u0001\u0000\u0000\u0000\u0001"+ + "\u00a1\u0001\u0000\u0000\u0000\u0001\u00a3\u0001\u0000\u0000\u0000\u0001"+ + "\u00a5\u0001\u0000\u0000\u0000\u0001\u00a7\u0001\u0000\u0000\u0000\u0001"+ + "\u00a9\u0001\u0000\u0000\u0000\u0001\u00ab\u0001\u0000\u0000\u0000\u0001"+ + "\u00ad\u0001\u0000\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001"+ + "\u00b1\u0001\u0000\u0000\u0000\u0001\u00b3\u0001\u0000\u0000\u0000\u0001"+ + "\u00b5\u0001\u0000\u0000\u0000\u0001\u00b7\u0001\u0000\u0000\u0000\u0001"+ + "\u00b9\u0001\u0000\u0000\u0000\u0001\u00bd\u0001\u0000\u0000\u0000\u0001"+ + "\u00bf\u0001\u0000\u0000\u0000\u0001\u00c1\u0001\u0000\u0000\u0000\u0001"+ + "\u00c3\u0001\u0000\u0000\u0000\u0002\u00c5\u0001\u0000\u0000\u0000\u0002"+ + "\u00c7\u0001\u0000\u0000\u0000\u0002\u00c9\u0001\u0000\u0000\u0000\u0002"+ + "\u00cb\u0001\u0000\u0000\u0000\u0002\u00cd\u0001\u0000\u0000\u0000\u0003"+ + "\u00cf\u0001\u0000\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003"+ + "\u00d3\u0001\u0000\u0000\u0000\u0003\u00d5\u0001\u0000\u0000\u0000\u0003"+ + "\u00d7\u0001\u0000\u0000\u0000\u0003\u00d9\u0001\u0000\u0000\u0000\u0003"+ + "\u00db\u0001\u0000\u0000\u0000\u0003\u00df\u0001\u0000\u0000\u0000\u0003"+ + "\u00e1\u0001\u0000\u0000\u0000\u0003\u00e3\u0001\u0000\u0000\u0000\u0003"+ + "\u00e5\u0001\u0000\u0000\u0000\u0003\u00e7\u0001\u0000\u0000\u0000\u0003"+ + "\u00e9\u0001\u0000\u0000\u0000\u0004\u00eb\u0001\u0000\u0000\u0000\u0004"+ + "\u00ed\u0001\u0000\u0000\u0000\u0004\u00ef\u0001\u0000\u0000\u0000\u0004"+ + "\u00f1\u0001\u0000\u0000\u0000\u0004\u00f3\u0001\u0000\u0000\u0000\u0004"+ + "\u00f9\u0001\u0000\u0000\u0000\u0004\u00fb\u0001\u0000\u0000\u0000\u0004"+ + "\u00fd\u0001\u0000\u0000\u0000\u0004\u00ff\u0001\u0000\u0000\u0000\u0005"+ + "\u0101\u0001\u0000\u0000\u0000\u0005\u0103\u0001\u0000\u0000\u0000\u0005"+ + "\u0105\u0001\u0000\u0000\u0000\u0005\u0107\u0001\u0000\u0000\u0000\u0005"+ + "\u0109\u0001\u0000\u0000\u0000\u0005\u010b\u0001\u0000\u0000\u0000\u0005"+ + "\u010d\u0001\u0000\u0000\u0000\u0005\u010f\u0001\u0000\u0000\u0000\u0005"+ + "\u0111\u0001\u0000\u0000\u0000\u0005\u0113\u0001\u0000\u0000\u0000\u0005"+ + "\u0115\u0001\u0000\u0000\u0000\u0006\u0117\u0001\u0000\u0000\u0000\u0006"+ + "\u0119\u0001\u0000\u0000\u0000\u0006\u011b\u0001\u0000\u0000\u0000\u0006"+ + "\u011d\u0001\u0000\u0000\u0000\u0006\u0121\u0001\u0000\u0000\u0000\u0006"+ + "\u0123\u0001\u0000\u0000\u0000\u0006\u0125\u0001\u0000\u0000\u0000\u0006"+ + "\u0127\u0001\u0000\u0000\u0000\u0006\u0129\u0001\u0000\u0000\u0000\u0007"+ + "\u012b\u0001\u0000\u0000\u0000\u0007\u012d\u0001\u0000\u0000\u0000\u0007"+ + "\u012f\u0001\u0000\u0000\u0000\u0007\u0131\u0001\u0000\u0000\u0000\u0007"+ + "\u0133\u0001\u0000\u0000\u0000\u0007\u0135\u0001\u0000\u0000\u0000\u0007"+ + "\u0137\u0001\u0000\u0000\u0000\u0007\u0139\u0001\u0000\u0000\u0000\u0007"+ + "\u013b\u0001\u0000\u0000\u0000\u0007\u013d\u0001\u0000\u0000\u0000\u0007"+ + "\u013f\u0001\u0000\u0000\u0000\u0007\u0141\u0001\u0000\u0000\u0000\b\u0143"+ + "\u0001\u0000\u0000\u0000\b\u0145\u0001\u0000\u0000\u0000\b\u0147\u0001"+ + "\u0000\u0000\u0000\b\u0149\u0001\u0000\u0000\u0000\b\u014b\u0001\u0000"+ + "\u0000\u0000\b\u014d\u0001\u0000\u0000\u0000\b\u014f\u0001\u0000\u0000"+ + "\u0000\b\u0151\u0001\u0000\u0000\u0000\b\u0153\u0001\u0000\u0000\u0000"+ + "\t\u0155\u0001\u0000\u0000\u0000\t\u0157\u0001\u0000\u0000\u0000\t\u0159"+ + "\u0001\u0000\u0000\u0000\t\u015b\u0001\u0000\u0000\u0000\t\u015d\u0001"+ + "\u0000\u0000\u0000\n\u015f\u0001\u0000\u0000\u0000\n\u0161\u0001\u0000"+ + "\u0000\u0000\n\u0163\u0001\u0000\u0000\u0000\n\u0165\u0001\u0000\u0000"+ + "\u0000\n\u0167\u0001\u0000\u0000\u0000\n\u0169\u0001\u0000\u0000\u0000"+ + "\u000b\u016b\u0001\u0000\u0000\u0000\u000b\u016d\u0001\u0000\u0000\u0000"+ + "\u000b\u016f\u0001\u0000\u0000\u0000\u000b\u0171\u0001\u0000\u0000\u0000"+ + "\u000b\u0173\u0001\u0000\u0000\u0000\u000b\u0175\u0001\u0000\u0000\u0000"+ + "\u000b\u0177\u0001\u0000\u0000\u0000\u000b\u0179\u0001\u0000\u0000\u0000"+ + "\u000b\u017b\u0001\u0000\u0000\u0000\u000b\u017d\u0001\u0000\u0000\u0000"+ + "\f\u017f\u0001\u0000\u0000\u0000\f\u0181\u0001\u0000\u0000\u0000\f\u0183"+ + "\u0001\u0000\u0000\u0000\f\u0185\u0001\u0000\u0000\u0000\f\u0187\u0001"+ + "\u0000\u0000\u0000\f\u0189\u0001\u0000\u0000\u0000\f\u018b\u0001\u0000"+ + "\u0000\u0000\r\u018d\u0001\u0000\u0000\u0000\r\u018f\u0001\u0000\u0000"+ + "\u0000\r\u0191\u0001\u0000\u0000\u0000\r\u0193\u0001\u0000\u0000\u0000"+ + "\r\u0195\u0001\u0000\u0000\u0000\r\u0197\u0001\u0000\u0000\u0000\r\u0199"+ + "\u0001\u0000\u0000\u0000\r\u019b\u0001\u0000\u0000\u0000\r\u019d\u0001"+ + "\u0000\u0000\u0000\r\u019f\u0001\u0000\u0000\u0000\r\u01a1\u0001\u0000"+ + "\u0000\u0000\r\u01a3\u0001\u0000\u0000\u0000\r\u01a5\u0001\u0000\u0000"+ + "\u0000\u000e\u01a7\u0001\u0000\u0000\u0000\u000e\u01a9\u0001\u0000\u0000"+ + "\u0000\u000e\u01ab\u0001\u0000\u0000\u0000\u000e\u01ad\u0001\u0000\u0000"+ + "\u0000\u000e\u01af\u0001\u0000\u0000\u0000\u000e\u01b1\u0001\u0000\u0000"+ + "\u0000\u000f\u01b3\u0001\u0000\u0000\u0000\u000f\u01b5\u0001\u0000\u0000"+ + "\u0000\u000f\u01b7\u0001\u0000\u0000\u0000\u000f\u01b9\u0001\u0000\u0000"+ + "\u0000\u000f\u01bb\u0001\u0000\u0000\u0000\u000f\u01bd\u0001\u0000\u0000"+ + "\u0000\u000f\u01bf\u0001\u0000\u0000\u0000\u000f\u01c1\u0001\u0000\u0000"+ + "\u0000\u000f\u01c3\u0001\u0000\u0000\u0000\u0010\u01c5\u0001\u0000\u0000"+ + "\u0000\u0010\u01c7\u0001\u0000\u0000\u0000\u0010\u01c9\u0001\u0000\u0000"+ + "\u0000\u0010\u01cb\u0001\u0000\u0000\u0000\u0010\u01cd\u0001\u0000\u0000"+ + "\u0000\u0011\u01cf\u0001\u0000\u0000\u0000\u0013\u01d9\u0001\u0000\u0000"+ + "\u0000\u0015\u01e0\u0001\u0000\u0000\u0000\u0017\u01e9\u0001\u0000\u0000"+ + "\u0000\u0019\u01f0\u0001\u0000\u0000\u0000\u001b\u01fa\u0001\u0000\u0000"+ + "\u0000\u001d\u0201\u0001\u0000\u0000\u0000\u001f\u0208\u0001\u0000\u0000"+ + "\u0000!\u020f\u0001\u0000\u0000\u0000#\u0217\u0001\u0000\u0000\u0000%"+ + "\u0223\u0001\u0000\u0000\u0000\'\u022c\u0001\u0000\u0000\u0000)\u0232"+ + "\u0001\u0000\u0000\u0000+\u0239\u0001\u0000\u0000\u0000-\u0240\u0001\u0000"+ + "\u0000\u0000/\u0248\u0001\u0000\u0000\u00001\u0250\u0001\u0000\u0000\u0000"+ + "3\u0259\u0001\u0000\u0000\u00005\u0268\u0001\u0000\u0000\u00007\u0274"+ + "\u0001\u0000\u0000\u00009\u0280\u0001\u0000\u0000\u0000;\u028b\u0001\u0000"+ + "\u0000\u0000=\u0293\u0001\u0000\u0000\u0000?\u029b\u0001\u0000\u0000\u0000"+ + "A\u02a5\u0001\u0000\u0000\u0000C\u02ab\u0001\u0000\u0000\u0000E\u02bc"+ + "\u0001\u0000\u0000\u0000G\u02cc\u0001\u0000\u0000\u0000I\u02d2\u0001\u0000"+ + "\u0000\u0000K\u02d6\u0001\u0000\u0000\u0000M\u02d8\u0001\u0000\u0000\u0000"+ + "O\u02da\u0001\u0000\u0000\u0000Q\u02dd\u0001\u0000\u0000\u0000S\u02df"+ + "\u0001\u0000\u0000\u0000U\u02e8\u0001\u0000\u0000\u0000W\u02ea\u0001\u0000"+ + "\u0000\u0000Y\u02ef\u0001\u0000\u0000\u0000[\u02f1\u0001\u0000\u0000\u0000"+ + "]\u02f6\u0001\u0000\u0000\u0000_\u0315\u0001\u0000\u0000\u0000a\u0318"+ + "\u0001\u0000\u0000\u0000c\u0346\u0001\u0000\u0000\u0000e\u0348\u0001\u0000"+ + "\u0000\u0000g\u034b\u0001\u0000\u0000\u0000i\u034f\u0001\u0000\u0000\u0000"+ + "k\u0353\u0001\u0000\u0000\u0000m\u0355\u0001\u0000\u0000\u0000o\u0358"+ + "\u0001\u0000\u0000\u0000q\u035a\u0001\u0000\u0000\u0000s\u035c\u0001\u0000"+ + "\u0000\u0000u\u0361\u0001\u0000\u0000\u0000w\u0363\u0001\u0000\u0000\u0000"+ + "y\u0369\u0001\u0000\u0000\u0000{\u036f\u0001\u0000\u0000\u0000}\u0372"+ + "\u0001\u0000\u0000\u0000\u007f\u0375\u0001\u0000\u0000\u0000\u0081\u037a"+ + "\u0001\u0000\u0000\u0000\u0083\u037f\u0001\u0000\u0000\u0000\u0085\u0381"+ + "\u0001\u0000\u0000\u0000\u0087\u0385\u0001\u0000\u0000\u0000\u0089\u038a"+ + "\u0001\u0000\u0000\u0000\u008b\u0390\u0001\u0000\u0000\u0000\u008d\u0393"+ + "\u0001\u0000\u0000\u0000\u008f\u0395\u0001\u0000\u0000\u0000\u0091\u039b"+ + "\u0001\u0000\u0000\u0000\u0093\u039d\u0001\u0000\u0000\u0000\u0095\u03a2"+ + "\u0001\u0000\u0000\u0000\u0097\u03a5\u0001\u0000\u0000\u0000\u0099\u03a8"+ + "\u0001\u0000\u0000\u0000\u009b\u03ab\u0001\u0000\u0000\u0000\u009d\u03ad"+ + "\u0001\u0000\u0000\u0000\u009f\u03b0\u0001\u0000\u0000\u0000\u00a1\u03b2"+ + "\u0001\u0000\u0000\u0000\u00a3\u03b5\u0001\u0000\u0000\u0000\u00a5\u03b7"+ + "\u0001\u0000\u0000\u0000\u00a7\u03b9\u0001\u0000\u0000\u0000\u00a9\u03bb"+ + "\u0001\u0000\u0000\u0000\u00ab\u03bd\u0001\u0000\u0000\u0000\u00ad\u03bf"+ + "\u0001\u0000\u0000\u0000\u00af\u03c1\u0001\u0000\u0000\u0000\u00b1\u03c3"+ + "\u0001\u0000\u0000\u0000\u00b3\u03d8\u0001\u0000\u0000\u0000\u00b5\u03da"+ + "\u0001\u0000\u0000\u0000\u00b7\u03df\u0001\u0000\u0000\u0000\u00b9\u03f4"+ + "\u0001\u0000\u0000\u0000\u00bb\u03f6\u0001\u0000\u0000\u0000\u00bd\u03fe"+ + "\u0001\u0000\u0000\u0000\u00bf\u0400\u0001\u0000\u0000\u0000\u00c1\u0404"+ + "\u0001\u0000\u0000\u0000\u00c3\u0408\u0001\u0000\u0000\u0000\u00c5\u040c"+ + "\u0001\u0000\u0000\u0000\u00c7\u0411\u0001\u0000\u0000\u0000\u00c9\u0416"+ + "\u0001\u0000\u0000\u0000\u00cb\u041a\u0001\u0000\u0000\u0000\u00cd\u041e"+ + "\u0001\u0000\u0000\u0000\u00cf\u0422\u0001\u0000\u0000\u0000\u00d1\u0427"+ + "\u0001\u0000\u0000\u0000\u00d3\u042b\u0001\u0000\u0000\u0000\u00d5\u042f"+ + "\u0001\u0000\u0000\u0000\u00d7\u0433\u0001\u0000\u0000\u0000\u00d9\u0437"+ + "\u0001\u0000\u0000\u0000\u00db\u043b\u0001\u0000\u0000\u0000\u00dd\u0447"+ + "\u0001\u0000\u0000\u0000\u00df\u044a\u0001\u0000\u0000\u0000\u00e1\u044e"+ + "\u0001\u0000\u0000\u0000\u00e3\u0452\u0001\u0000\u0000\u0000\u00e5\u0456"+ + "\u0001\u0000\u0000\u0000\u00e7\u045a\u0001\u0000\u0000\u0000\u00e9\u045e"+ + "\u0001\u0000\u0000\u0000\u00eb\u0462\u0001\u0000\u0000\u0000\u00ed\u0467"+ + "\u0001\u0000\u0000\u0000\u00ef\u046b\u0001\u0000\u0000\u0000\u00f1\u046f"+ + "\u0001\u0000\u0000\u0000\u00f3\u0473\u0001\u0000\u0000\u0000\u00f5\u047b"+ + "\u0001\u0000\u0000\u0000\u00f7\u0490\u0001\u0000\u0000\u0000\u00f9\u0494"+ + "\u0001\u0000\u0000\u0000\u00fb\u0498\u0001\u0000\u0000\u0000\u00fd\u049c"+ + "\u0001\u0000\u0000\u0000\u00ff\u04a0\u0001\u0000\u0000\u0000\u0101\u04a4"+ + "\u0001\u0000\u0000\u0000\u0103\u04a9\u0001\u0000\u0000\u0000\u0105\u04ad"+ + "\u0001\u0000\u0000\u0000\u0107\u04b1\u0001\u0000\u0000\u0000\u0109\u04b5"+ + "\u0001\u0000\u0000\u0000\u010b\u04b9\u0001\u0000\u0000\u0000\u010d\u04bd"+ + "\u0001\u0000\u0000\u0000\u010f\u04c0\u0001\u0000\u0000\u0000\u0111\u04c4"+ + "\u0001\u0000\u0000\u0000\u0113\u04c8\u0001\u0000\u0000\u0000\u0115\u04cc"+ + "\u0001\u0000\u0000\u0000\u0117\u04d0\u0001\u0000\u0000\u0000\u0119\u04d5"+ + "\u0001\u0000\u0000\u0000\u011b\u04da\u0001\u0000\u0000\u0000\u011d\u04df"+ + "\u0001\u0000\u0000\u0000\u011f\u04e6\u0001\u0000\u0000\u0000\u0121\u04ef"+ + "\u0001\u0000\u0000\u0000\u0123\u04f6\u0001\u0000\u0000\u0000\u0125\u04fa"+ + "\u0001\u0000\u0000\u0000\u0127\u04fe\u0001\u0000\u0000\u0000\u0129\u0502"+ + "\u0001\u0000\u0000\u0000\u012b\u0506\u0001\u0000\u0000\u0000\u012d\u050c"+ + "\u0001\u0000\u0000\u0000\u012f\u0510\u0001\u0000\u0000\u0000\u0131\u0514"+ + "\u0001\u0000\u0000\u0000\u0133\u0518\u0001\u0000\u0000\u0000\u0135\u051c"+ + "\u0001\u0000\u0000\u0000\u0137\u0520\u0001\u0000\u0000\u0000\u0139\u0524"+ + "\u0001\u0000\u0000\u0000\u013b\u0528\u0001\u0000\u0000\u0000\u013d\u052c"+ + "\u0001\u0000\u0000\u0000\u013f\u0530\u0001\u0000\u0000\u0000\u0141\u0534"+ + "\u0001\u0000\u0000\u0000\u0143\u0538\u0001\u0000\u0000\u0000\u0145\u053d"+ + "\u0001\u0000\u0000\u0000\u0147\u0541\u0001\u0000\u0000\u0000\u0149\u0545"+ + "\u0001\u0000\u0000\u0000\u014b\u0549\u0001\u0000\u0000\u0000\u014d\u054d"+ + "\u0001\u0000\u0000\u0000\u014f\u0551\u0001\u0000\u0000\u0000\u0151\u0555"+ + "\u0001\u0000\u0000\u0000\u0153\u0559\u0001\u0000\u0000\u0000\u0155\u055d"+ + "\u0001\u0000\u0000\u0000\u0157\u0562\u0001\u0000\u0000\u0000\u0159\u0567"+ + "\u0001\u0000\u0000\u0000\u015b\u056b\u0001\u0000\u0000\u0000\u015d\u056f"+ + "\u0001\u0000\u0000\u0000\u015f\u0573\u0001\u0000\u0000\u0000\u0161\u0578"+ + "\u0001\u0000\u0000\u0000\u0163\u0581\u0001\u0000\u0000\u0000\u0165\u0585"+ + "\u0001\u0000\u0000\u0000\u0167\u0589\u0001\u0000\u0000\u0000\u0169\u058d"+ + "\u0001\u0000\u0000\u0000\u016b\u0591\u0001\u0000\u0000\u0000\u016d\u0596"+ + "\u0001\u0000\u0000\u0000\u016f\u059a\u0001\u0000\u0000\u0000\u0171\u059e"+ + "\u0001\u0000\u0000\u0000\u0173\u05a2\u0001\u0000\u0000\u0000\u0175\u05a7"+ + "\u0001\u0000\u0000\u0000\u0177\u05ab\u0001\u0000\u0000\u0000\u0179\u05af"+ + "\u0001\u0000\u0000\u0000\u017b\u05b3\u0001\u0000\u0000\u0000\u017d\u05b7"+ + "\u0001\u0000\u0000\u0000\u017f\u05bb\u0001\u0000\u0000\u0000\u0181\u05c1"+ + "\u0001\u0000\u0000\u0000\u0183\u05c5\u0001\u0000\u0000\u0000\u0185\u05c9"+ + "\u0001\u0000\u0000\u0000\u0187\u05cd\u0001\u0000\u0000\u0000\u0189\u05d1"+ + "\u0001\u0000\u0000\u0000\u018b\u05d5\u0001\u0000\u0000\u0000\u018d\u05d9"+ + "\u0001\u0000\u0000\u0000\u018f\u05de\u0001\u0000\u0000\u0000\u0191\u05e3"+ + "\u0001\u0000\u0000\u0000\u0193\u05e7\u0001\u0000\u0000\u0000\u0195\u05ed"+ + "\u0001\u0000\u0000\u0000\u0197\u05f6\u0001\u0000\u0000\u0000\u0199\u05fa"+ + "\u0001\u0000\u0000\u0000\u019b\u05fe\u0001\u0000\u0000\u0000\u019d\u0602"+ + "\u0001\u0000\u0000\u0000\u019f\u0606\u0001\u0000\u0000\u0000\u01a1\u060a"+ + "\u0001\u0000\u0000\u0000\u01a3\u060e\u0001\u0000\u0000\u0000\u01a5\u0612"+ + "\u0001\u0000\u0000\u0000\u01a7\u0616\u0001\u0000\u0000\u0000\u01a9\u061b"+ + "\u0001\u0000\u0000\u0000\u01ab\u0621\u0001\u0000\u0000\u0000\u01ad\u0627"+ + "\u0001\u0000\u0000\u0000\u01af\u062b\u0001\u0000\u0000\u0000\u01b1\u062f"+ + "\u0001\u0000\u0000\u0000\u01b3\u0633\u0001\u0000\u0000\u0000\u01b5\u0639"+ + "\u0001\u0000\u0000\u0000\u01b7\u063f\u0001\u0000\u0000\u0000\u01b9\u0643"+ + "\u0001\u0000\u0000\u0000\u01bb\u0647\u0001\u0000\u0000\u0000\u01bd\u064b"+ + "\u0001\u0000\u0000\u0000\u01bf\u0651\u0001\u0000\u0000\u0000\u01c1\u0657"+ + "\u0001\u0000\u0000\u0000\u01c3\u065d\u0001\u0000\u0000\u0000\u01c5\u0662"+ + "\u0001\u0000\u0000\u0000\u01c7\u0667\u0001\u0000\u0000\u0000\u01c9\u066b"+ + "\u0001\u0000\u0000\u0000\u01cb\u066f\u0001\u0000\u0000\u0000\u01cd\u0673"+ + "\u0001\u0000\u0000\u0000\u01cf\u01d0\u0007\u0000\u0000\u0000\u01d0\u01d1"+ + "\u0007\u0001\u0000\u0000\u01d1\u01d2\u0007\u0002\u0000\u0000\u01d2\u01d3"+ + "\u0007\u0002\u0000\u0000\u01d3\u01d4\u0007\u0003\u0000\u0000\u01d4\u01d5"+ + "\u0007\u0004\u0000\u0000\u01d5\u01d6\u0007\u0005\u0000\u0000\u01d6\u01d7"+ + "\u0001\u0000\u0000\u0000\u01d7\u01d8\u0006\u0000\u0000\u0000\u01d8\u0012"+ + "\u0001\u0000\u0000\u0000\u01d9\u01da\u0007\u0000\u0000\u0000\u01da\u01db"+ + "\u0007\u0006\u0000\u0000\u01db\u01dc\u0007\u0007\u0000\u0000\u01dc\u01dd"+ + "\u0007\b\u0000\u0000\u01dd\u01de\u0001\u0000\u0000\u0000\u01de\u01df\u0006"+ + "\u0001\u0001\u0000\u01df\u0014\u0001\u0000\u0000\u0000\u01e0\u01e1\u0007"+ + "\u0003\u0000\u0000\u01e1\u01e2\u0007\t\u0000\u0000\u01e2\u01e3\u0007\u0006"+ + "\u0000\u0000\u01e3\u01e4\u0007\u0001\u0000\u0000\u01e4\u01e5\u0007\u0004"+ + "\u0000\u0000\u01e5\u01e6\u0007\n\u0000\u0000\u01e6\u01e7\u0001\u0000\u0000"+ + "\u0000\u01e7\u01e8\u0006\u0002\u0002\u0000\u01e8\u0016\u0001\u0000\u0000"+ + "\u0000\u01e9\u01ea\u0007\u0003\u0000\u0000\u01ea\u01eb\u0007\u000b\u0000"+ + "\u0000\u01eb\u01ec\u0007\f\u0000\u0000\u01ec\u01ed\u0007\r\u0000\u0000"+ + "\u01ed\u01ee\u0001\u0000\u0000\u0000\u01ee\u01ef\u0006\u0003\u0000\u0000"+ + "\u01ef\u0018\u0001\u0000\u0000\u0000\u01f0\u01f1\u0007\u0003\u0000\u0000"+ + "\u01f1\u01f2\u0007\u000e\u0000\u0000\u01f2\u01f3\u0007\b\u0000\u0000\u01f3"+ + "\u01f4\u0007\r\u0000\u0000\u01f4\u01f5\u0007\f\u0000\u0000\u01f5\u01f6"+ + "\u0007\u0001\u0000\u0000\u01f6\u01f7\u0007\t\u0000\u0000\u01f7\u01f8\u0001"+ + "\u0000\u0000\u0000\u01f8\u01f9\u0006\u0004\u0003\u0000\u01f9\u001a\u0001"+ + "\u0000\u0000\u0000\u01fa\u01fb\u0007\u000f\u0000\u0000\u01fb\u01fc\u0007"+ + "\u0006\u0000\u0000\u01fc\u01fd\u0007\u0007\u0000\u0000\u01fd\u01fe\u0007"+ + "\u0010\u0000\u0000\u01fe\u01ff\u0001\u0000\u0000\u0000\u01ff\u0200\u0006"+ + "\u0005\u0004\u0000\u0200\u001c\u0001\u0000\u0000\u0000\u0201\u0202\u0007"+ + "\u0011\u0000\u0000\u0202\u0203\u0007\u0006\u0000\u0000\u0203\u0204\u0007"+ + "\u0007\u0000\u0000\u0204\u0205\u0007\u0012\u0000\u0000\u0205\u0206\u0001"+ + "\u0000\u0000\u0000\u0206\u0207\u0006\u0006\u0000\u0000\u0207\u001e\u0001"+ + "\u0000\u0000\u0000\u0208\u0209\u0007\u0012\u0000\u0000\u0209\u020a\u0007"+ + "\u0003\u0000\u0000\u020a\u020b\u0007\u0003\u0000\u0000\u020b\u020c\u0007"+ + "\b\u0000\u0000\u020c\u020d\u0001\u0000\u0000\u0000\u020d\u020e\u0006\u0007"+ + "\u0001\u0000\u020e \u0001\u0000\u0000\u0000\u020f\u0210\u0007\r\u0000"+ + "\u0000\u0210\u0211\u0007\u0001\u0000\u0000\u0211\u0212\u0007\u0010\u0000"+ + "\u0000\u0212\u0213\u0007\u0001\u0000\u0000\u0213\u0214\u0007\u0005\u0000"+ + "\u0000\u0214\u0215\u0001\u0000\u0000\u0000\u0215\u0216\u0006\b\u0000\u0000"+ + "\u0216\"\u0001\u0000\u0000\u0000\u0217\u0218\u0007\u0010\u0000\u0000\u0218"+ + "\u0219\u0007\u000b\u0000\u0000\u0219\u021a\u0005_\u0000\u0000\u021a\u021b"+ + "\u0007\u0003\u0000\u0000\u021b\u021c\u0007\u000e\u0000\u0000\u021c\u021d"+ + "\u0007\b\u0000\u0000\u021d\u021e\u0007\f\u0000\u0000\u021e\u021f\u0007"+ + "\t\u0000\u0000\u021f\u0220\u0007\u0000\u0000\u0000\u0220\u0221\u0001\u0000"+ + "\u0000\u0000\u0221\u0222\u0006\t\u0005\u0000\u0222$\u0001\u0000\u0000"+ + "\u0000\u0223\u0224\u0007\u0006\u0000\u0000\u0224\u0225\u0007\u0003\u0000"+ + "\u0000\u0225\u0226\u0007\t\u0000\u0000\u0226\u0227\u0007\f\u0000\u0000"+ + "\u0227\u0228\u0007\u0010\u0000\u0000\u0228\u0229\u0007\u0003\u0000\u0000"+ + "\u0229\u022a\u0001\u0000\u0000\u0000\u022a\u022b\u0006\n\u0006\u0000\u022b"+ + "&\u0001\u0000\u0000\u0000\u022c\u022d\u0007\u0006\u0000\u0000\u022d\u022e"+ + "\u0007\u0007\u0000\u0000\u022e\u022f\u0007\u0013\u0000\u0000\u022f\u0230"+ + "\u0001\u0000\u0000\u0000\u0230\u0231\u0006\u000b\u0000\u0000\u0231(\u0001"+ + "\u0000\u0000\u0000\u0232\u0233\u0007\u0002\u0000\u0000\u0233\u0234\u0007"+ + "\n\u0000\u0000\u0234\u0235\u0007\u0007\u0000\u0000\u0235\u0236\u0007\u0013"+ + "\u0000\u0000\u0236\u0237\u0001\u0000\u0000\u0000\u0237\u0238\u0006\f\u0007"+ + "\u0000\u0238*\u0001\u0000\u0000\u0000\u0239\u023a\u0007\u0002\u0000\u0000"+ + "\u023a\u023b\u0007\u0007\u0000\u0000\u023b\u023c\u0007\u0006\u0000\u0000"+ + "\u023c\u023d\u0007\u0005\u0000\u0000\u023d\u023e\u0001\u0000\u0000\u0000"+ + "\u023e\u023f\u0006\r\u0000\u0000\u023f,\u0001\u0000\u0000\u0000\u0240"+ + "\u0241\u0007\u0002\u0000\u0000\u0241\u0242\u0007\u0005\u0000\u0000\u0242"+ + "\u0243\u0007\f\u0000\u0000\u0243\u0244\u0007\u0005\u0000\u0000\u0244\u0245"+ + "\u0007\u0002\u0000\u0000\u0245\u0246\u0001\u0000\u0000\u0000\u0246\u0247"+ + "\u0006\u000e\u0000\u0000\u0247.\u0001\u0000\u0000\u0000\u0248\u0249\u0007"+ + "\u0013\u0000\u0000\u0249\u024a\u0007\n\u0000\u0000\u024a\u024b\u0007\u0003"+ + "\u0000\u0000\u024b\u024c\u0007\u0006\u0000\u0000\u024c\u024d\u0007\u0003"+ + "\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e\u024f\u0006\u000f"+ + "\u0000\u0000\u024f0\u0001\u0000\u0000\u0000\u0250\u0251\u0007\r\u0000"+ + "\u0000\u0251\u0252\u0007\u0007\u0000\u0000\u0252\u0253\u0007\u0007\u0000"+ + "\u0000\u0253\u0254\u0007\u0012\u0000\u0000\u0254\u0255\u0007\u0014\u0000"+ + "\u0000\u0255\u0256\u0007\b\u0000\u0000\u0256\u0257\u0001\u0000\u0000\u0000"+ + "\u0257\u0258\u0006\u0010\b\u0000\u02582\u0001\u0000\u0000\u0000\u0259"+ + "\u025a\u0004\u0011\u0000\u0000\u025a\u025b\u0007\u0001\u0000\u0000\u025b"+ + "\u025c\u0007\t\u0000\u0000\u025c\u025d\u0007\r\u0000\u0000\u025d\u025e"+ + "\u0007\u0001\u0000\u0000\u025e\u025f\u0007\t\u0000\u0000\u025f\u0260\u0007"+ + "\u0003\u0000\u0000\u0260\u0261\u0007\u0002\u0000\u0000\u0261\u0262\u0007"+ + "\u0005\u0000\u0000\u0262\u0263\u0007\f\u0000\u0000\u0263\u0264\u0007\u0005"+ + "\u0000\u0000\u0264\u0265\u0007\u0002\u0000\u0000\u0265\u0266\u0001\u0000"+ + "\u0000\u0000\u0266\u0267\u0006\u0011\u0000\u0000\u02674\u0001\u0000\u0000"+ + "\u0000\u0268\u0269\u0004\u0012\u0001\u0000\u0269\u026a\u0007\u0001\u0000"+ + "\u0000\u026a\u026b\u0007\t\u0000\u0000\u026b\u026c\u0007\u0002\u0000\u0000"+ + "\u026c\u026d\u0007\u0001\u0000\u0000\u026d\u026e\u0007\u0002\u0000\u0000"+ + "\u026e\u026f\u0007\u0005\u0000\u0000\u026f\u0270\u0005_\u0000\u0000\u0270"+ + "\u0271\u0005\u8001\uf414\u0000\u0000\u0271\u0272\u0001\u0000\u0000\u0000"+ + "\u0272\u0273\u0006\u0012\u0001\u0000\u02736\u0001\u0000\u0000\u0000\u0274"+ + "\u0275\u0004\u0013\u0002\u0000\u0275\u0276\u0007\r\u0000\u0000\u0276\u0277"+ + "\u0007\u0007\u0000\u0000\u0277\u0278\u0007\u0007\u0000\u0000\u0278\u0279"+ + "\u0007\u0012\u0000\u0000\u0279\u027a\u0007\u0014\u0000\u0000\u027a\u027b"+ + "\u0007\b\u0000\u0000\u027b\u027c\u0005_\u0000\u0000\u027c\u027d\u0005"+ + "\u8001\uf414\u0000\u0000\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u027f"+ + "\u0006\u0013\t\u0000\u027f8\u0001\u0000\u0000\u0000\u0280\u0281\u0004"+ + "\u0014\u0003\u0000\u0281\u0282\u0007\u0010\u0000\u0000\u0282\u0283\u0007"+ + "\u0003\u0000\u0000\u0283\u0284\u0007\u0005\u0000\u0000\u0284\u0285\u0007"+ + "\u0006\u0000\u0000\u0285\u0286\u0007\u0001\u0000\u0000\u0286\u0287\u0007"+ + "\u0004\u0000\u0000\u0287\u0288\u0007\u0002\u0000\u0000\u0288\u0289\u0001"+ + "\u0000\u0000\u0000\u0289\u028a\u0006\u0014\n\u0000\u028a:\u0001\u0000"+ + "\u0000\u0000\u028b\u028c\u0004\u0015\u0004\u0000\u028c\u028d\u0007\u000f"+ + "\u0000\u0000\u028d\u028e\u0007\u0014\u0000\u0000\u028e\u028f\u0007\r\u0000"+ + "\u0000\u028f\u0290\u0007\r\u0000\u0000\u0290\u0291\u0001\u0000\u0000\u0000"+ + "\u0291\u0292\u0006\u0015\b\u0000\u0292<\u0001\u0000\u0000\u0000\u0293"+ + "\u0294\u0004\u0016\u0005\u0000\u0294\u0295\u0007\r\u0000\u0000\u0295\u0296"+ + "\u0007\u0003\u0000\u0000\u0296\u0297\u0007\u000f\u0000\u0000\u0297\u0298"+ + "\u0007\u0005\u0000\u0000\u0298\u0299\u0001\u0000\u0000\u0000\u0299\u029a"+ + "\u0006\u0016\b\u0000\u029a>\u0001\u0000\u0000\u0000\u029b\u029c\u0004"+ + "\u0017\u0006\u0000\u029c\u029d\u0007\u0006\u0000\u0000\u029d\u029e\u0007"+ + "\u0001\u0000\u0000\u029e\u029f\u0007\u0011\u0000\u0000\u029f\u02a0\u0007"+ + "\n\u0000\u0000\u02a0\u02a1\u0007\u0005\u0000\u0000\u02a1\u02a2\u0001\u0000"+ + "\u0000\u0000\u02a2\u02a3\u0006\u0017\b\u0000\u02a3@\u0001\u0000\u0000"+ + "\u0000\u02a4\u02a6\b\u0015\u0000\u0000\u02a5\u02a4\u0001\u0000\u0000\u0000"+ + "\u02a6\u02a7\u0001\u0000\u0000\u0000\u02a7\u02a5\u0001\u0000\u0000\u0000"+ + "\u02a7\u02a8\u0001\u0000\u0000\u0000\u02a8\u02a9\u0001\u0000\u0000\u0000"+ + "\u02a9\u02aa\u0006\u0018\u0000\u0000\u02aaB\u0001\u0000\u0000\u0000\u02ab"+ + "\u02ac\u0005/\u0000\u0000\u02ac\u02ad\u0005/\u0000\u0000\u02ad\u02b1\u0001"+ + "\u0000\u0000\u0000\u02ae\u02b0\b\u0016\u0000\u0000\u02af\u02ae\u0001\u0000"+ + "\u0000\u0000\u02b0\u02b3\u0001\u0000\u0000\u0000\u02b1\u02af\u0001\u0000"+ + "\u0000\u0000\u02b1\u02b2\u0001\u0000\u0000\u0000\u02b2\u02b5\u0001\u0000"+ + "\u0000\u0000\u02b3\u02b1\u0001\u0000\u0000\u0000\u02b4\u02b6\u0005\r\u0000"+ + "\u0000\u02b5\u02b4\u0001\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000"+ + "\u0000\u02b6\u02b8\u0001\u0000\u0000\u0000\u02b7\u02b9\u0005\n\u0000\u0000"+ + "\u02b8\u02b7\u0001\u0000\u0000\u0000\u02b8\u02b9\u0001\u0000\u0000\u0000"+ + "\u02b9\u02ba\u0001\u0000\u0000\u0000\u02ba\u02bb\u0006\u0019\u000b\u0000"+ + "\u02bbD\u0001\u0000\u0000\u0000\u02bc\u02bd\u0005/\u0000\u0000\u02bd\u02be"+ + "\u0005*\u0000\u0000\u02be\u02c3\u0001\u0000\u0000\u0000\u02bf\u02c2\u0003"+ + "E\u001a\u0000\u02c0\u02c2\t\u0000\u0000\u0000\u02c1\u02bf\u0001\u0000"+ + "\u0000\u0000\u02c1\u02c0\u0001\u0000\u0000\u0000\u02c2\u02c5\u0001\u0000"+ + "\u0000\u0000\u02c3\u02c4\u0001\u0000\u0000\u0000\u02c3\u02c1\u0001\u0000"+ + "\u0000\u0000\u02c4\u02c6\u0001\u0000\u0000\u0000\u02c5\u02c3\u0001\u0000"+ + "\u0000\u0000\u02c6\u02c7\u0005*\u0000\u0000\u02c7\u02c8\u0005/\u0000\u0000"+ + "\u02c8\u02c9\u0001\u0000\u0000\u0000\u02c9\u02ca\u0006\u001a\u000b\u0000"+ + "\u02caF\u0001\u0000\u0000\u0000\u02cb\u02cd\u0007\u0017\u0000\u0000\u02cc"+ + "\u02cb\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001\u0000\u0000\u0000\u02ce"+ + "\u02cc\u0001\u0000\u0000\u0000\u02ce\u02cf\u0001\u0000\u0000\u0000\u02cf"+ + "\u02d0\u0001\u0000\u0000\u0000\u02d0\u02d1\u0006\u001b\u000b\u0000\u02d1"+ + "H\u0001\u0000\u0000\u0000\u02d2\u02d3\u0005|\u0000\u0000\u02d3\u02d4\u0001"+ + "\u0000\u0000\u0000\u02d4\u02d5\u0006\u001c\f\u0000\u02d5J\u0001\u0000"+ + "\u0000\u0000\u02d6\u02d7\u0007\u0018\u0000\u0000\u02d7L\u0001\u0000\u0000"+ + "\u0000\u02d8\u02d9\u0007\u0019\u0000\u0000\u02d9N\u0001\u0000\u0000\u0000"+ + "\u02da\u02db\u0005\\\u0000\u0000\u02db\u02dc\u0007\u001a\u0000\u0000\u02dc"+ + "P\u0001\u0000\u0000\u0000\u02dd\u02de\b\u001b\u0000\u0000\u02deR\u0001"+ + "\u0000\u0000\u0000\u02df\u02e1\u0007\u0003\u0000\u0000\u02e0\u02e2\u0007"+ + "\u001c\u0000\u0000\u02e1\u02e0\u0001\u0000\u0000\u0000\u02e1\u02e2\u0001"+ + "\u0000\u0000\u0000\u02e2\u02e4\u0001\u0000\u0000\u0000\u02e3\u02e5\u0003"+ + "K\u001d\u0000\u02e4\u02e3\u0001\u0000\u0000\u0000\u02e5\u02e6\u0001\u0000"+ + "\u0000\u0000\u02e6\u02e4\u0001\u0000\u0000\u0000\u02e6\u02e7\u0001\u0000"+ + "\u0000\u0000\u02e7T\u0001\u0000\u0000\u0000\u02e8\u02e9\u0005@\u0000\u0000"+ + "\u02e9V\u0001\u0000\u0000\u0000\u02ea\u02eb\u0005`\u0000\u0000\u02ebX"+ + "\u0001\u0000\u0000\u0000\u02ec\u02f0\b\u001d\u0000\u0000\u02ed\u02ee\u0005"+ + "`\u0000\u0000\u02ee\u02f0\u0005`\u0000\u0000\u02ef\u02ec\u0001\u0000\u0000"+ + "\u0000\u02ef\u02ed\u0001\u0000\u0000\u0000\u02f0Z\u0001\u0000\u0000\u0000"+ + "\u02f1\u02f2\u0005_\u0000\u0000\u02f2\\\u0001\u0000\u0000\u0000\u02f3"+ + "\u02f7\u0003M\u001e\u0000\u02f4\u02f7\u0003K\u001d\u0000\u02f5\u02f7\u0003"+ + "[%\u0000\u02f6\u02f3\u0001\u0000\u0000\u0000\u02f6\u02f4\u0001\u0000\u0000"+ + "\u0000\u02f6\u02f5\u0001\u0000\u0000\u0000\u02f7^\u0001\u0000\u0000\u0000"+ + "\u02f8\u02fd\u0005\"\u0000\u0000\u02f9\u02fc\u0003O\u001f\u0000\u02fa"+ + "\u02fc\u0003Q \u0000\u02fb\u02f9\u0001\u0000\u0000\u0000\u02fb\u02fa\u0001"+ + "\u0000\u0000\u0000\u02fc\u02ff\u0001\u0000\u0000\u0000\u02fd\u02fb\u0001"+ + "\u0000\u0000\u0000\u02fd\u02fe\u0001\u0000\u0000\u0000\u02fe\u0300\u0001"+ + "\u0000\u0000\u0000\u02ff\u02fd\u0001\u0000\u0000\u0000\u0300\u0316\u0005"+ + "\"\u0000\u0000\u0301\u0302\u0005\"\u0000\u0000\u0302\u0303\u0005\"\u0000"+ + "\u0000\u0303\u0304\u0005\"\u0000\u0000\u0304\u0308\u0001\u0000\u0000\u0000"+ + "\u0305\u0307\b\u0016\u0000\u0000\u0306\u0305\u0001\u0000\u0000\u0000\u0307"+ + "\u030a\u0001\u0000\u0000\u0000\u0308\u0309\u0001\u0000\u0000\u0000\u0308"+ + "\u0306\u0001\u0000\u0000\u0000\u0309\u030b\u0001\u0000\u0000\u0000\u030a"+ + "\u0308\u0001\u0000\u0000\u0000\u030b\u030c\u0005\"\u0000\u0000\u030c\u030d"+ + "\u0005\"\u0000\u0000\u030d\u030e\u0005\"\u0000\u0000\u030e\u0310\u0001"+ + "\u0000\u0000\u0000\u030f\u0311\u0005\"\u0000\u0000\u0310\u030f\u0001\u0000"+ + "\u0000\u0000\u0310\u0311\u0001\u0000\u0000\u0000\u0311\u0313\u0001\u0000"+ + "\u0000\u0000\u0312\u0314\u0005\"\u0000\u0000\u0313\u0312\u0001\u0000\u0000"+ + "\u0000\u0313\u0314\u0001\u0000\u0000\u0000\u0314\u0316\u0001\u0000\u0000"+ + "\u0000\u0315\u02f8\u0001\u0000\u0000\u0000\u0315\u0301\u0001\u0000\u0000"+ + "\u0000\u0316`\u0001\u0000\u0000\u0000\u0317\u0319\u0003K\u001d\u0000\u0318"+ + "\u0317\u0001\u0000\u0000\u0000\u0319\u031a\u0001\u0000\u0000\u0000\u031a"+ + "\u0318\u0001\u0000\u0000\u0000\u031a\u031b\u0001\u0000\u0000\u0000\u031b"+ + "b\u0001\u0000\u0000\u0000\u031c\u031e\u0003K\u001d\u0000\u031d\u031c\u0001"+ + "\u0000\u0000\u0000\u031e\u031f\u0001\u0000\u0000\u0000\u031f\u031d\u0001"+ + "\u0000\u0000\u0000\u031f\u0320\u0001\u0000\u0000\u0000\u0320\u0321\u0001"+ + "\u0000\u0000\u0000\u0321\u0325\u0003u2\u0000\u0322\u0324\u0003K\u001d"+ + "\u0000\u0323\u0322\u0001\u0000\u0000\u0000\u0324\u0327\u0001\u0000\u0000"+ + "\u0000\u0325\u0323\u0001\u0000\u0000\u0000\u0325\u0326\u0001\u0000\u0000"+ + "\u0000\u0326\u0347\u0001\u0000\u0000\u0000\u0327\u0325\u0001\u0000\u0000"+ + "\u0000\u0328\u032a\u0003u2\u0000\u0329\u032b\u0003K\u001d\u0000\u032a"+ + "\u0329\u0001\u0000\u0000\u0000\u032b\u032c\u0001\u0000\u0000\u0000\u032c"+ + "\u032a\u0001\u0000\u0000\u0000\u032c\u032d\u0001\u0000\u0000\u0000\u032d"+ + "\u0347\u0001\u0000\u0000\u0000\u032e\u0330\u0003K\u001d\u0000\u032f\u032e"+ + "\u0001\u0000\u0000\u0000\u0330\u0331\u0001\u0000\u0000\u0000\u0331\u032f"+ + "\u0001\u0000\u0000\u0000\u0331\u0332\u0001\u0000\u0000\u0000\u0332\u033a"+ + "\u0001\u0000\u0000\u0000\u0333\u0337\u0003u2\u0000\u0334\u0336\u0003K"+ + "\u001d\u0000\u0335\u0334\u0001\u0000\u0000\u0000\u0336\u0339\u0001\u0000"+ + "\u0000\u0000\u0337\u0335\u0001\u0000\u0000\u0000\u0337\u0338\u0001\u0000"+ + "\u0000\u0000\u0338\u033b\u0001\u0000\u0000\u0000\u0339\u0337\u0001\u0000"+ + "\u0000\u0000\u033a\u0333\u0001\u0000\u0000\u0000\u033a\u033b\u0001\u0000"+ + "\u0000\u0000\u033b\u033c\u0001\u0000\u0000\u0000\u033c\u033d\u0003S!\u0000"+ + "\u033d\u0347\u0001\u0000\u0000\u0000\u033e\u0340\u0003u2\u0000\u033f\u0341"+ + "\u0003K\u001d\u0000\u0340\u033f\u0001\u0000\u0000\u0000\u0341\u0342\u0001"+ + "\u0000\u0000\u0000\u0342\u0340\u0001\u0000\u0000\u0000\u0342\u0343\u0001"+ + "\u0000\u0000\u0000\u0343\u0344\u0001\u0000\u0000\u0000\u0344\u0345\u0003"+ + "S!\u0000\u0345\u0347\u0001\u0000\u0000\u0000\u0346\u031d\u0001\u0000\u0000"+ + "\u0000\u0346\u0328\u0001\u0000\u0000\u0000\u0346\u032f\u0001\u0000\u0000"+ + "\u0000\u0346\u033e\u0001\u0000\u0000\u0000\u0347d\u0001\u0000\u0000\u0000"+ + "\u0348\u0349\u0007\u001e\u0000\u0000\u0349\u034a\u0007\u001f\u0000\u0000"+ + "\u034af\u0001\u0000\u0000\u0000\u034b\u034c\u0007\f\u0000\u0000\u034c"+ + "\u034d\u0007\t\u0000\u0000\u034d\u034e\u0007\u0000\u0000\u0000\u034eh"+ + "\u0001\u0000\u0000\u0000\u034f\u0350\u0007\f\u0000\u0000\u0350\u0351\u0007"+ + "\u0002\u0000\u0000\u0351\u0352\u0007\u0004\u0000\u0000\u0352j\u0001\u0000"+ + "\u0000\u0000\u0353\u0354\u0005=\u0000\u0000\u0354l\u0001\u0000\u0000\u0000"+ + "\u0355\u0356\u0005:\u0000\u0000\u0356\u0357\u0005:\u0000\u0000\u0357n"+ + "\u0001\u0000\u0000\u0000\u0358\u0359\u0005:\u0000\u0000\u0359p\u0001\u0000"+ + "\u0000\u0000\u035a\u035b\u0005,\u0000\u0000\u035br\u0001\u0000\u0000\u0000"+ + "\u035c\u035d\u0007\u0000\u0000\u0000\u035d\u035e\u0007\u0003\u0000\u0000"+ + "\u035e\u035f\u0007\u0002\u0000\u0000\u035f\u0360\u0007\u0004\u0000\u0000"+ + "\u0360t\u0001\u0000\u0000\u0000\u0361\u0362\u0005.\u0000\u0000\u0362v"+ + "\u0001\u0000\u0000\u0000\u0363\u0364\u0007\u000f\u0000\u0000\u0364\u0365"+ + "\u0007\f\u0000\u0000\u0365\u0366\u0007\r\u0000\u0000\u0366\u0367\u0007"+ + "\u0002\u0000\u0000\u0367\u0368\u0007\u0003\u0000\u0000\u0368x\u0001\u0000"+ + "\u0000\u0000\u0369\u036a\u0007\u000f\u0000\u0000\u036a\u036b\u0007\u0001"+ + "\u0000\u0000\u036b\u036c\u0007\u0006\u0000\u0000\u036c\u036d\u0007\u0002"+ + "\u0000\u0000\u036d\u036e\u0007\u0005\u0000\u0000\u036ez\u0001\u0000\u0000"+ + "\u0000\u036f\u0370\u0007\u0001\u0000\u0000\u0370\u0371\u0007\t\u0000\u0000"+ + "\u0371|\u0001\u0000\u0000\u0000\u0372\u0373\u0007\u0001\u0000\u0000\u0373"+ + "\u0374\u0007\u0002\u0000\u0000\u0374~\u0001\u0000\u0000\u0000\u0375\u0376"+ + "\u0007\r\u0000\u0000\u0376\u0377\u0007\f\u0000\u0000\u0377\u0378\u0007"+ + "\u0002\u0000\u0000\u0378\u0379\u0007\u0005\u0000\u0000\u0379\u0080\u0001"+ + "\u0000\u0000\u0000\u037a\u037b\u0007\r\u0000\u0000\u037b\u037c\u0007\u0001"+ + "\u0000\u0000\u037c\u037d\u0007\u0012\u0000\u0000\u037d\u037e\u0007\u0003"+ + "\u0000\u0000\u037e\u0082\u0001\u0000\u0000\u0000\u037f\u0380\u0005(\u0000"+ + "\u0000\u0380\u0084\u0001\u0000\u0000\u0000\u0381\u0382\u0007\t\u0000\u0000"+ + "\u0382\u0383\u0007\u0007\u0000\u0000\u0383\u0384\u0007\u0005\u0000\u0000"+ + "\u0384\u0086\u0001\u0000\u0000\u0000\u0385\u0386\u0007\t\u0000\u0000\u0386"+ + "\u0387\u0007\u0014\u0000\u0000\u0387\u0388\u0007\r\u0000\u0000\u0388\u0389"+ + "\u0007\r\u0000\u0000\u0389\u0088\u0001\u0000\u0000\u0000\u038a\u038b\u0007"+ + "\t\u0000\u0000\u038b\u038c\u0007\u0014\u0000\u0000\u038c\u038d\u0007\r"+ + "\u0000\u0000\u038d\u038e\u0007\r\u0000\u0000\u038e\u038f\u0007\u0002\u0000"+ + "\u0000\u038f\u008a\u0001\u0000\u0000\u0000\u0390\u0391\u0007\u0007\u0000"+ + "\u0000\u0391\u0392\u0007\u0006\u0000\u0000\u0392\u008c\u0001\u0000\u0000"+ + "\u0000\u0393\u0394\u0005?\u0000\u0000\u0394\u008e\u0001\u0000\u0000\u0000"+ + "\u0395\u0396\u0007\u0006\u0000\u0000\u0396\u0397\u0007\r\u0000\u0000\u0397"+ + "\u0398\u0007\u0001\u0000\u0000\u0398\u0399\u0007\u0012\u0000\u0000\u0399"+ + "\u039a\u0007\u0003\u0000\u0000\u039a\u0090\u0001\u0000\u0000\u0000\u039b"+ + "\u039c\u0005)\u0000\u0000\u039c\u0092\u0001\u0000\u0000\u0000\u039d\u039e"+ + "\u0007\u0005\u0000\u0000\u039e\u039f\u0007\u0006\u0000\u0000\u039f\u03a0"+ + "\u0007\u0014\u0000\u0000\u03a0\u03a1\u0007\u0003\u0000\u0000\u03a1\u0094"+ + "\u0001\u0000\u0000\u0000\u03a2\u03a3\u0005=\u0000\u0000\u03a3\u03a4\u0005"+ + "=\u0000\u0000\u03a4\u0096\u0001\u0000\u0000\u0000\u03a5\u03a6\u0005=\u0000"+ + "\u0000\u03a6\u03a7\u0005~\u0000\u0000\u03a7\u0098\u0001\u0000\u0000\u0000"+ + "\u03a8\u03a9\u0005!\u0000\u0000\u03a9\u03aa\u0005=\u0000\u0000\u03aa\u009a"+ + "\u0001\u0000\u0000\u0000\u03ab\u03ac\u0005<\u0000\u0000\u03ac\u009c\u0001"+ + "\u0000\u0000\u0000\u03ad\u03ae\u0005<\u0000\u0000\u03ae\u03af\u0005=\u0000"+ + "\u0000\u03af\u009e\u0001\u0000\u0000\u0000\u03b0\u03b1\u0005>\u0000\u0000"+ + "\u03b1\u00a0\u0001\u0000\u0000\u0000\u03b2\u03b3\u0005>\u0000\u0000\u03b3"+ + "\u03b4\u0005=\u0000\u0000\u03b4\u00a2\u0001\u0000\u0000\u0000\u03b5\u03b6"+ + "\u0005+\u0000\u0000\u03b6\u00a4\u0001\u0000\u0000\u0000\u03b7\u03b8\u0005"+ + "-\u0000\u0000\u03b8\u00a6\u0001\u0000\u0000\u0000\u03b9\u03ba\u0005*\u0000"+ + "\u0000\u03ba\u00a8\u0001\u0000\u0000\u0000\u03bb\u03bc\u0005/\u0000\u0000"+ + "\u03bc\u00aa\u0001\u0000\u0000\u0000\u03bd\u03be\u0005%\u0000\u0000\u03be"+ + "\u00ac\u0001\u0000\u0000\u0000\u03bf\u03c0\u0005{\u0000\u0000\u03c0\u00ae"+ + "\u0001\u0000\u0000\u0000\u03c1\u03c2\u0005}\u0000\u0000\u03c2\u00b0\u0001"+ + "\u0000\u0000\u0000\u03c3\u03c4\u0003/\u000f\u0000\u03c4\u03c5\u0001\u0000"+ + "\u0000\u0000\u03c5\u03c6\u0006P\r\u0000\u03c6\u00b2\u0001\u0000\u0000"+ + "\u0000\u03c7\u03ca\u0003\u008d>\u0000\u03c8\u03cb\u0003M\u001e\u0000\u03c9"+ + "\u03cb\u0003[%\u0000\u03ca\u03c8\u0001\u0000\u0000\u0000\u03ca\u03c9\u0001"+ + "\u0000\u0000\u0000\u03cb\u03cf\u0001\u0000\u0000\u0000\u03cc\u03ce\u0003"+ + "]&\u0000\u03cd\u03cc\u0001\u0000\u0000\u0000\u03ce\u03d1\u0001\u0000\u0000"+ + "\u0000\u03cf\u03cd\u0001\u0000\u0000\u0000\u03cf\u03d0\u0001\u0000\u0000"+ + "\u0000\u03d0\u03d9\u0001\u0000\u0000\u0000\u03d1\u03cf\u0001\u0000\u0000"+ + "\u0000\u03d2\u03d4\u0003\u008d>\u0000\u03d3\u03d5\u0003K\u001d\u0000\u03d4"+ + "\u03d3\u0001\u0000\u0000\u0000\u03d5\u03d6\u0001\u0000\u0000\u0000\u03d6"+ + "\u03d4\u0001\u0000\u0000\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000\u03d7"+ + "\u03d9\u0001\u0000\u0000\u0000\u03d8\u03c7\u0001\u0000\u0000\u0000\u03d8"+ + "\u03d2\u0001\u0000\u0000\u0000\u03d9\u00b4\u0001\u0000\u0000\u0000\u03da"+ + "\u03db\u0005[\u0000\u0000\u03db\u03dc\u0001\u0000\u0000\u0000\u03dc\u03dd"+ + "\u0006R\u0000\u0000\u03dd\u03de\u0006R\u0000\u0000\u03de\u00b6\u0001\u0000"+ + "\u0000\u0000\u03df\u03e0\u0005]\u0000\u0000\u03e0\u03e1\u0001\u0000\u0000"+ + "\u0000\u03e1\u03e2\u0006S\f\u0000\u03e2\u03e3\u0006S\f\u0000\u03e3\u00b8"+ + "\u0001\u0000\u0000\u0000\u03e4\u03e8\u0003M\u001e\u0000\u03e5\u03e7\u0003"+ + "]&\u0000\u03e6\u03e5\u0001\u0000\u0000\u0000\u03e7\u03ea\u0001\u0000\u0000"+ + "\u0000\u03e8\u03e6\u0001\u0000\u0000\u0000\u03e8\u03e9\u0001\u0000\u0000"+ + "\u0000\u03e9\u03f5\u0001\u0000\u0000\u0000\u03ea\u03e8\u0001\u0000\u0000"+ + "\u0000\u03eb\u03ee\u0003[%\u0000\u03ec\u03ee\u0003U\"\u0000\u03ed\u03eb"+ + "\u0001\u0000\u0000\u0000\u03ed\u03ec\u0001\u0000\u0000\u0000\u03ee\u03f0"+ + "\u0001\u0000\u0000\u0000\u03ef\u03f1\u0003]&\u0000\u03f0\u03ef\u0001\u0000"+ + "\u0000\u0000\u03f1\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f0\u0001\u0000"+ + "\u0000\u0000\u03f2\u03f3\u0001\u0000\u0000\u0000\u03f3\u03f5\u0001\u0000"+ + "\u0000\u0000\u03f4\u03e4\u0001\u0000\u0000\u0000\u03f4\u03ed\u0001\u0000"+ + "\u0000\u0000\u03f5\u00ba\u0001\u0000\u0000\u0000\u03f6\u03f8\u0003W#\u0000"+ + "\u03f7\u03f9\u0003Y$\u0000\u03f8\u03f7\u0001\u0000\u0000\u0000\u03f9\u03fa"+ + "\u0001\u0000\u0000\u0000\u03fa\u03f8\u0001\u0000\u0000\u0000\u03fa\u03fb"+ + "\u0001\u0000\u0000\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd"+ + "\u0003W#\u0000\u03fd\u00bc\u0001\u0000\u0000\u0000\u03fe\u03ff\u0003\u00bb"+ + "U\u0000\u03ff\u00be\u0001\u0000\u0000\u0000\u0400\u0401\u0003C\u0019\u0000"+ + "\u0401\u0402\u0001\u0000\u0000\u0000\u0402\u0403\u0006W\u000b\u0000\u0403"+ + "\u00c0\u0001\u0000\u0000\u0000\u0404\u0405\u0003E\u001a\u0000\u0405\u0406"+ + "\u0001\u0000\u0000\u0000\u0406\u0407\u0006X\u000b\u0000\u0407\u00c2\u0001"+ + "\u0000\u0000\u0000\u0408\u0409\u0003G\u001b\u0000\u0409\u040a\u0001\u0000"+ + "\u0000\u0000\u040a\u040b\u0006Y\u000b\u0000\u040b\u00c4\u0001\u0000\u0000"+ + "\u0000\u040c\u040d\u0003\u00b5R\u0000\u040d\u040e\u0001\u0000\u0000\u0000"+ + "\u040e\u040f\u0006Z\u000e\u0000\u040f\u0410\u0006Z\u000f\u0000\u0410\u00c6"+ + "\u0001\u0000\u0000\u0000\u0411\u0412\u0003I\u001c\u0000\u0412\u0413\u0001"+ + "\u0000\u0000\u0000\u0413\u0414\u0006[\u0010\u0000\u0414\u0415\u0006[\f"+ + "\u0000\u0415\u00c8\u0001\u0000\u0000\u0000\u0416\u0417\u0003G\u001b\u0000"+ + "\u0417\u0418\u0001\u0000\u0000\u0000\u0418\u0419\u0006\\\u000b\u0000\u0419"+ + "\u00ca\u0001\u0000\u0000\u0000\u041a\u041b\u0003C\u0019\u0000\u041b\u041c"+ + "\u0001\u0000\u0000\u0000\u041c\u041d\u0006]\u000b\u0000\u041d\u00cc\u0001"+ + "\u0000\u0000\u0000\u041e\u041f\u0003E\u001a\u0000\u041f\u0420\u0001\u0000"+ + "\u0000\u0000\u0420\u0421\u0006^\u000b\u0000\u0421\u00ce\u0001\u0000\u0000"+ + "\u0000\u0422\u0423\u0003I\u001c\u0000\u0423\u0424\u0001\u0000\u0000\u0000"+ + "\u0424\u0425\u0006_\u0010\u0000\u0425\u0426\u0006_\f\u0000\u0426\u00d0"+ + "\u0001\u0000\u0000\u0000\u0427\u0428\u0003\u00b5R\u0000\u0428\u0429\u0001"+ + "\u0000\u0000\u0000\u0429\u042a\u0006`\u000e\u0000\u042a\u00d2\u0001\u0000"+ + "\u0000\u0000\u042b\u042c\u0003\u00b7S\u0000\u042c\u042d\u0001\u0000\u0000"+ + "\u0000\u042d\u042e\u0006a\u0011\u0000\u042e\u00d4\u0001\u0000\u0000\u0000"+ + "\u042f\u0430\u0003o/\u0000\u0430\u0431\u0001\u0000\u0000\u0000\u0431\u0432"+ + "\u0006b\u0012\u0000\u0432\u00d6\u0001\u0000\u0000\u0000\u0433\u0434\u0003"+ + "q0\u0000\u0434\u0435\u0001\u0000\u0000\u0000\u0435\u0436\u0006c\u0013"+ + "\u0000\u0436\u00d8\u0001\u0000\u0000\u0000\u0437\u0438\u0003k-\u0000\u0438"+ + "\u0439\u0001\u0000\u0000\u0000\u0439\u043a\u0006d\u0014\u0000\u043a\u00da"+ + "\u0001\u0000\u0000\u0000\u043b\u043c\u0007\u0010\u0000\u0000\u043c\u043d"+ + "\u0007\u0003\u0000\u0000\u043d\u043e\u0007\u0005\u0000\u0000\u043e\u043f"+ + "\u0007\f\u0000\u0000\u043f\u0440\u0007\u0000\u0000\u0000\u0440\u0441\u0007"+ + "\f\u0000\u0000\u0441\u0442\u0007\u0005\u0000\u0000\u0442\u0443\u0007\f"+ + "\u0000\u0000\u0443\u00dc\u0001\u0000\u0000\u0000\u0444\u0448\b \u0000"+ + "\u0000\u0445\u0446\u0005/\u0000\u0000\u0446\u0448\b!\u0000\u0000\u0447"+ + "\u0444\u0001\u0000\u0000\u0000\u0447\u0445\u0001\u0000\u0000\u0000\u0448"+ + "\u00de\u0001\u0000\u0000\u0000\u0449\u044b\u0003\u00ddf\u0000\u044a\u0449"+ + "\u0001\u0000\u0000\u0000\u044b\u044c\u0001\u0000\u0000\u0000\u044c\u044a"+ + "\u0001\u0000\u0000\u0000\u044c\u044d\u0001\u0000\u0000\u0000\u044d\u00e0"+ + "\u0001\u0000\u0000\u0000\u044e\u044f\u0003\u00dfg\u0000\u044f\u0450\u0001"+ + "\u0000\u0000\u0000\u0450\u0451\u0006h\u0015\u0000\u0451\u00e2\u0001\u0000"+ + "\u0000\u0000\u0452\u0453\u0003_\'\u0000\u0453\u0454\u0001\u0000\u0000"+ + "\u0000\u0454\u0455\u0006i\u0016\u0000\u0455\u00e4\u0001\u0000\u0000\u0000"+ + "\u0456\u0457\u0003C\u0019\u0000\u0457\u0458\u0001\u0000\u0000\u0000\u0458"+ + "\u0459\u0006j\u000b\u0000\u0459\u00e6\u0001\u0000\u0000\u0000\u045a\u045b"+ + "\u0003E\u001a\u0000\u045b\u045c\u0001\u0000\u0000\u0000\u045c\u045d\u0006"+ + "k\u000b\u0000\u045d\u00e8\u0001\u0000\u0000\u0000\u045e\u045f\u0003G\u001b"+ + "\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006l\u000b\u0000"+ + "\u0461\u00ea\u0001\u0000\u0000\u0000\u0462\u0463\u0003I\u001c\u0000\u0463"+ + "\u0464\u0001\u0000\u0000\u0000\u0464\u0465\u0006m\u0010\u0000\u0465\u0466"+ + "\u0006m\f\u0000\u0466\u00ec\u0001\u0000\u0000\u0000\u0467\u0468\u0003"+ + "u2\u0000\u0468\u0469\u0001\u0000\u0000\u0000\u0469\u046a\u0006n\u0017"+ + "\u0000\u046a\u00ee\u0001\u0000\u0000\u0000\u046b\u046c\u0003q0\u0000\u046c"+ + "\u046d\u0001\u0000\u0000\u0000\u046d\u046e\u0006o\u0013\u0000\u046e\u00f0"+ + "\u0001\u0000\u0000\u0000\u046f\u0470\u0003\u008d>\u0000\u0470\u0471\u0001"+ + "\u0000\u0000\u0000\u0471\u0472\u0006p\u0018\u0000\u0472\u00f2\u0001\u0000"+ + "\u0000\u0000\u0473\u0474\u0003\u00b3Q\u0000\u0474\u0475\u0001\u0000\u0000"+ + "\u0000\u0475\u0476\u0006q\u0019\u0000\u0476\u00f4\u0001\u0000\u0000\u0000"+ + "\u0477\u047c\u0003M\u001e\u0000\u0478\u047c\u0003K\u001d\u0000\u0479\u047c"+ + "\u0003[%\u0000\u047a\u047c\u0003\u00a7K\u0000\u047b\u0477\u0001\u0000"+ + "\u0000\u0000\u047b\u0478\u0001\u0000\u0000\u0000\u047b\u0479\u0001\u0000"+ + "\u0000\u0000\u047b\u047a\u0001\u0000\u0000\u0000\u047c\u00f6\u0001\u0000"+ + "\u0000\u0000\u047d\u0480\u0003M\u001e\u0000\u047e\u0480\u0003\u00a7K\u0000"+ + "\u047f\u047d\u0001\u0000\u0000\u0000\u047f\u047e\u0001\u0000\u0000\u0000"+ + "\u0480\u0484\u0001\u0000\u0000\u0000\u0481\u0483\u0003\u00f5r\u0000\u0482"+ + "\u0481\u0001\u0000\u0000\u0000\u0483\u0486\u0001\u0000\u0000\u0000\u0484"+ + "\u0482\u0001\u0000\u0000\u0000\u0484\u0485\u0001\u0000\u0000\u0000\u0485"+ + "\u0491\u0001\u0000\u0000\u0000\u0486\u0484\u0001\u0000\u0000\u0000\u0487"+ + "\u048a\u0003[%\u0000\u0488\u048a\u0003U\"\u0000\u0489\u0487\u0001\u0000"+ + "\u0000\u0000\u0489\u0488\u0001\u0000\u0000\u0000\u048a\u048c\u0001\u0000"+ + "\u0000\u0000\u048b\u048d\u0003\u00f5r\u0000\u048c\u048b\u0001\u0000\u0000"+ + "\u0000\u048d\u048e\u0001\u0000\u0000\u0000\u048e\u048c\u0001\u0000\u0000"+ + "\u0000\u048e\u048f\u0001\u0000\u0000\u0000\u048f\u0491\u0001\u0000\u0000"+ + "\u0000\u0490\u047f\u0001\u0000\u0000\u0000\u0490\u0489\u0001\u0000\u0000"+ + "\u0000\u0491\u00f8\u0001\u0000\u0000\u0000\u0492\u0495\u0003\u00f7s\u0000"+ + "\u0493\u0495\u0003\u00bbU\u0000\u0494\u0492\u0001\u0000\u0000\u0000\u0494"+ + "\u0493\u0001\u0000\u0000\u0000\u0495\u0496\u0001\u0000\u0000\u0000\u0496"+ + "\u0494\u0001\u0000\u0000\u0000\u0496\u0497\u0001\u0000\u0000\u0000\u0497"+ + "\u00fa\u0001\u0000\u0000\u0000\u0498\u0499\u0003C\u0019\u0000\u0499\u049a"+ + "\u0001\u0000\u0000\u0000\u049a\u049b\u0006u\u000b\u0000\u049b\u00fc\u0001"+ + "\u0000\u0000\u0000\u049c\u049d\u0003E\u001a\u0000\u049d\u049e\u0001\u0000"+ + "\u0000\u0000\u049e\u049f\u0006v\u000b\u0000\u049f\u00fe\u0001\u0000\u0000"+ + "\u0000\u04a0\u04a1\u0003G\u001b\u0000\u04a1\u04a2\u0001\u0000\u0000\u0000"+ + "\u04a2\u04a3\u0006w\u000b\u0000\u04a3\u0100\u0001\u0000\u0000\u0000\u04a4"+ + "\u04a5\u0003I\u001c\u0000\u04a5\u04a6\u0001\u0000\u0000\u0000\u04a6\u04a7"+ + "\u0006x\u0010\u0000\u04a7\u04a8\u0006x\f\u0000\u04a8\u0102\u0001\u0000"+ + "\u0000\u0000\u04a9\u04aa\u0003k-\u0000\u04aa\u04ab\u0001\u0000\u0000\u0000"+ + "\u04ab\u04ac\u0006y\u0014\u0000\u04ac\u0104\u0001\u0000\u0000\u0000\u04ad"+ + "\u04ae\u0003q0\u0000\u04ae\u04af\u0001\u0000\u0000\u0000\u04af\u04b0\u0006"+ + "z\u0013\u0000\u04b0\u0106\u0001\u0000\u0000\u0000\u04b1\u04b2\u0003u2"+ + "\u0000\u04b2\u04b3\u0001\u0000\u0000\u0000\u04b3\u04b4\u0006{\u0017\u0000"+ + "\u04b4\u0108\u0001\u0000\u0000\u0000\u04b5\u04b6\u0003\u008d>\u0000\u04b6"+ + "\u04b7\u0001\u0000\u0000\u0000\u04b7\u04b8\u0006|\u0018\u0000\u04b8\u010a"+ + "\u0001\u0000\u0000\u0000\u04b9\u04ba\u0003\u00b3Q\u0000\u04ba\u04bb\u0001"+ + "\u0000\u0000\u0000\u04bb\u04bc\u0006}\u0019\u0000\u04bc\u010c\u0001\u0000"+ + "\u0000\u0000\u04bd\u04be\u0007\f\u0000\u0000\u04be\u04bf\u0007\u0002\u0000"+ + "\u0000\u04bf\u010e\u0001\u0000\u0000\u0000\u04c0\u04c1\u0003\u00f9t\u0000"+ + "\u04c1\u04c2\u0001\u0000\u0000\u0000\u04c2\u04c3\u0006\u007f\u001a\u0000"+ + "\u04c3\u0110\u0001\u0000\u0000\u0000\u04c4\u04c5\u0003C\u0019\u0000\u04c5"+ + "\u04c6\u0001\u0000\u0000\u0000\u04c6\u04c7\u0006\u0080\u000b\u0000\u04c7"+ + "\u0112\u0001\u0000\u0000\u0000\u04c8\u04c9\u0003E\u001a\u0000\u04c9\u04ca"+ + "\u0001\u0000\u0000\u0000\u04ca\u04cb\u0006\u0081\u000b\u0000\u04cb\u0114"+ + "\u0001\u0000\u0000\u0000\u04cc\u04cd\u0003G\u001b\u0000\u04cd\u04ce\u0001"+ + "\u0000\u0000\u0000\u04ce\u04cf\u0006\u0082\u000b\u0000\u04cf\u0116\u0001"+ + "\u0000\u0000\u0000\u04d0\u04d1\u0003I\u001c\u0000\u04d1\u04d2\u0001\u0000"+ + "\u0000\u0000\u04d2\u04d3\u0006\u0083\u0010\u0000\u04d3\u04d4\u0006\u0083"+ + "\f\u0000\u04d4\u0118\u0001\u0000\u0000\u0000\u04d5\u04d6\u0003\u00b5R"+ + "\u0000\u04d6\u04d7\u0001\u0000\u0000\u0000\u04d7\u04d8\u0006\u0084\u000e"+ + "\u0000\u04d8\u04d9\u0006\u0084\u001b\u0000\u04d9\u011a\u0001\u0000\u0000"+ + "\u0000\u04da\u04db\u0007\u0007\u0000\u0000\u04db\u04dc\u0007\t\u0000\u0000"+ + "\u04dc\u04dd\u0001\u0000\u0000\u0000\u04dd\u04de\u0006\u0085\u001c\u0000"+ + "\u04de\u011c\u0001\u0000\u0000\u0000\u04df\u04e0\u0007\u0013\u0000\u0000"+ + "\u04e0\u04e1\u0007\u0001\u0000\u0000\u04e1\u04e2\u0007\u0005\u0000\u0000"+ + "\u04e2\u04e3\u0007\n\u0000\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4"+ + "\u04e5\u0006\u0086\u001c\u0000\u04e5\u011e\u0001\u0000\u0000\u0000\u04e6"+ + "\u04e7\b\"\u0000\u0000\u04e7\u0120\u0001\u0000\u0000\u0000\u04e8\u04ea"+ + "\u0003\u011f\u0087\u0000\u04e9\u04e8\u0001\u0000\u0000\u0000\u04ea\u04eb"+ + "\u0001\u0000\u0000\u0000\u04eb\u04e9\u0001\u0000\u0000\u0000\u04eb\u04ec"+ + "\u0001\u0000\u0000\u0000\u04ec\u04ed\u0001\u0000\u0000\u0000\u04ed\u04ee"+ + "\u0003o/\u0000\u04ee\u04f0\u0001\u0000\u0000\u0000\u04ef\u04e9\u0001\u0000"+ + "\u0000\u0000\u04ef\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f2\u0001\u0000"+ + "\u0000\u0000\u04f1\u04f3\u0003\u011f\u0087\u0000\u04f2\u04f1\u0001\u0000"+ + "\u0000\u0000\u04f3\u04f4\u0001\u0000\u0000\u0000\u04f4\u04f2\u0001\u0000"+ + "\u0000\u0000\u04f4\u04f5\u0001\u0000\u0000\u0000\u04f5\u0122\u0001\u0000"+ + "\u0000\u0000\u04f6\u04f7\u0003\u0121\u0088\u0000\u04f7\u04f8\u0001\u0000"+ + "\u0000\u0000\u04f8\u04f9\u0006\u0089\u001d\u0000\u04f9\u0124\u0001\u0000"+ + "\u0000\u0000\u04fa\u04fb\u0003C\u0019\u0000\u04fb\u04fc\u0001\u0000\u0000"+ + "\u0000\u04fc\u04fd\u0006\u008a\u000b\u0000\u04fd\u0126\u0001\u0000\u0000"+ + "\u0000\u04fe\u04ff\u0003E\u001a\u0000\u04ff\u0500\u0001\u0000\u0000\u0000"+ + "\u0500\u0501\u0006\u008b\u000b\u0000\u0501\u0128\u0001\u0000\u0000\u0000"+ + "\u0502\u0503\u0003G\u001b\u0000\u0503\u0504\u0001\u0000\u0000\u0000\u0504"+ + "\u0505\u0006\u008c\u000b\u0000\u0505\u012a\u0001\u0000\u0000\u0000\u0506"+ + "\u0507\u0003I\u001c\u0000\u0507\u0508\u0001\u0000\u0000\u0000\u0508\u0509"+ + "\u0006\u008d\u0010\u0000\u0509\u050a\u0006\u008d\f\u0000\u050a\u050b\u0006"+ + "\u008d\f\u0000\u050b\u012c\u0001\u0000\u0000\u0000\u050c\u050d\u0003k"+ + "-\u0000\u050d\u050e\u0001\u0000\u0000\u0000\u050e\u050f\u0006\u008e\u0014"+ + "\u0000\u050f\u012e\u0001\u0000\u0000\u0000\u0510\u0511\u0003q0\u0000\u0511"+ + "\u0512\u0001\u0000\u0000\u0000\u0512\u0513\u0006\u008f\u0013\u0000\u0513"+ + "\u0130\u0001\u0000\u0000\u0000\u0514\u0515\u0003u2\u0000\u0515\u0516\u0001"+ + "\u0000\u0000\u0000\u0516\u0517\u0006\u0090\u0017\u0000\u0517\u0132\u0001"+ + "\u0000\u0000\u0000\u0518\u0519\u0003\u011d\u0086\u0000\u0519\u051a\u0001"+ + "\u0000\u0000\u0000\u051a\u051b\u0006\u0091\u001e\u0000\u051b\u0134\u0001"+ + "\u0000\u0000\u0000\u051c\u051d\u0003\u00f9t\u0000\u051d\u051e\u0001\u0000"+ + "\u0000\u0000\u051e\u051f\u0006\u0092\u001a\u0000\u051f\u0136\u0001\u0000"+ + "\u0000\u0000\u0520\u0521\u0003\u00bdV\u0000\u0521\u0522\u0001\u0000\u0000"+ + "\u0000\u0522\u0523\u0006\u0093\u001f\u0000\u0523\u0138\u0001\u0000\u0000"+ + "\u0000\u0524\u0525\u0003\u008d>\u0000\u0525\u0526\u0001\u0000\u0000\u0000"+ + "\u0526\u0527\u0006\u0094\u0018\u0000\u0527\u013a\u0001\u0000\u0000\u0000"+ + "\u0528\u0529\u0003\u00b3Q\u0000\u0529\u052a\u0001\u0000\u0000\u0000\u052a"+ + "\u052b\u0006\u0095\u0019\u0000\u052b\u013c\u0001\u0000\u0000\u0000\u052c"+ + "\u052d\u0003C\u0019\u0000\u052d\u052e\u0001\u0000\u0000\u0000\u052e\u052f"+ + "\u0006\u0096\u000b\u0000\u052f\u013e\u0001\u0000\u0000\u0000\u0530\u0531"+ + "\u0003E\u001a\u0000\u0531\u0532\u0001\u0000\u0000\u0000\u0532\u0533\u0006"+ + "\u0097\u000b\u0000\u0533\u0140\u0001\u0000\u0000\u0000\u0534\u0535\u0003"+ + "G\u001b\u0000\u0535\u0536\u0001\u0000\u0000\u0000\u0536\u0537\u0006\u0098"+ + "\u000b\u0000\u0537\u0142\u0001\u0000\u0000\u0000\u0538\u0539\u0003I\u001c"+ + "\u0000\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u0099\u0010"+ + "\u0000\u053b\u053c\u0006\u0099\f\u0000\u053c\u0144\u0001\u0000\u0000\u0000"+ + "\u053d\u053e\u0003u2\u0000\u053e\u053f\u0001\u0000\u0000\u0000\u053f\u0540"+ + "\u0006\u009a\u0017\u0000\u0540\u0146\u0001\u0000\u0000\u0000\u0541\u0542"+ + "\u0003\u008d>\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543\u0544\u0006"+ + "\u009b\u0018\u0000\u0544\u0148\u0001\u0000\u0000\u0000\u0545\u0546\u0003"+ + "\u00b3Q\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548\u0006\u009c"+ + "\u0019\u0000\u0548\u014a\u0001\u0000\u0000\u0000\u0549\u054a\u0003\u00bd"+ + "V\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006\u009d\u001f"+ + "\u0000\u054c\u014c\u0001\u0000\u0000\u0000\u054d\u054e\u0003\u00b9T\u0000"+ + "\u054e\u054f\u0001\u0000\u0000\u0000\u054f\u0550\u0006\u009e \u0000\u0550"+ + "\u014e\u0001\u0000\u0000\u0000\u0551\u0552\u0003C\u0019\u0000\u0552\u0553"+ + "\u0001\u0000\u0000\u0000\u0553\u0554\u0006\u009f\u000b\u0000\u0554\u0150"+ + "\u0001\u0000\u0000\u0000\u0555\u0556\u0003E\u001a\u0000\u0556\u0557\u0001"+ + "\u0000\u0000\u0000\u0557\u0558\u0006\u00a0\u000b\u0000\u0558\u0152\u0001"+ + "\u0000\u0000\u0000\u0559\u055a\u0003G\u001b\u0000\u055a\u055b\u0001\u0000"+ + "\u0000\u0000\u055b\u055c\u0006\u00a1\u000b\u0000\u055c\u0154\u0001\u0000"+ + "\u0000\u0000\u055d\u055e\u0003I\u001c\u0000\u055e\u055f\u0001\u0000\u0000"+ + "\u0000\u055f\u0560\u0006\u00a2\u0010\u0000\u0560\u0561\u0006\u00a2\f\u0000"+ + "\u0561\u0156\u0001\u0000\u0000\u0000\u0562\u0563\u0007\u0001\u0000\u0000"+ + "\u0563\u0564\u0007\t\u0000\u0000\u0564\u0565\u0007\u000f\u0000\u0000\u0565"+ + "\u0566\u0007\u0007\u0000\u0000\u0566\u0158\u0001\u0000\u0000\u0000\u0567"+ + "\u0568\u0003C\u0019\u0000\u0568\u0569\u0001\u0000\u0000\u0000\u0569\u056a"+ + "\u0006\u00a4\u000b\u0000\u056a\u015a\u0001\u0000\u0000\u0000\u056b\u056c"+ + "\u0003E\u001a\u0000\u056c\u056d\u0001\u0000\u0000\u0000\u056d\u056e\u0006"+ + "\u00a5\u000b\u0000\u056e\u015c\u0001\u0000\u0000\u0000\u056f\u0570\u0003"+ + "G\u001b\u0000\u0570\u0571\u0001\u0000\u0000\u0000\u0571\u0572\u0006\u00a6"+ + "\u000b\u0000\u0572\u015e\u0001\u0000\u0000\u0000\u0573\u0574\u0003\u00b7"+ + "S\u0000\u0574\u0575\u0001\u0000\u0000\u0000\u0575\u0576\u0006\u00a7\u0011"+ + "\u0000\u0576\u0577\u0006\u00a7\f\u0000\u0577\u0160\u0001\u0000\u0000\u0000"+ + "\u0578\u0579\u0003o/\u0000\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u057b"+ + "\u0006\u00a8\u0012\u0000\u057b\u0162\u0001\u0000\u0000\u0000\u057c\u0582"+ + "\u0003U\"\u0000\u057d\u0582\u0003K\u001d\u0000\u057e\u0582\u0003u2\u0000"+ + "\u057f\u0582\u0003M\u001e\u0000\u0580\u0582\u0003[%\u0000\u0581\u057c"+ + "\u0001\u0000\u0000\u0000\u0581\u057d\u0001\u0000\u0000\u0000\u0581\u057e"+ + "\u0001\u0000\u0000\u0000\u0581\u057f\u0001\u0000\u0000\u0000\u0581\u0580"+ + "\u0001\u0000\u0000\u0000\u0582\u0583\u0001\u0000\u0000\u0000\u0583\u0581"+ + "\u0001\u0000\u0000\u0000\u0583\u0584\u0001\u0000\u0000\u0000\u0584\u0164"+ + "\u0001\u0000\u0000\u0000\u0585\u0586\u0003C\u0019\u0000\u0586\u0587\u0001"+ + "\u0000\u0000\u0000\u0587\u0588\u0006\u00aa\u000b\u0000\u0588\u0166\u0001"+ + "\u0000\u0000\u0000\u0589\u058a\u0003E\u001a\u0000\u058a\u058b\u0001\u0000"+ + "\u0000\u0000\u058b\u058c\u0006\u00ab\u000b\u0000\u058c\u0168\u0001\u0000"+ + "\u0000\u0000\u058d\u058e\u0003G\u001b\u0000\u058e\u058f\u0001\u0000\u0000"+ + "\u0000\u058f\u0590\u0006\u00ac\u000b\u0000\u0590\u016a\u0001\u0000\u0000"+ + "\u0000\u0591\u0592\u0003I\u001c\u0000\u0592\u0593\u0001\u0000\u0000\u0000"+ + "\u0593\u0594\u0006\u00ad\u0010\u0000\u0594\u0595\u0006\u00ad\f\u0000\u0595"+ + "\u016c\u0001\u0000\u0000\u0000\u0596\u0597\u0003o/\u0000\u0597\u0598\u0001"+ + "\u0000\u0000\u0000\u0598\u0599\u0006\u00ae\u0012\u0000\u0599\u016e\u0001"+ + "\u0000\u0000\u0000\u059a\u059b\u0003q0\u0000\u059b\u059c\u0001\u0000\u0000"+ + "\u0000\u059c\u059d\u0006\u00af\u0013\u0000\u059d\u0170\u0001\u0000\u0000"+ + "\u0000\u059e\u059f\u0003u2\u0000\u059f\u05a0\u0001\u0000\u0000\u0000\u05a0"+ + "\u05a1\u0006\u00b0\u0017\u0000\u05a1\u0172\u0001\u0000\u0000\u0000\u05a2"+ + "\u05a3\u0003\u011b\u0085\u0000\u05a3\u05a4\u0001\u0000\u0000\u0000\u05a4"+ + "\u05a5\u0006\u00b1!\u0000\u05a5\u05a6\u0006\u00b1\"\u0000\u05a6\u0174"+ + "\u0001\u0000\u0000\u0000\u05a7\u05a8\u0003\u00dfg\u0000\u05a8\u05a9\u0001"+ + "\u0000\u0000\u0000\u05a9\u05aa\u0006\u00b2\u0015\u0000\u05aa\u0176\u0001"+ + "\u0000\u0000\u0000\u05ab\u05ac\u0003_\'\u0000\u05ac\u05ad\u0001\u0000"+ + "\u0000\u0000\u05ad\u05ae\u0006\u00b3\u0016\u0000\u05ae\u0178\u0001\u0000"+ + "\u0000\u0000\u05af\u05b0\u0003C\u0019\u0000\u05b0\u05b1\u0001\u0000\u0000"+ + "\u0000\u05b1\u05b2\u0006\u00b4\u000b\u0000\u05b2\u017a\u0001\u0000\u0000"+ + "\u0000\u05b3\u05b4\u0003E\u001a\u0000\u05b4\u05b5\u0001\u0000\u0000\u0000"+ + "\u05b5\u05b6\u0006\u00b5\u000b\u0000\u05b6\u017c\u0001\u0000\u0000\u0000"+ + "\u05b7\u05b8\u0003G\u001b\u0000\u05b8\u05b9\u0001\u0000\u0000\u0000\u05b9"+ + "\u05ba\u0006\u00b6\u000b\u0000\u05ba\u017e\u0001\u0000\u0000\u0000\u05bb"+ + "\u05bc\u0003I\u001c\u0000\u05bc\u05bd\u0001\u0000\u0000\u0000\u05bd\u05be"+ + "\u0006\u00b7\u0010\u0000\u05be\u05bf\u0006\u00b7\f\u0000\u05bf\u05c0\u0006"+ + "\u00b7\f\u0000\u05c0\u0180\u0001\u0000\u0000\u0000\u05c1\u05c2\u0003q"+ + "0\u0000\u05c2\u05c3\u0001\u0000\u0000\u0000\u05c3\u05c4\u0006\u00b8\u0013"+ + "\u0000\u05c4\u0182\u0001\u0000\u0000\u0000\u05c5\u05c6\u0003u2\u0000\u05c6"+ + "\u05c7\u0001\u0000\u0000\u0000\u05c7\u05c8\u0006\u00b9\u0017\u0000\u05c8"+ + "\u0184\u0001\u0000\u0000\u0000\u05c9\u05ca\u0003\u00f9t\u0000\u05ca\u05cb"+ + "\u0001\u0000\u0000\u0000\u05cb\u05cc\u0006\u00ba\u001a\u0000\u05cc\u0186"+ + "\u0001\u0000\u0000\u0000\u05cd\u05ce\u0003C\u0019\u0000\u05ce\u05cf\u0001"+ + "\u0000\u0000\u0000\u05cf\u05d0\u0006\u00bb\u000b\u0000\u05d0\u0188\u0001"+ + "\u0000\u0000\u0000\u05d1\u05d2\u0003E\u001a\u0000\u05d2\u05d3\u0001\u0000"+ + "\u0000\u0000\u05d3\u05d4\u0006\u00bc\u000b\u0000\u05d4\u018a\u0001\u0000"+ + "\u0000\u0000\u05d5\u05d6\u0003G\u001b\u0000\u05d6\u05d7\u0001\u0000\u0000"+ + "\u0000\u05d7\u05d8\u0006\u00bd\u000b\u0000\u05d8\u018c\u0001\u0000\u0000"+ + "\u0000\u05d9\u05da\u0003I\u001c\u0000\u05da\u05db\u0001\u0000\u0000\u0000"+ + "\u05db\u05dc\u0006\u00be\u0010\u0000\u05dc\u05dd\u0006\u00be\f\u0000\u05dd"+ + "\u018e\u0001\u0000\u0000\u0000\u05de\u05df\u0007#\u0000\u0000\u05df\u05e0"+ + "\u0007\u0007\u0000\u0000\u05e0\u05e1\u0007\u0001\u0000\u0000\u05e1\u05e2"+ + "\u0007\t\u0000\u0000\u05e2\u0190\u0001\u0000\u0000\u0000\u05e3\u05e4\u0003"+ + "\u010d~\u0000\u05e4\u05e5\u0001\u0000\u0000\u0000\u05e5\u05e6\u0006\u00c0"+ + "#\u0000\u05e6\u0192\u0001\u0000\u0000\u0000\u05e7\u05e8\u0003\u011b\u0085"+ + "\u0000\u05e8\u05e9\u0001\u0000\u0000\u0000\u05e9\u05ea\u0006\u00c1!\u0000"+ + "\u05ea\u05eb\u0006\u00c1\f\u0000\u05eb\u05ec\u0006\u00c1\u0000\u0000\u05ec"+ + "\u0194\u0001\u0000\u0000\u0000\u05ed\u05ee\u0007\u0014\u0000\u0000\u05ee"+ + "\u05ef\u0007\u0002\u0000\u0000\u05ef\u05f0\u0007\u0001\u0000\u0000\u05f0"+ + "\u05f1\u0007\t\u0000\u0000\u05f1\u05f2\u0007\u0011\u0000\u0000\u05f2\u05f3"+ + "\u0001\u0000\u0000\u0000\u05f3\u05f4\u0006\u00c2\f\u0000\u05f4\u05f5\u0006"+ + "\u00c2\u0000\u0000\u05f5\u0196\u0001\u0000\u0000\u0000\u05f6\u05f7\u0003"+ + "\u00dfg\u0000\u05f7\u05f8\u0001\u0000\u0000\u0000\u05f8\u05f9\u0006\u00c3"+ + "\u0015\u0000\u05f9\u0198\u0001\u0000\u0000\u0000\u05fa\u05fb\u0003_\'"+ + "\u0000\u05fb\u05fc\u0001\u0000\u0000\u0000\u05fc\u05fd\u0006\u00c4\u0016"+ + "\u0000\u05fd\u019a\u0001\u0000\u0000\u0000\u05fe\u05ff\u0003o/\u0000\u05ff"+ + "\u0600\u0001\u0000\u0000\u0000\u0600\u0601\u0006\u00c5\u0012\u0000\u0601"+ + "\u019c\u0001\u0000\u0000\u0000\u0602\u0603\u0003\u00b9T\u0000\u0603\u0604"+ + "\u0001\u0000\u0000\u0000\u0604\u0605\u0006\u00c6 \u0000\u0605\u019e\u0001"+ + "\u0000\u0000\u0000\u0606\u0607\u0003\u00bdV\u0000\u0607\u0608\u0001\u0000"+ + "\u0000\u0000\u0608\u0609\u0006\u00c7\u001f\u0000\u0609\u01a0\u0001\u0000"+ + "\u0000\u0000\u060a\u060b\u0003C\u0019\u0000\u060b\u060c\u0001\u0000\u0000"+ + "\u0000\u060c\u060d\u0006\u00c8\u000b\u0000\u060d\u01a2\u0001\u0000\u0000"+ + "\u0000\u060e\u060f\u0003E\u001a\u0000\u060f\u0610\u0001\u0000\u0000\u0000"+ + "\u0610\u0611\u0006\u00c9\u000b\u0000\u0611\u01a4\u0001\u0000\u0000\u0000"+ + "\u0612\u0613\u0003G\u001b\u0000\u0613\u0614\u0001\u0000\u0000\u0000\u0614"+ + "\u0615\u0006\u00ca\u000b\u0000\u0615\u01a6\u0001\u0000\u0000\u0000\u0616"+ + "\u0617\u0003I\u001c\u0000\u0617\u0618\u0001\u0000\u0000\u0000\u0618\u0619"+ + "\u0006\u00cb\u0010\u0000\u0619\u061a\u0006\u00cb\f\u0000\u061a\u01a8\u0001"+ + "\u0000\u0000\u0000\u061b\u061c\u0003\u00dfg\u0000\u061c\u061d\u0001\u0000"+ + "\u0000\u0000\u061d\u061e\u0006\u00cc\u0015\u0000\u061e\u061f\u0006\u00cc"+ + "\f\u0000\u061f\u0620\u0006\u00cc$\u0000\u0620\u01aa\u0001\u0000\u0000"+ + "\u0000\u0621\u0622\u0003_\'\u0000\u0622\u0623\u0001\u0000\u0000\u0000"+ + "\u0623\u0624\u0006\u00cd\u0016\u0000\u0624\u0625\u0006\u00cd\f\u0000\u0625"+ + "\u0626\u0006\u00cd$\u0000\u0626\u01ac\u0001\u0000\u0000\u0000\u0627\u0628"+ + "\u0003C\u0019\u0000\u0628\u0629\u0001\u0000\u0000\u0000\u0629\u062a\u0006"+ + "\u00ce\u000b\u0000\u062a\u01ae\u0001\u0000\u0000\u0000\u062b\u062c\u0003"+ + "E\u001a\u0000\u062c\u062d\u0001\u0000\u0000\u0000\u062d\u062e\u0006\u00cf"+ + "\u000b\u0000\u062e\u01b0\u0001\u0000\u0000\u0000\u062f\u0630\u0003G\u001b"+ + "\u0000\u0630\u0631\u0001\u0000\u0000\u0000\u0631\u0632\u0006\u00d0\u000b"+ + "\u0000\u0632\u01b2\u0001\u0000\u0000\u0000\u0633\u0634\u0003o/\u0000\u0634"+ + "\u0635\u0001\u0000\u0000\u0000\u0635\u0636\u0006\u00d1\u0012\u0000\u0636"+ + "\u0637\u0006\u00d1\f\u0000\u0637\u0638\u0006\u00d1\n\u0000\u0638\u01b4"+ + "\u0001\u0000\u0000\u0000\u0639\u063a\u0003q0\u0000\u063a\u063b\u0001\u0000"+ + "\u0000\u0000\u063b\u063c\u0006\u00d2\u0013\u0000\u063c\u063d\u0006\u00d2"+ + "\f\u0000\u063d\u063e\u0006\u00d2\n\u0000\u063e\u01b6\u0001\u0000\u0000"+ + "\u0000\u063f\u0640\u0003C\u0019\u0000\u0640\u0641\u0001\u0000\u0000\u0000"+ + "\u0641\u0642\u0006\u00d3\u000b\u0000\u0642\u01b8\u0001\u0000\u0000\u0000"+ + "\u0643\u0644\u0003E\u001a\u0000\u0644\u0645\u0001\u0000\u0000\u0000\u0645"+ + "\u0646\u0006\u00d4\u000b\u0000\u0646\u01ba\u0001\u0000\u0000\u0000\u0647"+ + "\u0648\u0003G\u001b\u0000\u0648\u0649\u0001\u0000\u0000\u0000\u0649\u064a"+ + "\u0006\u00d5\u000b\u0000\u064a\u01bc\u0001\u0000\u0000\u0000\u064b\u064c"+ + "\u0003\u00bdV\u0000\u064c\u064d\u0001\u0000\u0000\u0000\u064d\u064e\u0006"+ + "\u00d6\f\u0000\u064e\u064f\u0006\u00d6\u0000\u0000\u064f\u0650\u0006\u00d6"+ + "\u001f\u0000\u0650\u01be\u0001\u0000\u0000\u0000\u0651\u0652\u0003\u00b9"+ + "T\u0000\u0652\u0653\u0001\u0000\u0000\u0000\u0653\u0654\u0006\u00d7\f"+ + "\u0000\u0654\u0655\u0006\u00d7\u0000\u0000\u0655\u0656\u0006\u00d7 \u0000"+ + "\u0656\u01c0\u0001\u0000\u0000\u0000\u0657\u0658\u0003e*\u0000\u0658\u0659"+ + "\u0001\u0000\u0000\u0000\u0659\u065a\u0006\u00d8\f\u0000\u065a\u065b\u0006"+ + "\u00d8\u0000\u0000\u065b\u065c\u0006\u00d8%\u0000\u065c\u01c2\u0001\u0000"+ + "\u0000\u0000\u065d\u065e\u0003I\u001c\u0000\u065e\u065f\u0001\u0000\u0000"+ + "\u0000\u065f\u0660\u0006\u00d9\u0010\u0000\u0660\u0661\u0006\u00d9\f\u0000"+ + "\u0661\u01c4\u0001\u0000\u0000\u0000\u0662\u0663\u0003I\u001c\u0000\u0663"+ + "\u0664\u0001\u0000\u0000\u0000\u0664\u0665\u0006\u00da\u0010\u0000\u0665"+ + "\u0666\u0006\u00da\f\u0000\u0666\u01c6\u0001\u0000\u0000\u0000\u0667\u0668"+ + "\u0003\u00b9T\u0000\u0668\u0669\u0001\u0000\u0000\u0000\u0669\u066a\u0006"+ + "\u00db \u0000\u066a\u01c8\u0001\u0000\u0000\u0000\u066b\u066c\u0003G\u001b"+ + "\u0000\u066c\u066d\u0001\u0000\u0000\u0000\u066d\u066e\u0006\u00dc\u000b"+ + "\u0000\u066e\u01ca\u0001\u0000\u0000\u0000\u066f\u0670\u0003C\u0019\u0000"+ + "\u0670\u0671\u0001\u0000\u0000\u0000\u0671\u0672\u0006\u00dd\u000b\u0000"+ + "\u0672\u01cc\u0001\u0000\u0000\u0000\u0673\u0674\u0003E\u001a\u0000\u0674"+ + "\u0675\u0001\u0000\u0000\u0000\u0675\u0676\u0006\u00de\u000b\u0000\u0676"+ + "\u01ce\u0001\u0000\u0000\u0000C\u0000\u0001\u0002\u0003\u0004\u0005\u0006"+ + "\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u02a7\u02b1\u02b5\u02b8\u02c1"+ + "\u02c3\u02ce\u02e1\u02e6\u02ef\u02f6\u02fb\u02fd\u0308\u0310\u0313\u0315"+ + "\u031a\u031f\u0325\u032c\u0331\u0337\u033a\u0342\u0346\u03ca\u03cf\u03d6"+ + "\u03d8\u03e8\u03ed\u03f2\u03f4\u03fa\u0447\u044c\u047b\u047f\u0484\u0489"+ + "\u048e\u0490\u0494\u0496\u04eb\u04ef\u04f4\u0581\u0583&\u0005\u0001\u0000"+ + "\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0002\u0000\u0005\u0003\u0000"+ + "\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0005\r\u0000\u0005\u000b"+ + "\u0000\u0005\u000e\u0000\u0000\u0001\u0000\u0004\u0000\u0000\u0007\u0010"+ + "\u0000\u0007H\u0000\u0005\u0000\u0000\u0007\u001d\u0000\u0007I\u0000\u0007"+ + "&\u0000\u0007\'\u0000\u0007$\u0000\u0007S\u0000\u0007\u001e\u0000\u0007"+ + ")\u0000\u00075\u0000\u0007G\u0000\u0007W\u0000\u0005\n\u0000\u0005\u0007"+ + "\u0000\u0007a\u0000\u0007`\u0000\u0007K\u0000\u0007J\u0000\u0007_\u0000"+ + "\u0005\f\u0000\u0007[\u0000\u0005\u000f\u0000\u0007!\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index a75d7e985c1d0..69fc4c9139d8c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -16,7 +16,7 @@ null 'sort' 'stats' 'where' -null +'lookup' null null null @@ -120,6 +120,7 @@ null null null null +'join' 'USING' null null @@ -130,6 +131,9 @@ null null null null +null +null +null token symbolic names: null @@ -149,14 +153,14 @@ SHOW SORT STATS WHERE +JOIN_LOOKUP DEV_INLINESTATS +DEV_INSIST DEV_LOOKUP DEV_METRICS -DEV_JOIN DEV_JOIN_FULL DEV_JOIN_LEFT DEV_JOIN_RIGHT -DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -253,6 +257,7 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +JOIN USING JOIN_LINE_COMMENT JOIN_MULTILINE_COMMENT @@ -263,6 +268,9 @@ METRICS_WS CLOSING_METRICS_LINE_COMMENT CLOSING_METRICS_MULTILINE_COMMENT CLOSING_METRICS_WS +INSIST_WS +INSIST_LINE_COMMENT +INSIST_MULTILINE_COMMENT rule names: singleStatement @@ -331,7 +339,8 @@ joinCommand joinTarget joinCondition joinPredicate +insistCommand atn: -[4, 1, 130, 650, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 239, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 249, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 255, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 263, 8, 9, 10, 9, 12, 9, 266, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 276, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 281, 8, 10, 10, 10, 12, 10, 284, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 292, 8, 11, 10, 11, 12, 11, 295, 9, 11, 1, 11, 1, 11, 3, 11, 299, 8, 11, 3, 11, 301, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 311, 8, 13, 10, 13, 12, 13, 314, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 5, 17, 330, 8, 17, 10, 17, 12, 17, 333, 9, 17, 1, 18, 1, 18, 1, 18, 3, 18, 338, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 346, 8, 19, 10, 19, 12, 19, 349, 9, 19, 1, 19, 3, 19, 352, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 357, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 369, 8, 23, 10, 23, 12, 23, 372, 9, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 378, 8, 24, 10, 24, 12, 24, 381, 9, 24, 1, 24, 3, 24, 384, 8, 24, 1, 24, 1, 24, 3, 24, 388, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 395, 8, 26, 1, 26, 1, 26, 3, 26, 399, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 404, 8, 27, 10, 27, 12, 27, 407, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 412, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 417, 8, 29, 10, 29, 12, 29, 420, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 425, 8, 30, 10, 30, 12, 30, 428, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 433, 8, 31, 10, 31, 12, 31, 436, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 443, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 458, 8, 34, 10, 34, 12, 34, 461, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 469, 8, 34, 10, 34, 12, 34, 472, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 480, 8, 34, 10, 34, 12, 34, 483, 9, 34, 1, 34, 1, 34, 3, 34, 487, 8, 34, 1, 35, 1, 35, 3, 35, 491, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 496, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 505, 8, 38, 10, 38, 12, 38, 508, 9, 38, 1, 39, 1, 39, 3, 39, 512, 8, 39, 1, 39, 1, 39, 3, 39, 516, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 528, 8, 42, 10, 42, 12, 42, 531, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 541, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 553, 8, 47, 10, 47, 12, 47, 556, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 566, 8, 50, 1, 51, 3, 51, 569, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 574, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 596, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 602, 8, 58, 10, 58, 12, 58, 605, 9, 58, 3, 58, 607, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 612, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 625, 8, 61, 1, 62, 3, 62, 628, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 637, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 643, 8, 64, 10, 64, 12, 64, 646, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 83, 83, 1, 0, 74, 75, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 677, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 248, 1, 0, 0, 0, 18, 254, 1, 0, 0, 0, 20, 275, 1, 0, 0, 0, 22, 285, 1, 0, 0, 0, 24, 304, 1, 0, 0, 0, 26, 306, 1, 0, 0, 0, 28, 317, 1, 0, 0, 0, 30, 321, 1, 0, 0, 0, 32, 323, 1, 0, 0, 0, 34, 326, 1, 0, 0, 0, 36, 337, 1, 0, 0, 0, 38, 341, 1, 0, 0, 0, 40, 356, 1, 0, 0, 0, 42, 360, 1, 0, 0, 0, 44, 362, 1, 0, 0, 0, 46, 364, 1, 0, 0, 0, 48, 373, 1, 0, 0, 0, 50, 389, 1, 0, 0, 0, 52, 392, 1, 0, 0, 0, 54, 400, 1, 0, 0, 0, 56, 408, 1, 0, 0, 0, 58, 413, 1, 0, 0, 0, 60, 421, 1, 0, 0, 0, 62, 429, 1, 0, 0, 0, 64, 437, 1, 0, 0, 0, 66, 442, 1, 0, 0, 0, 68, 486, 1, 0, 0, 0, 70, 490, 1, 0, 0, 0, 72, 495, 1, 0, 0, 0, 74, 497, 1, 0, 0, 0, 76, 500, 1, 0, 0, 0, 78, 509, 1, 0, 0, 0, 80, 517, 1, 0, 0, 0, 82, 520, 1, 0, 0, 0, 84, 523, 1, 0, 0, 0, 86, 532, 1, 0, 0, 0, 88, 536, 1, 0, 0, 0, 90, 542, 1, 0, 0, 0, 92, 546, 1, 0, 0, 0, 94, 549, 1, 0, 0, 0, 96, 557, 1, 0, 0, 0, 98, 561, 1, 0, 0, 0, 100, 565, 1, 0, 0, 0, 102, 568, 1, 0, 0, 0, 104, 573, 1, 0, 0, 0, 106, 577, 1, 0, 0, 0, 108, 579, 1, 0, 0, 0, 110, 581, 1, 0, 0, 0, 112, 584, 1, 0, 0, 0, 114, 588, 1, 0, 0, 0, 116, 591, 1, 0, 0, 0, 118, 611, 1, 0, 0, 0, 120, 615, 1, 0, 0, 0, 122, 620, 1, 0, 0, 0, 124, 627, 1, 0, 0, 0, 126, 633, 1, 0, 0, 0, 128, 638, 1, 0, 0, 0, 130, 647, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 38, 19, 0, 148, 153, 3, 32, 16, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 238, 3, 58, 29, 0, 236, 237, 5, 37, 0, 0, 237, 239, 3, 30, 15, 0, 238, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 5, 38, 0, 0, 241, 242, 3, 68, 34, 0, 242, 15, 1, 0, 0, 0, 243, 249, 3, 18, 9, 0, 244, 245, 3, 18, 9, 0, 245, 246, 3, 108, 54, 0, 246, 247, 3, 18, 9, 0, 247, 249, 1, 0, 0, 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 249, 17, 1, 0, 0, 0, 250, 251, 6, 9, -1, 0, 251, 255, 3, 20, 10, 0, 252, 253, 7, 0, 0, 0, 253, 255, 3, 18, 9, 3, 254, 250, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 264, 1, 0, 0, 0, 256, 257, 10, 2, 0, 0, 257, 258, 7, 1, 0, 0, 258, 263, 3, 18, 9, 3, 259, 260, 10, 1, 0, 0, 260, 261, 7, 0, 0, 0, 261, 263, 3, 18, 9, 2, 262, 256, 1, 0, 0, 0, 262, 259, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 19, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 268, 6, 10, -1, 0, 268, 276, 3, 68, 34, 0, 269, 276, 3, 58, 29, 0, 270, 276, 3, 22, 11, 0, 271, 272, 5, 48, 0, 0, 272, 273, 3, 10, 5, 0, 273, 274, 5, 55, 0, 0, 274, 276, 1, 0, 0, 0, 275, 267, 1, 0, 0, 0, 275, 269, 1, 0, 0, 0, 275, 270, 1, 0, 0, 0, 275, 271, 1, 0, 0, 0, 276, 282, 1, 0, 0, 0, 277, 278, 10, 1, 0, 0, 278, 279, 5, 37, 0, 0, 279, 281, 3, 30, 15, 0, 280, 277, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 282, 283, 1, 0, 0, 0, 283, 21, 1, 0, 0, 0, 284, 282, 1, 0, 0, 0, 285, 286, 3, 24, 12, 0, 286, 300, 5, 48, 0, 0, 287, 301, 5, 66, 0, 0, 288, 293, 3, 10, 5, 0, 289, 290, 5, 39, 0, 0, 290, 292, 3, 10, 5, 0, 291, 289, 1, 0, 0, 0, 292, 295, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 298, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 296, 297, 5, 39, 0, 0, 297, 299, 3, 26, 13, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 301, 1, 0, 0, 0, 300, 287, 1, 0, 0, 0, 300, 288, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 303, 5, 55, 0, 0, 303, 23, 1, 0, 0, 0, 304, 305, 3, 72, 36, 0, 305, 25, 1, 0, 0, 0, 306, 307, 5, 69, 0, 0, 307, 312, 3, 28, 14, 0, 308, 309, 5, 39, 0, 0, 309, 311, 3, 28, 14, 0, 310, 308, 1, 0, 0, 0, 311, 314, 1, 0, 0, 0, 312, 310, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 315, 1, 0, 0, 0, 314, 312, 1, 0, 0, 0, 315, 316, 5, 70, 0, 0, 316, 27, 1, 0, 0, 0, 317, 318, 3, 106, 53, 0, 318, 319, 5, 38, 0, 0, 319, 320, 3, 68, 34, 0, 320, 29, 1, 0, 0, 0, 321, 322, 3, 64, 32, 0, 322, 31, 1, 0, 0, 0, 323, 324, 5, 12, 0, 0, 324, 325, 3, 34, 17, 0, 325, 33, 1, 0, 0, 0, 326, 331, 3, 36, 18, 0, 327, 328, 5, 39, 0, 0, 328, 330, 3, 36, 18, 0, 329, 327, 1, 0, 0, 0, 330, 333, 1, 0, 0, 0, 331, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 35, 1, 0, 0, 0, 333, 331, 1, 0, 0, 0, 334, 335, 3, 58, 29, 0, 335, 336, 5, 36, 0, 0, 336, 338, 1, 0, 0, 0, 337, 334, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 340, 3, 10, 5, 0, 340, 37, 1, 0, 0, 0, 341, 342, 5, 6, 0, 0, 342, 347, 3, 40, 20, 0, 343, 344, 5, 39, 0, 0, 344, 346, 3, 40, 20, 0, 345, 343, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 351, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 350, 352, 3, 46, 23, 0, 351, 350, 1, 0, 0, 0, 351, 352, 1, 0, 0, 0, 352, 39, 1, 0, 0, 0, 353, 354, 3, 42, 21, 0, 354, 355, 5, 38, 0, 0, 355, 357, 1, 0, 0, 0, 356, 353, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 359, 3, 44, 22, 0, 359, 41, 1, 0, 0, 0, 360, 361, 5, 83, 0, 0, 361, 43, 1, 0, 0, 0, 362, 363, 7, 2, 0, 0, 363, 45, 1, 0, 0, 0, 364, 365, 5, 82, 0, 0, 365, 370, 5, 83, 0, 0, 366, 367, 5, 39, 0, 0, 367, 369, 5, 83, 0, 0, 368, 366, 1, 0, 0, 0, 369, 372, 1, 0, 0, 0, 370, 368, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 47, 1, 0, 0, 0, 372, 370, 1, 0, 0, 0, 373, 374, 5, 19, 0, 0, 374, 379, 3, 40, 20, 0, 375, 376, 5, 39, 0, 0, 376, 378, 3, 40, 20, 0, 377, 375, 1, 0, 0, 0, 378, 381, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 382, 384, 3, 54, 27, 0, 383, 382, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 387, 1, 0, 0, 0, 385, 386, 5, 33, 0, 0, 386, 388, 3, 34, 17, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 49, 1, 0, 0, 0, 389, 390, 5, 4, 0, 0, 390, 391, 3, 34, 17, 0, 391, 51, 1, 0, 0, 0, 392, 394, 5, 15, 0, 0, 393, 395, 3, 54, 27, 0, 394, 393, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, 397, 5, 33, 0, 0, 397, 399, 3, 34, 17, 0, 398, 396, 1, 0, 0, 0, 398, 399, 1, 0, 0, 0, 399, 53, 1, 0, 0, 0, 400, 405, 3, 56, 28, 0, 401, 402, 5, 39, 0, 0, 402, 404, 3, 56, 28, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 55, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 411, 3, 36, 18, 0, 409, 410, 5, 16, 0, 0, 410, 412, 3, 10, 5, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 57, 1, 0, 0, 0, 413, 418, 3, 72, 36, 0, 414, 415, 5, 41, 0, 0, 415, 417, 3, 72, 36, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 59, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 426, 3, 66, 33, 0, 422, 423, 5, 41, 0, 0, 423, 425, 3, 66, 33, 0, 424, 422, 1, 0, 0, 0, 425, 428, 1, 0, 0, 0, 426, 424, 1, 0, 0, 0, 426, 427, 1, 0, 0, 0, 427, 61, 1, 0, 0, 0, 428, 426, 1, 0, 0, 0, 429, 434, 3, 60, 30, 0, 430, 431, 5, 39, 0, 0, 431, 433, 3, 60, 30, 0, 432, 430, 1, 0, 0, 0, 433, 436, 1, 0, 0, 0, 434, 432, 1, 0, 0, 0, 434, 435, 1, 0, 0, 0, 435, 63, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 437, 438, 7, 3, 0, 0, 438, 65, 1, 0, 0, 0, 439, 443, 5, 87, 0, 0, 440, 441, 4, 33, 10, 0, 441, 443, 3, 70, 35, 0, 442, 439, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 67, 1, 0, 0, 0, 444, 487, 5, 50, 0, 0, 445, 446, 3, 104, 52, 0, 446, 447, 5, 74, 0, 0, 447, 487, 1, 0, 0, 0, 448, 487, 3, 102, 51, 0, 449, 487, 3, 104, 52, 0, 450, 487, 3, 98, 49, 0, 451, 487, 3, 70, 35, 0, 452, 487, 3, 106, 53, 0, 453, 454, 5, 72, 0, 0, 454, 459, 3, 100, 50, 0, 455, 456, 5, 39, 0, 0, 456, 458, 3, 100, 50, 0, 457, 455, 1, 0, 0, 0, 458, 461, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 462, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 462, 463, 5, 73, 0, 0, 463, 487, 1, 0, 0, 0, 464, 465, 5, 72, 0, 0, 465, 470, 3, 98, 49, 0, 466, 467, 5, 39, 0, 0, 467, 469, 3, 98, 49, 0, 468, 466, 1, 0, 0, 0, 469, 472, 1, 0, 0, 0, 470, 468, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 473, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 473, 474, 5, 73, 0, 0, 474, 487, 1, 0, 0, 0, 475, 476, 5, 72, 0, 0, 476, 481, 3, 106, 53, 0, 477, 478, 5, 39, 0, 0, 478, 480, 3, 106, 53, 0, 479, 477, 1, 0, 0, 0, 480, 483, 1, 0, 0, 0, 481, 479, 1, 0, 0, 0, 481, 482, 1, 0, 0, 0, 482, 484, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 484, 485, 5, 73, 0, 0, 485, 487, 1, 0, 0, 0, 486, 444, 1, 0, 0, 0, 486, 445, 1, 0, 0, 0, 486, 448, 1, 0, 0, 0, 486, 449, 1, 0, 0, 0, 486, 450, 1, 0, 0, 0, 486, 451, 1, 0, 0, 0, 486, 452, 1, 0, 0, 0, 486, 453, 1, 0, 0, 0, 486, 464, 1, 0, 0, 0, 486, 475, 1, 0, 0, 0, 487, 69, 1, 0, 0, 0, 488, 491, 5, 53, 0, 0, 489, 491, 5, 71, 0, 0, 490, 488, 1, 0, 0, 0, 490, 489, 1, 0, 0, 0, 491, 71, 1, 0, 0, 0, 492, 496, 3, 64, 32, 0, 493, 494, 4, 36, 11, 0, 494, 496, 3, 70, 35, 0, 495, 492, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 73, 1, 0, 0, 0, 497, 498, 5, 9, 0, 0, 498, 499, 5, 31, 0, 0, 499, 75, 1, 0, 0, 0, 500, 501, 5, 14, 0, 0, 501, 506, 3, 78, 39, 0, 502, 503, 5, 39, 0, 0, 503, 505, 3, 78, 39, 0, 504, 502, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 77, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 509, 511, 3, 10, 5, 0, 510, 512, 7, 4, 0, 0, 511, 510, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 514, 5, 51, 0, 0, 514, 516, 7, 5, 0, 0, 515, 513, 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 79, 1, 0, 0, 0, 517, 518, 5, 8, 0, 0, 518, 519, 3, 62, 31, 0, 519, 81, 1, 0, 0, 0, 520, 521, 5, 2, 0, 0, 521, 522, 3, 62, 31, 0, 522, 83, 1, 0, 0, 0, 523, 524, 5, 11, 0, 0, 524, 529, 3, 86, 43, 0, 525, 526, 5, 39, 0, 0, 526, 528, 3, 86, 43, 0, 527, 525, 1, 0, 0, 0, 528, 531, 1, 0, 0, 0, 529, 527, 1, 0, 0, 0, 529, 530, 1, 0, 0, 0, 530, 85, 1, 0, 0, 0, 531, 529, 1, 0, 0, 0, 532, 533, 3, 60, 30, 0, 533, 534, 5, 91, 0, 0, 534, 535, 3, 60, 30, 0, 535, 87, 1, 0, 0, 0, 536, 537, 5, 1, 0, 0, 537, 538, 3, 20, 10, 0, 538, 540, 3, 106, 53, 0, 539, 541, 3, 94, 47, 0, 540, 539, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 89, 1, 0, 0, 0, 542, 543, 5, 7, 0, 0, 543, 544, 3, 20, 10, 0, 544, 545, 3, 106, 53, 0, 545, 91, 1, 0, 0, 0, 546, 547, 5, 10, 0, 0, 547, 548, 3, 58, 29, 0, 548, 93, 1, 0, 0, 0, 549, 554, 3, 96, 48, 0, 550, 551, 5, 39, 0, 0, 551, 553, 3, 96, 48, 0, 552, 550, 1, 0, 0, 0, 553, 556, 1, 0, 0, 0, 554, 552, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 95, 1, 0, 0, 0, 556, 554, 1, 0, 0, 0, 557, 558, 3, 64, 32, 0, 558, 559, 5, 36, 0, 0, 559, 560, 3, 68, 34, 0, 560, 97, 1, 0, 0, 0, 561, 562, 7, 6, 0, 0, 562, 99, 1, 0, 0, 0, 563, 566, 3, 102, 51, 0, 564, 566, 3, 104, 52, 0, 565, 563, 1, 0, 0, 0, 565, 564, 1, 0, 0, 0, 566, 101, 1, 0, 0, 0, 567, 569, 7, 0, 0, 0, 568, 567, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 5, 32, 0, 0, 571, 103, 1, 0, 0, 0, 572, 574, 7, 0, 0, 0, 573, 572, 1, 0, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 576, 5, 31, 0, 0, 576, 105, 1, 0, 0, 0, 577, 578, 5, 30, 0, 0, 578, 107, 1, 0, 0, 0, 579, 580, 7, 7, 0, 0, 580, 109, 1, 0, 0, 0, 581, 582, 5, 5, 0, 0, 582, 583, 3, 112, 56, 0, 583, 111, 1, 0, 0, 0, 584, 585, 5, 72, 0, 0, 585, 586, 3, 2, 1, 0, 586, 587, 5, 73, 0, 0, 587, 113, 1, 0, 0, 0, 588, 589, 5, 13, 0, 0, 589, 590, 5, 107, 0, 0, 590, 115, 1, 0, 0, 0, 591, 592, 5, 3, 0, 0, 592, 595, 5, 97, 0, 0, 593, 594, 5, 95, 0, 0, 594, 596, 3, 60, 30, 0, 595, 593, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 606, 1, 0, 0, 0, 597, 598, 5, 96, 0, 0, 598, 603, 3, 118, 59, 0, 599, 600, 5, 39, 0, 0, 600, 602, 3, 118, 59, 0, 601, 599, 1, 0, 0, 0, 602, 605, 1, 0, 0, 0, 603, 601, 1, 0, 0, 0, 603, 604, 1, 0, 0, 0, 604, 607, 1, 0, 0, 0, 605, 603, 1, 0, 0, 0, 606, 597, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 117, 1, 0, 0, 0, 608, 609, 3, 60, 30, 0, 609, 610, 5, 36, 0, 0, 610, 612, 1, 0, 0, 0, 611, 608, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 3, 60, 30, 0, 614, 119, 1, 0, 0, 0, 615, 616, 5, 18, 0, 0, 616, 617, 3, 40, 20, 0, 617, 618, 5, 95, 0, 0, 618, 619, 3, 62, 31, 0, 619, 121, 1, 0, 0, 0, 620, 621, 5, 17, 0, 0, 621, 624, 3, 54, 27, 0, 622, 623, 5, 33, 0, 0, 623, 625, 3, 34, 17, 0, 624, 622, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 123, 1, 0, 0, 0, 626, 628, 7, 8, 0, 0, 627, 626, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 5, 20, 0, 0, 630, 631, 3, 126, 63, 0, 631, 632, 3, 128, 64, 0, 632, 125, 1, 0, 0, 0, 633, 636, 3, 40, 20, 0, 634, 635, 5, 91, 0, 0, 635, 637, 3, 64, 32, 0, 636, 634, 1, 0, 0, 0, 636, 637, 1, 0, 0, 0, 637, 127, 1, 0, 0, 0, 638, 639, 5, 95, 0, 0, 639, 644, 3, 130, 65, 0, 640, 641, 5, 39, 0, 0, 641, 643, 3, 130, 65, 0, 642, 640, 1, 0, 0, 0, 643, 646, 1, 0, 0, 0, 644, 642, 1, 0, 0, 0, 644, 645, 1, 0, 0, 0, 645, 129, 1, 0, 0, 0, 646, 644, 1, 0, 0, 0, 647, 648, 3, 16, 8, 0, 648, 131, 1, 0, 0, 0, 63, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 238, 248, 254, 262, 264, 275, 282, 293, 298, 300, 312, 331, 337, 347, 351, 356, 370, 379, 383, 387, 394, 398, 405, 411, 418, 426, 434, 442, 459, 470, 481, 486, 490, 495, 506, 511, 515, 529, 540, 554, 565, 568, 573, 595, 603, 606, 611, 624, 627, 636, 644] \ No newline at end of file +[4, 1, 134, 649, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 144, 8, 1, 10, 1, 12, 1, 147, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 155, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 176, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 188, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 195, 8, 5, 10, 5, 12, 5, 198, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 205, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 210, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 218, 8, 5, 10, 5, 12, 5, 221, 9, 5, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 232, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 237, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 242, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 252, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 258, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 266, 8, 9, 10, 9, 12, 9, 269, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 279, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 284, 8, 10, 10, 10, 12, 10, 287, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 295, 8, 11, 10, 11, 12, 11, 298, 9, 11, 1, 11, 1, 11, 3, 11, 302, 8, 11, 3, 11, 304, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 314, 8, 13, 10, 13, 12, 13, 317, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 5, 17, 333, 8, 17, 10, 17, 12, 17, 336, 9, 17, 1, 18, 1, 18, 1, 18, 3, 18, 341, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 349, 8, 19, 10, 19, 12, 19, 352, 9, 19, 1, 19, 3, 19, 355, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 360, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 372, 8, 23, 10, 23, 12, 23, 375, 9, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 381, 8, 24, 10, 24, 12, 24, 384, 9, 24, 1, 24, 3, 24, 387, 8, 24, 1, 24, 1, 24, 3, 24, 391, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 398, 8, 26, 1, 26, 1, 26, 3, 26, 402, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 407, 8, 27, 10, 27, 12, 27, 410, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 415, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 420, 8, 29, 10, 29, 12, 29, 423, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 428, 8, 30, 10, 30, 12, 30, 431, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 436, 8, 31, 10, 31, 12, 31, 439, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 445, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 460, 8, 34, 10, 34, 12, 34, 463, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 471, 8, 34, 10, 34, 12, 34, 474, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 482, 8, 34, 10, 34, 12, 34, 485, 9, 34, 1, 34, 1, 34, 3, 34, 489, 8, 34, 1, 35, 1, 35, 3, 35, 493, 8, 35, 1, 36, 1, 36, 3, 36, 497, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 506, 8, 38, 10, 38, 12, 38, 509, 9, 38, 1, 39, 1, 39, 3, 39, 513, 8, 39, 1, 39, 1, 39, 3, 39, 517, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 529, 8, 42, 10, 42, 12, 42, 532, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 542, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 554, 8, 47, 10, 47, 12, 47, 557, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 567, 8, 50, 1, 51, 3, 51, 570, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 575, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 597, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 603, 8, 58, 10, 58, 12, 58, 606, 9, 58, 3, 58, 608, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 613, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 626, 8, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 639, 8, 64, 10, 64, 12, 64, 642, 9, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 66, 0, 4, 2, 10, 18, 20, 67, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 83, 83, 1, 0, 74, 75, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 2, 0, 17, 17, 23, 24, 674, 0, 134, 1, 0, 0, 0, 2, 137, 1, 0, 0, 0, 4, 154, 1, 0, 0, 0, 6, 175, 1, 0, 0, 0, 8, 177, 1, 0, 0, 0, 10, 209, 1, 0, 0, 0, 12, 236, 1, 0, 0, 0, 14, 238, 1, 0, 0, 0, 16, 251, 1, 0, 0, 0, 18, 257, 1, 0, 0, 0, 20, 278, 1, 0, 0, 0, 22, 288, 1, 0, 0, 0, 24, 307, 1, 0, 0, 0, 26, 309, 1, 0, 0, 0, 28, 320, 1, 0, 0, 0, 30, 324, 1, 0, 0, 0, 32, 326, 1, 0, 0, 0, 34, 329, 1, 0, 0, 0, 36, 340, 1, 0, 0, 0, 38, 344, 1, 0, 0, 0, 40, 359, 1, 0, 0, 0, 42, 363, 1, 0, 0, 0, 44, 365, 1, 0, 0, 0, 46, 367, 1, 0, 0, 0, 48, 376, 1, 0, 0, 0, 50, 392, 1, 0, 0, 0, 52, 395, 1, 0, 0, 0, 54, 403, 1, 0, 0, 0, 56, 411, 1, 0, 0, 0, 58, 416, 1, 0, 0, 0, 60, 424, 1, 0, 0, 0, 62, 432, 1, 0, 0, 0, 64, 440, 1, 0, 0, 0, 66, 444, 1, 0, 0, 0, 68, 488, 1, 0, 0, 0, 70, 492, 1, 0, 0, 0, 72, 496, 1, 0, 0, 0, 74, 498, 1, 0, 0, 0, 76, 501, 1, 0, 0, 0, 78, 510, 1, 0, 0, 0, 80, 518, 1, 0, 0, 0, 82, 521, 1, 0, 0, 0, 84, 524, 1, 0, 0, 0, 86, 533, 1, 0, 0, 0, 88, 537, 1, 0, 0, 0, 90, 543, 1, 0, 0, 0, 92, 547, 1, 0, 0, 0, 94, 550, 1, 0, 0, 0, 96, 558, 1, 0, 0, 0, 98, 562, 1, 0, 0, 0, 100, 566, 1, 0, 0, 0, 102, 569, 1, 0, 0, 0, 104, 574, 1, 0, 0, 0, 106, 578, 1, 0, 0, 0, 108, 580, 1, 0, 0, 0, 110, 582, 1, 0, 0, 0, 112, 585, 1, 0, 0, 0, 114, 589, 1, 0, 0, 0, 116, 592, 1, 0, 0, 0, 118, 612, 1, 0, 0, 0, 120, 616, 1, 0, 0, 0, 122, 621, 1, 0, 0, 0, 124, 627, 1, 0, 0, 0, 126, 632, 1, 0, 0, 0, 128, 634, 1, 0, 0, 0, 130, 643, 1, 0, 0, 0, 132, 645, 1, 0, 0, 0, 134, 135, 3, 2, 1, 0, 135, 136, 5, 0, 0, 1, 136, 1, 1, 0, 0, 0, 137, 138, 6, 1, -1, 0, 138, 139, 3, 4, 2, 0, 139, 145, 1, 0, 0, 0, 140, 141, 10, 1, 0, 0, 141, 142, 5, 29, 0, 0, 142, 144, 3, 6, 3, 0, 143, 140, 1, 0, 0, 0, 144, 147, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 145, 146, 1, 0, 0, 0, 146, 3, 1, 0, 0, 0, 147, 145, 1, 0, 0, 0, 148, 155, 3, 110, 55, 0, 149, 155, 3, 38, 19, 0, 150, 155, 3, 32, 16, 0, 151, 155, 3, 114, 57, 0, 152, 153, 4, 2, 1, 0, 153, 155, 3, 48, 24, 0, 154, 148, 1, 0, 0, 0, 154, 149, 1, 0, 0, 0, 154, 150, 1, 0, 0, 0, 154, 151, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 155, 5, 1, 0, 0, 0, 156, 176, 3, 50, 25, 0, 157, 176, 3, 8, 4, 0, 158, 176, 3, 80, 40, 0, 159, 176, 3, 74, 37, 0, 160, 176, 3, 52, 26, 0, 161, 176, 3, 76, 38, 0, 162, 176, 3, 82, 41, 0, 163, 176, 3, 84, 42, 0, 164, 176, 3, 88, 44, 0, 165, 176, 3, 90, 45, 0, 166, 176, 3, 116, 58, 0, 167, 176, 3, 92, 46, 0, 168, 176, 3, 124, 62, 0, 169, 170, 4, 3, 2, 0, 170, 176, 3, 122, 61, 0, 171, 172, 4, 3, 3, 0, 172, 176, 3, 120, 60, 0, 173, 174, 4, 3, 4, 0, 174, 176, 3, 132, 66, 0, 175, 156, 1, 0, 0, 0, 175, 157, 1, 0, 0, 0, 175, 158, 1, 0, 0, 0, 175, 159, 1, 0, 0, 0, 175, 160, 1, 0, 0, 0, 175, 161, 1, 0, 0, 0, 175, 162, 1, 0, 0, 0, 175, 163, 1, 0, 0, 0, 175, 164, 1, 0, 0, 0, 175, 165, 1, 0, 0, 0, 175, 166, 1, 0, 0, 0, 175, 167, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 175, 169, 1, 0, 0, 0, 175, 171, 1, 0, 0, 0, 175, 173, 1, 0, 0, 0, 176, 7, 1, 0, 0, 0, 177, 178, 5, 16, 0, 0, 178, 179, 3, 10, 5, 0, 179, 9, 1, 0, 0, 0, 180, 181, 6, 5, -1, 0, 181, 182, 5, 49, 0, 0, 182, 210, 3, 10, 5, 8, 183, 210, 3, 16, 8, 0, 184, 210, 3, 12, 6, 0, 185, 187, 3, 16, 8, 0, 186, 188, 5, 49, 0, 0, 187, 186, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 5, 44, 0, 0, 190, 191, 5, 48, 0, 0, 191, 196, 3, 16, 8, 0, 192, 193, 5, 39, 0, 0, 193, 195, 3, 16, 8, 0, 194, 192, 1, 0, 0, 0, 195, 198, 1, 0, 0, 0, 196, 194, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 199, 1, 0, 0, 0, 198, 196, 1, 0, 0, 0, 199, 200, 5, 55, 0, 0, 200, 210, 1, 0, 0, 0, 201, 202, 3, 16, 8, 0, 202, 204, 5, 45, 0, 0, 203, 205, 5, 49, 0, 0, 204, 203, 1, 0, 0, 0, 204, 205, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 207, 5, 50, 0, 0, 207, 210, 1, 0, 0, 0, 208, 210, 3, 14, 7, 0, 209, 180, 1, 0, 0, 0, 209, 183, 1, 0, 0, 0, 209, 184, 1, 0, 0, 0, 209, 185, 1, 0, 0, 0, 209, 201, 1, 0, 0, 0, 209, 208, 1, 0, 0, 0, 210, 219, 1, 0, 0, 0, 211, 212, 10, 5, 0, 0, 212, 213, 5, 34, 0, 0, 213, 218, 3, 10, 5, 6, 214, 215, 10, 4, 0, 0, 215, 216, 5, 52, 0, 0, 216, 218, 3, 10, 5, 5, 217, 211, 1, 0, 0, 0, 217, 214, 1, 0, 0, 0, 218, 221, 1, 0, 0, 0, 219, 217, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 11, 1, 0, 0, 0, 221, 219, 1, 0, 0, 0, 222, 224, 3, 16, 8, 0, 223, 225, 5, 49, 0, 0, 224, 223, 1, 0, 0, 0, 224, 225, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 227, 5, 47, 0, 0, 227, 228, 3, 106, 53, 0, 228, 237, 1, 0, 0, 0, 229, 231, 3, 16, 8, 0, 230, 232, 5, 49, 0, 0, 231, 230, 1, 0, 0, 0, 231, 232, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 234, 5, 54, 0, 0, 234, 235, 3, 106, 53, 0, 235, 237, 1, 0, 0, 0, 236, 222, 1, 0, 0, 0, 236, 229, 1, 0, 0, 0, 237, 13, 1, 0, 0, 0, 238, 241, 3, 58, 29, 0, 239, 240, 5, 37, 0, 0, 240, 242, 3, 30, 15, 0, 241, 239, 1, 0, 0, 0, 241, 242, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 244, 5, 38, 0, 0, 244, 245, 3, 68, 34, 0, 245, 15, 1, 0, 0, 0, 246, 252, 3, 18, 9, 0, 247, 248, 3, 18, 9, 0, 248, 249, 3, 108, 54, 0, 249, 250, 3, 18, 9, 0, 250, 252, 1, 0, 0, 0, 251, 246, 1, 0, 0, 0, 251, 247, 1, 0, 0, 0, 252, 17, 1, 0, 0, 0, 253, 254, 6, 9, -1, 0, 254, 258, 3, 20, 10, 0, 255, 256, 7, 0, 0, 0, 256, 258, 3, 18, 9, 3, 257, 253, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 258, 267, 1, 0, 0, 0, 259, 260, 10, 2, 0, 0, 260, 261, 7, 1, 0, 0, 261, 266, 3, 18, 9, 3, 262, 263, 10, 1, 0, 0, 263, 264, 7, 0, 0, 0, 264, 266, 3, 18, 9, 2, 265, 259, 1, 0, 0, 0, 265, 262, 1, 0, 0, 0, 266, 269, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 19, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 270, 271, 6, 10, -1, 0, 271, 279, 3, 68, 34, 0, 272, 279, 3, 58, 29, 0, 273, 279, 3, 22, 11, 0, 274, 275, 5, 48, 0, 0, 275, 276, 3, 10, 5, 0, 276, 277, 5, 55, 0, 0, 277, 279, 1, 0, 0, 0, 278, 270, 1, 0, 0, 0, 278, 272, 1, 0, 0, 0, 278, 273, 1, 0, 0, 0, 278, 274, 1, 0, 0, 0, 279, 285, 1, 0, 0, 0, 280, 281, 10, 1, 0, 0, 281, 282, 5, 37, 0, 0, 282, 284, 3, 30, 15, 0, 283, 280, 1, 0, 0, 0, 284, 287, 1, 0, 0, 0, 285, 283, 1, 0, 0, 0, 285, 286, 1, 0, 0, 0, 286, 21, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 288, 289, 3, 24, 12, 0, 289, 303, 5, 48, 0, 0, 290, 304, 5, 66, 0, 0, 291, 296, 3, 10, 5, 0, 292, 293, 5, 39, 0, 0, 293, 295, 3, 10, 5, 0, 294, 292, 1, 0, 0, 0, 295, 298, 1, 0, 0, 0, 296, 294, 1, 0, 0, 0, 296, 297, 1, 0, 0, 0, 297, 301, 1, 0, 0, 0, 298, 296, 1, 0, 0, 0, 299, 300, 5, 39, 0, 0, 300, 302, 3, 26, 13, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 304, 1, 0, 0, 0, 303, 290, 1, 0, 0, 0, 303, 291, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306, 5, 55, 0, 0, 306, 23, 1, 0, 0, 0, 307, 308, 3, 72, 36, 0, 308, 25, 1, 0, 0, 0, 309, 310, 5, 69, 0, 0, 310, 315, 3, 28, 14, 0, 311, 312, 5, 39, 0, 0, 312, 314, 3, 28, 14, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 318, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 319, 5, 70, 0, 0, 319, 27, 1, 0, 0, 0, 320, 321, 3, 106, 53, 0, 321, 322, 5, 38, 0, 0, 322, 323, 3, 68, 34, 0, 323, 29, 1, 0, 0, 0, 324, 325, 3, 64, 32, 0, 325, 31, 1, 0, 0, 0, 326, 327, 5, 12, 0, 0, 327, 328, 3, 34, 17, 0, 328, 33, 1, 0, 0, 0, 329, 334, 3, 36, 18, 0, 330, 331, 5, 39, 0, 0, 331, 333, 3, 36, 18, 0, 332, 330, 1, 0, 0, 0, 333, 336, 1, 0, 0, 0, 334, 332, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 35, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 337, 338, 3, 58, 29, 0, 338, 339, 5, 36, 0, 0, 339, 341, 1, 0, 0, 0, 340, 337, 1, 0, 0, 0, 340, 341, 1, 0, 0, 0, 341, 342, 1, 0, 0, 0, 342, 343, 3, 10, 5, 0, 343, 37, 1, 0, 0, 0, 344, 345, 5, 6, 0, 0, 345, 350, 3, 40, 20, 0, 346, 347, 5, 39, 0, 0, 347, 349, 3, 40, 20, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 354, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 355, 3, 46, 23, 0, 354, 353, 1, 0, 0, 0, 354, 355, 1, 0, 0, 0, 355, 39, 1, 0, 0, 0, 356, 357, 3, 42, 21, 0, 357, 358, 5, 38, 0, 0, 358, 360, 1, 0, 0, 0, 359, 356, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 361, 1, 0, 0, 0, 361, 362, 3, 44, 22, 0, 362, 41, 1, 0, 0, 0, 363, 364, 7, 2, 0, 0, 364, 43, 1, 0, 0, 0, 365, 366, 7, 2, 0, 0, 366, 45, 1, 0, 0, 0, 367, 368, 5, 82, 0, 0, 368, 373, 5, 83, 0, 0, 369, 370, 5, 39, 0, 0, 370, 372, 5, 83, 0, 0, 371, 369, 1, 0, 0, 0, 372, 375, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 373, 374, 1, 0, 0, 0, 374, 47, 1, 0, 0, 0, 375, 373, 1, 0, 0, 0, 376, 377, 5, 21, 0, 0, 377, 382, 3, 40, 20, 0, 378, 379, 5, 39, 0, 0, 379, 381, 3, 40, 20, 0, 380, 378, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 387, 3, 54, 27, 0, 386, 385, 1, 0, 0, 0, 386, 387, 1, 0, 0, 0, 387, 390, 1, 0, 0, 0, 388, 389, 5, 33, 0, 0, 389, 391, 3, 34, 17, 0, 390, 388, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 49, 1, 0, 0, 0, 392, 393, 5, 4, 0, 0, 393, 394, 3, 34, 17, 0, 394, 51, 1, 0, 0, 0, 395, 397, 5, 15, 0, 0, 396, 398, 3, 54, 27, 0, 397, 396, 1, 0, 0, 0, 397, 398, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 400, 5, 33, 0, 0, 400, 402, 3, 34, 17, 0, 401, 399, 1, 0, 0, 0, 401, 402, 1, 0, 0, 0, 402, 53, 1, 0, 0, 0, 403, 408, 3, 56, 28, 0, 404, 405, 5, 39, 0, 0, 405, 407, 3, 56, 28, 0, 406, 404, 1, 0, 0, 0, 407, 410, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0, 408, 409, 1, 0, 0, 0, 409, 55, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 411, 414, 3, 36, 18, 0, 412, 413, 5, 16, 0, 0, 413, 415, 3, 10, 5, 0, 414, 412, 1, 0, 0, 0, 414, 415, 1, 0, 0, 0, 415, 57, 1, 0, 0, 0, 416, 421, 3, 72, 36, 0, 417, 418, 5, 41, 0, 0, 418, 420, 3, 72, 36, 0, 419, 417, 1, 0, 0, 0, 420, 423, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 59, 1, 0, 0, 0, 423, 421, 1, 0, 0, 0, 424, 429, 3, 66, 33, 0, 425, 426, 5, 41, 0, 0, 426, 428, 3, 66, 33, 0, 427, 425, 1, 0, 0, 0, 428, 431, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 429, 430, 1, 0, 0, 0, 430, 61, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 432, 437, 3, 60, 30, 0, 433, 434, 5, 39, 0, 0, 434, 436, 3, 60, 30, 0, 435, 433, 1, 0, 0, 0, 436, 439, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 437, 438, 1, 0, 0, 0, 438, 63, 1, 0, 0, 0, 439, 437, 1, 0, 0, 0, 440, 441, 7, 3, 0, 0, 441, 65, 1, 0, 0, 0, 442, 445, 5, 87, 0, 0, 443, 445, 3, 70, 35, 0, 444, 442, 1, 0, 0, 0, 444, 443, 1, 0, 0, 0, 445, 67, 1, 0, 0, 0, 446, 489, 5, 50, 0, 0, 447, 448, 3, 104, 52, 0, 448, 449, 5, 74, 0, 0, 449, 489, 1, 0, 0, 0, 450, 489, 3, 102, 51, 0, 451, 489, 3, 104, 52, 0, 452, 489, 3, 98, 49, 0, 453, 489, 3, 70, 35, 0, 454, 489, 3, 106, 53, 0, 455, 456, 5, 72, 0, 0, 456, 461, 3, 100, 50, 0, 457, 458, 5, 39, 0, 0, 458, 460, 3, 100, 50, 0, 459, 457, 1, 0, 0, 0, 460, 463, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 461, 462, 1, 0, 0, 0, 462, 464, 1, 0, 0, 0, 463, 461, 1, 0, 0, 0, 464, 465, 5, 73, 0, 0, 465, 489, 1, 0, 0, 0, 466, 467, 5, 72, 0, 0, 467, 472, 3, 98, 49, 0, 468, 469, 5, 39, 0, 0, 469, 471, 3, 98, 49, 0, 470, 468, 1, 0, 0, 0, 471, 474, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 472, 473, 1, 0, 0, 0, 473, 475, 1, 0, 0, 0, 474, 472, 1, 0, 0, 0, 475, 476, 5, 73, 0, 0, 476, 489, 1, 0, 0, 0, 477, 478, 5, 72, 0, 0, 478, 483, 3, 106, 53, 0, 479, 480, 5, 39, 0, 0, 480, 482, 3, 106, 53, 0, 481, 479, 1, 0, 0, 0, 482, 485, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 486, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 486, 487, 5, 73, 0, 0, 487, 489, 1, 0, 0, 0, 488, 446, 1, 0, 0, 0, 488, 447, 1, 0, 0, 0, 488, 450, 1, 0, 0, 0, 488, 451, 1, 0, 0, 0, 488, 452, 1, 0, 0, 0, 488, 453, 1, 0, 0, 0, 488, 454, 1, 0, 0, 0, 488, 455, 1, 0, 0, 0, 488, 466, 1, 0, 0, 0, 488, 477, 1, 0, 0, 0, 489, 69, 1, 0, 0, 0, 490, 493, 5, 53, 0, 0, 491, 493, 5, 71, 0, 0, 492, 490, 1, 0, 0, 0, 492, 491, 1, 0, 0, 0, 493, 71, 1, 0, 0, 0, 494, 497, 3, 64, 32, 0, 495, 497, 3, 70, 35, 0, 496, 494, 1, 0, 0, 0, 496, 495, 1, 0, 0, 0, 497, 73, 1, 0, 0, 0, 498, 499, 5, 9, 0, 0, 499, 500, 5, 31, 0, 0, 500, 75, 1, 0, 0, 0, 501, 502, 5, 14, 0, 0, 502, 507, 3, 78, 39, 0, 503, 504, 5, 39, 0, 0, 504, 506, 3, 78, 39, 0, 505, 503, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 77, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 512, 3, 10, 5, 0, 511, 513, 7, 4, 0, 0, 512, 511, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 515, 5, 51, 0, 0, 515, 517, 7, 5, 0, 0, 516, 514, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 79, 1, 0, 0, 0, 518, 519, 5, 8, 0, 0, 519, 520, 3, 62, 31, 0, 520, 81, 1, 0, 0, 0, 521, 522, 5, 2, 0, 0, 522, 523, 3, 62, 31, 0, 523, 83, 1, 0, 0, 0, 524, 525, 5, 11, 0, 0, 525, 530, 3, 86, 43, 0, 526, 527, 5, 39, 0, 0, 527, 529, 3, 86, 43, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 85, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 60, 30, 0, 534, 535, 5, 91, 0, 0, 535, 536, 3, 60, 30, 0, 536, 87, 1, 0, 0, 0, 537, 538, 5, 1, 0, 0, 538, 539, 3, 20, 10, 0, 539, 541, 3, 106, 53, 0, 540, 542, 3, 94, 47, 0, 541, 540, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 89, 1, 0, 0, 0, 543, 544, 5, 7, 0, 0, 544, 545, 3, 20, 10, 0, 545, 546, 3, 106, 53, 0, 546, 91, 1, 0, 0, 0, 547, 548, 5, 10, 0, 0, 548, 549, 3, 58, 29, 0, 549, 93, 1, 0, 0, 0, 550, 555, 3, 96, 48, 0, 551, 552, 5, 39, 0, 0, 552, 554, 3, 96, 48, 0, 553, 551, 1, 0, 0, 0, 554, 557, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 95, 1, 0, 0, 0, 557, 555, 1, 0, 0, 0, 558, 559, 3, 64, 32, 0, 559, 560, 5, 36, 0, 0, 560, 561, 3, 68, 34, 0, 561, 97, 1, 0, 0, 0, 562, 563, 7, 6, 0, 0, 563, 99, 1, 0, 0, 0, 564, 567, 3, 102, 51, 0, 565, 567, 3, 104, 52, 0, 566, 564, 1, 0, 0, 0, 566, 565, 1, 0, 0, 0, 567, 101, 1, 0, 0, 0, 568, 570, 7, 0, 0, 0, 569, 568, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 5, 32, 0, 0, 572, 103, 1, 0, 0, 0, 573, 575, 7, 0, 0, 0, 574, 573, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 5, 31, 0, 0, 577, 105, 1, 0, 0, 0, 578, 579, 5, 30, 0, 0, 579, 107, 1, 0, 0, 0, 580, 581, 7, 7, 0, 0, 581, 109, 1, 0, 0, 0, 582, 583, 5, 5, 0, 0, 583, 584, 3, 112, 56, 0, 584, 111, 1, 0, 0, 0, 585, 586, 5, 72, 0, 0, 586, 587, 3, 2, 1, 0, 587, 588, 5, 73, 0, 0, 588, 113, 1, 0, 0, 0, 589, 590, 5, 13, 0, 0, 590, 591, 5, 107, 0, 0, 591, 115, 1, 0, 0, 0, 592, 593, 5, 3, 0, 0, 593, 596, 5, 97, 0, 0, 594, 595, 5, 95, 0, 0, 595, 597, 3, 60, 30, 0, 596, 594, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 607, 1, 0, 0, 0, 598, 599, 5, 96, 0, 0, 599, 604, 3, 118, 59, 0, 600, 601, 5, 39, 0, 0, 601, 603, 3, 118, 59, 0, 602, 600, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 608, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 598, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 608, 117, 1, 0, 0, 0, 609, 610, 3, 60, 30, 0, 610, 611, 5, 36, 0, 0, 611, 613, 1, 0, 0, 0, 612, 609, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 3, 60, 30, 0, 615, 119, 1, 0, 0, 0, 616, 617, 5, 20, 0, 0, 617, 618, 3, 40, 20, 0, 618, 619, 5, 95, 0, 0, 619, 620, 3, 62, 31, 0, 620, 121, 1, 0, 0, 0, 621, 622, 5, 18, 0, 0, 622, 625, 3, 54, 27, 0, 623, 624, 5, 33, 0, 0, 624, 626, 3, 34, 17, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 123, 1, 0, 0, 0, 627, 628, 7, 8, 0, 0, 628, 629, 5, 121, 0, 0, 629, 630, 3, 126, 63, 0, 630, 631, 3, 128, 64, 0, 631, 125, 1, 0, 0, 0, 632, 633, 3, 40, 20, 0, 633, 127, 1, 0, 0, 0, 634, 635, 5, 95, 0, 0, 635, 640, 3, 130, 65, 0, 636, 637, 5, 39, 0, 0, 637, 639, 3, 130, 65, 0, 638, 636, 1, 0, 0, 0, 639, 642, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 129, 1, 0, 0, 0, 642, 640, 1, 0, 0, 0, 643, 644, 3, 16, 8, 0, 644, 131, 1, 0, 0, 0, 645, 646, 5, 19, 0, 0, 646, 647, 3, 62, 31, 0, 647, 133, 1, 0, 0, 0, 61, 145, 154, 175, 187, 196, 204, 209, 217, 219, 224, 231, 236, 241, 251, 257, 265, 267, 278, 285, 296, 301, 303, 315, 334, 340, 350, 354, 359, 373, 382, 386, 390, 397, 401, 408, 414, 421, 429, 437, 444, 461, 472, 483, 488, 492, 496, 507, 512, 516, 530, 541, 555, 566, 569, 574, 596, 604, 607, 612, 625, 640] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 1e0a636d67182..294a76abada75 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -27,8 +27,8 @@ public class EsqlBaseParser extends ParserConfig { public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, - DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, + WHERE=16, JOIN_LOOKUP=17, DEV_INLINESTATS=18, DEV_INSIST=19, DEV_LOOKUP=20, + DEV_METRICS=21, DEV_JOIN_FULL=22, DEV_JOIN_LEFT=23, DEV_JOIN_RIGHT=24, UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, @@ -48,10 +48,11 @@ public class EsqlBaseParser extends ParserConfig { SHOW_MULTILINE_COMMENT=109, SHOW_WS=110, SETTING=111, SETTING_LINE_COMMENT=112, SETTTING_MULTILINE_COMMENT=113, SETTING_WS=114, LOOKUP_LINE_COMMENT=115, LOOKUP_MULTILINE_COMMENT=116, LOOKUP_WS=117, LOOKUP_FIELD_LINE_COMMENT=118, - LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, USING=121, JOIN_LINE_COMMENT=122, - JOIN_MULTILINE_COMMENT=123, JOIN_WS=124, METRICS_LINE_COMMENT=125, METRICS_MULTILINE_COMMENT=126, - METRICS_WS=127, CLOSING_METRICS_LINE_COMMENT=128, CLOSING_METRICS_MULTILINE_COMMENT=129, - CLOSING_METRICS_WS=130; + LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, JOIN=121, USING=122, + JOIN_LINE_COMMENT=123, JOIN_MULTILINE_COMMENT=124, JOIN_WS=125, METRICS_LINE_COMMENT=126, + METRICS_MULTILINE_COMMENT=127, METRICS_WS=128, CLOSING_METRICS_LINE_COMMENT=129, + CLOSING_METRICS_MULTILINE_COMMENT=130, CLOSING_METRICS_WS=131, INSIST_WS=132, + INSIST_LINE_COMMENT=133, INSIST_MULTILINE_COMMENT=134; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, @@ -73,7 +74,7 @@ public class EsqlBaseParser extends ParserConfig { RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, RULE_inlinestatsCommand = 61, RULE_joinCommand = 62, RULE_joinTarget = 63, - RULE_joinCondition = 64, RULE_joinPredicate = 65; + RULE_joinCondition = 64, RULE_joinPredicate = 65, RULE_insistCommand = 66; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", @@ -89,7 +90,8 @@ private static String[] makeRuleNames() { "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", - "inlinestatsCommand", "joinCommand", "joinTarget", "joinCondition", "joinPredicate" + "inlinestatsCommand", "joinCommand", "joinTarget", "joinCondition", "joinPredicate", + "insistCommand" }; } public static final String[] ruleNames = makeRuleNames(); @@ -98,17 +100,17 @@ private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", - "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", - "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'{'", "'}'", - null, null, "']'", null, null, null, null, null, null, null, null, "'metadata'", - null, null, null, null, null, null, null, null, "'as'", null, null, null, - "'on'", "'with'", null, null, null, null, null, null, null, null, null, - null, "'info'", null, null, null, null, null, null, null, null, null, - null, null, null, null, "'USING'" + "'sort'", "'stats'", "'where'", "'lookup'", null, null, null, null, null, + null, null, null, null, null, null, "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", + "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", + "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", + "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", + "'{'", "'}'", null, null, "']'", null, null, null, null, null, null, + null, null, "'metadata'", null, null, null, null, null, null, null, null, + "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, + null, null, null, null, null, "'info'", null, null, null, null, null, + null, null, null, null, null, null, null, null, "'join'", "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -116,13 +118,13 @@ private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", - "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", - "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", - "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "WHERE", "JOIN_LOOKUP", "DEV_INLINESTATS", "DEV_INSIST", "DEV_LOOKUP", + "DEV_METRICS", "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COLON", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", + "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", + "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", @@ -137,9 +139,10 @@ private static String[] makeSymbolicNames() { "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", - "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", - "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" + "JOIN", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", + "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", + "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS", "INSIST_WS", + "INSIST_LINE_COMMENT", "INSIST_MULTILINE_COMMENT" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -226,9 +229,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(132); + setState(134); query(0); - setState(133); + setState(135); match(EOF); } } @@ -324,11 +327,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(136); + setState(138); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(143); + setState(145); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -339,16 +342,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(138); + setState(140); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(139); + setState(141); match(PIPE); - setState(140); + setState(142); processingCommand(); } } } - setState(145); + setState(147); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -406,43 +409,43 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(152); + setState(154); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(146); + setState(148); explainCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(147); + setState(149); fromCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(148); + setState(150); rowCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(149); + setState(151); showCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(150); + setState(152); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(151); + setState(153); metricsCommand(); } break; @@ -497,14 +500,17 @@ public EnrichCommandContext enrichCommand() { public MvExpandCommandContext mvExpandCommand() { return getRuleContext(MvExpandCommandContext.class,0); } + public JoinCommandContext joinCommand() { + return getRuleContext(JoinCommandContext.class,0); + } public InlinestatsCommandContext inlinestatsCommand() { return getRuleContext(InlinestatsCommandContext.class,0); } public LookupCommandContext lookupCommand() { return getRuleContext(LookupCommandContext.class,0); } - public JoinCommandContext joinCommand() { - return getRuleContext(JoinCommandContext.class,0); + public InsistCommandContext insistCommand() { + return getRuleContext(InsistCommandContext.class,0); } @SuppressWarnings("this-escape") public ProcessingCommandContext(ParserRuleContext parent, int invokingState) { @@ -530,118 +536,125 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(172); + setState(175); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(154); + setState(156); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(155); + setState(157); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(156); + setState(158); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(157); + setState(159); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(158); + setState(160); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(159); + setState(161); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(160); + setState(162); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(161); + setState(163); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(162); + setState(164); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(163); + setState(165); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(164); + setState(166); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(165); + setState(167); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(166); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(167); - inlinestatsCommand(); + setState(168); + joinCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(168); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); setState(169); - lookupCommand(); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(170); + inlinestatsCommand(); } break; case 15: enterOuterAlt(_localctx, 15); { - setState(170); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); setState(171); - joinCommand(); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(172); + lookupCommand(); + } + break; + case 16: + enterOuterAlt(_localctx, 16); + { + setState(173); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(174); + insistCommand(); } break; } @@ -689,9 +702,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(174); + setState(177); match(WHERE); - setState(175); + setState(178); booleanExpression(0); } } @@ -907,7 +920,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(206); + setState(209); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -916,9 +929,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(178); + setState(181); match(NOT); - setState(179); + setState(182); booleanExpression(8); } break; @@ -927,7 +940,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(180); + setState(183); valueExpression(); } break; @@ -936,7 +949,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(181); + setState(184); regexBooleanExpression(); } break; @@ -945,41 +958,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(182); + setState(185); valueExpression(); - setState(184); + setState(187); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(183); + setState(186); match(NOT); } } - setState(186); + setState(189); match(IN); - setState(187); + setState(190); match(LP); - setState(188); + setState(191); valueExpression(); - setState(193); + setState(196); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(189); + setState(192); match(COMMA); - setState(190); + setState(193); valueExpression(); } } - setState(195); + setState(198); _errHandler.sync(this); _la = _input.LA(1); } - setState(196); + setState(199); match(RP); } break; @@ -988,21 +1001,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(198); + setState(201); valueExpression(); - setState(199); + setState(202); match(IS); - setState(201); + setState(204); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(200); + setState(203); match(NOT); } } - setState(203); + setState(206); match(NULL); } break; @@ -1011,13 +1024,13 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(205); + setState(208); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(216); + setState(219); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1025,7 +1038,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(214); + setState(217); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1033,11 +1046,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(208); + setState(211); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(209); + setState(212); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(210); + setState(213); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1046,18 +1059,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(211); + setState(214); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(212); + setState(215); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(213); + setState(216); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } } } - setState(218); + setState(221); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1112,48 +1125,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(233); + setState(236); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(219); + setState(222); valueExpression(); - setState(221); + setState(224); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(220); + setState(223); match(NOT); } } - setState(223); + setState(226); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(224); + setState(227); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(226); + setState(229); valueExpression(); - setState(228); + setState(231); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(227); + setState(230); match(NOT); } } - setState(230); + setState(233); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(231); + setState(234); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1213,23 +1226,23 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(235); - ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); setState(238); + ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); + setState(241); _errHandler.sync(this); _la = _input.LA(1); if (_la==CAST_OP) { { - setState(236); + setState(239); match(CAST_OP); - setState(237); + setState(240); ((MatchBooleanExpressionContext)_localctx).fieldType = dataType(); } } - setState(240); + setState(243); match(COLON); - setState(241); + setState(244); ((MatchBooleanExpressionContext)_localctx).matchQuery = constant(); } } @@ -1313,14 +1326,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(248); + setState(251); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(243); + setState(246); operatorExpression(0); } break; @@ -1328,11 +1341,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(244); + setState(247); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(245); + setState(248); comparisonOperator(); - setState(246); + setState(249); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1457,7 +1470,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(254); + setState(257); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1466,7 +1479,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(251); + setState(254); primaryExpression(0); } break; @@ -1475,7 +1488,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(252); + setState(255); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1486,13 +1499,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(253); + setState(256); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(264); + setState(267); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1500,7 +1513,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(262); + setState(265); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { case 1: @@ -1508,9 +1521,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(256); + setState(259); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(257); + setState(260); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) { @@ -1521,7 +1534,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(258); + setState(261); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1530,9 +1543,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(259); + setState(262); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(260); + setState(263); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1543,14 +1556,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(261); + setState(264); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(266); + setState(269); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); } @@ -1708,7 +1721,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(275); + setState(278); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,17,_ctx) ) { case 1: @@ -1717,7 +1730,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(268); + setState(271); constant(); } break; @@ -1726,7 +1739,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(269); + setState(272); qualifiedName(); } break; @@ -1735,7 +1748,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(270); + setState(273); functionExpression(); } break; @@ -1744,17 +1757,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(271); + setState(274); match(LP); - setState(272); + setState(275); booleanExpression(0); - setState(273); + setState(276); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(282); + setState(285); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,18,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1765,16 +1778,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(277); + setState(280); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(278); + setState(281); match(CAST_OP); - setState(279); + setState(282); dataType(); } } } - setState(284); + setState(287); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,18,_ctx); } @@ -1840,50 +1853,64 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx int _alt; enterOuterAlt(_localctx, 1); { - setState(285); + setState(288); functionName(); - setState(286); + setState(289); match(LP); - setState(300); + setState(303); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { - case 1: + switch (_input.LA(1)) { + case ASTERISK: { - setState(287); + setState(290); match(ASTERISK); } break; - case 2: + case QUOTED_STRING: + case INTEGER_LITERAL: + case DECIMAL_LITERAL: + case FALSE: + case LP: + case NOT: + case NULL: + case PARAM: + case TRUE: + case PLUS: + case MINUS: + case NAMED_OR_POSITIONAL_PARAM: + case OPENING_BRACKET: + case UNQUOTED_IDENTIFIER: + case QUOTED_IDENTIFIER: { { - setState(288); + setState(291); booleanExpression(0); - setState(293); + setState(296); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(289); + setState(292); match(COMMA); - setState(290); + setState(293); booleanExpression(0); } } } - setState(295); + setState(298); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } - setState(298); + setState(301); _errHandler.sync(this); _la = _input.LA(1); if (_la==COMMA) { { - setState(296); + setState(299); match(COMMA); - setState(297); + setState(300); mapExpression(); } } @@ -1891,8 +1918,12 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } } break; + case RP: + break; + default: + break; } - setState(302); + setState(305); match(RP); } } @@ -1938,7 +1969,7 @@ public final FunctionNameContext functionName() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(304); + setState(307); identifierOrParameter(); } } @@ -1994,27 +2025,27 @@ public final MapExpressionContext mapExpression() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(306); + setState(309); match(LEFT_BRACES); - setState(307); + setState(310); entryExpression(); - setState(312); + setState(315); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(308); + setState(311); match(COMMA); - setState(309); + setState(312); entryExpression(); } } - setState(314); + setState(317); _errHandler.sync(this); _la = _input.LA(1); } - setState(315); + setState(318); match(RIGHT_BRACES); } } @@ -2066,11 +2097,11 @@ public final EntryExpressionContext entryExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(317); + setState(320); ((EntryExpressionContext)_localctx).key = string(); - setState(318); + setState(321); match(COLON); - setState(319); + setState(322); ((EntryExpressionContext)_localctx).value = constant(); } } @@ -2128,7 +2159,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(321); + setState(324); identifier(); } } @@ -2175,9 +2206,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(323); + setState(326); match(ROW); - setState(324); + setState(327); fields(); } } @@ -2231,23 +2262,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(326); + setState(329); field(); - setState(331); + setState(334); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(327); + setState(330); match(COMMA); - setState(328); + setState(331); field(); } } } - setState(333); + setState(336); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); } @@ -2299,19 +2330,19 @@ public final FieldContext field() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(337); + setState(340); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(334); + setState(337); qualifiedName(); - setState(335); + setState(338); match(ASSIGN); } break; } - setState(339); + setState(342); booleanExpression(0); } } @@ -2369,34 +2400,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(341); + setState(344); match(FROM); - setState(342); + setState(345); indexPattern(); - setState(347); + setState(350); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(343); + setState(346); match(COMMA); - setState(344); + setState(347); indexPattern(); } } } - setState(349); + setState(352); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); } - setState(351); + setState(354); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: { - setState(350); + setState(353); metadata(); } break; @@ -2449,19 +2480,19 @@ public final IndexPatternContext indexPattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(356); + setState(359); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: { - setState(353); + setState(356); clusterString(); - setState(354); + setState(357); match(COLON); } break; } - setState(358); + setState(361); indexString(); } } @@ -2479,6 +2510,7 @@ public final IndexPatternContext indexPattern() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class ClusterStringContext extends ParserRuleContext { public TerminalNode UNQUOTED_SOURCE() { return getToken(EsqlBaseParser.UNQUOTED_SOURCE, 0); } + public TerminalNode QUOTED_STRING() { return getToken(EsqlBaseParser.QUOTED_STRING, 0); } @SuppressWarnings("this-escape") public ClusterStringContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -2502,11 +2534,20 @@ public T accept(ParseTreeVisitor visitor) { public final ClusterStringContext clusterString() throws RecognitionException { ClusterStringContext _localctx = new ClusterStringContext(_ctx, getState()); enterRule(_localctx, 42, RULE_clusterString); + int _la; try { enterOuterAlt(_localctx, 1); { - setState(360); - match(UNQUOTED_SOURCE); + setState(363); + _la = _input.LA(1); + if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } } } catch (RecognitionException re) { @@ -2551,7 +2592,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(362); + setState(365); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2612,25 +2653,25 @@ public final MetadataContext metadata() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(364); + setState(367); match(METADATA); - setState(365); + setState(368); match(UNQUOTED_SOURCE); - setState(370); + setState(373); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(366); + setState(369); match(COMMA); - setState(367); + setState(370); match(UNQUOTED_SOURCE); } } } - setState(372); + setState(375); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } @@ -2696,46 +2737,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(373); + setState(376); match(DEV_METRICS); - setState(374); + setState(377); indexPattern(); - setState(379); + setState(382); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(375); + setState(378); match(COMMA); - setState(376); + setState(379); indexPattern(); } } } - setState(381); + setState(384); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); } - setState(383); + setState(386); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(382); + setState(385); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(387); + setState(390); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(385); + setState(388); match(BY); - setState(386); + setState(389); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2785,9 +2826,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(389); + setState(392); match(EVAL); - setState(390); + setState(393); fields(); } } @@ -2840,26 +2881,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(392); + setState(395); match(STATS); - setState(394); + setState(397); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(393); + setState(396); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(398); + setState(401); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(396); + setState(399); match(BY); - setState(397); + setState(400); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2916,23 +2957,23 @@ public final AggFieldsContext aggFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(400); + setState(403); aggField(); - setState(405); + setState(408); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(401); + setState(404); match(COMMA); - setState(402); + setState(405); aggField(); } } } - setState(407); + setState(410); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -2984,16 +3025,16 @@ public final AggFieldContext aggField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(408); - field(); setState(411); + field(); + setState(414); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(409); + setState(412); match(WHERE); - setState(410); + setState(413); booleanExpression(0); } break; @@ -3050,23 +3091,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(413); + setState(416); identifierOrParameter(); - setState(418); + setState(421); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(414); + setState(417); match(DOT); - setState(415); + setState(418); identifierOrParameter(); } } } - setState(420); + setState(423); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } @@ -3122,23 +3163,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(421); + setState(424); identifierPattern(); - setState(426); + setState(429); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(422); + setState(425); match(DOT); - setState(423); + setState(426); identifierPattern(); } } } - setState(428); + setState(431); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } @@ -3194,23 +3235,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(429); + setState(432); qualifiedNamePattern(); - setState(434); + setState(437); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(430); + setState(433); match(COMMA); - setState(431); + setState(434); qualifiedNamePattern(); } } } - setState(436); + setState(439); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } @@ -3258,7 +3299,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(437); + setState(440); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3311,25 +3352,26 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(442); + setState(444); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { - case 1: + switch (_input.LA(1)) { + case ID_PATTERN: enterOuterAlt(_localctx, 1); { - setState(439); + setState(442); match(ID_PATTERN); } break; - case 2: + case PARAM: + case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(440); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(441); + setState(443); parameter(); } break; + default: + throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -3599,14 +3641,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(486); + setState(488); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(444); + setState(446); match(NULL); } break; @@ -3614,9 +3656,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(445); + setState(447); integerValue(); - setState(446); + setState(448); match(UNQUOTED_IDENTIFIER); } break; @@ -3624,7 +3666,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(448); + setState(450); decimalValue(); } break; @@ -3632,7 +3674,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(449); + setState(451); integerValue(); } break; @@ -3640,7 +3682,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(450); + setState(452); booleanValue(); } break; @@ -3648,7 +3690,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(451); + setState(453); parameter(); } break; @@ -3656,7 +3698,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(452); + setState(454); string(); } break; @@ -3664,27 +3706,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(453); + setState(455); match(OPENING_BRACKET); - setState(454); + setState(456); numericValue(); - setState(459); + setState(461); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(455); + setState(457); match(COMMA); - setState(456); + setState(458); numericValue(); } } - setState(461); + setState(463); _errHandler.sync(this); _la = _input.LA(1); } - setState(462); + setState(464); match(CLOSING_BRACKET); } break; @@ -3692,27 +3734,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(464); + setState(466); match(OPENING_BRACKET); - setState(465); + setState(467); booleanValue(); - setState(470); + setState(472); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(466); + setState(468); match(COMMA); - setState(467); + setState(469); booleanValue(); } } - setState(472); + setState(474); _errHandler.sync(this); _la = _input.LA(1); } - setState(473); + setState(475); match(CLOSING_BRACKET); } break; @@ -3720,27 +3762,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(475); + setState(477); match(OPENING_BRACKET); - setState(476); + setState(478); string(); - setState(481); + setState(483); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(477); + setState(479); match(COMMA); - setState(478); + setState(480); string(); } } - setState(483); + setState(485); _errHandler.sync(this); _la = _input.LA(1); } - setState(484); + setState(486); match(CLOSING_BRACKET); } break; @@ -3814,14 +3856,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(490); + setState(492); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(488); + setState(490); match(PARAM); } break; @@ -3829,7 +3871,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(489); + setState(491); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3880,25 +3922,27 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(495); + setState(496); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { - case 1: + switch (_input.LA(1)) { + case UNQUOTED_IDENTIFIER: + case QUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(492); + setState(494); identifier(); } break; - case 2: + case PARAM: + case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(493); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(494); + setState(495); parameter(); } break; + default: + throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -3942,9 +3986,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(497); - match(LIMIT); setState(498); + match(LIMIT); + setState(499); match(INTEGER_LITERAL); } } @@ -3999,25 +4043,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(500); - match(SORT); setState(501); + match(SORT); + setState(502); orderExpression(); - setState(506); + setState(507); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(502); - match(COMMA); setState(503); + match(COMMA); + setState(504); orderExpression(); } } } - setState(508); + setState(509); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); } @@ -4073,14 +4117,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(509); + setState(510); booleanExpression(0); - setState(511); + setState(512); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(510); + setState(511); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4094,14 +4138,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(515); + setState(516); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(513); - match(NULLS); setState(514); + match(NULLS); + setState(515); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4160,9 +4204,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(517); - match(KEEP); setState(518); + match(KEEP); + setState(519); qualifiedNamePatterns(); } } @@ -4209,9 +4253,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(520); - match(DROP); setState(521); + match(DROP); + setState(522); qualifiedNamePatterns(); } } @@ -4266,25 +4310,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(523); - match(RENAME); setState(524); + match(RENAME); + setState(525); renameClause(); - setState(529); + setState(530); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(525); - match(COMMA); setState(526); + match(COMMA); + setState(527); renameClause(); } } } - setState(531); + setState(532); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } @@ -4338,11 +4382,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(532); - ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); setState(533); - match(AS); + ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); setState(534); + match(AS); + setState(535); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4395,18 +4439,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(536); - match(DISSECT); setState(537); - primaryExpression(0); + match(DISSECT); setState(538); + primaryExpression(0); + setState(539); string(); - setState(540); + setState(541); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(539); + setState(540); commandOptions(); } break; @@ -4459,11 +4503,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(542); - match(GROK); setState(543); - primaryExpression(0); + match(GROK); setState(544); + primaryExpression(0); + setState(545); string(); } } @@ -4510,9 +4554,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(546); - match(MV_EXPAND); setState(547); + match(MV_EXPAND); + setState(548); qualifiedName(); } } @@ -4566,23 +4610,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(549); + setState(550); commandOption(); - setState(554); + setState(555); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,51,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(550); - match(COMMA); setState(551); + match(COMMA); + setState(552); commandOption(); } } } - setState(556); + setState(557); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,51,_ctx); } @@ -4634,11 +4678,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(557); - identifier(); setState(558); - match(ASSIGN); + identifier(); setState(559); + match(ASSIGN); + setState(560); constant(); } } @@ -4684,7 +4728,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(561); + setState(562); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4739,20 +4783,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(565); + setState(566); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(563); + setState(564); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(564); + setState(565); integerValue(); } break; @@ -4801,12 +4845,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(568); + setState(569); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(567); + setState(568); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4819,7 +4863,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(570); + setState(571); match(DECIMAL_LITERAL); } } @@ -4866,12 +4910,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(573); + setState(574); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(572); + setState(573); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4884,7 +4928,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(575); + setState(576); match(INTEGER_LITERAL); } } @@ -4928,7 +4972,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(577); + setState(578); match(QUOTED_STRING); } } @@ -4978,7 +5022,7 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(579); + setState(580); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { _errHandler.recoverInline(this); @@ -5033,9 +5077,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(581); - match(EXPLAIN); setState(582); + match(EXPLAIN); + setState(583); subqueryExpression(); } } @@ -5083,11 +5127,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(584); - match(OPENING_BRACKET); setState(585); - query(0); + match(OPENING_BRACKET); setState(586); + query(0); + setState(587); match(CLOSING_BRACKET); } } @@ -5144,9 +5188,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(588); - match(SHOW); setState(589); + match(SHOW); + setState(590); match(INFO); } } @@ -5209,46 +5253,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(591); - match(ENRICH); setState(592); + match(ENRICH); + setState(593); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(595); + setState(596); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(593); - match(ON); setState(594); + match(ON); + setState(595); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(606); + setState(607); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(597); - match(WITH); setState(598); + match(WITH); + setState(599); enrichWithClause(); - setState(603); + setState(604); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,56,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(599); - match(COMMA); setState(600); + match(COMMA); + setState(601); enrichWithClause(); } } } - setState(605); + setState(606); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,56,_ctx); } @@ -5305,19 +5349,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(611); + setState(612); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: { - setState(608); - ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); setState(609); + ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); + setState(610); match(ASSIGN); } break; } - setState(613); + setState(614); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5370,13 +5414,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(615); - match(DEV_LOOKUP); setState(616); - ((LookupCommandContext)_localctx).tableName = indexPattern(); + match(DEV_LOOKUP); setState(617); - match(ON); + ((LookupCommandContext)_localctx).tableName = indexPattern(); setState(618); + match(ON); + setState(619); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5429,18 +5473,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(620); - match(DEV_INLINESTATS); setState(621); + match(DEV_INLINESTATS); + setState(622); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(624); + setState(625); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { case 1: { - setState(622); - match(BY); setState(623); + match(BY); + setState(624); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5461,14 +5505,14 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx @SuppressWarnings("CheckReturnValue") public static class JoinCommandContext extends ParserRuleContext { public Token type; - public TerminalNode DEV_JOIN() { return getToken(EsqlBaseParser.DEV_JOIN, 0); } + public TerminalNode JOIN() { return getToken(EsqlBaseParser.JOIN, 0); } public JoinTargetContext joinTarget() { return getRuleContext(JoinTargetContext.class,0); } public JoinConditionContext joinCondition() { return getRuleContext(JoinConditionContext.class,0); } - public TerminalNode DEV_JOIN_LOOKUP() { return getToken(EsqlBaseParser.DEV_JOIN_LOOKUP, 0); } + public TerminalNode JOIN_LOOKUP() { return getToken(EsqlBaseParser.JOIN_LOOKUP, 0); } public TerminalNode DEV_JOIN_LEFT() { return getToken(EsqlBaseParser.DEV_JOIN_LEFT, 0); } public TerminalNode DEV_JOIN_RIGHT() { return getToken(EsqlBaseParser.DEV_JOIN_RIGHT, 0); } @SuppressWarnings("this-escape") @@ -5499,29 +5543,21 @@ public final JoinCommandContext joinCommand() throws RecognitionException { enterOuterAlt(_localctx, 1); { setState(627); - _errHandler.sync(this); + ((JoinCommandContext)_localctx).type = _input.LT(1); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) { - { - setState(626); - ((JoinCommandContext)_localctx).type = _input.LT(1); - _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) ) { - ((JoinCommandContext)_localctx).type = (Token)_errHandler.recoverInline(this); - } - else { - if ( _input.LA(1)==Token.EOF ) matchedEOF = true; - _errHandler.reportMatch(this); - consume(); - } - } + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 25296896L) != 0)) ) { + ((JoinCommandContext)_localctx).type = (Token)_errHandler.recoverInline(this); } - + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + setState(628); + match(JOIN); setState(629); - match(DEV_JOIN); - setState(630); joinTarget(); - setState(631); + setState(630); joinCondition(); } } @@ -5539,14 +5575,9 @@ public final JoinCommandContext joinCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class JoinTargetContext extends ParserRuleContext { public IndexPatternContext index; - public IdentifierContext alias; public IndexPatternContext indexPattern() { return getRuleContext(IndexPatternContext.class,0); } - public TerminalNode AS() { return getToken(EsqlBaseParser.AS, 0); } - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); - } @SuppressWarnings("this-escape") public JoinTargetContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -5570,24 +5601,11 @@ public T accept(ParseTreeVisitor visitor) { public final JoinTargetContext joinTarget() throws RecognitionException { JoinTargetContext _localctx = new JoinTargetContext(_ctx, getState()); enterRule(_localctx, 126, RULE_joinTarget); - int _la; try { enterOuterAlt(_localctx, 1); { - setState(633); + setState(632); ((JoinTargetContext)_localctx).index = indexPattern(); - setState(636); - _errHandler.sync(this); - _la = _input.LA(1); - if (_la==AS) { - { - setState(634); - match(AS); - setState(635); - ((JoinTargetContext)_localctx).alias = identifier(); - } - } - } } catch (RecognitionException re) { @@ -5641,27 +5659,27 @@ public final JoinConditionContext joinCondition() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(638); + setState(634); match(ON); - setState(639); + setState(635); joinPredicate(); - setState(644); + setState(640); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,62,_ctx); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(640); + setState(636); match(COMMA); - setState(641); + setState(637); joinPredicate(); } } } - setState(646); + setState(642); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,62,_ctx); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); } } } @@ -5707,7 +5725,7 @@ public final JoinPredicateContext joinPredicate() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(647); + setState(643); valueExpression(); } } @@ -5722,6 +5740,55 @@ public final JoinPredicateContext joinPredicate() throws RecognitionException { return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class InsistCommandContext extends ParserRuleContext { + public TerminalNode DEV_INSIST() { return getToken(EsqlBaseParser.DEV_INSIST, 0); } + public QualifiedNamePatternsContext qualifiedNamePatterns() { + return getRuleContext(QualifiedNamePatternsContext.class,0); + } + @SuppressWarnings("this-escape") + public InsistCommandContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_insistCommand; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterInsistCommand(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitInsistCommand(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitInsistCommand(this); + else return visitor.visitChildren(this); + } + } + + public final InsistCommandContext insistCommand() throws RecognitionException { + InsistCommandContext _localctx = new InsistCommandContext(_ctx, getState()); + enterRule(_localctx, 132, RULE_insistCommand); + try { + enterOuterAlt(_localctx, 1); + { + setState(645); + match(DEV_INSIST); + setState(646); + qualifiedNamePatterns(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 1: @@ -5736,10 +5803,6 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 10: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); - case 33: - return identifierPattern_sempred((IdentifierPatternContext)_localctx, predIndex); - case 36: - return identifierOrParameter_sempred((IdentifierOrParameterContext)_localctx, predIndex); } return true; } @@ -5793,23 +5856,9 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } return true; } - private boolean identifierPattern_sempred(IdentifierPatternContext _localctx, int predIndex) { - switch (predIndex) { - case 10: - return this.isDevVersion(); - } - return true; - } - private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _localctx, int predIndex) { - switch (predIndex) { - case 11: - return this.isDevVersion(); - } - return true; - } public static final String _serializedATN = - "\u0004\u0001\u0082\u028a\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0086\u0289\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ @@ -5826,399 +5875,397 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ - "@\u0002A\u0007A\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u008e"+ - "\b\u0001\n\u0001\f\u0001\u0091\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0099\b\u0002\u0001\u0003"+ + "@\u0002A\u0007A\u0002B\u0007B\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005"+ + "\u0001\u0090\b\u0001\n\u0001\f\u0001\u0093\t\u0001\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u009b\b\u0002"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003"+ - "\u00ad\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ - "\u00b9\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0005\u0005\u00c0\b\u0005\n\u0005\f\u0005\u00c3\t\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ca\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00cf\b\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d7"+ - "\b\u0005\n\u0005\f\u0005\u00da\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006"+ - "\u00de\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0003\u0006\u00e5\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006"+ - "\u00ea\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00ef\b"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0003\b\u00f9\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00ff"+ - "\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u0107\b\t"+ - "\n\t\f\t\u010a\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0003\n\u0114\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0119\b\n"+ - "\n\n\f\n\u011c\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0005\u000b\u0124\b\u000b\n\u000b\f\u000b\u0127\t\u000b"+ - "\u0001\u000b\u0001\u000b\u0003\u000b\u012b\b\u000b\u0003\u000b\u012d\b"+ - "\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r"+ - "\u0001\r\u0005\r\u0137\b\r\n\r\f\r\u013a\t\r\u0001\r\u0001\r\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011"+ - "\u014a\b\u0011\n\u0011\f\u0011\u014d\t\u0011\u0001\u0012\u0001\u0012\u0001"+ - "\u0012\u0003\u0012\u0152\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u015a\b\u0013\n\u0013\f\u0013"+ - "\u015d\t\u0013\u0001\u0013\u0003\u0013\u0160\b\u0013\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0003\u0014\u0165\b\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0005\u0017\u0171\b\u0017\n\u0017\f\u0017\u0174\t\u0017"+ - "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u017a\b\u0018"+ - "\n\u0018\f\u0018\u017d\t\u0018\u0001\u0018\u0003\u0018\u0180\b\u0018\u0001"+ - "\u0018\u0001\u0018\u0003\u0018\u0184\b\u0018\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u018b\b\u001a\u0001\u001a\u0001"+ - "\u001a\u0003\u001a\u018f\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005"+ - "\u001b\u0194\b\u001b\n\u001b\f\u001b\u0197\t\u001b\u0001\u001c\u0001\u001c"+ - "\u0001\u001c\u0003\u001c\u019c\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0005\u001d\u01a1\b\u001d\n\u001d\f\u001d\u01a4\t\u001d\u0001\u001e\u0001"+ - "\u001e\u0001\u001e\u0005\u001e\u01a9\b\u001e\n\u001e\f\u001e\u01ac\t\u001e"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01b1\b\u001f\n\u001f"+ - "\f\u001f\u01b4\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01bb"+ - "\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01ca\b\"\n\"\f\"\u01cd\t\""+ - "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01d5\b\"\n\""+ - "\f\"\u01d8\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\""+ - "\u01e0\b\"\n\"\f\"\u01e3\t\"\u0001\"\u0001\"\u0003\"\u01e7\b\"\u0001#"+ - "\u0001#\u0003#\u01eb\b#\u0001$\u0001$\u0001$\u0003$\u01f0\b$\u0001%\u0001"+ - "%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01f9\b&\n&\f&\u01fc\t&\u0001"+ - "\'\u0001\'\u0003\'\u0200\b\'\u0001\'\u0001\'\u0003\'\u0204\b\'\u0001("+ - "\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005"+ - "*\u0210\b*\n*\f*\u0213\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ - ",\u0001,\u0003,\u021d\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ - ".\u0001/\u0001/\u0001/\u0005/\u0229\b/\n/\f/\u022c\t/\u00010\u00010\u0001"+ - "0\u00010\u00011\u00011\u00012\u00012\u00032\u0236\b2\u00013\u00033\u0239"+ - "\b3\u00013\u00013\u00014\u00034\u023e\b4\u00014\u00014\u00015\u00015\u0001"+ - "6\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u0001"+ - "9\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0254\b:\u0001:\u0001:\u0001"+ - ":\u0001:\u0005:\u025a\b:\n:\f:\u025d\t:\u0003:\u025f\b:\u0001;\u0001;"+ - "\u0001;\u0003;\u0264\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001"+ - "<\u0001=\u0001=\u0001=\u0001=\u0003=\u0271\b=\u0001>\u0003>\u0274\b>\u0001"+ - ">\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u027d\b?\u0001@\u0001"+ - "@\u0001@\u0001@\u0005@\u0283\b@\n@\f@\u0286\t@\u0001A\u0001A\u0001A\u0000"+ - "\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ - "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ - "TVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000@A\u0001\u0000"+ + "\u0001\u0003\u0003\u0003\u00b0\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0003\u0005\u00bc\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0005\u0005\u00c3\b\u0005\n\u0005\f\u0005\u00c6"+ + "\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ + "\u0005\u00cd\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00d2"+ + "\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0005\u0005\u00da\b\u0005\n\u0005\f\u0005\u00dd\t\u0005\u0001\u0006"+ + "\u0001\u0006\u0003\u0006\u00e1\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0003\u0006\u00e8\b\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0003\u0006\u00ed\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0003\u0007\u00f2\b\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00fc\b\b\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0003\t\u0102\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0005\t\u010a\b\t\n\t\f\t\u010d\t\t\u0001\n\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0117\b\n\u0001\n\u0001\n\u0001"+ + "\n\u0005\n\u011c\b\n\n\n\f\n\u011f\t\n\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0127\b\u000b\n\u000b"+ + "\f\u000b\u012a\t\u000b\u0001\u000b\u0001\u000b\u0003\u000b\u012e\b\u000b"+ + "\u0003\u000b\u0130\b\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0005\r\u013a\b\r\n\r\f\r\u013d\t\r\u0001\r"+ + "\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001"+ + "\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0005\u0011\u014d\b\u0011\n\u0011\f\u0011\u0150\t\u0011\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0003\u0012\u0155\b\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u015d\b\u0013"+ + "\n\u0013\f\u0013\u0160\t\u0013\u0001\u0013\u0003\u0013\u0163\b\u0013\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0003\u0014\u0168\b\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u0174\b\u0017\n\u0017\f\u0017"+ + "\u0177\t\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018"+ + "\u017d\b\u0018\n\u0018\f\u0018\u0180\t\u0018\u0001\u0018\u0003\u0018\u0183"+ + "\b\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u0187\b\u0018\u0001\u0019"+ + "\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u018e\b\u001a"+ + "\u0001\u001a\u0001\u001a\u0003\u001a\u0192\b\u001a\u0001\u001b\u0001\u001b"+ + "\u0001\u001b\u0005\u001b\u0197\b\u001b\n\u001b\f\u001b\u019a\t\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001c\u0003\u001c\u019f\b\u001c\u0001\u001d\u0001"+ + "\u001d\u0001\u001d\u0005\u001d\u01a4\b\u001d\n\u001d\f\u001d\u01a7\t\u001d"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u01ac\b\u001e\n\u001e"+ + "\f\u001e\u01af\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f"+ + "\u01b4\b\u001f\n\u001f\f\u001f\u01b7\t\u001f\u0001 \u0001 \u0001!\u0001"+ + "!\u0003!\u01bd\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01cc\b\"\n"+ + "\"\f\"\u01cf\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005"+ + "\"\u01d7\b\"\n\"\f\"\u01da\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\""+ + "\u0001\"\u0005\"\u01e2\b\"\n\"\f\"\u01e5\t\"\u0001\"\u0001\"\u0003\"\u01e9"+ + "\b\"\u0001#\u0001#\u0003#\u01ed\b#\u0001$\u0001$\u0003$\u01f1\b$\u0001"+ + "%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01fa\b&\n&\f&\u01fd"+ + "\t&\u0001\'\u0001\'\u0003\'\u0201\b\'\u0001\'\u0001\'\u0003\'\u0205\b"+ + "\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001"+ + "*\u0005*\u0211\b*\n*\f*\u0214\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001"+ + ",\u0001,\u0001,\u0003,\u021e\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001"+ + ".\u0001.\u0001/\u0001/\u0001/\u0005/\u022a\b/\n/\f/\u022d\t/\u00010\u0001"+ + "0\u00010\u00010\u00011\u00011\u00012\u00012\u00032\u0237\b2\u00013\u0003"+ + "3\u023a\b3\u00013\u00013\u00014\u00034\u023f\b4\u00014\u00014\u00015\u0001"+ + "5\u00016\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u0001"+ + "9\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0255\b:\u0001:\u0001"+ + ":\u0001:\u0001:\u0005:\u025b\b:\n:\f:\u025e\t:\u0003:\u0260\b:\u0001;"+ + "\u0001;\u0001;\u0003;\u0265\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001"+ + "<\u0001<\u0001=\u0001=\u0001=\u0001=\u0003=\u0272\b=\u0001>\u0001>\u0001"+ + ">\u0001>\u0001>\u0001?\u0001?\u0001@\u0001@\u0001@\u0001@\u0005@\u027f"+ + "\b@\n@\f@\u0282\t@\u0001A\u0001A\u0001B\u0001B\u0001B\u0001B\u0000\u0004"+ + "\u0002\n\u0012\u0014C\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012"+ + "\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\"+ + "^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0000\t\u0001\u0000@A\u0001\u0000"+ "BD\u0002\u0000\u001e\u001eSS\u0001\u0000JK\u0002\u0000##((\u0002\u0000"+ - "++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016\u0018\u02a5\u0000"+ - "\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000\u0000\u0000\u0004"+ - "\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000\u0000\u0000\b\u00ae"+ - "\u0001\u0000\u0000\u0000\n\u00ce\u0001\u0000\u0000\u0000\f\u00e9\u0001"+ - "\u0000\u0000\u0000\u000e\u00eb\u0001\u0000\u0000\u0000\u0010\u00f8\u0001"+ - "\u0000\u0000\u0000\u0012\u00fe\u0001\u0000\u0000\u0000\u0014\u0113\u0001"+ - "\u0000\u0000\u0000\u0016\u011d\u0001\u0000\u0000\u0000\u0018\u0130\u0001"+ - "\u0000\u0000\u0000\u001a\u0132\u0001\u0000\u0000\u0000\u001c\u013d\u0001"+ - "\u0000\u0000\u0000\u001e\u0141\u0001\u0000\u0000\u0000 \u0143\u0001\u0000"+ - "\u0000\u0000\"\u0146\u0001\u0000\u0000\u0000$\u0151\u0001\u0000\u0000"+ - "\u0000&\u0155\u0001\u0000\u0000\u0000(\u0164\u0001\u0000\u0000\u0000*"+ - "\u0168\u0001\u0000\u0000\u0000,\u016a\u0001\u0000\u0000\u0000.\u016c\u0001"+ - "\u0000\u0000\u00000\u0175\u0001\u0000\u0000\u00002\u0185\u0001\u0000\u0000"+ - "\u00004\u0188\u0001\u0000\u0000\u00006\u0190\u0001\u0000\u0000\u00008"+ - "\u0198\u0001\u0000\u0000\u0000:\u019d\u0001\u0000\u0000\u0000<\u01a5\u0001"+ - "\u0000\u0000\u0000>\u01ad\u0001\u0000\u0000\u0000@\u01b5\u0001\u0000\u0000"+ - "\u0000B\u01ba\u0001\u0000\u0000\u0000D\u01e6\u0001\u0000\u0000\u0000F"+ - "\u01ea\u0001\u0000\u0000\u0000H\u01ef\u0001\u0000\u0000\u0000J\u01f1\u0001"+ - "\u0000\u0000\u0000L\u01f4\u0001\u0000\u0000\u0000N\u01fd\u0001\u0000\u0000"+ - "\u0000P\u0205\u0001\u0000\u0000\u0000R\u0208\u0001\u0000\u0000\u0000T"+ - "\u020b\u0001\u0000\u0000\u0000V\u0214\u0001\u0000\u0000\u0000X\u0218\u0001"+ - "\u0000\u0000\u0000Z\u021e\u0001\u0000\u0000\u0000\\\u0222\u0001\u0000"+ - "\u0000\u0000^\u0225\u0001\u0000\u0000\u0000`\u022d\u0001\u0000\u0000\u0000"+ - "b\u0231\u0001\u0000\u0000\u0000d\u0235\u0001\u0000\u0000\u0000f\u0238"+ - "\u0001\u0000\u0000\u0000h\u023d\u0001\u0000\u0000\u0000j\u0241\u0001\u0000"+ - "\u0000\u0000l\u0243\u0001\u0000\u0000\u0000n\u0245\u0001\u0000\u0000\u0000"+ - "p\u0248\u0001\u0000\u0000\u0000r\u024c\u0001\u0000\u0000\u0000t\u024f"+ - "\u0001\u0000\u0000\u0000v\u0263\u0001\u0000\u0000\u0000x\u0267\u0001\u0000"+ - "\u0000\u0000z\u026c\u0001\u0000\u0000\u0000|\u0273\u0001\u0000\u0000\u0000"+ - "~\u0279\u0001\u0000\u0000\u0000\u0080\u027e\u0001\u0000\u0000\u0000\u0082"+ - "\u0287\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001\u0000\u0085"+ - "\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000\u0000\u0087"+ - "\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004\u0002\u0000"+ - "\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000\u0000\u008b"+ - "\u008c\u0005\u001d\u0000\u0000\u008c\u008e\u0003\u0006\u0003\u0000\u008d"+ - "\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000\u0000\u008f"+ - "\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090"+ - "\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000\u0000\u0092"+ - "\u0099\u0003n7\u0000\u0093\u0099\u0003&\u0013\u0000\u0094\u0099\u0003"+ - " \u0010\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004\u0002\u0001"+ - "\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000\u0000\u0000"+ - "\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000\u0000\u0000"+ - "\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000\u0000\u0000"+ - "\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019\u0000\u009b"+ - "\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d\u00ad\u0003"+ - "J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003L&\u0000\u00a0"+ - "\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad\u0003X,\u0000"+ - "\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5\u00ad\u0003"+ - "\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad\u0003z=\u0000"+ - "\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x<\u0000\u00aa\u00ab"+ - "\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000\u00ac\u009a\u0001\u0000"+ - "\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000\u00ac\u009c\u0001\u0000"+ - "\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000\u00ac\u009e\u0001\u0000"+ - "\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000\u00ac\u00a0\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000\u00ac\u00a2\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000\u00ac\u00a4\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000\u00ac\u00a6\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001\u0000"+ - "\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000\u00ae\u00af\u0005\u0010"+ - "\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0\t\u0001\u0000\u0000"+ - "\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2\u00b3\u00051\u0000"+ - "\u0000\u00b3\u00cf\u0003\n\u0005\b\u00b4\u00cf\u0003\u0010\b\u0000\u00b5"+ - "\u00cf\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010\b\u0000\u00b7\u00b9"+ - "\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001"+ - "\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00bb\u0005"+ - ",\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc\u00c1\u0003\u0010\b"+ - "\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0\u0003\u0010\b\u0000"+ - "\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001\u0000\u0000\u0000"+ - "\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001\u0000\u0000\u0000"+ - "\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000\u0000\u0000"+ - "\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00cf\u0001\u0000\u0000\u0000\u00c6"+ - "\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000\u0000\u00c8\u00ca"+ - "\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001"+ - "\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000\u00cb\u00cc\u0005"+ - "2\u0000\u0000\u00cc\u00cf\u0001\u0000\u0000\u0000\u00cd\u00cf\u0003\u000e"+ - "\u0007\u0000\u00ce\u00b1\u0001\u0000\u0000\u0000\u00ce\u00b4\u0001\u0000"+ - "\u0000\u0000\u00ce\u00b5\u0001\u0000\u0000\u0000\u00ce\u00b6\u0001\u0000"+ - "\u0000\u0000\u00ce\u00c6\u0001\u0000\u0000\u0000\u00ce\u00cd\u0001\u0000"+ - "\u0000\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000\u00d0\u00d1\n\u0005\u0000"+ - "\u0000\u00d1\u00d2\u0005\"\u0000\u0000\u00d2\u00d7\u0003\n\u0005\u0006"+ - "\u00d3\u00d4\n\u0004\u0000\u0000\u00d4\u00d5\u00054\u0000\u0000\u00d5"+ - "\u00d7\u0003\n\u0005\u0005\u00d6\u00d0\u0001\u0000\u0000\u0000\u00d6\u00d3"+ - "\u0001\u0000\u0000\u0000\u00d7\u00da\u0001\u0000\u0000\u0000\u00d8\u00d6"+ - "\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000\u0000\u0000\u00d9\u000b"+ - "\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000\u0000\u0000\u00db\u00dd"+ - "\u0003\u0010\b\u0000\u00dc\u00de\u00051\u0000\u0000\u00dd\u00dc\u0001"+ - "\u0000\u0000\u0000\u00dd\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u0001"+ - "\u0000\u0000\u0000\u00df\u00e0\u0005/\u0000\u0000\u00e0\u00e1\u0003j5"+ - "\u0000\u00e1\u00ea\u0001\u0000\u0000\u0000\u00e2\u00e4\u0003\u0010\b\u0000"+ - "\u00e3\u00e5\u00051\u0000\u0000\u00e4\u00e3\u0001\u0000\u0000\u0000\u00e4"+ - "\u00e5\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001\u0000\u0000\u0000\u00e6"+ - "\u00e7\u00056\u0000\u0000\u00e7\u00e8\u0003j5\u0000\u00e8\u00ea\u0001"+ - "\u0000\u0000\u0000\u00e9\u00db\u0001\u0000\u0000\u0000\u00e9\u00e2\u0001"+ - "\u0000\u0000\u0000\u00ea\r\u0001\u0000\u0000\u0000\u00eb\u00ee\u0003:"+ - "\u001d\u0000\u00ec\u00ed\u0005%\u0000\u0000\u00ed\u00ef\u0003\u001e\u000f"+ - "\u0000\u00ee\u00ec\u0001\u0000\u0000\u0000\u00ee\u00ef\u0001\u0000\u0000"+ - "\u0000\u00ef\u00f0\u0001\u0000\u0000\u0000\u00f0\u00f1\u0005&\u0000\u0000"+ - "\u00f1\u00f2\u0003D\"\u0000\u00f2\u000f\u0001\u0000\u0000\u0000\u00f3"+ - "\u00f9\u0003\u0012\t\u0000\u00f4\u00f5\u0003\u0012\t\u0000\u00f5\u00f6"+ - "\u0003l6\u0000\u00f6\u00f7\u0003\u0012\t\u0000\u00f7\u00f9\u0001\u0000"+ - "\u0000\u0000\u00f8\u00f3\u0001\u0000\u0000\u0000\u00f8\u00f4\u0001\u0000"+ - "\u0000\u0000\u00f9\u0011\u0001\u0000\u0000\u0000\u00fa\u00fb\u0006\t\uffff"+ - "\uffff\u0000\u00fb\u00ff\u0003\u0014\n\u0000\u00fc\u00fd\u0007\u0000\u0000"+ - "\u0000\u00fd\u00ff\u0003\u0012\t\u0003\u00fe\u00fa\u0001\u0000\u0000\u0000"+ - "\u00fe\u00fc\u0001\u0000\u0000\u0000\u00ff\u0108\u0001\u0000\u0000\u0000"+ - "\u0100\u0101\n\u0002\u0000\u0000\u0101\u0102\u0007\u0001\u0000\u0000\u0102"+ - "\u0107\u0003\u0012\t\u0003\u0103\u0104\n\u0001\u0000\u0000\u0104\u0105"+ - "\u0007\u0000\u0000\u0000\u0105\u0107\u0003\u0012\t\u0002\u0106\u0100\u0001"+ - "\u0000\u0000\u0000\u0106\u0103\u0001\u0000\u0000\u0000\u0107\u010a\u0001"+ - "\u0000\u0000\u0000\u0108\u0106\u0001\u0000\u0000\u0000\u0108\u0109\u0001"+ - "\u0000\u0000\u0000\u0109\u0013\u0001\u0000\u0000\u0000\u010a\u0108\u0001"+ - "\u0000\u0000\u0000\u010b\u010c\u0006\n\uffff\uffff\u0000\u010c\u0114\u0003"+ - "D\"\u0000\u010d\u0114\u0003:\u001d\u0000\u010e\u0114\u0003\u0016\u000b"+ - "\u0000\u010f\u0110\u00050\u0000\u0000\u0110\u0111\u0003\n\u0005\u0000"+ - "\u0111\u0112\u00057\u0000\u0000\u0112\u0114\u0001\u0000\u0000\u0000\u0113"+ - "\u010b\u0001\u0000\u0000\u0000\u0113\u010d\u0001\u0000\u0000\u0000\u0113"+ - "\u010e\u0001\u0000\u0000\u0000\u0113\u010f\u0001\u0000\u0000\u0000\u0114"+ - "\u011a\u0001\u0000\u0000\u0000\u0115\u0116\n\u0001\u0000\u0000\u0116\u0117"+ - "\u0005%\u0000\u0000\u0117\u0119\u0003\u001e\u000f\u0000\u0118\u0115\u0001"+ - "\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000\u0000\u011a\u0118\u0001"+ - "\u0000\u0000\u0000\u011a\u011b\u0001\u0000\u0000\u0000\u011b\u0015\u0001"+ - "\u0000\u0000\u0000\u011c\u011a\u0001\u0000\u0000\u0000\u011d\u011e\u0003"+ - "\u0018\f\u0000\u011e\u012c\u00050\u0000\u0000\u011f\u012d\u0005B\u0000"+ - "\u0000\u0120\u0125\u0003\n\u0005\u0000\u0121\u0122\u0005\'\u0000\u0000"+ - "\u0122\u0124\u0003\n\u0005\u0000\u0123\u0121\u0001\u0000\u0000\u0000\u0124"+ - "\u0127\u0001\u0000\u0000\u0000\u0125\u0123\u0001\u0000\u0000\u0000\u0125"+ - "\u0126\u0001\u0000\u0000\u0000\u0126\u012a\u0001\u0000\u0000\u0000\u0127"+ - "\u0125\u0001\u0000\u0000\u0000\u0128\u0129\u0005\'\u0000\u0000\u0129\u012b"+ - "\u0003\u001a\r\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001"+ - "\u0000\u0000\u0000\u012b\u012d\u0001\u0000\u0000\u0000\u012c\u011f\u0001"+ - "\u0000\u0000\u0000\u012c\u0120\u0001\u0000\u0000\u0000\u012c\u012d\u0001"+ - "\u0000\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\u012f\u0005"+ - "7\u0000\u0000\u012f\u0017\u0001\u0000\u0000\u0000\u0130\u0131\u0003H$"+ - "\u0000\u0131\u0019\u0001\u0000\u0000\u0000\u0132\u0133\u0005E\u0000\u0000"+ - "\u0133\u0138\u0003\u001c\u000e\u0000\u0134\u0135\u0005\'\u0000\u0000\u0135"+ - "\u0137\u0003\u001c\u000e\u0000\u0136\u0134\u0001\u0000\u0000\u0000\u0137"+ - "\u013a\u0001\u0000\u0000\u0000\u0138\u0136\u0001\u0000\u0000\u0000\u0138"+ - "\u0139\u0001\u0000\u0000\u0000\u0139\u013b\u0001\u0000\u0000\u0000\u013a"+ - "\u0138\u0001\u0000\u0000\u0000\u013b\u013c\u0005F\u0000\u0000\u013c\u001b"+ - "\u0001\u0000\u0000\u0000\u013d\u013e\u0003j5\u0000\u013e\u013f\u0005&"+ - "\u0000\u0000\u013f\u0140\u0003D\"\u0000\u0140\u001d\u0001\u0000\u0000"+ - "\u0000\u0141\u0142\u0003@ \u0000\u0142\u001f\u0001\u0000\u0000\u0000\u0143"+ - "\u0144\u0005\f\u0000\u0000\u0144\u0145\u0003\"\u0011\u0000\u0145!\u0001"+ - "\u0000\u0000\u0000\u0146\u014b\u0003$\u0012\u0000\u0147\u0148\u0005\'"+ - "\u0000\u0000\u0148\u014a\u0003$\u0012\u0000\u0149\u0147\u0001\u0000\u0000"+ - "\u0000\u014a\u014d\u0001\u0000\u0000\u0000\u014b\u0149\u0001\u0000\u0000"+ - "\u0000\u014b\u014c\u0001\u0000\u0000\u0000\u014c#\u0001\u0000\u0000\u0000"+ - "\u014d\u014b\u0001\u0000\u0000\u0000\u014e\u014f\u0003:\u001d\u0000\u014f"+ - "\u0150\u0005$\u0000\u0000\u0150\u0152\u0001\u0000\u0000\u0000\u0151\u014e"+ - "\u0001\u0000\u0000\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152\u0153"+ - "\u0001\u0000\u0000\u0000\u0153\u0154\u0003\n\u0005\u0000\u0154%\u0001"+ - "\u0000\u0000\u0000\u0155\u0156\u0005\u0006\u0000\u0000\u0156\u015b\u0003"+ - "(\u0014\u0000\u0157\u0158\u0005\'\u0000\u0000\u0158\u015a\u0003(\u0014"+ - "\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u015a\u015d\u0001\u0000\u0000"+ - "\u0000\u015b\u0159\u0001\u0000\u0000\u0000\u015b\u015c\u0001\u0000\u0000"+ - "\u0000\u015c\u015f\u0001\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000"+ - "\u0000\u015e\u0160\u0003.\u0017\u0000\u015f\u015e\u0001\u0000\u0000\u0000"+ - "\u015f\u0160\u0001\u0000\u0000\u0000\u0160\'\u0001\u0000\u0000\u0000\u0161"+ - "\u0162\u0003*\u0015\u0000\u0162\u0163\u0005&\u0000\u0000\u0163\u0165\u0001"+ - "\u0000\u0000\u0000\u0164\u0161\u0001\u0000\u0000\u0000\u0164\u0165\u0001"+ - "\u0000\u0000\u0000\u0165\u0166\u0001\u0000\u0000\u0000\u0166\u0167\u0003"+ - ",\u0016\u0000\u0167)\u0001\u0000\u0000\u0000\u0168\u0169\u0005S\u0000"+ - "\u0000\u0169+\u0001\u0000\u0000\u0000\u016a\u016b\u0007\u0002\u0000\u0000"+ - "\u016b-\u0001\u0000\u0000\u0000\u016c\u016d\u0005R\u0000\u0000\u016d\u0172"+ - "\u0005S\u0000\u0000\u016e\u016f\u0005\'\u0000\u0000\u016f\u0171\u0005"+ - "S\u0000\u0000\u0170\u016e\u0001\u0000\u0000\u0000\u0171\u0174\u0001\u0000"+ - "\u0000\u0000\u0172\u0170\u0001\u0000\u0000\u0000\u0172\u0173\u0001\u0000"+ - "\u0000\u0000\u0173/\u0001\u0000\u0000\u0000\u0174\u0172\u0001\u0000\u0000"+ - "\u0000\u0175\u0176\u0005\u0013\u0000\u0000\u0176\u017b\u0003(\u0014\u0000"+ - "\u0177\u0178\u0005\'\u0000\u0000\u0178\u017a\u0003(\u0014\u0000\u0179"+ - "\u0177\u0001\u0000\u0000\u0000\u017a\u017d\u0001\u0000\u0000\u0000\u017b"+ - "\u0179\u0001\u0000\u0000\u0000\u017b\u017c\u0001\u0000\u0000\u0000\u017c"+ - "\u017f\u0001\u0000\u0000\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017e"+ - "\u0180\u00036\u001b\u0000\u017f\u017e\u0001\u0000\u0000\u0000\u017f\u0180"+ - "\u0001\u0000\u0000\u0000\u0180\u0183\u0001\u0000\u0000\u0000\u0181\u0182"+ - "\u0005!\u0000\u0000\u0182\u0184\u0003\"\u0011\u0000\u0183\u0181\u0001"+ - "\u0000\u0000\u0000\u0183\u0184\u0001\u0000\u0000\u0000\u01841\u0001\u0000"+ - "\u0000\u0000\u0185\u0186\u0005\u0004\u0000\u0000\u0186\u0187\u0003\"\u0011"+ - "\u0000\u01873\u0001\u0000\u0000\u0000\u0188\u018a\u0005\u000f\u0000\u0000"+ - "\u0189\u018b\u00036\u001b\u0000\u018a\u0189\u0001\u0000\u0000\u0000\u018a"+ - "\u018b\u0001\u0000\u0000\u0000\u018b\u018e\u0001\u0000\u0000\u0000\u018c"+ - "\u018d\u0005!\u0000\u0000\u018d\u018f\u0003\"\u0011\u0000\u018e\u018c"+ - "\u0001\u0000\u0000\u0000\u018e\u018f\u0001\u0000\u0000\u0000\u018f5\u0001"+ - "\u0000\u0000\u0000\u0190\u0195\u00038\u001c\u0000\u0191\u0192\u0005\'"+ - "\u0000\u0000\u0192\u0194\u00038\u001c\u0000\u0193\u0191\u0001\u0000\u0000"+ - "\u0000\u0194\u0197\u0001\u0000\u0000\u0000\u0195\u0193\u0001\u0000\u0000"+ - "\u0000\u0195\u0196\u0001\u0000\u0000\u0000\u01967\u0001\u0000\u0000\u0000"+ - "\u0197\u0195\u0001\u0000\u0000\u0000\u0198\u019b\u0003$\u0012\u0000\u0199"+ - "\u019a\u0005\u0010\u0000\u0000\u019a\u019c\u0003\n\u0005\u0000\u019b\u0199"+ - "\u0001\u0000\u0000\u0000\u019b\u019c\u0001\u0000\u0000\u0000\u019c9\u0001"+ - "\u0000\u0000\u0000\u019d\u01a2\u0003H$\u0000\u019e\u019f\u0005)\u0000"+ - "\u0000\u019f\u01a1\u0003H$\u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1"+ - "\u01a4\u0001\u0000\u0000\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a2"+ - "\u01a3\u0001\u0000\u0000\u0000\u01a3;\u0001\u0000\u0000\u0000\u01a4\u01a2"+ - "\u0001\u0000\u0000\u0000\u01a5\u01aa\u0003B!\u0000\u01a6\u01a7\u0005)"+ - "\u0000\u0000\u01a7\u01a9\u0003B!\u0000\u01a8\u01a6\u0001\u0000\u0000\u0000"+ - "\u01a9\u01ac\u0001\u0000\u0000\u0000\u01aa\u01a8\u0001\u0000\u0000\u0000"+ - "\u01aa\u01ab\u0001\u0000\u0000\u0000\u01ab=\u0001\u0000\u0000\u0000\u01ac"+ - "\u01aa\u0001\u0000\u0000\u0000\u01ad\u01b2\u0003<\u001e\u0000\u01ae\u01af"+ - "\u0005\'\u0000\u0000\u01af\u01b1\u0003<\u001e\u0000\u01b0\u01ae\u0001"+ - "\u0000\u0000\u0000\u01b1\u01b4\u0001\u0000\u0000\u0000\u01b2\u01b0\u0001"+ - "\u0000\u0000\u0000\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3?\u0001\u0000"+ - "\u0000\u0000\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b5\u01b6\u0007\u0003"+ - "\u0000\u0000\u01b6A\u0001\u0000\u0000\u0000\u01b7\u01bb\u0005W\u0000\u0000"+ - "\u01b8\u01b9\u0004!\n\u0000\u01b9\u01bb\u0003F#\u0000\u01ba\u01b7\u0001"+ - "\u0000\u0000\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bbC\u0001\u0000"+ - "\u0000\u0000\u01bc\u01e7\u00052\u0000\u0000\u01bd\u01be\u0003h4\u0000"+ - "\u01be\u01bf\u0005J\u0000\u0000\u01bf\u01e7\u0001\u0000\u0000\u0000\u01c0"+ - "\u01e7\u0003f3\u0000\u01c1\u01e7\u0003h4\u0000\u01c2\u01e7\u0003b1\u0000"+ - "\u01c3\u01e7\u0003F#\u0000\u01c4\u01e7\u0003j5\u0000\u01c5\u01c6\u0005"+ - "H\u0000\u0000\u01c6\u01cb\u0003d2\u0000\u01c7\u01c8\u0005\'\u0000\u0000"+ - "\u01c8\u01ca\u0003d2\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cd"+ - "\u0001\u0000\u0000\u0000\u01cb\u01c9\u0001\u0000\u0000\u0000\u01cb\u01cc"+ - "\u0001\u0000\u0000\u0000\u01cc\u01ce\u0001\u0000\u0000\u0000\u01cd\u01cb"+ - "\u0001\u0000\u0000\u0000\u01ce\u01cf\u0005I\u0000\u0000\u01cf\u01e7\u0001"+ - "\u0000\u0000\u0000\u01d0\u01d1\u0005H\u0000\u0000\u01d1\u01d6\u0003b1"+ - "\u0000\u01d2\u01d3\u0005\'\u0000\u0000\u01d3\u01d5\u0003b1\u0000\u01d4"+ - "\u01d2\u0001\u0000\u0000\u0000\u01d5\u01d8\u0001\u0000\u0000\u0000\u01d6"+ - "\u01d4\u0001\u0000\u0000\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7"+ - "\u01d9\u0001\u0000\u0000\u0000\u01d8\u01d6\u0001\u0000\u0000\u0000\u01d9"+ - "\u01da\u0005I\u0000\u0000\u01da\u01e7\u0001\u0000\u0000\u0000\u01db\u01dc"+ - "\u0005H\u0000\u0000\u01dc\u01e1\u0003j5\u0000\u01dd\u01de\u0005\'\u0000"+ - "\u0000\u01de\u01e0\u0003j5\u0000\u01df\u01dd\u0001\u0000\u0000\u0000\u01e0"+ - "\u01e3\u0001\u0000\u0000\u0000\u01e1\u01df\u0001\u0000\u0000\u0000\u01e1"+ - "\u01e2\u0001\u0000\u0000\u0000\u01e2\u01e4\u0001\u0000\u0000\u0000\u01e3"+ - "\u01e1\u0001\u0000\u0000\u0000\u01e4\u01e5\u0005I\u0000\u0000\u01e5\u01e7"+ - "\u0001\u0000\u0000\u0000\u01e6\u01bc\u0001\u0000\u0000\u0000\u01e6\u01bd"+ - "\u0001\u0000\u0000\u0000\u01e6\u01c0\u0001\u0000\u0000\u0000\u01e6\u01c1"+ - "\u0001\u0000\u0000\u0000\u01e6\u01c2\u0001\u0000\u0000\u0000\u01e6\u01c3"+ - "\u0001\u0000\u0000\u0000\u01e6\u01c4\u0001\u0000\u0000\u0000\u01e6\u01c5"+ - "\u0001\u0000\u0000\u0000\u01e6\u01d0\u0001\u0000\u0000\u0000\u01e6\u01db"+ - "\u0001\u0000\u0000\u0000\u01e7E\u0001\u0000\u0000\u0000\u01e8\u01eb\u0005"+ - "5\u0000\u0000\u01e9\u01eb\u0005G\u0000\u0000\u01ea\u01e8\u0001\u0000\u0000"+ - "\u0000\u01ea\u01e9\u0001\u0000\u0000\u0000\u01ebG\u0001\u0000\u0000\u0000"+ - "\u01ec\u01f0\u0003@ \u0000\u01ed\u01ee\u0004$\u000b\u0000\u01ee\u01f0"+ - "\u0003F#\u0000\u01ef\u01ec\u0001\u0000\u0000\u0000\u01ef\u01ed\u0001\u0000"+ - "\u0000\u0000\u01f0I\u0001\u0000\u0000\u0000\u01f1\u01f2\u0005\t\u0000"+ - "\u0000\u01f2\u01f3\u0005\u001f\u0000\u0000\u01f3K\u0001\u0000\u0000\u0000"+ - "\u01f4\u01f5\u0005\u000e\u0000\u0000\u01f5\u01fa\u0003N\'\u0000\u01f6"+ - "\u01f7\u0005\'\u0000\u0000\u01f7\u01f9\u0003N\'\u0000\u01f8\u01f6\u0001"+ - "\u0000\u0000\u0000\u01f9\u01fc\u0001\u0000\u0000\u0000\u01fa\u01f8\u0001"+ - "\u0000\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000\u01fbM\u0001\u0000"+ - "\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd\u01ff\u0003\n\u0005"+ - "\u0000\u01fe\u0200\u0007\u0004\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000"+ - "\u0000\u01ff\u0200\u0001\u0000\u0000\u0000\u0200\u0203\u0001\u0000\u0000"+ - "\u0000\u0201\u0202\u00053\u0000\u0000\u0202\u0204\u0007\u0005\u0000\u0000"+ - "\u0203\u0201\u0001\u0000\u0000\u0000\u0203\u0204\u0001\u0000\u0000\u0000"+ - "\u0204O\u0001\u0000\u0000\u0000\u0205\u0206\u0005\b\u0000\u0000\u0206"+ - "\u0207\u0003>\u001f\u0000\u0207Q\u0001\u0000\u0000\u0000\u0208\u0209\u0005"+ - "\u0002\u0000\u0000\u0209\u020a\u0003>\u001f\u0000\u020aS\u0001\u0000\u0000"+ - "\u0000\u020b\u020c\u0005\u000b\u0000\u0000\u020c\u0211\u0003V+\u0000\u020d"+ - "\u020e\u0005\'\u0000\u0000\u020e\u0210\u0003V+\u0000\u020f\u020d\u0001"+ - "\u0000\u0000\u0000\u0210\u0213\u0001\u0000\u0000\u0000\u0211\u020f\u0001"+ - "\u0000\u0000\u0000\u0211\u0212\u0001\u0000\u0000\u0000\u0212U\u0001\u0000"+ - "\u0000\u0000\u0213\u0211\u0001\u0000\u0000\u0000\u0214\u0215\u0003<\u001e"+ - "\u0000\u0215\u0216\u0005[\u0000\u0000\u0216\u0217\u0003<\u001e\u0000\u0217"+ - "W\u0001\u0000\u0000\u0000\u0218\u0219\u0005\u0001\u0000\u0000\u0219\u021a"+ - "\u0003\u0014\n\u0000\u021a\u021c\u0003j5\u0000\u021b\u021d\u0003^/\u0000"+ - "\u021c\u021b\u0001\u0000\u0000\u0000\u021c\u021d\u0001\u0000\u0000\u0000"+ - "\u021dY\u0001\u0000\u0000\u0000\u021e\u021f\u0005\u0007\u0000\u0000\u021f"+ - "\u0220\u0003\u0014\n\u0000\u0220\u0221\u0003j5\u0000\u0221[\u0001\u0000"+ - "\u0000\u0000\u0222\u0223\u0005\n\u0000\u0000\u0223\u0224\u0003:\u001d"+ - "\u0000\u0224]\u0001\u0000\u0000\u0000\u0225\u022a\u0003`0\u0000\u0226"+ - "\u0227\u0005\'\u0000\u0000\u0227\u0229\u0003`0\u0000\u0228\u0226\u0001"+ - "\u0000\u0000\u0000\u0229\u022c\u0001\u0000\u0000\u0000\u022a\u0228\u0001"+ - "\u0000\u0000\u0000\u022a\u022b\u0001\u0000\u0000\u0000\u022b_\u0001\u0000"+ - "\u0000\u0000\u022c\u022a\u0001\u0000\u0000\u0000\u022d\u022e\u0003@ \u0000"+ - "\u022e\u022f\u0005$\u0000\u0000\u022f\u0230\u0003D\"\u0000\u0230a\u0001"+ - "\u0000\u0000\u0000\u0231\u0232\u0007\u0006\u0000\u0000\u0232c\u0001\u0000"+ - "\u0000\u0000\u0233\u0236\u0003f3\u0000\u0234\u0236\u0003h4\u0000\u0235"+ - "\u0233\u0001\u0000\u0000\u0000\u0235\u0234\u0001\u0000\u0000\u0000\u0236"+ - "e\u0001\u0000\u0000\u0000\u0237\u0239\u0007\u0000\u0000\u0000\u0238\u0237"+ - "\u0001\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239\u023a"+ - "\u0001\u0000\u0000\u0000\u023a\u023b\u0005 \u0000\u0000\u023bg\u0001\u0000"+ - "\u0000\u0000\u023c\u023e\u0007\u0000\u0000\u0000\u023d\u023c\u0001\u0000"+ - "\u0000\u0000\u023d\u023e\u0001\u0000\u0000\u0000\u023e\u023f\u0001\u0000"+ - "\u0000\u0000\u023f\u0240\u0005\u001f\u0000\u0000\u0240i\u0001\u0000\u0000"+ - "\u0000\u0241\u0242\u0005\u001e\u0000\u0000\u0242k\u0001\u0000\u0000\u0000"+ - "\u0243\u0244\u0007\u0007\u0000\u0000\u0244m\u0001\u0000\u0000\u0000\u0245"+ - "\u0246\u0005\u0005\u0000\u0000\u0246\u0247\u0003p8\u0000\u0247o\u0001"+ - "\u0000\u0000\u0000\u0248\u0249\u0005H\u0000\u0000\u0249\u024a\u0003\u0002"+ - "\u0001\u0000\u024a\u024b\u0005I\u0000\u0000\u024bq\u0001\u0000\u0000\u0000"+ - "\u024c\u024d\u0005\r\u0000\u0000\u024d\u024e\u0005k\u0000\u0000\u024e"+ - "s\u0001\u0000\u0000\u0000\u024f\u0250\u0005\u0003\u0000\u0000\u0250\u0253"+ - "\u0005a\u0000\u0000\u0251\u0252\u0005_\u0000\u0000\u0252\u0254\u0003<"+ - "\u001e\u0000\u0253\u0251\u0001\u0000\u0000\u0000\u0253\u0254\u0001\u0000"+ - "\u0000\u0000\u0254\u025e\u0001\u0000\u0000\u0000\u0255\u0256\u0005`\u0000"+ - "\u0000\u0256\u025b\u0003v;\u0000\u0257\u0258\u0005\'\u0000\u0000\u0258"+ - "\u025a\u0003v;\u0000\u0259\u0257\u0001\u0000\u0000\u0000\u025a\u025d\u0001"+ - "\u0000\u0000\u0000\u025b\u0259\u0001\u0000\u0000\u0000\u025b\u025c\u0001"+ - "\u0000\u0000\u0000\u025c\u025f\u0001\u0000\u0000\u0000\u025d\u025b\u0001"+ - "\u0000\u0000\u0000\u025e\u0255\u0001\u0000\u0000\u0000\u025e\u025f\u0001"+ - "\u0000\u0000\u0000\u025fu\u0001\u0000\u0000\u0000\u0260\u0261\u0003<\u001e"+ - "\u0000\u0261\u0262\u0005$\u0000\u0000\u0262\u0264\u0001\u0000\u0000\u0000"+ - "\u0263\u0260\u0001\u0000\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000"+ - "\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0266\u0003<\u001e\u0000\u0266"+ - "w\u0001\u0000\u0000\u0000\u0267\u0268\u0005\u0012\u0000\u0000\u0268\u0269"+ - "\u0003(\u0014\u0000\u0269\u026a\u0005_\u0000\u0000\u026a\u026b\u0003>"+ - "\u001f\u0000\u026by\u0001\u0000\u0000\u0000\u026c\u026d\u0005\u0011\u0000"+ - "\u0000\u026d\u0270\u00036\u001b\u0000\u026e\u026f\u0005!\u0000\u0000\u026f"+ - "\u0271\u0003\"\u0011\u0000\u0270\u026e\u0001\u0000\u0000\u0000\u0270\u0271"+ - "\u0001\u0000\u0000\u0000\u0271{\u0001\u0000\u0000\u0000\u0272\u0274\u0007"+ - "\b\u0000\u0000\u0273\u0272\u0001\u0000\u0000\u0000\u0273\u0274\u0001\u0000"+ - "\u0000\u0000\u0274\u0275\u0001\u0000\u0000\u0000\u0275\u0276\u0005\u0014"+ - "\u0000\u0000\u0276\u0277\u0003~?\u0000\u0277\u0278\u0003\u0080@\u0000"+ - "\u0278}\u0001\u0000\u0000\u0000\u0279\u027c\u0003(\u0014\u0000\u027a\u027b"+ - "\u0005[\u0000\u0000\u027b\u027d\u0003@ \u0000\u027c\u027a\u0001\u0000"+ - "\u0000\u0000\u027c\u027d\u0001\u0000\u0000\u0000\u027d\u007f\u0001\u0000"+ - "\u0000\u0000\u027e\u027f\u0005_\u0000\u0000\u027f\u0284\u0003\u0082A\u0000"+ - "\u0280\u0281\u0005\'\u0000\u0000\u0281\u0283\u0003\u0082A\u0000\u0282"+ - "\u0280\u0001\u0000\u0000\u0000\u0283\u0286\u0001\u0000\u0000\u0000\u0284"+ - "\u0282\u0001\u0000\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285"+ - "\u0081\u0001\u0000\u0000\u0000\u0286\u0284\u0001\u0000\u0000\u0000\u0287"+ - "\u0288\u0003\u0010\b\u0000\u0288\u0083\u0001\u0000\u0000\u0000?\u008f"+ - "\u0098\u00ac\u00b8\u00c1\u00c9\u00ce\u00d6\u00d8\u00dd\u00e4\u00e9\u00ee"+ - "\u00f8\u00fe\u0106\u0108\u0113\u011a\u0125\u012a\u012c\u0138\u014b\u0151"+ - "\u015b\u015f\u0164\u0172\u017b\u017f\u0183\u018a\u018e\u0195\u019b\u01a2"+ - "\u01aa\u01b2\u01ba\u01cb\u01d6\u01e1\u01e6\u01ea\u01ef\u01fa\u01ff\u0203"+ - "\u0211\u021c\u022a\u0235\u0238\u023d\u0253\u025b\u025e\u0263\u0270\u0273"+ - "\u027c\u0284"; + "++..\u0002\u0000**88\u0002\u000099;?\u0002\u0000\u0011\u0011\u0017\u0018"+ + "\u02a2\u0000\u0086\u0001\u0000\u0000\u0000\u0002\u0089\u0001\u0000\u0000"+ + "\u0000\u0004\u009a\u0001\u0000\u0000\u0000\u0006\u00af\u0001\u0000\u0000"+ + "\u0000\b\u00b1\u0001\u0000\u0000\u0000\n\u00d1\u0001\u0000\u0000\u0000"+ + "\f\u00ec\u0001\u0000\u0000\u0000\u000e\u00ee\u0001\u0000\u0000\u0000\u0010"+ + "\u00fb\u0001\u0000\u0000\u0000\u0012\u0101\u0001\u0000\u0000\u0000\u0014"+ + "\u0116\u0001\u0000\u0000\u0000\u0016\u0120\u0001\u0000\u0000\u0000\u0018"+ + "\u0133\u0001\u0000\u0000\u0000\u001a\u0135\u0001\u0000\u0000\u0000\u001c"+ + "\u0140\u0001\u0000\u0000\u0000\u001e\u0144\u0001\u0000\u0000\u0000 \u0146"+ + "\u0001\u0000\u0000\u0000\"\u0149\u0001\u0000\u0000\u0000$\u0154\u0001"+ + "\u0000\u0000\u0000&\u0158\u0001\u0000\u0000\u0000(\u0167\u0001\u0000\u0000"+ + "\u0000*\u016b\u0001\u0000\u0000\u0000,\u016d\u0001\u0000\u0000\u0000."+ + "\u016f\u0001\u0000\u0000\u00000\u0178\u0001\u0000\u0000\u00002\u0188\u0001"+ + "\u0000\u0000\u00004\u018b\u0001\u0000\u0000\u00006\u0193\u0001\u0000\u0000"+ + "\u00008\u019b\u0001\u0000\u0000\u0000:\u01a0\u0001\u0000\u0000\u0000<"+ + "\u01a8\u0001\u0000\u0000\u0000>\u01b0\u0001\u0000\u0000\u0000@\u01b8\u0001"+ + "\u0000\u0000\u0000B\u01bc\u0001\u0000\u0000\u0000D\u01e8\u0001\u0000\u0000"+ + "\u0000F\u01ec\u0001\u0000\u0000\u0000H\u01f0\u0001\u0000\u0000\u0000J"+ + "\u01f2\u0001\u0000\u0000\u0000L\u01f5\u0001\u0000\u0000\u0000N\u01fe\u0001"+ + "\u0000\u0000\u0000P\u0206\u0001\u0000\u0000\u0000R\u0209\u0001\u0000\u0000"+ + "\u0000T\u020c\u0001\u0000\u0000\u0000V\u0215\u0001\u0000\u0000\u0000X"+ + "\u0219\u0001\u0000\u0000\u0000Z\u021f\u0001\u0000\u0000\u0000\\\u0223"+ + "\u0001\u0000\u0000\u0000^\u0226\u0001\u0000\u0000\u0000`\u022e\u0001\u0000"+ + "\u0000\u0000b\u0232\u0001\u0000\u0000\u0000d\u0236\u0001\u0000\u0000\u0000"+ + "f\u0239\u0001\u0000\u0000\u0000h\u023e\u0001\u0000\u0000\u0000j\u0242"+ + "\u0001\u0000\u0000\u0000l\u0244\u0001\u0000\u0000\u0000n\u0246\u0001\u0000"+ + "\u0000\u0000p\u0249\u0001\u0000\u0000\u0000r\u024d\u0001\u0000\u0000\u0000"+ + "t\u0250\u0001\u0000\u0000\u0000v\u0264\u0001\u0000\u0000\u0000x\u0268"+ + "\u0001\u0000\u0000\u0000z\u026d\u0001\u0000\u0000\u0000|\u0273\u0001\u0000"+ + "\u0000\u0000~\u0278\u0001\u0000\u0000\u0000\u0080\u027a\u0001\u0000\u0000"+ + "\u0000\u0082\u0283\u0001\u0000\u0000\u0000\u0084\u0285\u0001\u0000\u0000"+ + "\u0000\u0086\u0087\u0003\u0002\u0001\u0000\u0087\u0088\u0005\u0000\u0000"+ + "\u0001\u0088\u0001\u0001\u0000\u0000\u0000\u0089\u008a\u0006\u0001\uffff"+ + "\uffff\u0000\u008a\u008b\u0003\u0004\u0002\u0000\u008b\u0091\u0001\u0000"+ + "\u0000\u0000\u008c\u008d\n\u0001\u0000\u0000\u008d\u008e\u0005\u001d\u0000"+ + "\u0000\u008e\u0090\u0003\u0006\u0003\u0000\u008f\u008c\u0001\u0000\u0000"+ + "\u0000\u0090\u0093\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000"+ + "\u0000\u0091\u0092\u0001\u0000\u0000\u0000\u0092\u0003\u0001\u0000\u0000"+ + "\u0000\u0093\u0091\u0001\u0000\u0000\u0000\u0094\u009b\u0003n7\u0000\u0095"+ + "\u009b\u0003&\u0013\u0000\u0096\u009b\u0003 \u0010\u0000\u0097\u009b\u0003"+ + "r9\u0000\u0098\u0099\u0004\u0002\u0001\u0000\u0099\u009b\u00030\u0018"+ + "\u0000\u009a\u0094\u0001\u0000\u0000\u0000\u009a\u0095\u0001\u0000\u0000"+ + "\u0000\u009a\u0096\u0001\u0000\u0000\u0000\u009a\u0097\u0001\u0000\u0000"+ + "\u0000\u009a\u0098\u0001\u0000\u0000\u0000\u009b\u0005\u0001\u0000\u0000"+ + "\u0000\u009c\u00b0\u00032\u0019\u0000\u009d\u00b0\u0003\b\u0004\u0000"+ + "\u009e\u00b0\u0003P(\u0000\u009f\u00b0\u0003J%\u0000\u00a0\u00b0\u0003"+ + "4\u001a\u0000\u00a1\u00b0\u0003L&\u0000\u00a2\u00b0\u0003R)\u0000\u00a3"+ + "\u00b0\u0003T*\u0000\u00a4\u00b0\u0003X,\u0000\u00a5\u00b0\u0003Z-\u0000"+ + "\u00a6\u00b0\u0003t:\u0000\u00a7\u00b0\u0003\\.\u0000\u00a8\u00b0\u0003"+ + "|>\u0000\u00a9\u00aa\u0004\u0003\u0002\u0000\u00aa\u00b0\u0003z=\u0000"+ + "\u00ab\u00ac\u0004\u0003\u0003\u0000\u00ac\u00b0\u0003x<\u0000\u00ad\u00ae"+ + "\u0004\u0003\u0004\u0000\u00ae\u00b0\u0003\u0084B\u0000\u00af\u009c\u0001"+ + "\u0000\u0000\u0000\u00af\u009d\u0001\u0000\u0000\u0000\u00af\u009e\u0001"+ + "\u0000\u0000\u0000\u00af\u009f\u0001\u0000\u0000\u0000\u00af\u00a0\u0001"+ + "\u0000\u0000\u0000\u00af\u00a1\u0001\u0000\u0000\u0000\u00af\u00a2\u0001"+ + "\u0000\u0000\u0000\u00af\u00a3\u0001\u0000\u0000\u0000\u00af\u00a4\u0001"+ + "\u0000\u0000\u0000\u00af\u00a5\u0001\u0000\u0000\u0000\u00af\u00a6\u0001"+ + "\u0000\u0000\u0000\u00af\u00a7\u0001\u0000\u0000\u0000\u00af\u00a8\u0001"+ + "\u0000\u0000\u0000\u00af\u00a9\u0001\u0000\u0000\u0000\u00af\u00ab\u0001"+ + "\u0000\u0000\u0000\u00af\u00ad\u0001\u0000\u0000\u0000\u00b0\u0007\u0001"+ + "\u0000\u0000\u0000\u00b1\u00b2\u0005\u0010\u0000\u0000\u00b2\u00b3\u0003"+ + "\n\u0005\u0000\u00b3\t\u0001\u0000\u0000\u0000\u00b4\u00b5\u0006\u0005"+ + "\uffff\uffff\u0000\u00b5\u00b6\u00051\u0000\u0000\u00b6\u00d2\u0003\n"+ + "\u0005\b\u00b7\u00d2\u0003\u0010\b\u0000\u00b8\u00d2\u0003\f\u0006\u0000"+ + "\u00b9\u00bb\u0003\u0010\b\u0000\u00ba\u00bc\u00051\u0000\u0000\u00bb"+ + "\u00ba\u0001\u0000\u0000\u0000\u00bb\u00bc\u0001\u0000\u0000\u0000\u00bc"+ + "\u00bd\u0001\u0000\u0000\u0000\u00bd\u00be\u0005,\u0000\u0000\u00be\u00bf"+ + "\u00050\u0000\u0000\u00bf\u00c4\u0003\u0010\b\u0000\u00c0\u00c1\u0005"+ + "\'\u0000\u0000\u00c1\u00c3\u0003\u0010\b\u0000\u00c2\u00c0\u0001\u0000"+ + "\u0000\u0000\u00c3\u00c6\u0001\u0000\u0000\u0000\u00c4\u00c2\u0001\u0000"+ + "\u0000\u0000\u00c4\u00c5\u0001\u0000\u0000\u0000\u00c5\u00c7\u0001\u0000"+ + "\u0000\u0000\u00c6\u00c4\u0001\u0000\u0000\u0000\u00c7\u00c8\u00057\u0000"+ + "\u0000\u00c8\u00d2\u0001\u0000\u0000\u0000\u00c9\u00ca\u0003\u0010\b\u0000"+ + "\u00ca\u00cc\u0005-\u0000\u0000\u00cb\u00cd\u00051\u0000\u0000\u00cc\u00cb"+ + "\u0001\u0000\u0000\u0000\u00cc\u00cd\u0001\u0000\u0000\u0000\u00cd\u00ce"+ + "\u0001\u0000\u0000\u0000\u00ce\u00cf\u00052\u0000\u0000\u00cf\u00d2\u0001"+ + "\u0000\u0000\u0000\u00d0\u00d2\u0003\u000e\u0007\u0000\u00d1\u00b4\u0001"+ + "\u0000\u0000\u0000\u00d1\u00b7\u0001\u0000\u0000\u0000\u00d1\u00b8\u0001"+ + "\u0000\u0000\u0000\u00d1\u00b9\u0001\u0000\u0000\u0000\u00d1\u00c9\u0001"+ + "\u0000\u0000\u0000\u00d1\u00d0\u0001\u0000\u0000\u0000\u00d2\u00db\u0001"+ + "\u0000\u0000\u0000\u00d3\u00d4\n\u0005\u0000\u0000\u00d4\u00d5\u0005\""+ + "\u0000\u0000\u00d5\u00da\u0003\n\u0005\u0006\u00d6\u00d7\n\u0004\u0000"+ + "\u0000\u00d7\u00d8\u00054\u0000\u0000\u00d8\u00da\u0003\n\u0005\u0005"+ + "\u00d9\u00d3\u0001\u0000\u0000\u0000\u00d9\u00d6\u0001\u0000\u0000\u0000"+ + "\u00da\u00dd\u0001\u0000\u0000\u0000\u00db\u00d9\u0001\u0000\u0000\u0000"+ + "\u00db\u00dc\u0001\u0000\u0000\u0000\u00dc\u000b\u0001\u0000\u0000\u0000"+ + "\u00dd\u00db\u0001\u0000\u0000\u0000\u00de\u00e0\u0003\u0010\b\u0000\u00df"+ + "\u00e1\u00051\u0000\u0000\u00e0\u00df\u0001\u0000\u0000\u0000\u00e0\u00e1"+ + "\u0001\u0000\u0000\u0000\u00e1\u00e2\u0001\u0000\u0000\u0000\u00e2\u00e3"+ + "\u0005/\u0000\u0000\u00e3\u00e4\u0003j5\u0000\u00e4\u00ed\u0001\u0000"+ + "\u0000\u0000\u00e5\u00e7\u0003\u0010\b\u0000\u00e6\u00e8\u00051\u0000"+ + "\u0000\u00e7\u00e6\u0001\u0000\u0000\u0000\u00e7\u00e8\u0001\u0000\u0000"+ + "\u0000\u00e8\u00e9\u0001\u0000\u0000\u0000\u00e9\u00ea\u00056\u0000\u0000"+ + "\u00ea\u00eb\u0003j5\u0000\u00eb\u00ed\u0001\u0000\u0000\u0000\u00ec\u00de"+ + "\u0001\u0000\u0000\u0000\u00ec\u00e5\u0001\u0000\u0000\u0000\u00ed\r\u0001"+ + "\u0000\u0000\u0000\u00ee\u00f1\u0003:\u001d\u0000\u00ef\u00f0\u0005%\u0000"+ + "\u0000\u00f0\u00f2\u0003\u001e\u000f\u0000\u00f1\u00ef\u0001\u0000\u0000"+ + "\u0000\u00f1\u00f2\u0001\u0000\u0000\u0000\u00f2\u00f3\u0001\u0000\u0000"+ + "\u0000\u00f3\u00f4\u0005&\u0000\u0000\u00f4\u00f5\u0003D\"\u0000\u00f5"+ + "\u000f\u0001\u0000\u0000\u0000\u00f6\u00fc\u0003\u0012\t\u0000\u00f7\u00f8"+ + "\u0003\u0012\t\u0000\u00f8\u00f9\u0003l6\u0000\u00f9\u00fa\u0003\u0012"+ + "\t\u0000\u00fa\u00fc\u0001\u0000\u0000\u0000\u00fb\u00f6\u0001\u0000\u0000"+ + "\u0000\u00fb\u00f7\u0001\u0000\u0000\u0000\u00fc\u0011\u0001\u0000\u0000"+ + "\u0000\u00fd\u00fe\u0006\t\uffff\uffff\u0000\u00fe\u0102\u0003\u0014\n"+ + "\u0000\u00ff\u0100\u0007\u0000\u0000\u0000\u0100\u0102\u0003\u0012\t\u0003"+ + "\u0101\u00fd\u0001\u0000\u0000\u0000\u0101\u00ff\u0001\u0000\u0000\u0000"+ + "\u0102\u010b\u0001\u0000\u0000\u0000\u0103\u0104\n\u0002\u0000\u0000\u0104"+ + "\u0105\u0007\u0001\u0000\u0000\u0105\u010a\u0003\u0012\t\u0003\u0106\u0107"+ + "\n\u0001\u0000\u0000\u0107\u0108\u0007\u0000\u0000\u0000\u0108\u010a\u0003"+ + "\u0012\t\u0002\u0109\u0103\u0001\u0000\u0000\u0000\u0109\u0106\u0001\u0000"+ + "\u0000\u0000\u010a\u010d\u0001\u0000\u0000\u0000\u010b\u0109\u0001\u0000"+ + "\u0000\u0000\u010b\u010c\u0001\u0000\u0000\u0000\u010c\u0013\u0001\u0000"+ + "\u0000\u0000\u010d\u010b\u0001\u0000\u0000\u0000\u010e\u010f\u0006\n\uffff"+ + "\uffff\u0000\u010f\u0117\u0003D\"\u0000\u0110\u0117\u0003:\u001d\u0000"+ + "\u0111\u0117\u0003\u0016\u000b\u0000\u0112\u0113\u00050\u0000\u0000\u0113"+ + "\u0114\u0003\n\u0005\u0000\u0114\u0115\u00057\u0000\u0000\u0115\u0117"+ + "\u0001\u0000\u0000\u0000\u0116\u010e\u0001\u0000\u0000\u0000\u0116\u0110"+ + "\u0001\u0000\u0000\u0000\u0116\u0111\u0001\u0000\u0000\u0000\u0116\u0112"+ + "\u0001\u0000\u0000\u0000\u0117\u011d\u0001\u0000\u0000\u0000\u0118\u0119"+ + "\n\u0001\u0000\u0000\u0119\u011a\u0005%\u0000\u0000\u011a\u011c\u0003"+ + "\u001e\u000f\u0000\u011b\u0118\u0001\u0000\u0000\u0000\u011c\u011f\u0001"+ + "\u0000\u0000\u0000\u011d\u011b\u0001\u0000\u0000\u0000\u011d\u011e\u0001"+ + "\u0000\u0000\u0000\u011e\u0015\u0001\u0000\u0000\u0000\u011f\u011d\u0001"+ + "\u0000\u0000\u0000\u0120\u0121\u0003\u0018\f\u0000\u0121\u012f\u00050"+ + "\u0000\u0000\u0122\u0130\u0005B\u0000\u0000\u0123\u0128\u0003\n\u0005"+ + "\u0000\u0124\u0125\u0005\'\u0000\u0000\u0125\u0127\u0003\n\u0005\u0000"+ + "\u0126\u0124\u0001\u0000\u0000\u0000\u0127\u012a\u0001\u0000\u0000\u0000"+ + "\u0128\u0126\u0001\u0000\u0000\u0000\u0128\u0129\u0001\u0000\u0000\u0000"+ + "\u0129\u012d\u0001\u0000\u0000\u0000\u012a\u0128\u0001\u0000\u0000\u0000"+ + "\u012b\u012c\u0005\'\u0000\u0000\u012c\u012e\u0003\u001a\r\u0000\u012d"+ + "\u012b\u0001\u0000\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e"+ + "\u0130\u0001\u0000\u0000\u0000\u012f\u0122\u0001\u0000\u0000\u0000\u012f"+ + "\u0123\u0001\u0000\u0000\u0000\u012f\u0130\u0001\u0000\u0000\u0000\u0130"+ + "\u0131\u0001\u0000\u0000\u0000\u0131\u0132\u00057\u0000\u0000\u0132\u0017"+ + "\u0001\u0000\u0000\u0000\u0133\u0134\u0003H$\u0000\u0134\u0019\u0001\u0000"+ + "\u0000\u0000\u0135\u0136\u0005E\u0000\u0000\u0136\u013b\u0003\u001c\u000e"+ + "\u0000\u0137\u0138\u0005\'\u0000\u0000\u0138\u013a\u0003\u001c\u000e\u0000"+ + "\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000\u0000"+ + "\u013b\u0139\u0001\u0000\u0000\u0000\u013b\u013c\u0001\u0000\u0000\u0000"+ + "\u013c\u013e\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000"+ + "\u013e\u013f\u0005F\u0000\u0000\u013f\u001b\u0001\u0000\u0000\u0000\u0140"+ + "\u0141\u0003j5\u0000\u0141\u0142\u0005&\u0000\u0000\u0142\u0143\u0003"+ + "D\"\u0000\u0143\u001d\u0001\u0000\u0000\u0000\u0144\u0145\u0003@ \u0000"+ + "\u0145\u001f\u0001\u0000\u0000\u0000\u0146\u0147\u0005\f\u0000\u0000\u0147"+ + "\u0148\u0003\"\u0011\u0000\u0148!\u0001\u0000\u0000\u0000\u0149\u014e"+ + "\u0003$\u0012\u0000\u014a\u014b\u0005\'\u0000\u0000\u014b\u014d\u0003"+ + "$\u0012\u0000\u014c\u014a\u0001\u0000\u0000\u0000\u014d\u0150\u0001\u0000"+ + "\u0000\u0000\u014e\u014c\u0001\u0000\u0000\u0000\u014e\u014f\u0001\u0000"+ + "\u0000\u0000\u014f#\u0001\u0000\u0000\u0000\u0150\u014e\u0001\u0000\u0000"+ + "\u0000\u0151\u0152\u0003:\u001d\u0000\u0152\u0153\u0005$\u0000\u0000\u0153"+ + "\u0155\u0001\u0000\u0000\u0000\u0154\u0151\u0001\u0000\u0000\u0000\u0154"+ + "\u0155\u0001\u0000\u0000\u0000\u0155\u0156\u0001\u0000\u0000\u0000\u0156"+ + "\u0157\u0003\n\u0005\u0000\u0157%\u0001\u0000\u0000\u0000\u0158\u0159"+ + "\u0005\u0006\u0000\u0000\u0159\u015e\u0003(\u0014\u0000\u015a\u015b\u0005"+ + "\'\u0000\u0000\u015b\u015d\u0003(\u0014\u0000\u015c\u015a\u0001\u0000"+ + "\u0000\u0000\u015d\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000"+ + "\u0000\u0000\u015e\u015f\u0001\u0000\u0000\u0000\u015f\u0162\u0001\u0000"+ + "\u0000\u0000\u0160\u015e\u0001\u0000\u0000\u0000\u0161\u0163\u0003.\u0017"+ + "\u0000\u0162\u0161\u0001\u0000\u0000\u0000\u0162\u0163\u0001\u0000\u0000"+ + "\u0000\u0163\'\u0001\u0000\u0000\u0000\u0164\u0165\u0003*\u0015\u0000"+ + "\u0165\u0166\u0005&\u0000\u0000\u0166\u0168\u0001\u0000\u0000\u0000\u0167"+ + "\u0164\u0001\u0000\u0000\u0000\u0167\u0168\u0001\u0000\u0000\u0000\u0168"+ + "\u0169\u0001\u0000\u0000\u0000\u0169\u016a\u0003,\u0016\u0000\u016a)\u0001"+ + "\u0000\u0000\u0000\u016b\u016c\u0007\u0002\u0000\u0000\u016c+\u0001\u0000"+ + "\u0000\u0000\u016d\u016e\u0007\u0002\u0000\u0000\u016e-\u0001\u0000\u0000"+ + "\u0000\u016f\u0170\u0005R\u0000\u0000\u0170\u0175\u0005S\u0000\u0000\u0171"+ + "\u0172\u0005\'\u0000\u0000\u0172\u0174\u0005S\u0000\u0000\u0173\u0171"+ + "\u0001\u0000\u0000\u0000\u0174\u0177\u0001\u0000\u0000\u0000\u0175\u0173"+ + "\u0001\u0000\u0000\u0000\u0175\u0176\u0001\u0000\u0000\u0000\u0176/\u0001"+ + "\u0000\u0000\u0000\u0177\u0175\u0001\u0000\u0000\u0000\u0178\u0179\u0005"+ + "\u0015\u0000\u0000\u0179\u017e\u0003(\u0014\u0000\u017a\u017b\u0005\'"+ + "\u0000\u0000\u017b\u017d\u0003(\u0014\u0000\u017c\u017a\u0001\u0000\u0000"+ + "\u0000\u017d\u0180\u0001\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000"+ + "\u0000\u017e\u017f\u0001\u0000\u0000\u0000\u017f\u0182\u0001\u0000\u0000"+ + "\u0000\u0180\u017e\u0001\u0000\u0000\u0000\u0181\u0183\u00036\u001b\u0000"+ + "\u0182\u0181\u0001\u0000\u0000\u0000\u0182\u0183\u0001\u0000\u0000\u0000"+ + "\u0183\u0186\u0001\u0000\u0000\u0000\u0184\u0185\u0005!\u0000\u0000\u0185"+ + "\u0187\u0003\"\u0011\u0000\u0186\u0184\u0001\u0000\u0000\u0000\u0186\u0187"+ + "\u0001\u0000\u0000\u0000\u01871\u0001\u0000\u0000\u0000\u0188\u0189\u0005"+ + "\u0004\u0000\u0000\u0189\u018a\u0003\"\u0011\u0000\u018a3\u0001\u0000"+ + "\u0000\u0000\u018b\u018d\u0005\u000f\u0000\u0000\u018c\u018e\u00036\u001b"+ + "\u0000\u018d\u018c\u0001\u0000\u0000\u0000\u018d\u018e\u0001\u0000\u0000"+ + "\u0000\u018e\u0191\u0001\u0000\u0000\u0000\u018f\u0190\u0005!\u0000\u0000"+ + "\u0190\u0192\u0003\"\u0011\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0191"+ + "\u0192\u0001\u0000\u0000\u0000\u01925\u0001\u0000\u0000\u0000\u0193\u0198"+ + "\u00038\u001c\u0000\u0194\u0195\u0005\'\u0000\u0000\u0195\u0197\u0003"+ + "8\u001c\u0000\u0196\u0194\u0001\u0000\u0000\u0000\u0197\u019a\u0001\u0000"+ + "\u0000\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0198\u0199\u0001\u0000"+ + "\u0000\u0000\u01997\u0001\u0000\u0000\u0000\u019a\u0198\u0001\u0000\u0000"+ + "\u0000\u019b\u019e\u0003$\u0012\u0000\u019c\u019d\u0005\u0010\u0000\u0000"+ + "\u019d\u019f\u0003\n\u0005\u0000\u019e\u019c\u0001\u0000\u0000\u0000\u019e"+ + "\u019f\u0001\u0000\u0000\u0000\u019f9\u0001\u0000\u0000\u0000\u01a0\u01a5"+ + "\u0003H$\u0000\u01a1\u01a2\u0005)\u0000\u0000\u01a2\u01a4\u0003H$\u0000"+ + "\u01a3\u01a1\u0001\u0000\u0000\u0000\u01a4\u01a7\u0001\u0000\u0000\u0000"+ + "\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a5\u01a6\u0001\u0000\u0000\u0000"+ + "\u01a6;\u0001\u0000\u0000\u0000\u01a7\u01a5\u0001\u0000\u0000\u0000\u01a8"+ + "\u01ad\u0003B!\u0000\u01a9\u01aa\u0005)\u0000\u0000\u01aa\u01ac\u0003"+ + "B!\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ac\u01af\u0001\u0000\u0000"+ + "\u0000\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ad\u01ae\u0001\u0000\u0000"+ + "\u0000\u01ae=\u0001\u0000\u0000\u0000\u01af\u01ad\u0001\u0000\u0000\u0000"+ + "\u01b0\u01b5\u0003<\u001e\u0000\u01b1\u01b2\u0005\'\u0000\u0000\u01b2"+ + "\u01b4\u0003<\u001e\u0000\u01b3\u01b1\u0001\u0000\u0000\u0000\u01b4\u01b7"+ + "\u0001\u0000\u0000\u0000\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b5\u01b6"+ + "\u0001\u0000\u0000\u0000\u01b6?\u0001\u0000\u0000\u0000\u01b7\u01b5\u0001"+ + "\u0000\u0000\u0000\u01b8\u01b9\u0007\u0003\u0000\u0000\u01b9A\u0001\u0000"+ + "\u0000\u0000\u01ba\u01bd\u0005W\u0000\u0000\u01bb\u01bd\u0003F#\u0000"+ + "\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bc\u01bb\u0001\u0000\u0000\u0000"+ + "\u01bdC\u0001\u0000\u0000\u0000\u01be\u01e9\u00052\u0000\u0000\u01bf\u01c0"+ + "\u0003h4\u0000\u01c0\u01c1\u0005J\u0000\u0000\u01c1\u01e9\u0001\u0000"+ + "\u0000\u0000\u01c2\u01e9\u0003f3\u0000\u01c3\u01e9\u0003h4\u0000\u01c4"+ + "\u01e9\u0003b1\u0000\u01c5\u01e9\u0003F#\u0000\u01c6\u01e9\u0003j5\u0000"+ + "\u01c7\u01c8\u0005H\u0000\u0000\u01c8\u01cd\u0003d2\u0000\u01c9\u01ca"+ + "\u0005\'\u0000\u0000\u01ca\u01cc\u0003d2\u0000\u01cb\u01c9\u0001\u0000"+ + "\u0000\u0000\u01cc\u01cf\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001\u0000"+ + "\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000\u0000\u01ce\u01d0\u0001\u0000"+ + "\u0000\u0000\u01cf\u01cd\u0001\u0000\u0000\u0000\u01d0\u01d1\u0005I\u0000"+ + "\u0000\u01d1\u01e9\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005H\u0000\u0000"+ + "\u01d3\u01d8\u0003b1\u0000\u01d4\u01d5\u0005\'\u0000\u0000\u01d5\u01d7"+ + "\u0003b1\u0000\u01d6\u01d4\u0001\u0000\u0000\u0000\u01d7\u01da\u0001\u0000"+ + "\u0000\u0000\u01d8\u01d6\u0001\u0000\u0000\u0000\u01d8\u01d9\u0001\u0000"+ + "\u0000\u0000\u01d9\u01db\u0001\u0000\u0000\u0000\u01da\u01d8\u0001\u0000"+ + "\u0000\u0000\u01db\u01dc\u0005I\u0000\u0000\u01dc\u01e9\u0001\u0000\u0000"+ + "\u0000\u01dd\u01de\u0005H\u0000\u0000\u01de\u01e3\u0003j5\u0000\u01df"+ + "\u01e0\u0005\'\u0000\u0000\u01e0\u01e2\u0003j5\u0000\u01e1\u01df\u0001"+ + "\u0000\u0000\u0000\u01e2\u01e5\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001"+ + "\u0000\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e6\u0001"+ + "\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005"+ + "I\u0000\u0000\u01e7\u01e9\u0001\u0000\u0000\u0000\u01e8\u01be\u0001\u0000"+ + "\u0000\u0000\u01e8\u01bf\u0001\u0000\u0000\u0000\u01e8\u01c2\u0001\u0000"+ + "\u0000\u0000\u01e8\u01c3\u0001\u0000\u0000\u0000\u01e8\u01c4\u0001\u0000"+ + "\u0000\u0000\u01e8\u01c5\u0001\u0000\u0000\u0000\u01e8\u01c6\u0001\u0000"+ + "\u0000\u0000\u01e8\u01c7\u0001\u0000\u0000\u0000\u01e8\u01d2\u0001\u0000"+ + "\u0000\u0000\u01e8\u01dd\u0001\u0000\u0000\u0000\u01e9E\u0001\u0000\u0000"+ + "\u0000\u01ea\u01ed\u00055\u0000\u0000\u01eb\u01ed\u0005G\u0000\u0000\u01ec"+ + "\u01ea\u0001\u0000\u0000\u0000\u01ec\u01eb\u0001\u0000\u0000\u0000\u01ed"+ + "G\u0001\u0000\u0000\u0000\u01ee\u01f1\u0003@ \u0000\u01ef\u01f1\u0003"+ + "F#\u0000\u01f0\u01ee\u0001\u0000\u0000\u0000\u01f0\u01ef\u0001\u0000\u0000"+ + "\u0000\u01f1I\u0001\u0000\u0000\u0000\u01f2\u01f3\u0005\t\u0000\u0000"+ + "\u01f3\u01f4\u0005\u001f\u0000\u0000\u01f4K\u0001\u0000\u0000\u0000\u01f5"+ + "\u01f6\u0005\u000e\u0000\u0000\u01f6\u01fb\u0003N\'\u0000\u01f7\u01f8"+ + "\u0005\'\u0000\u0000\u01f8\u01fa\u0003N\'\u0000\u01f9\u01f7\u0001\u0000"+ + "\u0000\u0000\u01fa\u01fd\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000"+ + "\u0000\u0000\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fcM\u0001\u0000\u0000"+ + "\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000\u01fe\u0200\u0003\n\u0005\u0000"+ + "\u01ff\u0201\u0007\u0004\u0000\u0000\u0200\u01ff\u0001\u0000\u0000\u0000"+ + "\u0200\u0201\u0001\u0000\u0000\u0000\u0201\u0204\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u00053\u0000\u0000\u0203\u0205\u0007\u0005\u0000\u0000\u0204"+ + "\u0202\u0001\u0000\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205"+ + "O\u0001\u0000\u0000\u0000\u0206\u0207\u0005\b\u0000\u0000\u0207\u0208"+ + "\u0003>\u001f\u0000\u0208Q\u0001\u0000\u0000\u0000\u0209\u020a\u0005\u0002"+ + "\u0000\u0000\u020a\u020b\u0003>\u001f\u0000\u020bS\u0001\u0000\u0000\u0000"+ + "\u020c\u020d\u0005\u000b\u0000\u0000\u020d\u0212\u0003V+\u0000\u020e\u020f"+ + "\u0005\'\u0000\u0000\u020f\u0211\u0003V+\u0000\u0210\u020e\u0001\u0000"+ + "\u0000\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212\u0210\u0001\u0000"+ + "\u0000\u0000\u0212\u0213\u0001\u0000\u0000\u0000\u0213U\u0001\u0000\u0000"+ + "\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0215\u0216\u0003<\u001e\u0000"+ + "\u0216\u0217\u0005[\u0000\u0000\u0217\u0218\u0003<\u001e\u0000\u0218W"+ + "\u0001\u0000\u0000\u0000\u0219\u021a\u0005\u0001\u0000\u0000\u021a\u021b"+ + "\u0003\u0014\n\u0000\u021b\u021d\u0003j5\u0000\u021c\u021e\u0003^/\u0000"+ + "\u021d\u021c\u0001\u0000\u0000\u0000\u021d\u021e\u0001\u0000\u0000\u0000"+ + "\u021eY\u0001\u0000\u0000\u0000\u021f\u0220\u0005\u0007\u0000\u0000\u0220"+ + "\u0221\u0003\u0014\n\u0000\u0221\u0222\u0003j5\u0000\u0222[\u0001\u0000"+ + "\u0000\u0000\u0223\u0224\u0005\n\u0000\u0000\u0224\u0225\u0003:\u001d"+ + "\u0000\u0225]\u0001\u0000\u0000\u0000\u0226\u022b\u0003`0\u0000\u0227"+ + "\u0228\u0005\'\u0000\u0000\u0228\u022a\u0003`0\u0000\u0229\u0227\u0001"+ + "\u0000\u0000\u0000\u022a\u022d\u0001\u0000\u0000\u0000\u022b\u0229\u0001"+ + "\u0000\u0000\u0000\u022b\u022c\u0001\u0000\u0000\u0000\u022c_\u0001\u0000"+ + "\u0000\u0000\u022d\u022b\u0001\u0000\u0000\u0000\u022e\u022f\u0003@ \u0000"+ + "\u022f\u0230\u0005$\u0000\u0000\u0230\u0231\u0003D\"\u0000\u0231a\u0001"+ + "\u0000\u0000\u0000\u0232\u0233\u0007\u0006\u0000\u0000\u0233c\u0001\u0000"+ + "\u0000\u0000\u0234\u0237\u0003f3\u0000\u0235\u0237\u0003h4\u0000\u0236"+ + "\u0234\u0001\u0000\u0000\u0000\u0236\u0235\u0001\u0000\u0000\u0000\u0237"+ + "e\u0001\u0000\u0000\u0000\u0238\u023a\u0007\u0000\u0000\u0000\u0239\u0238"+ + "\u0001\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000\u023a\u023b"+ + "\u0001\u0000\u0000\u0000\u023b\u023c\u0005 \u0000\u0000\u023cg\u0001\u0000"+ + "\u0000\u0000\u023d\u023f\u0007\u0000\u0000\u0000\u023e\u023d\u0001\u0000"+ + "\u0000\u0000\u023e\u023f\u0001\u0000\u0000\u0000\u023f\u0240\u0001\u0000"+ + "\u0000\u0000\u0240\u0241\u0005\u001f\u0000\u0000\u0241i\u0001\u0000\u0000"+ + "\u0000\u0242\u0243\u0005\u001e\u0000\u0000\u0243k\u0001\u0000\u0000\u0000"+ + "\u0244\u0245\u0007\u0007\u0000\u0000\u0245m\u0001\u0000\u0000\u0000\u0246"+ + "\u0247\u0005\u0005\u0000\u0000\u0247\u0248\u0003p8\u0000\u0248o\u0001"+ + "\u0000\u0000\u0000\u0249\u024a\u0005H\u0000\u0000\u024a\u024b\u0003\u0002"+ + "\u0001\u0000\u024b\u024c\u0005I\u0000\u0000\u024cq\u0001\u0000\u0000\u0000"+ + "\u024d\u024e\u0005\r\u0000\u0000\u024e\u024f\u0005k\u0000\u0000\u024f"+ + "s\u0001\u0000\u0000\u0000\u0250\u0251\u0005\u0003\u0000\u0000\u0251\u0254"+ + "\u0005a\u0000\u0000\u0252\u0253\u0005_\u0000\u0000\u0253\u0255\u0003<"+ + "\u001e\u0000\u0254\u0252\u0001\u0000\u0000\u0000\u0254\u0255\u0001\u0000"+ + "\u0000\u0000\u0255\u025f\u0001\u0000\u0000\u0000\u0256\u0257\u0005`\u0000"+ + "\u0000\u0257\u025c\u0003v;\u0000\u0258\u0259\u0005\'\u0000\u0000\u0259"+ + "\u025b\u0003v;\u0000\u025a\u0258\u0001\u0000\u0000\u0000\u025b\u025e\u0001"+ + "\u0000\u0000\u0000\u025c\u025a\u0001\u0000\u0000\u0000\u025c\u025d\u0001"+ + "\u0000\u0000\u0000\u025d\u0260\u0001\u0000\u0000\u0000\u025e\u025c\u0001"+ + "\u0000\u0000\u0000\u025f\u0256\u0001\u0000\u0000\u0000\u025f\u0260\u0001"+ + "\u0000\u0000\u0000\u0260u\u0001\u0000\u0000\u0000\u0261\u0262\u0003<\u001e"+ + "\u0000\u0262\u0263\u0005$\u0000\u0000\u0263\u0265\u0001\u0000\u0000\u0000"+ + "\u0264\u0261\u0001\u0000\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000"+ + "\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0267\u0003<\u001e\u0000\u0267"+ + "w\u0001\u0000\u0000\u0000\u0268\u0269\u0005\u0014\u0000\u0000\u0269\u026a"+ + "\u0003(\u0014\u0000\u026a\u026b\u0005_\u0000\u0000\u026b\u026c\u0003>"+ + "\u001f\u0000\u026cy\u0001\u0000\u0000\u0000\u026d\u026e\u0005\u0012\u0000"+ + "\u0000\u026e\u0271\u00036\u001b\u0000\u026f\u0270\u0005!\u0000\u0000\u0270"+ + "\u0272\u0003\"\u0011\u0000\u0271\u026f\u0001\u0000\u0000\u0000\u0271\u0272"+ + "\u0001\u0000\u0000\u0000\u0272{\u0001\u0000\u0000\u0000\u0273\u0274\u0007"+ + "\b\u0000\u0000\u0274\u0275\u0005y\u0000\u0000\u0275\u0276\u0003~?\u0000"+ + "\u0276\u0277\u0003\u0080@\u0000\u0277}\u0001\u0000\u0000\u0000\u0278\u0279"+ + "\u0003(\u0014\u0000\u0279\u007f\u0001\u0000\u0000\u0000\u027a\u027b\u0005"+ + "_\u0000\u0000\u027b\u0280\u0003\u0082A\u0000\u027c\u027d\u0005\'\u0000"+ + "\u0000\u027d\u027f\u0003\u0082A\u0000\u027e\u027c\u0001\u0000\u0000\u0000"+ + "\u027f\u0282\u0001\u0000\u0000\u0000\u0280\u027e\u0001\u0000\u0000\u0000"+ + "\u0280\u0281\u0001\u0000\u0000\u0000\u0281\u0081\u0001\u0000\u0000\u0000"+ + "\u0282\u0280\u0001\u0000\u0000\u0000\u0283\u0284\u0003\u0010\b\u0000\u0284"+ + "\u0083\u0001\u0000\u0000\u0000\u0285\u0286\u0005\u0013\u0000\u0000\u0286"+ + "\u0287\u0003>\u001f\u0000\u0287\u0085\u0001\u0000\u0000\u0000=\u0091\u009a"+ + "\u00af\u00bb\u00c4\u00cc\u00d1\u00d9\u00db\u00e0\u00e7\u00ec\u00f1\u00fb"+ + "\u0101\u0109\u010b\u0116\u011d\u0128\u012d\u012f\u013b\u014e\u0154\u015e"+ + "\u0162\u0167\u0175\u017e\u0182\u0186\u018d\u0191\u0198\u019e\u01a5\u01ad"+ + "\u01b5\u01bc\u01cd\u01d8\u01e3\u01e8\u01ec\u01f0\u01fb\u0200\u0204\u0212"+ + "\u021d\u022b\u0236\u0239\u023e\u0254\u025c\u025f\u0264\u0271\u0280"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index df6d8f1e35013..71dec06682b44 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -1100,6 +1100,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterInsistCommand(EsqlBaseParser.InsistCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitInsistCommand(EsqlBaseParser.InsistCommandContext ctx) { } /** * {@inheritDoc} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 885ba91e20dcd..2b3d1f224c545 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -650,4 +650,11 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitInsistCommand(EsqlBaseParser.InsistCommandContext ctx) { return visitChildren(ctx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 2c1faa374695e..9790ee17ac5ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -981,4 +981,14 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#insistCommand}. + * @param ctx the parse tree + */ + void enterInsistCommand(EsqlBaseParser.InsistCommandContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#insistCommand}. + * @param ctx the parse tree + */ + void exitInsistCommand(EsqlBaseParser.InsistCommandContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 73afd23393cdb..a0097b039edbb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -591,4 +591,10 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#insistCommand}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitInsistCommand(EsqlBaseParser.InsistCommandContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 78c3044257f9f..9d7c3dd908477 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -795,9 +795,9 @@ public NamedExpression visitEnrichWithClause(EsqlBaseParser.EnrichWithClauseCont private NamedExpression enrichFieldName(EsqlBaseParser.QualifiedNamePatternContext ctx) { return visitQualifiedNamePattern(ctx, ne -> { - if (ne instanceof UnresolvedNamePattern up) { + if (ne instanceof UnresolvedNamePattern || ne instanceof UnresolvedStar) { var src = ne.source(); - throw new ParsingException(src, "Using wildcards [*] in ENRICH WITH projections is not allowed [{}]", up.pattern()); + throw new ParsingException(src, "Using wildcards [*] in ENRICH WITH projections is not allowed, found [{}]", src.text()); } }); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index ae2379318474b..fdcd7fc961b9c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.common.Strings; import org.elasticsearch.indices.InvalidIndexNameException; +import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IdentifierContext; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IndexStringContext; @@ -51,10 +52,24 @@ protected static String quoteIdString(String unquotedString) { return "`" + unquotedString.replace("`", "``") + "`"; } + @Override + public String visitClusterString(EsqlBaseParser.ClusterStringContext ctx) { + if (ctx == null) { + return null; + } else if (ctx.UNQUOTED_SOURCE() != null) { + return ctx.UNQUOTED_SOURCE().getText(); + } else { + return unquote(ctx.QUOTED_STRING().getText()); + } + } + @Override public String visitIndexString(IndexStringContext ctx) { - TerminalNode n = ctx.UNQUOTED_SOURCE(); - return n != null ? n.getText() : unquote(ctx.QUOTED_STRING().getText()); + if (ctx.UNQUOTED_SOURCE() != null) { + return ctx.UNQUOTED_SOURCE().getText(); + } else { + return unquote(ctx.QUOTED_STRING().getText()); + } } public String visitIndexPattern(List ctx) { @@ -62,18 +77,26 @@ public String visitIndexPattern(List ctx) { Holder hasSeenStar = new Holder<>(false); ctx.forEach(c -> { String indexPattern = visitIndexString(c.indexString()); - String clusterString = c.clusterString() != null ? c.clusterString().getText() : null; + String clusterString = visitClusterString(c.clusterString()); // skip validating index on remote cluster, because the behavior of remote cluster is not consistent with local cluster // For example, invalid#index is an invalid index name, however FROM *:invalid#index does not return an error if (clusterString == null) { hasSeenStar.set(indexPattern.contains(WILDCARD) || hasSeenStar.get()); validateIndexPattern(indexPattern, c, hasSeenStar.get()); + } else { + validateClusterString(clusterString, c); } patterns.add(clusterString != null ? clusterString + REMOTE_CLUSTER_INDEX_SEPARATOR + indexPattern : indexPattern); }); return Strings.collectionToDelimitedString(patterns, ","); } + protected static void validateClusterString(String clusterString, EsqlBaseParser.IndexPatternContext ctx) { + if (clusterString.indexOf(RemoteClusterService.REMOTE_CLUSTER_INDEX_SEPARATOR) != -1) { + throw new ParsingException(source(ctx), "cluster string [{}] must not contain ':'", clusterString); + } + } + private static void validateIndexPattern(String indexPattern, EsqlBaseParser.IndexPatternContext ctx, boolean hasSeenStar) { // multiple index names can be in the same double quote, e.g. indexPattern = "idx1, *, -idx2" String[] indices = indexPattern.split(","); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 46c1de31bb471..86458b8048e9d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -47,6 +48,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.Grok; import org.elasticsearch.xpack.esql.plan.logical.InlineStats; +import org.elasticsearch.xpack.esql.plan.logical.Insist; import org.elasticsearch.xpack.esql.plan.logical.Keep; import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; @@ -107,17 +109,7 @@ protected LogicalPlan plan(ParseTree ctx) { if (errors.hasNext() == false) { return p; } else { - StringBuilder message = new StringBuilder(); - int i = 0; - - while (errors.hasNext()) { - if (i > 0) { - message.append("; "); - } - message.append(errors.next().getMessage()); - i++; - } - throw new ParsingException(message.toString()); + throw ParsingException.combineParsingExceptions(errors); } } @@ -294,6 +286,22 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { ); } + @Override + public PlanFactory visitInsistCommand(EsqlBaseParser.InsistCommandContext ctx) { + var source = source(ctx); + List fields = visitQualifiedNamePatterns(ctx.qualifiedNamePatterns(), ne -> { + if (ne instanceof UnresolvedStar || ne instanceof UnresolvedNamePattern) { + Source neSource = ne.source(); + throw new ParsingException(neSource, "INSIST doesn't support wildcards, found [{}]", neSource.text()); + } + }); + return input -> new Insist( + source, + input, + fields.stream().map(ne -> (Attribute) new UnresolvedAttribute(ne.source(), ne.name())).toList() + ); + } + @Override public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { final Stats stats = stats(source(ctx), ctx.grouping, ctx.stats); @@ -431,7 +439,11 @@ public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { : matchField instanceof UnresolvedStar ? WILDCARD : null; if (patternString != null) { - throw new ParsingException(source, "Using wildcards [*] in ENRICH WITH projections is not allowed [{}]", patternString); + throw new ParsingException( + source, + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [{}]", + patternString + ); } List keepClauses = visitList(this, ctx.enrichWithClause(), NamedExpression.class); @@ -525,11 +537,11 @@ public PlanFactory visitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { var source = source(ctx); - if (false == Build.current().isSnapshot()) { + if (false == EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()) { throw new ParsingException(source, "JOIN is in preview and only available in SNAPSHOT build"); } - if (ctx.type != null && ctx.type.getType() != EsqlBaseParser.DEV_JOIN_LOOKUP) { + if (ctx.type != null && ctx.type.getType() != EsqlBaseParser.JOIN_LOOKUP) { String joinType = ctx.type == null ? "(INNER)" : ctx.type.getText(); throw new ParsingException(source, "only LOOKUP JOIN available, {} JOIN unsupported at the moment", joinType); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java index c25ab92437bfc..119e96bbd865c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java @@ -9,6 +9,8 @@ import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.util.Iterator; + import static org.elasticsearch.common.logging.LoggerMessageFormat.format; public class ParsingException extends EsqlClientException { @@ -21,6 +23,10 @@ public ParsingException(String message, Exception cause, int line, int charPosit this.charPositionInLine = charPositionInLine + 1; } + /** + * To be used only if the exception cannot be associated with a specific position in the query. + * Error message will start with {@code line -1:-1:} instead of using specific location. + */ public ParsingException(String message, Object... args) { this(Source.EMPTY, message, args); } @@ -37,6 +43,38 @@ public ParsingException(Exception cause, Source source, String message, Object.. this.charPositionInLine = source.source().getColumnNumber(); } + private ParsingException(int line, int charPositionInLine, String message, Object... args) { + super(message, args); + this.line = line; + this.charPositionInLine = charPositionInLine; + } + + /** + * Combine multiple {@code ParsingException} into one, this is used by {@code LogicalPlanBuilder} to + * consolidate multiple named parameters related {@code ParsingException}. + */ + public static ParsingException combineParsingExceptions(Iterator parsingExceptions) { + StringBuilder message = new StringBuilder(); + int i = 0; + int line = -1; + int charPositionInLine = -1; + + while (parsingExceptions.hasNext()) { + ParsingException e = parsingExceptions.next(); + if (i > 0) { + message.append("; "); + message.append(e.getMessage()); + } else { + // line and column numbers are the associated with the first error + line = e.getLineNumber(); + charPositionInLine = e.getColumnNumber(); + message.append(e.getErrorMessage()); + } + i++; + } + return new ParsingException(line, charPositionInLine, message.toString()); + } + public int getLineNumber() { return line; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java index b3c273cbfa1bb..0b2bff13f981c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java @@ -43,7 +43,6 @@ import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; -import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.SubqueryExec; @@ -71,8 +70,8 @@ public static List logical() { Eval.ENTRY, Filter.ENTRY, Grok.ENTRY, - InlineStats.ENTRY, InlineJoin.ENTRY, + InlineStats.ENTRY, Join.ENTRY, LocalRelation.ENTRY, Limit.ENTRY, @@ -103,7 +102,6 @@ public static List phsyical() { LimitExec.ENTRY, LocalSourceExec.ENTRY, MvExpandExec.ENTRY, - OrderExec.ENTRY, ProjectExec.ENTRY, ShowExec.ENTRY, SubqueryExec.ENTRY, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java index 5c40bfce32064..8cff1d4c88e90 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java @@ -40,7 +40,7 @@ import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; import static org.elasticsearch.xpack.esql.plan.logical.Filter.checkFilterConditionDataType; -public class Aggregate extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { +public class Aggregate extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "Aggregate", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java index 483c3508013ab..c8668f58ab5c0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java @@ -17,7 +17,7 @@ import java.util.List; import java.util.Objects; -public class Drop extends UnaryPlan implements TelemetryAware { +public class Drop extends UnaryPlan implements TelemetryAware, SortAgnostic { private final List removals; public Drop(Source source, LogicalPlan child, List removals) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java index 4e9fc87318029..11e9a57064e5b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java @@ -49,7 +49,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public class Enrich extends UnaryPlan implements GeneratingPlan, PostAnalysisPlanVerificationAware, TelemetryAware { +public class Enrich extends UnaryPlan implements GeneratingPlan, PostAnalysisPlanVerificationAware, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "Enrich", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index 448085df1e831..e3c562d3d630e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -226,4 +226,8 @@ public static void writeIndexMode(StreamOutput out, IndexMode indexMode) throws throw new IllegalStateException("not ready to support index mode [" + indexMode + "]"); } } + + public EsRelation withAttributes(List newAttributes) { + return new EsRelation(source(), indexPattern, indexMode, indexNameWithModes, newAttributes); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java index 7c437dac03409..af81e26d57c60 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java @@ -38,7 +38,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public class Eval extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware, TelemetryAware { +public class Eval extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Eval", Eval::new); private final List fields; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java index 6931c320007fe..7a1726ea59e97 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java @@ -29,7 +29,7 @@ * {@code SELECT x FROM y WHERE z ..} the "WHERE" clause is a Filter. A * {@code Filter} has a "condition" Expression that does the filtering. */ -public class Filter extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { +public class Filter extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Filter", Filter::new); private final Expression condition; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java index 527ba28d377f1..724aa2da25983 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java @@ -37,7 +37,7 @@ * underlying aggregate. *

*/ -public class InlineStats extends UnaryPlan implements NamedWriteable, SurrogateLogicalPlan, TelemetryAware { +public class InlineStats extends UnaryPlan implements NamedWriteable, SurrogateLogicalPlan, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "InlineStats", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Insist.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Insist.java new file mode 100644 index 0000000000000..78d342ca7e3ad --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Insist.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.plan.logical; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.NamedExpressions; +import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public class Insist extends UnaryPlan implements SurrogateLogicalPlan { + private final List insistedAttributes; + private @Nullable List lazyOutput = null; + + public Insist(Source source, LogicalPlan child, List insistedAttributes) { + super(source, child); + this.insistedAttributes = insistedAttributes; + } + + @Override + public List output() { + if (lazyOutput == null) { + lazyOutput = NamedExpressions.mergeOutputAttributes(insistedAttributes, child().output()); + } + return lazyOutput; + } + + public List insistedAttributes() { + return insistedAttributes; + } + + @Override + public Insist replaceChild(LogicalPlan newChild) { + return new Insist(source(), newChild, insistedAttributes); + } + + @Override + public boolean expressionsResolved() { + // Like EsqlProject, we allow unsupported attributes to flow through the engine. + return insistedAttributes().stream().allMatch(a -> a.resolved() || a instanceof UnsupportedAttribute); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create( + this, + (source, insistedAttributes1, child) -> new Insist(source, child, insistedAttributes1), + insistedAttributes, + child() + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException("doesn't escape the coordinator node"); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException("doesn't escape the coordinator node"); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), Objects.hashCode(insistedAttributes)); + } + + @Override + public boolean equals(Object obj) { + return super.equals(obj) && Objects.equals(((Insist) obj).insistedAttributes, insistedAttributes); + } + + @Override + public LogicalPlan surrogate() { + return new Project(source(), child(), output()); + } + + public Insist withAttributes(List attributes) { + return new Insist(source(), child(), attributes); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java index 67108afb94668..268c6bbe17242 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java @@ -15,7 +15,7 @@ import java.util.List; import java.util.Objects; -public class Keep extends Project implements TelemetryAware { +public class Keep extends Project implements TelemetryAware, SortAgnostic { public Keep(Source source, LogicalPlan child, List projections) { super(source, child, projections); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java index 1c05ceb124529..56dae7b1f16c0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -32,7 +32,7 @@ * Looks up values from the associated {@code tables}. * The class is supposed to be substituted by a {@link Join}. */ -public class Lookup extends UnaryPlan implements SurrogateLogicalPlan, TelemetryAware { +public class Lookup extends UnaryPlan implements SurrogateLogicalPlan, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Lookup", Lookup::new); private final Expression tableName; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java index e700ad90afdab..f65811fc26526 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java @@ -23,7 +23,7 @@ import java.util.List; import java.util.Objects; -public class MvExpand extends UnaryPlan implements TelemetryAware { +public class MvExpand extends UnaryPlan implements TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "MvExpand", MvExpand::new); private final NamedExpression target; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java index 051e2c7769bde..ddb07e0490db3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; @@ -25,7 +26,12 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; -public class OrderBy extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { +public class OrderBy extends UnaryPlan + implements + PostAnalysisVerificationAware, + PostOptimizationVerificationAware, + TelemetryAware, + SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "OrderBy", OrderBy::new); private final List order; @@ -109,4 +115,9 @@ public void postAnalysisVerification(Failures failures) { } }); } + + @Override + public void postOptimizationVerification(Failures failures) { + failures.add(fail(this, "Unbounded sort not supported yet [{}] please add a limit", this.sourceText())); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java index e12a8cb557fde..a36341f60525a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java @@ -25,7 +25,7 @@ /** * A {@code Project} is a {@code Plan} with one child. In {@code SELECT x FROM y}, the "SELECT" statement is a Project. */ -public class Project extends UnaryPlan { +public class Project extends UnaryPlan implements SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Project", Project::new); private final List projections; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java index d691507b62cb3..f111b5d03edb3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java @@ -24,7 +24,7 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public abstract class RegexExtract extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware { +public abstract class RegexExtract extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware, SortAgnostic { protected final Expression input; protected final List extractedFields; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java index 7887d8ed66b99..c609bfdae87e7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; -public class Rename extends UnaryPlan implements TelemetryAware { +public class Rename extends UnaryPlan implements TelemetryAware, SortAgnostic { private final List renamings; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/SortAgnostic.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/SortAgnostic.java new file mode 100644 index 0000000000000..3955b542ca496 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/SortAgnostic.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical; + +/** + * This interface is intended to check redundancy of a previous SORT. + *

+ * + * An example is with commands that compute values record by record, regardless of the input order + * and that don't rely on the context (intended as previous/next records). + * + *


+ *

+ * + * Example 1: if a MY_COMMAND that implements this interface is used between two sorts, + * then we can assume that + *

+ * + * | SORT x, y, z | MY_COMMAND | SORT a, b, c + * + *

+ * is equivalent to + *

+ * + * | MY_COMMAND | SORT a, b, c + * + * + *


+ *

+ * + * Example 2: commands that make previous order irrelevant, eg. because they collapse the results; + * STATS is one of them, eg. + * + *

+ * + * | SORT x, y, z | STATS count(*) + * + *

+ * is equivalent to + *

+ * + * | STATS count(*) + * + *

+ * + * and if MY_COMMAND implements this interface, then + * + *

+ * + * | SORT x, y, z | MY_COMMAND | STATS count(*) + * + *

+ * is equivalent to + *

+ * + * | MY_COMMAND | STATS count(*) + * + * + *


+ *

+ * + * In all the other cases, eg. if the command does not implement this interface + * then we assume that the previous SORT is still relevant and cannot be pruned. + * + *


+ *

+ * + * Eg. LIMIT does not implement this interface, because + * + *

+ * + * | SORT x, y, z | LIMIT 10 | SORT a, b, c + * + *

+ * is NOT equivalent to + *

+ * + * | LIMIT 10 | SORT a, b, c + * + * + *


+ *

+ * + * For n-ary plans that implement this interface, + * we assume that the above applies to all the children + * + */ +public interface SortAgnostic {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/InlineJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/InlineJoin.java index 87c9db1db4807..e3daa4fcbfb99 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/InlineJoin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/InlineJoin.java @@ -16,10 +16,9 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; @@ -71,10 +70,9 @@ public static LogicalPlan inlineData(InlineJoin target, LocalRelation data) { List aliases = new ArrayList<>(schema.size()); for (int i = 0; i < schema.size(); i++) { Attribute attr = schema.get(i); - aliases.add(new Alias(attr.source(), attr.name(), Literal.of(attr, BlockUtils.toJavaObject(blocks[i], 0)))); + aliases.add(new Alias(attr.source(), attr.name(), Literal.of(attr, BlockUtils.toJavaObject(blocks[i], 0)), attr.id())); } - LogicalPlan left = target.left(); - return new Project(target.source(), left, CollectionUtils.combine(left.output(), aliases)); + return new Eval(target.source(), target.left(), aliases); } else { return target.replaceRight(data); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index 997bff70663bd..14877abb62272 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.plan.logical.BinaryPlan; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.SortAgnostic; import java.io.IOException; import java.util.ArrayList; @@ -32,7 +33,7 @@ import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; -public class Join extends BinaryPlan implements PostAnalysisVerificationAware { +public class Join extends BinaryPlan implements PostAnalysisVerificationAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Join", Join::new); private final JoinConfig config; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java index 42cf3528f2ae6..22ee88f8119ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java @@ -121,8 +121,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(concreteIndices(), StreamOutput::writeString, StreamOutput::writeString); } else { if (concreteIndices().keySet().equals(Set.of(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY))) { - String concreteIndex = concreteIndices().get(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); - new EsIndex(concreteIndex, Map.of(), Map.of(concreteIndex, IndexMode.STANDARD)).writeTo(out); + String enrichIndex = concreteIndices().get(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + EsIndex esIndex = new EsIndex(enrichIndex, Map.of(), Map.of(enrichIndex, IndexMode.STANDARD)); + esIndex.writeTo(out); } else { throw new IllegalStateException("expected a single concrete enrich index; got " + concreteIndices()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/OrderExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/OrderExec.java deleted file mode 100644 index 9d53e828f4f81..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/OrderExec.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plan.physical; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.Order; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class OrderExec extends UnaryExec { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( - PhysicalPlan.class, - "OrderExec", - OrderExec::new - ); - - private final List order; - - public OrderExec(Source source, PhysicalPlan child, List order) { - super(source, child); - this.order = order; - } - - private OrderExec(StreamInput in) throws IOException { - this( - Source.readFrom((PlanStreamInput) in), - in.readNamedWriteable(PhysicalPlan.class), - in.readCollectionAsList(org.elasticsearch.xpack.esql.expression.Order::new) - ); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - Source.EMPTY.writeTo(out); - out.writeNamedWriteable(child()); - out.writeCollection(order()); - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, OrderExec::new, child(), order); - } - - @Override - public OrderExec replaceChild(PhysicalPlan newChild) { - return new OrderExec(source(), newChild, order); - } - - public List order() { - return order; - } - - @Override - public int hashCode() { - return Objects.hash(order, child()); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - OrderExec other = (OrderExec) obj; - - return Objects.equals(order, other.order) && Objects.equals(child(), other.child()); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 072bae21da2a3..8fb51457b6a8a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -95,7 +95,7 @@ public final PhysicalOperation groupingPhysicalOperation( aggregatorMode, sourceLayout, false, // non-grouping - s -> aggregatorFactories.add(s.supplier.aggregatorFactory(s.mode)) + s -> aggregatorFactories.add(s.supplier.aggregatorFactory(s.mode, s.channels)) ); if (aggregatorFactories.isEmpty() == false) { @@ -169,7 +169,7 @@ else if (aggregatorMode.isOutputPartial()) { aggregatorMode, sourceLayout, true, // grouping - s -> aggregatorFactories.add(s.supplier.groupingAggregatorFactory(s.mode)) + s -> aggregatorFactories.add(s.supplier.groupingAggregatorFactory(s.mode, s.channels)) ); if (groupSpecs.size() == 1 && groupSpecs.get(0).channel == null) { @@ -251,7 +251,7 @@ public static List intermediateAttributes(List channels, AggregatorMode mode) {} private void aggregatesToFactory( @@ -308,11 +308,12 @@ else if (mode == AggregatorMode.FINAL || mode == AggregatorMode.INTERMEDIATE) { } else { throw new EsqlIllegalArgumentException("illegal aggregation mode"); } + + AggregatorFunctionSupplier aggSupplier = supplier(aggregateFunction); + List inputChannels = sourceAttr.stream().map(attr -> layout.get(attr.id()).channel()).toList(); assert inputChannels.stream().allMatch(i -> i >= 0) : inputChannels; - AggregatorFunctionSupplier aggSupplier = supplier(aggregateFunction, inputChannels); - // apply the filter only in the initial phase - as the rest of the data is already filtered if (aggregateFunction.hasFilter() && mode.isInputPartial() == false) { EvalOperator.ExpressionEvaluator.Factory evalFactory = EvalMapper.toEvaluator( @@ -322,15 +323,15 @@ else if (mode == AggregatorMode.FINAL || mode == AggregatorMode.INTERMEDIATE) { ); aggSupplier = new FilteredAggregatorFunctionSupplier(aggSupplier, evalFactory); } - consumer.accept(new AggFunctionSupplierContext(aggSupplier, mode)); + consumer.accept(new AggFunctionSupplierContext(aggSupplier, inputChannels, mode)); } } } } - private static AggregatorFunctionSupplier supplier(AggregateFunction aggregateFunction, List inputChannels) { + private static AggregatorFunctionSupplier supplier(AggregateFunction aggregateFunction) { if (aggregateFunction instanceof ToAggregator delegate) { - return delegate.supplier(inputChannels); + return delegate.supplier(); } throw new EsqlIllegalArgumentException("aggregate functions must extend ToAggregator"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index a66a302354df2..740e39ea77bb4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -20,89 +19,20 @@ import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; -import org.elasticsearch.xpack.esql.expression.function.aggregate.CountDistinct; -import org.elasticsearch.xpack.esql.expression.function.aggregate.FromPartial; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; -import org.elasticsearch.xpack.esql.expression.function.aggregate.MedianAbsoluteDeviation; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; -import org.elasticsearch.xpack.esql.expression.function.aggregate.NumericAggregate; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; -import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; -import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialExtent; -import org.elasticsearch.xpack.esql.expression.function.aggregate.StdDev; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; -import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; -import java.lang.invoke.MethodHandle; -import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodType; import java.util.HashMap; import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; import java.util.stream.Stream; /** * Static class used to convert aggregate expressions to the named expressions that represent their intermediate state. - *

- * At class load time, the mapper is populated with all supported aggregate functions and their intermediate state. - *

- *

- * Reflection is used to call the {@code intermediateStateDesc()}` static method of the aggregate functions, - * but the function classes are found based on the exising information within this class. - *

- *

- * This class must be updated when aggregations are created or updated, by adding the new aggs or types to the corresponding methods. - *

*/ final class AggregateMapper { - private static final List NUMERIC = List.of("Int", "Long", "Double"); - private static final List SPATIAL_EXTRA_CONFIGS = List.of("SourceValues", "DocValues"); - - /** List of all mappable ESQL agg functions (excludes surrogates like AVG = SUM/COUNT). */ - private static final List> AGG_FUNCTIONS = List.of( - Count.class, - CountDistinct.class, - Max.class, - MedianAbsoluteDeviation.class, - Min.class, - Percentile.class, - SpatialCentroid.class, - SpatialExtent.class, - StdDev.class, - Sum.class, - Values.class, - Top.class, - Rate.class, - - // internal function - FromPartial.class, - ToPartial.class - ); - - /** Record of agg Class, type, and grouping (or non-grouping). */ - private record AggDef(Class aggClazz, String type, String extra, boolean grouping) { - public AggDef withoutExtra() { - return new AggDef(aggClazz, type, "", grouping); - } - } - - /** Map of AggDef types to intermediate named expressions. */ - private static final Map> MAPPER = AGG_FUNCTIONS.stream() - .flatMap(AggregateMapper::typeAndNames) - .flatMap(AggregateMapper::groupingAndNonGrouping) - .collect(Collectors.toUnmodifiableMap(aggDef -> aggDef, AggregateMapper::lookupIntermediateState)); - + // TODO: Do we need this cache? /** Cache of aggregates to intermediate expressions. */ private final HashMap> cache; @@ -148,143 +78,21 @@ private static List computeEntryForAgg(String aggAlias, Express } private static List entryForAgg(String aggAlias, AggregateFunction aggregateFunction, boolean grouping) { - var aggDef = new AggDef( - aggregateFunction.getClass(), - dataTypeToString(aggregateFunction.field().dataType(), aggregateFunction.getClass()), - aggregateFunction instanceof SpatialAggregateFunction ? "SourceValues" : "", - grouping - ); - var is = getNonNull(aggDef); - return isToNE(is, aggAlias).toList(); - } - - /** Gets the agg from the mapper - wrapper around map::get for more informative failure.*/ - private static List getNonNull(AggDef aggDef) { - var l = MAPPER.getOrDefault(aggDef, MAPPER.get(aggDef.withoutExtra())); - if (l == null) { - throw new EsqlIllegalArgumentException("Cannot find intermediate state for: " + aggDef); - } - return l; - } - - private static Stream, Tuple>> typeAndNames(Class clazz) { - List types; - List extraConfigs = List.of(""); - if (NumericAggregate.class.isAssignableFrom(clazz)) { - types = NUMERIC; - } else if (Max.class.isAssignableFrom(clazz) || Min.class.isAssignableFrom(clazz)) { - types = List.of("Boolean", "Int", "Long", "Double", "Ip", "BytesRef"); - } else if (clazz == Count.class) { - types = List.of(""); // no extra type distinction - } else if (clazz == SpatialCentroid.class) { - types = List.of("GeoPoint", "CartesianPoint"); - extraConfigs = SPATIAL_EXTRA_CONFIGS; - } else if (clazz == SpatialExtent.class) { - types = List.of("GeoPoint", "CartesianPoint", "GeoShape", "CartesianShape"); - extraConfigs = SPATIAL_EXTRA_CONFIGS; - } else if (Values.class.isAssignableFrom(clazz)) { - // TODO can't we figure this out from the function itself? - types = List.of("Int", "Long", "Double", "Boolean", "BytesRef"); - } else if (Top.class.isAssignableFrom(clazz)) { - types = List.of("Boolean", "Int", "Long", "Double", "Ip", "BytesRef"); - } else if (Rate.class.isAssignableFrom(clazz) || StdDev.class.isAssignableFrom(clazz)) { - types = List.of("Int", "Long", "Double"); - } else if (FromPartial.class.isAssignableFrom(clazz) || ToPartial.class.isAssignableFrom(clazz)) { - types = List.of(""); // no type - } else if (CountDistinct.class.isAssignableFrom(clazz)) { - types = Stream.concat(NUMERIC.stream(), Stream.of("Boolean", "BytesRef")).toList(); - } else { - assert false : "unknown aggregate type " + clazz; - throw new IllegalArgumentException("unknown aggregate type " + clazz); - } - return combine(clazz, types, extraConfigs); - } - - private static Stream, Tuple>> combine(Class clazz, List types, List extraConfigs) { - return combinations(types, extraConfigs).map(combo -> new Tuple<>(clazz, combo)); - } - - private static Stream> combinations(List types, List extraConfigs) { - return types.stream().flatMap(type -> extraConfigs.stream().map(config -> new Tuple<>(type, config))); - } - - private static Stream groupingAndNonGrouping(Tuple, Tuple> tuple) { - if (tuple.v1().isAssignableFrom(Rate.class)) { - // rate doesn't support non-grouping aggregations - return Stream.of(new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), true)); - } else if (tuple.v2().v1().equals("AggregateMetricDouble")) { - // TODO: support grouping aggregations for aggregate metric double - return Stream.of(new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), false)); + List intermediateState; + if (aggregateFunction instanceof ToAggregator toAggregator) { + var supplier = toAggregator.supplier(); + intermediateState = grouping ? supplier.groupingIntermediateStateDesc() : supplier.nonGroupingIntermediateStateDesc(); } else { - return Stream.of( - new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), true), - new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), false) - ); - } - } - - /** Retrieves the intermediate state description for a given class, type, and grouping. */ - private static List lookupIntermediateState(AggDef aggDef) { - try { - return (List) lookup(aggDef.aggClazz(), aggDef.type(), aggDef.extra(), aggDef.grouping()).invokeExact(); - } catch (Throwable t) { - // invokeExact forces us to handle any Throwable thrown by lookup. - throw new EsqlIllegalArgumentException(t); - } - } - - /** Looks up the intermediate state method for a given class, type, and grouping. */ - private static MethodHandle lookup(Class clazz, String type, String extra, boolean grouping) { - try { - return lookupRetry(clazz, type, extra, grouping); - } catch (IllegalAccessException | NoSuchMethodException | ClassNotFoundException e) { - throw new EsqlIllegalArgumentException(e); + throw new EsqlIllegalArgumentException("Aggregate has no defined intermediate state: " + aggregateFunction); } - } - - private static MethodHandle lookupRetry(Class clazz, String type, String extra, boolean grouping) throws IllegalAccessException, - NoSuchMethodException, ClassNotFoundException { - try { - return MethodHandles.lookup() - .findStatic( - Class.forName(determineAggName(clazz, type, extra, grouping)), - "intermediateStateDesc", - MethodType.methodType(List.class) - ); - } catch (NoSuchMethodException ignore) { - // Retry without the extra information. - return MethodHandles.lookup() - .findStatic( - Class.forName(determineAggName(clazz, type, "", grouping)), - "intermediateStateDesc", - MethodType.methodType(List.class) - ); - } - } - - /** Determines the engines agg class name, for the given class, type, and grouping. */ - private static String determineAggName(Class clazz, String type, String extra, boolean grouping) { - StringBuilder sb = new StringBuilder(); - sb.append(determinePackageName(clazz)).append("."); - sb.append(clazz.getSimpleName()); - sb.append(type); - sb.append(extra); - sb.append(grouping ? "Grouping" : ""); - sb.append("AggregatorFunction"); - return sb.toString(); - } - - /** Determines the engine agg package name, for the given class. */ - private static String determinePackageName(Class clazz) { - if (clazz.getSimpleName().startsWith("Spatial")) { - // All spatial aggs are in the spatial sub-package - return "org.elasticsearch.compute.aggregation.spatial"; - } - return "org.elasticsearch.compute.aggregation"; + return intermediateStateToNamedExpressions(intermediateState, aggAlias).toList(); } /** Maps intermediate state description to named expressions. */ - private static Stream isToNE(List intermediateStateDescs, String aggAlias) { + private static Stream intermediateStateToNamedExpressions( + List intermediateStateDescs, + String aggAlias + ) { return intermediateStateDescs.stream().map(is -> { final DataType dataType; if (Strings.isEmpty(is.dataType())) { @@ -308,37 +116,4 @@ private static DataType toDataType(ElementType elementType) { case FLOAT, NULL, DOC, COMPOSITE, UNKNOWN -> throw new EsqlIllegalArgumentException("unsupported agg type: " + elementType); }; } - - /** Returns the string representation for the data type. This reflects the engine's aggs naming structure. */ - private static String dataTypeToString(DataType type, Class aggClass) { - if (aggClass == Count.class) { - return ""; // no type distinction - } - if (aggClass == ToPartial.class || aggClass == FromPartial.class) { - return ""; - } - if ((aggClass == Max.class || aggClass == Min.class) && type.equals(DataType.IP)) { - return "Ip"; - } - if (aggClass == Top.class && type.equals(DataType.IP)) { - return "Ip"; - } - - return switch (type) { - case DataType.BOOLEAN -> "Boolean"; - case DataType.INTEGER, DataType.COUNTER_INTEGER -> "Int"; - case DataType.LONG, DataType.DATETIME, DataType.COUNTER_LONG, DataType.DATE_NANOS -> "Long"; - case DataType.DOUBLE, DataType.COUNTER_DOUBLE -> "Double"; - case DataType.KEYWORD, DataType.IP, DataType.VERSION, DataType.TEXT, DataType.SEMANTIC_TEXT -> "BytesRef"; - case GEO_POINT -> "GeoPoint"; - case CARTESIAN_POINT -> "CartesianPoint"; - case GEO_SHAPE -> "GeoShape"; - case CARTESIAN_SHAPE -> "CartesianShape"; - case AGGREGATE_METRIC_DOUBLE -> "AggregateMetricDouble"; - case UNSUPPORTED, NULL, UNSIGNED_LONG, SHORT, BYTE, FLOAT, HALF_FLOAT, SCALED_FLOAT, OBJECT, SOURCE, DATE_PERIOD, TIME_DURATION, - DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new EsqlIllegalArgumentException( - "illegal agg type: " + type.typeName() - ); - }; - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index 112f101ad842b..10c380f2db56d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -26,11 +26,13 @@ import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.SourceLoader; @@ -49,7 +51,9 @@ import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.KeywordEsField; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; +import org.elasticsearch.xpack.esql.core.type.PotentiallyUnmappedKeywordEsField; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; @@ -62,6 +66,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Function; @@ -110,28 +115,29 @@ public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fi int docChannel = source.layout.get(sourceAttr.id()).channel(); for (Attribute attr : fieldExtractExec.attributesToExtract()) { layout.append(attr); - var unionTypes = findUnionTypes(attr); DataType dataType = attr.dataType(); MappedFieldType.FieldExtractPreference fieldExtractPreference = fieldExtractExec.fieldExtractPreference(attr); ElementType elementType = PlannerUtils.toElementType(dataType, fieldExtractPreference); - // Do not use the field attribute name, this can deviate from the field name for union types. - String fieldName = attr instanceof FieldAttribute fa ? fa.fieldName() : attr.name(); - boolean isUnsupported = dataType == DataType.UNSUPPORTED; - IntFunction loader = s -> getBlockLoaderFor(s, fieldName, isUnsupported, fieldExtractPreference, unionTypes); - fields.add(new ValuesSourceReaderOperator.FieldInfo(fieldName, elementType, loader)); + IntFunction loader = s -> getBlockLoaderFor(s, attr, fieldExtractPreference); + fields.add(new ValuesSourceReaderOperator.FieldInfo(getFieldName(attr), elementType, loader)); } return source.with(new ValuesSourceReaderOperator.Factory(fields, readers, docChannel), layout.build()); } - private BlockLoader getBlockLoaderFor( - int shardId, - String fieldName, - boolean isUnsupported, - MappedFieldType.FieldExtractPreference fieldExtractPreference, - MultiTypeEsField unionTypes - ) { + private static String getFieldName(Attribute attr) { + // Do not use the field attribute name, this can deviate from the field name for union types. + return attr instanceof FieldAttribute fa ? fa.fieldName() : attr.name(); + } + + private BlockLoader getBlockLoaderFor(int shardId, Attribute attr, MappedFieldType.FieldExtractPreference fieldExtractPreference) { DefaultShardContext shardContext = (DefaultShardContext) shardContexts.get(shardId); - BlockLoader blockLoader = shardContext.blockLoader(fieldName, isUnsupported, fieldExtractPreference); + if (attr instanceof FieldAttribute fa && fa.field() instanceof PotentiallyUnmappedKeywordEsField kf) { + shardContext = new DefaultShardContextForUnmappedField(shardContext, kf); + } + + boolean isUnsupported = attr.dataType() == DataType.UNSUPPORTED; + BlockLoader blockLoader = shardContext.blockLoader(getFieldName(attr), isUnsupported, fieldExtractPreference); + MultiTypeEsField unionTypes = findUnionTypes(attr); if (unionTypes != null) { String indexName = shardContext.ctx.index().getName(); Expression conversion = unionTypes.getConversionExpressionForIndex(indexName); @@ -142,7 +148,25 @@ private BlockLoader getBlockLoaderFor( return blockLoader; } - private MultiTypeEsField findUnionTypes(Attribute attr) { + /** A hack to pretend an unmapped field still exists. */ + private static class DefaultShardContextForUnmappedField extends DefaultShardContext { + private final KeywordEsField unmappedEsField; + + DefaultShardContextForUnmappedField(DefaultShardContext ctx, PotentiallyUnmappedKeywordEsField unmappedEsField) { + super(ctx.index, ctx.ctx, ctx.aliasFilter); + this.unmappedEsField = unmappedEsField; + } + + @Override + protected @Nullable MappedFieldType fieldType(String name) { + var superResult = super.fieldType(name); + return superResult == null && name.equals(unmappedEsField.getName()) + ? new KeywordFieldMapper.KeywordFieldType(name, false /* isIndexed */, false /* hasDocValues */, Map.of() /* meta */) + : superResult; + } + } + + private static @Nullable MultiTypeEsField findUnionTypes(Attribute attr) { if (attr instanceof FieldAttribute fa && fa.field() instanceof MultiTypeEsField multiTypeEsField) { return multiTypeEsField; } @@ -237,12 +261,8 @@ public final Operator.OperatorFactory ordinalGroupingOperatorFactory( .toList(); // The grouping-by values are ready, let's group on them directly. // Costin: why are they ready and not already exposed in the layout? - boolean isUnsupported = attrSource.dataType() == DataType.UNSUPPORTED; - var unionTypes = findUnionTypes(attrSource); - // Do not use the field attribute name, this can deviate from the field name for union types. - String fieldName = attrSource instanceof FieldAttribute fa ? fa.fieldName() : attrSource.name(); return new OrdinalsGroupingOperator.OrdinalsGroupingOperatorFactory( - shardIdx -> getBlockLoaderFor(shardIdx, fieldName, isUnsupported, NONE, unionTypes), + shardIdx -> getBlockLoaderFor(shardIdx, attrSource, NONE), vsShardContexts, groupElementType, docChannel, @@ -315,7 +335,7 @@ public BlockLoader blockLoader( if (asUnsupportedSource) { return BlockLoader.CONSTANT_NULLS; } - MappedFieldType fieldType = ctx.getFieldType(name); + MappedFieldType fieldType = fieldType(name); if (fieldType == null) { // the field does not exist in this context return BlockLoader.CONSTANT_NULLS; @@ -363,6 +383,10 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { return loader; } + + protected @Nullable MappedFieldType fieldType(String name) { + return ctx.getFieldType(name); + } } private static class TypeConvertingBlockLoader implements BlockLoader { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index aa24ea113cb48..c4985b029cfcd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -169,7 +169,7 @@ public LocalExecutionPlanner( /** * turn the given plan into a list of drivers to execute */ - public LocalExecutionPlan plan(FoldContext foldCtx, PhysicalPlan localPhysicalPlan) { + public LocalExecutionPlan plan(String taskDescription, FoldContext foldCtx, PhysicalPlan localPhysicalPlan) { var context = new LocalExecutionPlannerContext( new ArrayList<>(), new Holder<>(DriverParallelism.SINGLE), @@ -190,7 +190,7 @@ public LocalExecutionPlan plan(FoldContext foldCtx, PhysicalPlan localPhysicalPl final TimeValue statusInterval = configuration.pragmas().statusInterval(); context.addDriverFactory( new DriverFactory( - new DriverSupplier(context.bigArrays, context.blockFactory, physicalOperation, statusInterval, settings), + new DriverSupplier(taskDescription, context.bigArrays, context.blockFactory, physicalOperation, statusInterval, settings), context.driverParallelism().get() ) ); @@ -831,6 +831,7 @@ int pageSize(Integer estimatedRowSize) { } record DriverSupplier( + String taskDescription, BigArrays bigArrays, BlockFactory blockFactory, PhysicalOperation physicalOperation, @@ -857,6 +858,7 @@ public Driver apply(String sessionId) { success = true; return new Driver( sessionId, + taskDescription, System.currentTimeMillis(), System.nanoTime(), driverContext, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ToAggregator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ToAggregator.java index 62bc0a96ab873..053d4bc839f11 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ToAggregator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ToAggregator.java @@ -9,11 +9,9 @@ import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; -import java.util.List; - /** * Expressions that have a mapping to an {@link AggregatorFunctionSupplier}. */ public interface ToAggregator { - AggregatorFunctionSupplier supplier(List inputChannels); + AggregatorFunctionSupplier supplier(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java index f95ae0e0783e5..217737de5309b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.esql.plan.logical.LeafPlan; import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; @@ -28,7 +27,6 @@ import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; -import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -81,10 +79,6 @@ private PhysicalPlan mapUnary(UnaryPlan unary) { return new LimitExec(limit.source(), mappedChild, limit.limit()); } - if (unary instanceof OrderBy o) { - return new OrderExec(o.source(), mappedChild, o.order()); - } - if (unary instanceof TopN topN) { return new TopNExec(topN.source(), mappedChild, topN.order(), topN.limit(), null); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java index 8a4325ed84b2a..b4560b2e33555 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; @@ -32,7 +33,6 @@ import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; -import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plan.physical.UnaryExec; @@ -105,7 +105,7 @@ private PhysicalPlan mapUnary(UnaryPlan unary) { return enrichExec.child(); } if (f instanceof UnaryExec unaryExec) { - if (f instanceof LimitExec || f instanceof ExchangeExec || f instanceof OrderExec || f instanceof TopNExec) { + if (f instanceof LimitExec || f instanceof ExchangeExec || f instanceof TopNExec) { return f; } else { return unaryExec.child(); @@ -161,11 +161,6 @@ private PhysicalPlan mapUnary(UnaryPlan unary) { return new LimitExec(limit.source(), mappedChild, limit.limit()); } - if (unary instanceof OrderBy o) { - mappedChild = addExchangeForFragment(o, mappedChild); - return new OrderExec(o.source(), mappedChild, o.order()); - } - if (unary instanceof TopN topN) { mappedChild = addExchangeForFragment(topN, mappedChild); return new TopNExec(topN.source(), mappedChild, topN.order(), topN.limit(), null); @@ -184,6 +179,10 @@ private PhysicalPlan mapBinary(BinaryPlan bp) { throw new EsqlIllegalArgumentException("unsupported join type [" + config.type() + "]"); } + if (join instanceof InlineJoin) { + return new FragmentExec(bp); + } + PhysicalPlan left = map(bp.left()); // only broadcast joins supported for now - hence push down as a streaming operator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java index 19ed77405daa2..21319a830a16e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java @@ -197,8 +197,7 @@ void runComputeOnRemoteCluster( }))) { var exchangeSource = new ExchangeSourceHandler( configuration.pragmas().exchangeBufferSize(), - transportService.getThreadPool().executor(ThreadPool.Names.SEARCH), - computeListener.acquireAvoid() + transportService.getThreadPool().executor(ThreadPool.Names.SEARCH) ); try (Releasable ignored = exchangeSource.addEmptySink()) { exchangeSink.addCompletionListener(computeListener.acquireAvoid()); @@ -206,6 +205,7 @@ void runComputeOnRemoteCluster( parentTask, new ComputeContext( localSessionId, + "remote_reduce", clusterAlias, List.of(), configuration, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java index 82943d23581fd..86af106ea7e42 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java @@ -19,6 +19,7 @@ record ComputeContext( String sessionId, + String taskDescription, String clusterAlias, List searchContexts, Configuration configuration, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java index 3d358b8c7a8a2..c8b8e84fd2478 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java @@ -47,7 +47,8 @@ final class ComputeListener implements Releasable { * Acquires a new listener that doesn't collect result */ ActionListener acquireAvoid() { - return refs.acquire().delegateResponse((l, e) -> { + var listener = ActionListener.assertAtLeastOnce(refs.acquire()); + return listener.delegateResponse((l, e) -> { try { runOnFailure.run(); } finally { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 71c2a65037e9a..3ddbef95f43b7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -155,6 +155,7 @@ public void execute( } var computeContext = new ComputeContext( newChildSession(sessionId), + "single", LOCAL_CLUSTER, List.of(), configuration, @@ -190,16 +191,16 @@ public void execute( * entire plan. */ List outputAttributes = physicalPlan.output(); + var exchangeSource = new ExchangeSourceHandler( + queryPragmas.exchangeBufferSize(), + transportService.getThreadPool().executor(ThreadPool.Names.SEARCH) + ); + listener = ActionListener.runBefore(listener, () -> exchangeService.removeExchangeSourceHandler(sessionId)); + exchangeService.addExchangeSourceHandler(sessionId, exchangeSource); try (var computeListener = new ComputeListener(transportService.getThreadPool(), cancelQueryOnFailure, listener.map(profiles -> { execInfo.markEndQuery(); // TODO: revisit this time recording model as part of INLINESTATS improvements return new Result(outputAttributes, collectedPages, profiles, execInfo); }))) { - var exchangeSource = new ExchangeSourceHandler( - queryPragmas.exchangeBufferSize(), - transportService.getThreadPool().executor(ThreadPool.Names.SEARCH), - ActionListener.runBefore(computeListener.acquireAvoid(), () -> exchangeService.removeExchangeSourceHandler(sessionId)) - ); - exchangeService.addExchangeSourceHandler(sessionId, exchangeSource); try (Releasable ignored = exchangeSource.addEmptySink()) { // run compute on the coordinator final AtomicBoolean localClusterWasInterrupted = new AtomicBoolean(); @@ -226,6 +227,7 @@ public void execute( rootTask, new ComputeContext( sessionId, + "final", LOCAL_CLUSTER, List.of(), configuration, @@ -394,7 +396,7 @@ public SourceProvider createSourceProvider() { // the planner will also set the driver parallelism in LocalExecutionPlanner.LocalExecutionPlan (used down below) // it's doing this in the planning of EsQueryExec (the source of the data) // see also EsPhysicalOperationProviders.sourcePhysicalOperation - LocalExecutionPlanner.LocalExecutionPlan localExecutionPlan = planner.plan(context.foldCtx(), plan); + LocalExecutionPlanner.LocalExecutionPlan localExecutionPlan = planner.plan(context.taskDescription(), context.foldCtx(), plan); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Local execution plan:\n{}", localExecutionPlan.describe()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java index 7020932819421..ef3c162aa9b69 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java @@ -372,6 +372,7 @@ public void onFailure(Exception e) { assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.SEARCH, ESQL_WORKER_THREAD_POOL_NAME); var computeContext = new ComputeContext( sessionId, + "data", clusterAlias, searchContexts, configuration, @@ -428,13 +429,14 @@ private void runComputeOnDataNode( task.addListener( () -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled())) ); - var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor, computeListener.acquireAvoid()); + var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor); exchangeSource.addRemoteSink(internalSink::fetchPageAsync, true, () -> {}, 1, ActionListener.noop()); var reductionListener = computeListener.acquireCompute(); computeService.runCompute( task, new ComputeContext( request.sessionId(), + "node_reduce", request.clusterAlias(), List.of(), request.configuration(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java index a4007a520ed30..f5f51029ae8a6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java @@ -7,7 +7,8 @@ package org.elasticsearch.xpack.esql.plugin; -import org.elasticsearch.ResourceNotFoundException; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.ActionFilters; @@ -16,10 +17,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -32,12 +31,11 @@ import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.esql.action.EsqlAsyncStopAction; +import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.action.EsqlQueryTask; import java.io.IOException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; @@ -55,6 +53,8 @@ public class TransportEsqlAsyncStopAction extends HandledTransportAction listener) { String asyncIdStr = asyncId.getEncoded(); - TransportEsqlQueryAction.EsqlQueryListener asyncListener = queryAction.getAsyncListener(asyncIdStr); - if (asyncListener == null) { + EsqlQueryTask asyncTask = getEsqlQueryTask(asyncId); + GetAsyncResultRequest getAsyncResultRequest = new GetAsyncResultRequest(asyncIdStr); + if (asyncTask == null) { // This should mean one of the two things: either bad request ID, or the query has already finished // In both cases, let regular async get deal with it. - var getAsyncResultRequest = new GetAsyncResultRequest(asyncIdStr); - // TODO: this should not be happening, but if the listener is not registered and the query is not finished, - // we give it some time to finish - getAsyncResultRequest.setWaitForCompletionTimeout(new TimeValue(1, TimeUnit.SECONDS)); + logger.debug("Async stop for task {}, no task present - passing to GetAsyncResultRequest", asyncIdStr); getResultsAction.execute(task, getAsyncResultRequest, listener); return; } - try { - EsqlQueryTask asyncTask = AsyncTaskIndexService.getTask(taskManager, asyncId, EsqlQueryTask.class); - if (false == security.currentUserHasAccessToTask(asyncTask)) { - throw new ResourceNotFoundException(asyncId + " not found"); + logger.debug("Async stop for task {} - stopping", asyncIdStr); + final EsqlExecutionInfo esqlExecutionInfo = asyncTask.executionInfo(); + if (esqlExecutionInfo != null) { + esqlExecutionInfo.markAsPartial(); + } + Runnable getResults = () -> getResultsAction.execute(task, getAsyncResultRequest, listener); + exchangeService.finishSessionEarly(sessionID(asyncId), ActionListener.running(() -> { + if (asyncTask.addCompletionListener(() -> ActionListener.running(getResults)) == false) { + getResults.run(); } + })); + } + + private EsqlQueryTask getEsqlQueryTask(AsyncExecutionId asyncId) { + try { + return AsyncTaskIndexService.getTaskAndCheckAuthentication(taskManager, security, asyncId, EsqlQueryTask.class); } catch (IOException e) { - throw new ResourceNotFoundException(asyncId + " not found", e); - } - // Here we will wait for both the response to become available and for the finish operation to complete - var responseHolder = new AtomicReference(); - try (var refs = new EsqlRefCountingListener(listener.map(unused -> responseHolder.get()))) { - asyncListener.addListener(refs.acquire().map(r -> { - responseHolder.set(r); - return null; - })); - asyncListener.markAsPartial(); - exchangeService.finishSessionEarly(sessionID(asyncId), refs.acquire()); + return null; } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index a32b4591943f4..d83239545c383 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -13,14 +13,12 @@ import org.elasticsearch.action.admin.cluster.stats.CCSUsageTelemetry; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; @@ -83,8 +81,6 @@ public class TransportEsqlQueryAction extends HandledTransportAction asyncListeners = ConcurrentCollections.newConcurrentMap(); @Inject @SuppressWarnings("this-escape") @@ -183,41 +179,11 @@ private void doExecuteForked(Task task, EsqlQueryRequest request, ActionListener } } - // Subscribable listener that can keep track of the EsqlExecutionInfo - // Used to mark an async query as partial if it is stopped - public static class EsqlQueryListener extends SubscribableListener { - private EsqlExecutionInfo executionInfo; - - public EsqlQueryListener(EsqlExecutionInfo executionInfo) { - this.executionInfo = executionInfo; - } - - public EsqlExecutionInfo getExecutionInfo() { - return executionInfo; - } - - public void markAsPartial() { - if (executionInfo != null) { - executionInfo.markAsPartial(); - } - } - } - @Override public void execute(EsqlQueryRequest request, EsqlQueryTask task, ActionListener listener) { // set EsqlExecutionInfo on async-search task so that it is accessible to GET _query/async while the query is still running task.setExecutionInfo(createEsqlExecutionInfo(request)); - // Since the request is async here, we need to wrap the listener in a SubscribableListener so that we can collect the results from - // other endpoints, such as _query/async/stop - EsqlQueryListener subListener = new EsqlQueryListener(task.executionInfo()); - String asyncExecutionId = task.getExecutionId().getEncoded(); - subListener.addListener(ActionListener.runAfter(listener, () -> asyncListeners.remove(asyncExecutionId))); - asyncListeners.put(asyncExecutionId, subListener); - ActionListener.run(subListener, l -> innerExecute(task, request, l)); - } - - public EsqlQueryListener getAsyncListener(String executionId) { - return asyncListeners.get(executionId); + ActionListener.run(listener, l -> innerExecute(task, request, l)); } private void innerExecute(Task task, EsqlQueryRequest request, ActionListener listener) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java index b11a8580a1e18..9187ba567c405 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.esql.action.EsqlResolveFieldsAction; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.DateEsField; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -50,7 +51,7 @@ public class IndexResolver { public static final Set ALL_FIELDS = Set.of("*"); - public static final Set INDEX_METADATA_FIELD = Set.of("_index"); + public static final Set INDEX_METADATA_FIELD = Set.of(MetadataAttribute.INDEX); public static final String UNMAPPED = "unmapped"; public static final IndicesOptions FIELD_CAPS_INDICES_OPTIONS = IndicesOptions.builder() @@ -91,7 +92,8 @@ public void resolveAsMergedMapping( } // public for testing only - public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResponse fieldCapsResponse) { + public static IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResponse fieldCapsResponse) { + var numberOfIndices = fieldCapsResponse.getIndexResponses().size(); assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.SEARCH_COORDINATION); // too expensive to run this on a transport worker if (fieldCapsResponse.getIndexResponses().isEmpty()) { return IndexResolution.notFound(indexPattern); @@ -104,6 +106,7 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp String[] names = fieldsCaps.keySet().toArray(new String[0]); Arrays.sort(names); Map rootFields = new HashMap<>(); + Set partiallyUnmappedFields = new HashSet<>(); for (String name : names) { Map fields = rootFields; String fullName = name; @@ -128,8 +131,9 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp } // TODO we're careful to make isAlias match IndexResolver - but do we use it? + List fcs = fieldsCaps.get(fullName); EsField field = firstUnsupportedParent == null - ? createField(fieldCapsResponse, name, fullName, fieldsCaps.get(fullName), isAlias) + ? createField(fieldCapsResponse, name, fullName, fcs, isAlias) : new UnsupportedEsField( fullName, firstUnsupportedParent.getOriginalType(), @@ -137,6 +141,11 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp new HashMap<>() ); fields.put(name, field); + + var isPartiallyUnmapped = fcs.size() < numberOfIndices; + if (isPartiallyUnmapped) { + partiallyUnmappedFields.add(fullName); + } } Map unavailableRemotes = EsqlSessionCCSUtils.determineUnavailableRemoteClusters( @@ -152,11 +161,9 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp for (FieldCapabilitiesIndexResponse ir : fieldCapsResponse.getIndexResponses()) { allEmpty &= ir.get().isEmpty(); } - if (allEmpty) { - // If all the mappings are empty we return an empty set of resolved indices to line up with QL - return IndexResolution.valid(new EsIndex(indexPattern, rootFields, Map.of()), concreteIndices.keySet(), unavailableRemotes); - } - return IndexResolution.valid(new EsIndex(indexPattern, rootFields, concreteIndices), concreteIndices.keySet(), unavailableRemotes); + // If all the mappings are empty we return an empty set of resolved indices to line up with QL + var index = new EsIndex(indexPattern, rootFields, allEmpty ? Map.of() : concreteIndices, partiallyUnmappedFields); + return IndexResolution.valid(index, concreteIndices.keySet(), unavailableRemotes); } private static Map> collectFieldCaps(FieldCapabilitiesResponse fieldCapsResponse) { @@ -178,7 +185,7 @@ private static Map> collectFieldCaps(FieldC return fieldsCaps; } - private EsField createField( + private static EsField createField( FieldCapabilitiesResponse fieldCapsResponse, String name, String fullName, @@ -226,12 +233,12 @@ private EsField createField( return new EsField(name, type, new HashMap<>(), aggregatable, isAlias); } - private UnsupportedEsField unsupported(String name, IndexFieldCapabilities fc) { + private static UnsupportedEsField unsupported(String name, IndexFieldCapabilities fc) { String originalType = fc.metricType() == TimeSeriesParams.MetricType.COUNTER ? "counter" : fc.type(); return new UnsupportedEsField(name, originalType); } - private EsField conflictingTypes(String name, String fullName, FieldCapabilitiesResponse fieldCapsResponse) { + private static EsField conflictingTypes(String name, String fullName, FieldCapabilitiesResponse fieldCapsResponse) { Map> typesToIndices = new TreeMap<>(); for (FieldCapabilitiesIndexResponse ir : fieldCapsResponse.getIndexResponses()) { IndexFieldCapabilities fc = ir.get().get(fullName); @@ -246,7 +253,7 @@ private EsField conflictingTypes(String name, String fullName, FieldCapabilities return new InvalidMappedField(name, typesToIndices); } - private EsField conflictingMetricTypes(String name, String fullName, FieldCapabilitiesResponse fieldCapsResponse) { + private static EsField conflictingMetricTypes(String name, String fullName, FieldCapabilitiesResponse fieldCapsResponse) { TreeSet indices = new TreeSet<>(); for (FieldCapabilitiesIndexResponse ir : fieldCapsResponse.getIndexResponses()) { IndexFieldCapabilities fc = ir.get().get(fullName); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index bae20bb9b26d3..364d77bdd444c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -278,6 +278,10 @@ public final void test() throws Throwable { "CSV tests cannot correctly handle the field caps change", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.SEMANTIC_TEXT_FIELD_CAPS.capabilityName()) ); + assumeFalse( + "CSV tests cannot currently handle the _source field mapping directives", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.SOURCE_FIELD_MAPPING.capabilityName()) + ); if (Build.current().isSnapshot()) { assertThat( "Capability is not included in the enabled list capabilities on a snapshot build. Spelling mistake?", @@ -354,7 +358,10 @@ private static IndexResolution loadIndexResolution(CsvTestsDataLoader.MultiIndex .stream() .map(ds -> new MappingPerIndex(ds.indexName(), createMappingForIndex(ds))) .toList(); - return IndexResolution.valid(new EsIndex(datasets.indexPattern(), mergeMappings(mappings), indexModes)); + var mergedMappings = mergeMappings(mappings); + return IndexResolution.valid( + new EsIndex(datasets.indexPattern(), mergedMappings.mapping, indexModes, mergedMappings.partiallyUnmappedFields) + ); } private static Map createMappingForIndex(CsvTestsDataLoader.TestDataset dataset) { @@ -375,7 +382,10 @@ private static Map createMappingForIndex(CsvTestsDataLoader.Tes record MappingPerIndex(String index, Map mapping) {} - private static Map mergeMappings(List mappingsPerIndex) { + record MergedResult(Map mapping, Set partiallyUnmappedFields) {} + + private static MergedResult mergeMappings(List mappingsPerIndex) { + int numberOfIndices = mappingsPerIndex.size(); Map> columnNamesToFieldByIndices = new HashMap<>(); for (var mappingPerIndex : mappingsPerIndex) { for (var entry : mappingPerIndex.mapping().entrySet()) { @@ -385,9 +395,15 @@ private static Map mergeMappings(List mappings } } - return columnNamesToFieldByIndices.entrySet() + var partiallyUnmappedFields = columnNamesToFieldByIndices.entrySet() + .stream() + .filter(e -> e.getValue().size() < numberOfIndices) + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); + var mappings = columnNamesToFieldByIndices.entrySet() .stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> mergeFields(e.getKey(), e.getValue()))); + return new MergedResult(mappings, partiallyUnmappedFields); } private static EsField mergeFields(String index, Map columnNameToField) { @@ -493,7 +509,8 @@ private static TestPhysicalOperationProviders testOperationProviders( var indexPages = new ArrayList(); for (CsvTestsDataLoader.TestDataset dataset : datasets.datasets()) { var testData = loadPageFromCsv(CsvTests.class.getResource("/data/" + dataset.dataFileName()), dataset.typeMapping()); - indexPages.add(new TestPhysicalOperationProviders.IndexPage(dataset.indexName(), testData.v1(), testData.v2())); + Set mappedFields = loadMapping(dataset.mappingFileName()).keySet(); + indexPages.add(new TestPhysicalOperationProviders.IndexPage(dataset.indexName(), testData.v1(), testData.v2(), mappedFields)); } return TestPhysicalOperationProviders.create(foldCtx, indexPages); } @@ -615,7 +632,7 @@ void executeSubPlan( bigArrays, ByteSizeValue.ofBytes(randomLongBetween(1, BlockFactory.DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE.getBytes() * 2)) ); - ExchangeSourceHandler exchangeSource = new ExchangeSourceHandler(between(1, 64), executor, ActionListener.noop()); + ExchangeSourceHandler exchangeSource = new ExchangeSourceHandler(between(1, 64), executor); ExchangeSinkHandler exchangeSink = new ExchangeSinkHandler(blockFactory, between(1, 64), threadPool::relativeTimeInMillis); LocalExecutionPlanner executionPlanner = new LocalExecutionPlanner( @@ -639,6 +656,7 @@ void executeSubPlan( // replace fragment inside the coordinator plan List drivers = new ArrayList<>(); LocalExecutionPlan coordinatorNodeExecutionPlan = executionPlanner.plan( + "final", foldCtx, new OutputExec(coordinatorPlan, collectedPages::add) ); @@ -660,7 +678,7 @@ void executeSubPlan( throw new AssertionError("expected no failure", e); }) ); - LocalExecutionPlan dataNodeExecutionPlan = executionPlanner.plan(foldCtx, csvDataNodePhysicalPlan); + LocalExecutionPlan dataNodeExecutionPlan = executionPlanner.plan("data", foldCtx, csvDataNodePhysicalPlan); drivers.addAll(dataNodeExecutionPlan.createDrivers(getTestName())); Randomness.shuffle(drivers); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java index 3392ae9b6e3bc..df8cacff67f44 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java @@ -78,14 +78,15 @@ public static String randomIndexPattern(Feature... features) { index.insert(0, "-"); } - var pattern = maybeQuote(index.toString()); - if (canAdd(Features.CROSS_CLUSTER, features)) { - var cluster = randomIdentifier(); - pattern = maybeQuote(cluster + ":" + pattern); + var pattern = index.toString(); + if (pattern.contains("|")) { + pattern = quote(pattern); } + pattern = maybeQuote(pattern); - if (pattern.contains("|") && pattern.contains("\"") == false) { - pattern = quote(pattern); + if (canAdd(Features.CROSS_CLUSTER, features)) { + var cluster = maybeQuote(randomIdentifier()); + pattern = maybeQuote(cluster + ":" + pattern); } return pattern; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index dcb83dadfcf96..698291a54fa68 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -144,10 +144,6 @@ public void testNamedParams() throws IOException { } public void testNamedParamsForIdentifiersPatterns() throws IOException { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); String query = randomAlphaOfLengthBetween(1, 100); boolean columnar = randomBoolean(); Locale locale = randomLocale(random()); @@ -260,10 +256,6 @@ public void testInvalidParams() throws IOException { } public void testInvalidParamsForIdentifiersPatterns() throws IOException { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); String query = randomAlphaOfLengthBetween(1, 100); boolean columnar = randomBoolean(); Locale locale = randomLocale(random()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java index ebfe1c8147073..cc4e70632d678 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.action; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.compute.data.BlockWritables; @@ -49,6 +51,7 @@ private List randomDriverProfiles() { private DriverProfile randomDriverProfile() { return new DriverProfile( + RandomStrings.randomAsciiLettersOfLength(random(), 5), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 4fdb4a7bf042b..065495cbad937 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -723,6 +723,7 @@ public void testProfileXContent() { new EsqlQueryResponse.Profile( List.of( new DriverProfile( + "test", 1723489812649L, 1723489819929L, 20021, @@ -757,6 +758,7 @@ public void testProfileXContent() { "profile" : { "drivers" : [ { + "task_description" : "test", "start_millis" : 1723489812649, "stop_millis" : 1723489819929, "took_nanos" : 20021, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 1c288a9bc33f9..b575ddf4ce92a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.Build; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.action.fieldcaps.IndexFieldCapabilities; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.analysis.IndexAnalyzers; @@ -29,9 +30,12 @@ import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.PotentiallyUnmappedKeywordEsField; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; @@ -47,6 +51,7 @@ import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Insist; import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; @@ -85,10 +90,12 @@ import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.matchesRegex; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; @@ -2344,10 +2351,6 @@ public void testCoalesceWithMixedNumericTypes() { } public void testNamedParamsForIdentifiers() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); assertProjectionWithMapping( """ from test @@ -2438,10 +2441,6 @@ public void testNamedParamsForIdentifiers() { } public void testInvalidNamedParamsForIdentifiers() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // missing field assertError( """ @@ -2511,10 +2510,6 @@ public void testInvalidNamedParamsForIdentifiers() { } public void testNamedParamsForIdentifierPatterns() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); assertProjectionWithMapping( """ from test @@ -2545,10 +2540,6 @@ public void testNamedParamsForIdentifierPatterns() { } public void testInvalidNamedParamsForIdentifierPatterns() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // missing pattern assertError( """ @@ -2603,6 +2594,176 @@ public void testFunctionNamedParamsAsFunctionArgument() { assertEquals(DataType.DOUBLE, ee.dataType()); } + public void testResolveInsist_fieldExists_insistedOutputContainsNoUnmappedFields() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + LogicalPlan plan = analyze("FROM test | INSIST_🐔 emp_no"); + + Attribute last = plan.output().getLast(); + assertThat(last.name(), is("emp_no")); + assertThat(last.dataType(), is(DataType.INTEGER)); + assertThat( + plan.output() + .stream() + .filter(a -> a instanceof FieldAttribute fa && fa.field() instanceof PotentiallyUnmappedKeywordEsField) + .toList(), + is(empty()) + ); + } + + public void testInsist_afterRowThrowsException() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + VerificationException e = expectThrows( + VerificationException.class, + () -> analyze("ROW x = 1 | INSIST_🐔 x", analyzer(TEST_VERIFIER)) + ); + assertThat(e.getMessage(), containsString("[insist] can only be used after [from] or [insist] commands, but was [ROW x = 1]")); + } + + public void testResolveInsist_fieldDoesNotExist_createsUnmappedField() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + LogicalPlan plan = analyze("FROM test | INSIST_🐔 foo"); + + var limit = as(plan, Limit.class); + var insist = as(limit.child(), Insist.class); + assertThat(insist.output(), hasSize(analyze("FROM test").output().size() + 1)); + var expectedAttribute = new FieldAttribute(Source.EMPTY, "foo", new PotentiallyUnmappedKeywordEsField("foo")); + assertThat(insist.insistedAttributes(), is(List.of(expectedAttribute))); + assertThat(insist.output().getLast(), is(expectedAttribute)); + } + + public void testResolveInsist_multiIndexFieldPartiallyMappedWithSingleKeywordType_createsUnmappedField() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + IndexResolution resolution = IndexResolver.mergedMappings( + "foo, bar", + new FieldCapabilitiesResponse( + List.of( + fieldCapabilitiesIndexResponse("foo", messageResponseMap("keyword")), + fieldCapabilitiesIndexResponse("bar", Map.of()) + ), + List.of() + ) + ); + + String query = "FROM foo, bar | INSIST_🐔 message"; + var plan = analyze(query, analyzer(resolution, TEST_VERIFIER, configuration(query))); + var limit = as(plan, Limit.class); + var insist = as(limit.child(), Insist.class); + var attribute = (FieldAttribute) EsqlTestUtils.singleValue(insist.output()); + assertThat(attribute.name(), is("message")); + assertThat(attribute.field(), is(new PotentiallyUnmappedKeywordEsField("message"))); + } + + public void testResolveInsist_multiIndexFieldExistsWithSingleTypeButIsNotKeywordAndMissingCast_createsAnInvalidMappedField() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + IndexResolution resolution = IndexResolver.mergedMappings( + "foo, bar", + new FieldCapabilitiesResponse( + List.of(fieldCapabilitiesIndexResponse("foo", messageResponseMap("long")), fieldCapabilitiesIndexResponse("bar", Map.of())), + List.of() + ) + ); + var plan = analyze("FROM foo, bar | INSIST_🐔 message", analyzer(resolution, TEST_VERIFIER)); + var limit = as(plan, Limit.class); + var insist = as(limit.child(), Insist.class); + var attribute = (UnsupportedAttribute) EsqlTestUtils.singleValue(insist.output()); + assertThat(attribute.name(), is("message")); + + String expected = "Cannot use field [message] due to ambiguities being mapped as [2] incompatible types: " + + "[keyword] enforced by INSIST command, and [long] in index mappings"; + assertThat(attribute.unresolvedMessage(), is(expected)); + } + + public void testResolveInsist_multiIndexFieldPartiallyExistsWithMultiTypesNoKeyword_createsAnInvalidMappedField() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + IndexResolution resolution = IndexResolver.mergedMappings( + "foo, bar", + new FieldCapabilitiesResponse( + List.of( + fieldCapabilitiesIndexResponse("foo", messageResponseMap("long")), + fieldCapabilitiesIndexResponse("bar", messageResponseMap("date")), + fieldCapabilitiesIndexResponse("bazz", Map.of()) + ), + List.of() + ) + ); + var plan = analyze("FROM foo, bar | INSIST_🐔 message", analyzer(resolution, TEST_VERIFIER)); + var limit = as(plan, Limit.class); + var insist = as(limit.child(), Insist.class); + var attr = (UnsupportedAttribute) EsqlTestUtils.singleValue(insist.output()); + + String expected = "Cannot use field [message] due to ambiguities being mapped as [3] incompatible types: " + + "[keyword] enforced by INSIST command, [datetime] in [bar], [long] in [foo]"; + assertThat(attr.unresolvedMessage(), is(expected)); + } + + public void testResolveInsist_multiIndexFieldPartiallyExistsWithMultiTypesWithKeyword_createsAnInvalidMappedField() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + IndexResolution resolution = IndexResolver.mergedMappings( + "foo, bar", + new FieldCapabilitiesResponse( + List.of( + fieldCapabilitiesIndexResponse("foo", messageResponseMap("long")), + fieldCapabilitiesIndexResponse("bar", messageResponseMap("date")), + fieldCapabilitiesIndexResponse("bazz", messageResponseMap("keyword")), + fieldCapabilitiesIndexResponse("qux", Map.of()) + ), + List.of() + ) + ); + var plan = analyze("FROM foo, bar | INSIST_🐔 message", analyzer(resolution, TEST_VERIFIER)); + var limit = as(plan, Limit.class); + var insist = as(limit.child(), Insist.class); + var attr = (UnsupportedAttribute) EsqlTestUtils.singleValue(insist.output()); + + String expected = "Cannot use field [message] due to ambiguities being mapped as [3] incompatible types: " + + "[datetime] in [bar], [keyword] enforced by INSIST command and in [bazz], [long] in [foo]"; + assertThat(attr.unresolvedMessage(), is(expected)); + } + + public void testResolveInsist_multiIndexFieldPartiallyExistsWithMultiTypesWithCast_castsAreNotSupported() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + IndexResolution resolution = IndexResolver.mergedMappings( + "foo, bar", + new FieldCapabilitiesResponse( + List.of( + fieldCapabilitiesIndexResponse("foo", messageResponseMap("long")), + fieldCapabilitiesIndexResponse("bar", messageResponseMap("date")), + fieldCapabilitiesIndexResponse("bazz", Map.of()) + ), + List.of() + ) + ); + VerificationException e = expectThrows( + VerificationException.class, + () -> analyze("FROM multi_index | INSIST_🐔 message | EVAL message = message :: keyword", analyzer(resolution, TEST_VERIFIER)) + ); + // This isn't the most informative error, but it'll do for now. + assertThat( + e.getMessage(), + containsString("EVAL does not support type [unsupported] as the return data type of expression [message]") + ); + } + + // TODO There's too much boilerplate involved here! We need a better way of creating FieldCapabilitiesResponses from a mapping or index. + private static FieldCapabilitiesIndexResponse fieldCapabilitiesIndexResponse( + String indexName, + Map fields + ) { + return new FieldCapabilitiesIndexResponse(indexName, indexName, fields, false, IndexMode.STANDARD); + } + + private static Map messageResponseMap(String date) { + return Map.of("message", new IndexFieldCapabilities("message", date, false, true, true, false, null, null)); + } + private void verifyUnsupported(String query, String errorMessage) { verifyUnsupported(query, errorMessage, "mapping-multi-field-variation.json"); } @@ -2654,7 +2815,7 @@ private static LogicalPlan analyzeWithEmptyFieldCapsResponse(String query) throw new FieldCapabilitiesIndexResponse("idx", "idx", Map.of(), true, IndexMode.STANDARD) ); FieldCapabilitiesResponse caps = new FieldCapabilitiesResponse(idxResponses, List.of()); - IndexResolution resolution = new IndexResolver(null).mergedMappings("test*", caps); + IndexResolution resolution = IndexResolver.mergedMappings("test*", caps); var analyzer = analyzer(resolution, TEST_VERIFIER, configuration(query)); return analyze(query, analyzer); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 4403477e51125..dac9b67ecfe33 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -2077,6 +2077,15 @@ public void testMatchOptions() { ); } + public void testInsistNotOnTopOfFrom() { + assumeTrue("requires snapshot builds", Build.current().isSnapshot()); + + assertThat( + error("FROM test | EVAL foo = 42 | INSIST_🐔 bar"), + containsString("1:29: [insist] can only be used after [from] or [insist] commands, but was [EVAL foo = 42]") + ); + } + private void query(String query) { query(query, defaultAnalyzer); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java index 87ea6315d4f3b..e86fc1ffa2771 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -399,15 +399,15 @@ private Expression resolveSurrogates(Expression expression) { } private Aggregator aggregator(Expression expression, List inputChannels, AggregatorMode mode) { - AggregatorFunctionSupplier aggregatorFunctionSupplier = ((ToAggregator) expression).supplier(inputChannels); + AggregatorFunctionSupplier aggregatorFunctionSupplier = ((ToAggregator) expression).supplier(); - return new Aggregator(aggregatorFunctionSupplier.aggregator(driverContext()), mode); + return new Aggregator(aggregatorFunctionSupplier.aggregator(driverContext(), inputChannels), mode); } private GroupingAggregator groupingAggregator(Expression expression, List inputChannels, AggregatorMode mode) { - AggregatorFunctionSupplier aggregatorFunctionSupplier = ((ToAggregator) expression).supplier(inputChannels); + AggregatorFunctionSupplier aggregatorFunctionSupplier = ((ToAggregator) expression).supplier(); - return new GroupingAggregator(aggregatorFunctionSupplier.groupingAggregator(driverContext()), mode); + return new GroupingAggregator(aggregatorFunctionSupplier.groupingAggregator(driverContext(), inputChannels), mode); } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index f089e81621990..65e0a2f1b20ac 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -924,7 +924,7 @@ protected static void renderDocs(String name) throws IOException { description.returnType(), description.description(), description.variadic(), - description.isAggregation() + description.type() ); } renderTypes(name, description.args()); @@ -1216,7 +1216,11 @@ private static void renderKibanaFunctionDefinition( builder.field("operator", info.operator()); assertThat(isAggregation(), equalTo(false)); } else { - builder.field("type", isAggregation() ? "agg" : "eval"); + builder.field("type", switch (info.type()) { + case SCALAR -> "scalar"; + case AGGREGATE -> "agg"; + case GROUPING -> "grouping"; + }); } builder.field("name", name); builder.field("description", removeAsciidocLinks(info.description())); @@ -1253,7 +1257,7 @@ private static void renderKibanaFunctionDefinition( builder.startObject(); builder.field("name", arg.name()); if (arg.mapArg()) { - builder.field("type", "function named parameters"); + builder.field("type", "function_named_parameters"); builder.field( "mapParams", arg.mapParams() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java index 7f04f076ed15f..53e666738e182 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.junit.AfterClass; @@ -147,8 +148,8 @@ public String appendix() { } @Override - public boolean isAggregation() { - return orig.isAggregation(); + public FunctionType type() { + return orig.type(); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index c80e374540d09..6e48ea55e8428 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -45,6 +45,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.core.type.PotentiallyUnmappedKeywordEsField; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.expression.Order; @@ -96,6 +97,7 @@ import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.optimizer.rules.logical.LiteralsOnTheRight; import org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneRedundantOrderBy; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownAndCombineLimits; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownEnrich; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownEval; @@ -134,6 +136,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.function.BiFunction; import java.util.function.Function; @@ -156,6 +159,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; import static org.elasticsearch.xpack.esql.EsqlTestUtils.localSource; import static org.elasticsearch.xpack.esql.EsqlTestUtils.referenceAttribute; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.singleValue; import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.Analyzer.NO_FIELDS; @@ -191,23 +195,24 @@ //@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug") public class LogicalPlanOptimizerTests extends ESTestCase { - private static EsqlParser parser; - private static Analyzer analyzer; private static LogicalOptimizerContext logicalOptimizerCtx; private static LogicalPlanOptimizer logicalOptimizer; + private static Map mapping; + private static Analyzer analyzer; private static Map mappingAirports; - private static Map mappingTypes; private static Analyzer analyzerAirports; + private static Map mappingTypes; private static Analyzer analyzerTypes; private static Map mappingExtra; private static Analyzer analyzerExtra; - private static EnrichResolution enrichResolution; - private static final LiteralsOnTheRight LITERALS_ON_THE_RIGHT = new LiteralsOnTheRight(); - private static Map metricMapping; private static Analyzer metricsAnalyzer; + private static Analyzer multiIndexAnalyzer; + + private static EnrichResolution enrichResolution; + private static final LiteralsOnTheRight LITERALS_ON_THE_RIGHT = new LiteralsOnTheRight(); private static class SubstitutionOnlyOptimizer extends LogicalPlanOptimizer { static SubstitutionOnlyOptimizer INSTANCE = new SubstitutionOnlyOptimizer(unboundLogicalOptimizerContext()); @@ -278,6 +283,21 @@ public static void init() { new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), metricsIndex, enrichResolution), TEST_VERIFIER ); + + var multiIndexMapping = loadMapping("mapping-basic.json"); + multiIndexMapping.put("partial_type_keyword", new EsField("partial_type_keyword", KEYWORD, emptyMap(), true)); + var multiIndex = IndexResolution.valid( + new EsIndex( + "multi_index", + multiIndexMapping, + Map.of("test1", IndexMode.STANDARD, "test2", IndexMode.STANDARD), + Set.of("partial_type_keyword") + ) + ); + multiIndexAnalyzer = new Analyzer( + new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), multiIndex, enrichResolution), + TEST_VERIFIER + ); } public void testEmptyProjections() { @@ -1839,10 +1859,9 @@ public void testCombineOrderByThroughFilter() { /** * Expected - * TopN[[Order[first_name{f}#170,ASC,LAST]],1000[INTEGER]] - * \_MvExpand[first_name{f}#170] - * \_TopN[[Order[emp_no{f}#169,ASC,LAST]],1000[INTEGER]] - * \_EsRelation[test][avg_worked_seconds{f}#167, birth_date{f}#168, emp_n..] + * TopN[[Order[first_name{r}#5575,ASC,LAST]],1000[INTEGER]] + * \_MvExpand[first_name{f}#5565,first_name{r}#5575,null] + * \_EsRelation[test][_meta_field{f}#5570, emp_no{f}#5564, first_name{f}#..] */ public void testDontCombineOrderByThroughMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -1854,9 +1873,7 @@ public void testDontCombineOrderByThroughMvExpand() { var topN = as(plan, TopN.class); assertThat(orderNames(topN), contains("first_name")); var mvExpand = as(topN.child(), MvExpand.class); - topN = as(mvExpand.child(), TopN.class); - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExpand.child(), EsRelation.class); } /** @@ -2065,12 +2082,10 @@ public void testMultipleLookupJoinWithSortAndLimit() { } /** - * Expected - * EsqlProject[[emp_no{f}#350, first_name{f}#351, salary{f}#352]] - * \_TopN[[Order[salary{f}#352,ASC,LAST], Order[first_name{f}#351,ASC,LAST]],5[INTEGER]] - * \_MvExpand[first_name{f}#351] - * \_TopN[[Order[emp_no{f}#350,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#350, first_name{f}#351, salary{f}#352] + * EsqlProject[[emp_no{f}#10, first_name{r}#21, salary{f}#15]] + * \_TopN[[Order[salary{f}#15,ASC,LAST], Order[first_name{r}#21,ASC,LAST]],5[INTEGER]] + * \_MvExpand[first_name{f}#11,first_name{r}#21,null] + * \_EsRelation[test][_meta_field{f}#16, emp_no{f}#10, first_name{f}#11, ..] */ public void testPushDownLimitThroughMultipleSort_AfterMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -2086,20 +2101,16 @@ public void testPushDownLimitThroughMultipleSort_AfterMvExpand() { assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); assertThat(orderNames(topN), contains("salary", "first_name")); var mvExp = as(topN.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#361, first_name{f}#362, salary{f}#363]] - * \_TopN[[Order[first_name{f}#362,ASC,LAST]],5[INTEGER]] - * \_TopN[[Order[salary{f}#363,ASC,LAST]],5[INTEGER]] - * \_MvExpand[first_name{f}#362] - * \_TopN[[Order[emp_no{f}#361,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#361, first_name{f}#362, salary{f}#363] + * EsqlProject[[emp_no{f}#2560, first_name{r}#2571, salary{f}#2565]] + * \_TopN[[Order[first_name{r}#2571,ASC,LAST]],5[INTEGER]] + * \_TopN[[Order[salary{f}#2565,ASC,LAST]],5[INTEGER]] + * \_MvExpand[first_name{f}#2561,first_name{r}#2571,null] + * \_EsRelation[test][_meta_field{f}#2566, emp_no{f}#2560, first_name{f}#..] */ public void testPushDownLimitThroughMultipleSort_AfterMvExpand2() { LogicalPlan plan = optimizedPlan(""" @@ -2119,10 +2130,7 @@ public void testPushDownLimitThroughMultipleSort_AfterMvExpand2() { assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); assertThat(orderNames(topN), contains("salary")); var mvExp = as(topN.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** @@ -2231,8 +2239,7 @@ public void testPushDown_TheRightLimit_PastLookupJoin() { * \_TopN[[Order[salary{f}#12,ASC,LAST]],5[INTEGER]] * \_Eval[[100[INTEGER] AS b]] * \_MvExpand[first_name{f}#11] - * \_TopN[[Order[first_name{f}#11,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#10, first_name{f}#11, salary{f}#12] + * \_EsRelation[employees][emp_no{f}#10, first_name{f}#11, salary{f}#12] */ public void testPushDownLimit_PastEvalAndMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -2250,22 +2257,18 @@ public void testPushDownLimit_PastEvalAndMvExpand() { assertThat(orderNames(topN), contains("salary")); var eval = as(topN.child(), Eval.class); var mvExp = as(eval.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("first_name")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#12, first_name{r}#22, salary{f}#17]] - * \_TopN[[Order[salary{f}#17,ASC,LAST], Order[first_name{r}#22,ASC,LAST]],1000[INTEGER]] - * \_Filter[gender{f}#14 == [46][KEYWORD] AND WILDCARDLIKE(first_name{r}#22)] - * \_MvExpand[first_name{f}#13,first_name{r}#22,null] - * \_TopN[[Order[emp_no{f}#12,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[test][_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, ..] - */ - public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedField_ResultTruncationDefaultSize() { + * EsqlProject[[emp_no{f}#5885, first_name{r}#5896, salary{f}#5890]] + * \_TopN[[Order[salary{f}#5890,ASC,LAST], Order[first_name{r}#5896,ASC,LAST]],1000[INTEGER]] + * \_Filter[gender{f}#5887 == [46][KEYWORD] AND WILDCARDLIKE(first_name{r}#5896)] + * \_MvExpand[first_name{f}#5886,first_name{r}#5896,null] + * \_EsRelation[test][_meta_field{f}#5891, emp_no{f}#5885, first_name{f}#..] + */ + public void testRedundantSort_BeforeMvExpand_WithFilterOnExpandedField_ResultTruncationDefaultSize() { LogicalPlan plan = optimizedPlan(""" from test | sort emp_no @@ -2282,9 +2285,7 @@ public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedField_ResultT var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); var mvExp = as(filter.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); // TODO is it correct? Double-check AddDefaultTopN rule - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** @@ -2367,8 +2368,7 @@ public void testMultiMvExpand_SortDownBelow() { var mvExpand = as(topN.child(), MvExpand.class); var filter = as(mvExpand.child(), Filter.class); mvExpand = as(filter.child(), MvExpand.class); - var topN2 = as(mvExpand.child(), TopN.class); // TODO is it correct? Double-check AddDefaultTopN rule - as(topN2.child(), EsRelation.class); + as(mvExpand.child(), EsRelation.class); } /** @@ -2463,20 +2463,18 @@ public void testRemoveUnusedSortBeforeMvExpand_DefaultLimit10000() { assertThat(orderNames(topN), contains("first_name")); assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); var mvExpand = as(topN.child(), MvExpand.class); - var topN2 = as(mvExpand.child(), TopN.class); // TODO is it correct? Double-check AddDefaultTopN rule - as(topN2.child(), EsRelation.class); + as(mvExpand.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#104, first_name{f}#105, salary{f}#106]] - * \_TopN[[Order[salary{f}#106,ASC,LAST], Order[first_name{f}#105,ASC,LAST]],15[INTEGER]] - * \_Filter[gender{f}#215 == [46][KEYWORD] AND WILDCARDLIKE(first_name{f}#105)] - * \_MvExpand[first_name{f}#105] - * \_TopN[[Order[emp_no{f}#104,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#104, first_name{f}#105, salary{f}#106] - */ - public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedField() { + * EsqlProject[[emp_no{f}#3517, first_name{r}#3528, salary{f}#3522]] + * \_TopN[[Order[salary{f}#3522,ASC,LAST], Order[first_name{r}#3528,ASC,LAST]],15[INTEGER]] + * \_Filter[gender{f}#3519 == [46][KEYWORD] AND WILDCARDLIKE(first_name{r}#3528)] + * \_MvExpand[first_name{f}#3518,first_name{r}#3528,null] + * \_EsRelation[test][_meta_field{f}#3523, emp_no{f}#3517, first_name{f}#..] + */ + public void testRedundantSort_BeforeMvExpand_WithFilterOnExpandedField() { LogicalPlan plan = optimizedPlan(""" from test | sort emp_no @@ -2494,24 +2492,18 @@ public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedField() { var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); var mvExp = as(filter.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - // the filter acts on first_name (the one used in mv_expand), so the limit 15 is not pushed down past mv_expand - // instead the default limit is added - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#104, first_name{f}#105, salary{f}#106]] - * \_TopN[[Order[salary{f}#106,ASC,LAST], Order[first_name{f}#105,ASC,LAST]],15[INTEGER]] - * \_Filter[gender{f}#215 == [46][KEYWORD] AND salary{f}#106 > 60000[INTEGER]] - * \_MvExpand[first_name{f}#105] - * \_TopN[[Order[emp_no{f}#104,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#104, first_name{f}#105, salary{f}#106] - */ - public void testAddDefaultLimit_BeforeMvExpand_WithFilter_NOT_OnExpandedField() { + * EsqlProject[[emp_no{f}#3421, first_name{r}#3432, salary{f}#3426]] + * \_TopN[[Order[salary{f}#3426,ASC,LAST], Order[first_name{r}#3432,ASC,LAST]],15[INTEGER]] + * \_Filter[gender{f}#3423 == [46][KEYWORD] AND salary{f}#3426 > 60000[INTEGER]] + * \_MvExpand[first_name{f}#3422,first_name{r}#3432,null] + * \_EsRelation[test][_meta_field{f}#3427, emp_no{f}#3421, first_name{f}#..] + */ + public void testRedundantSort_BeforeMvExpand_WithFilter_NOT_OnExpandedField() { LogicalPlan plan = optimizedPlan(""" from test | sort emp_no @@ -2529,24 +2521,18 @@ public void testAddDefaultLimit_BeforeMvExpand_WithFilter_NOT_OnExpandedField() var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); var mvExp = as(filter.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - // the filters after mv_expand do not act on the expanded field values, as such the limit 15 is the one being pushed down - // otherwise that limit wouldn't have pushed down and the default limit was instead being added by default before mv_expanded - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#116, first_name{f}#117 AS x, salary{f}#119]] - * \_TopN[[Order[salary{f}#119,ASC,LAST], Order[first_name{f}#117,ASC,LAST]],15[INTEGER]] - * \_Filter[gender{f}#118 == [46][KEYWORD] AND WILDCARDLIKE(first_name{f}#117)] - * \_MvExpand[first_name{f}#117] - * \_TopN[[Order[gender{f}#118,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#116, first_name{f}#117, gender{f}#118, sa..] - */ - public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedFieldAlias() { + * EsqlProject[[emp_no{f}#2085, first_name{r}#2096 AS x, salary{f}#2090]] + * \_TopN[[Order[salary{f}#2090,ASC,LAST], Order[first_name{r}#2096,ASC,LAST]],15[INTEGER]] + * \_Filter[gender{f}#2087 == [46][KEYWORD] AND WILDCARDLIKE(first_name{r}#2096)] + * \_MvExpand[first_name{f}#2086,first_name{r}#2096,null] + * \_EsRelation[test][_meta_field{f}#2091, emp_no{f}#2085, first_name{f}#..] + */ + public void testRedundantSort_BeforeMvExpand_WithFilterOnExpandedFieldAlias() { LogicalPlan plan = optimizedPlan(""" from test | sort gender @@ -2565,11 +2551,7 @@ public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedFieldAlias() var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); var mvExp = as(filter.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - // the filter uses an alias ("x") to the expanded field ("first_name"), so the default limit is used and not the one provided - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("gender")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** @@ -2937,6 +2919,45 @@ public void testPruneRedundantSortClausesUsingAlias() { ); } + public void testInsist_fieldDoesNotExist_createsUnmappedFieldInRelation() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + LogicalPlan plan = optimizedPlan("FROM test | INSIST_🐔 foo"); + + var project = as(plan, Project.class); + var limit = as(project.child(), Limit.class); + var relation = as(limit.child(), EsRelation.class); + assertPartialTypeKeyword(relation, "foo"); + } + + public void testInsist_multiIndexFieldPartiallyExistsAndIsKeyword_castsAreNotSupported() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + var plan = planMultiIndex("FROM multi_index | INSIST_🐔 partial_type_keyword"); + var project = as(plan, Project.class); + var limit = as(project.child(), Limit.class); + var relation = as(limit.child(), EsRelation.class); + + assertPartialTypeKeyword(relation, "partial_type_keyword"); + } + + public void testInsist_multipleInsistClauses_insistsAreFolded() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + var plan = planMultiIndex("FROM multi_index | INSIST_🐔 partial_type_keyword | INSIST_🐔 foo"); + var project = as(plan, Project.class); + var limit = as(project.child(), Limit.class); + var relation = as(limit.child(), EsRelation.class); + + assertPartialTypeKeyword(relation, "partial_type_keyword"); + assertPartialTypeKeyword(relation, "foo"); + } + + private static void assertPartialTypeKeyword(EsRelation relation, String name) { + var attribute = (FieldAttribute) singleValue(relation.output().stream().filter(attr -> attr.name().equals(name)).toList()); + assertThat(attribute.field(), instanceOf(PotentiallyUnmappedKeywordEsField.class)); + } + public void testSimplifyLikeNoWildcard() { LogicalPlan plan = optimizedPlan(""" from test @@ -5925,6 +5946,10 @@ private LogicalPlan planTypes(String query) { return logicalOptimizer.optimize(analyzerTypes.analyze(parser.createStatement(query))); } + private LogicalPlan planMultiIndex(String query) { + return logicalOptimizer.optimize(multiIndexAnalyzer.analyze(parser.createStatement(query))); + } + private EsqlBinaryComparison extractPlannedBinaryComparison(String expression) { LogicalPlan plan = planTypes("FROM types | WHERE " + expression); @@ -7302,4 +7327,349 @@ public void testFunctionNamedParamsAsFunctionArgument() { assertEquals(new Literal(EMPTY, 2.0, DataType.DOUBLE), ee.value()); assertEquals(DataType.DOUBLE, ee.dataType()); } + + /** + * TopN[[Order[emp_no{f}#11,ASC,LAST]],1000[INTEGER]] + * \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#22]] + * |_EsqlProject[[_meta_field{f}#17, emp_no{f}#11, first_name{f}#12, gender{f}#13, hire_date{f}#18, job{f}#19, job.raw{f}#20, l + * anguages{f}#14 AS language_code, last_name{f}#15, long_noidx{f}#21, salary{f}#16, foo{r}#7]] + * | \_Eval[[[62 61 72][KEYWORD] AS foo]] + * | \_Filter[languages{f}#14 > 1[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#17, emp_no{f}#11, first_name{f}#12, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#22, language_name{f}#23] + */ + public void testRedundantSortOnJoin() { + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); + + var plan = optimizedPlan(""" + FROM test + | SORT languages + | RENAME languages AS language_code + | EVAL foo = "bar" + | LOOKUP JOIN languages_lookup ON language_code + | WHERE language_code > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var join = as(topN.child(), Join.class); + var project = as(join.left(), EsqlProject.class); + var eval = as(project.child(), Eval.class); + var filter = as(eval.child(), Filter.class); + as(filter.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#9,ASC,LAST]],1000[INTEGER]] + * \_Filter[emp_no{f}#9 > 1[INTEGER]] + * \_MvExpand[languages{f}#12,languages{r}#20,null] + * \_Eval[[[62 61 72][KEYWORD] AS foo]] + * \_EsRelation[test][_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, g..] + */ + public void testRedundantSortOnMvExpand() { + var plan = optimizedPlan(""" + FROM test + | SORT languages + | EVAL foo = "bar" + | MV_EXPAND languages + | WHERE emp_no > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var filter = as(topN.child(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var eval = as(mvExpand.child(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#11,ASC,LAST]],1000[INTEGER]] + * \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#22]] + * |_Filter[emp_no{f}#11 > 1[INTEGER]] + * | \_MvExpand[languages{f}#14,languages{r}#24,null] + * | \_Eval[[languages{f}#14 AS language_code]] + * | \_EsRelation[test][_meta_field{f}#17, emp_no{f}#11, first_name{f}#12, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#22, language_name{f}#23] + */ + public void testRedundantSortOnMvExpandAndJoin() { + var plan = optimizedPlan(""" + FROM test + | SORT languages + | EVAL language_code = languages + | MV_EXPAND languages + | WHERE emp_no > 1 + | LOOKUP JOIN languages_lookup ON language_code + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var join = as(topN.child(), Join.class); + var filter = as(join.left(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var eval = as(mvExpand.child(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#12,ASC,LAST]],1000[INTEGER]] + * \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#23]] + * |_Filter[emp_no{f}#12 > 1[INTEGER]] + * | \_MvExpand[languages{f}#15,languages{r}#25,null] + * | \_Eval[[languages{f}#15 AS language_code]] + * | \_EsRelation[test][_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#23, language_name{f}#24] + */ + public void testMultlipleRedundantSortOnMvExpandAndJoin() { + var plan = optimizedPlan(""" + FROM test + | SORT first_name + | EVAL language_code = languages + | MV_EXPAND languages + | sort last_name + | WHERE emp_no > 1 + | LOOKUP JOIN languages_lookup ON language_code + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var join = as(topN.child(), Join.class); + var filter = as(join.left(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var eval = as(mvExpand.child(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#16,ASC,LAST]],1000[INTEGER]] + * \_Filter[emp_no{f}#16 > 1[INTEGER]] + * \_MvExpand[languages{f}#19,languages{r}#31] + * \_Dissect[foo{r}#5,Parser[pattern=%{z}, appendSeparator=, parser=org.elasticsearch.dissect.DissectParser@26f2cab],[z{r}#10 + * ]] + * \_Grok[foo{r}#5,Parser[pattern=%{WORD:y}, grok=org.elasticsearch.grok.Grok@6ea44ccd],[y{r}#9]] + * \_Enrich[ANY,[6c 61 6e 67 75 61 67 65 73 5f 69 64 78][KEYWORD],foo{r}#5,{"match":{"indices":[],"match_field":"id","enrich_ + * fields":["language_code","language_name"]}},{=languages_idx},[language_code{r}#29, language_name{r}#30]] + * \_Eval[[TOSTRING(languages{f}#19) AS foo]] + * \_EsRelation[test][_meta_field{f}#22, emp_no{f}#16, first_name{f}#17, ..] + */ + public void testRedundantSortOnMvExpandEnrichGrokDissect() { + var plan = optimizedPlan(""" + FROM test + | SORT languages + | EVAL foo = to_string(languages) + | ENRICH languages_idx on foo + | GROK foo "%{WORD:y}" + | DISSECT foo "%{z}" + | MV_EXPAND languages + | WHERE emp_no > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var filter = as(topN.child(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var dissect = as(mvExpand.child(), Dissect.class); + var grok = as(dissect.child(), Grok.class); + var enrich = as(grok.child(), Enrich.class); + var eval = as(enrich.child(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#20,ASC,LAST]],1000[INTEGER]] + * \_Filter[emp_no{f}#20 > 1[INTEGER]] + * \_MvExpand[languages{f}#23,languages{r}#37] + * \_Dissect[foo{r}#5,Parser[pattern=%{z}, appendSeparator=, parser=org.elasticsearch.dissect.DissectParser@3e922db0],[z{r}#1 + * 4]] + * \_Grok[foo{r}#5,Parser[pattern=%{WORD:y}, grok=org.elasticsearch.grok.Grok@4d6ad024],[y{r}#13]] + * \_Enrich[ANY,[6c 61 6e 67 75 61 67 65 73 5f 69 64 78][KEYWORD],foo{r}#5,{"match":{"indices":[],"match_field":"id","enrich_ + * fields":["language_code","language_name"]}},{=languages_idx},[language_code{r}#35, language_name{r}#36]] + * \_Join[LEFT,[language_code{r}#8],[language_code{r}#8],[language_code{f}#31]] + * |_Eval[[TOSTRING(languages{f}#23) AS foo, languages{f}#23 AS language_code]] + * | \_EsRelation[test][_meta_field{f}#26, emp_no{f}#20, first_name{f}#21, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#31] + */ + public void testRedundantSortOnMvExpandJoinEnrichGrokDissect() { + var plan = optimizedPlan(""" + FROM test + | SORT languages + | EVAL foo = to_string(languages), language_code = languages + | LOOKUP JOIN languages_lookup ON language_code + | ENRICH languages_idx on foo + | GROK foo "%{WORD:y}" + | DISSECT foo "%{z}" + | MV_EXPAND languages + | WHERE emp_no > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var filter = as(topN.child(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var dissect = as(mvExpand.child(), Dissect.class); + var grok = as(dissect.child(), Grok.class); + var enrich = as(grok.child(), Enrich.class); + var join = as(enrich.child(), Join.class); + var eval = as(join.left(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#23,ASC,LAST]],1000[INTEGER]] + * \_Filter[emp_no{f}#23 > 1[INTEGER]] + * \_MvExpand[languages{f}#26,languages{r}#36] + * \_EsqlProject[[language_name{f}#35, foo{r}#5 AS bar, languages{f}#26, emp_no{f}#23]] + * \_Join[LEFT,[language_code{r}#8],[language_code{r}#8],[language_code{f}#34]] + * |_Project[[_meta_field{f}#29, emp_no{f}#23, first_name{f}#24, gender{f}#25, hire_date{f}#30, job{f}#31, job.raw{f}#32, l + * anguages{f}#26, last_name{f}#27, long_noidx{f}#33, salary{f}#28, foo{r}#5, languages{f}#26 AS language_code]] + * | \_Eval[[TOSTRING(languages{f}#26) AS foo]] + * | \_EsRelation[test][_meta_field{f}#29, emp_no{f}#23, first_name{f}#24, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#34, language_name{f}#35] + */ + public void testRedundantSortOnMvExpandJoinKeepDropRename() { + var plan = optimizedPlan(""" + FROM test + | SORT languages + | EVAL foo = to_string(languages), language_code = languages + | LOOKUP JOIN languages_lookup ON language_code + | KEEP language_name, language_code, foo, languages, emp_no + | DROP language_code + | RENAME foo AS bar + | MV_EXPAND languages + | WHERE emp_no > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var filter = as(topN.child(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var project = as(mvExpand.child(), Project.class); + var join = as(project.child(), Join.class); + var project2 = as(join.left(), Project.class); + var eval = as(project2.child(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#15,ASC,LAST]],1000[INTEGER]] + * \_Filter[emp_no{f}#15 > 1[INTEGER]] + * \_MvExpand[foo{r}#10,foo{r}#29] + * \_Eval[[CONCAT(language_name{r}#28,[66 6f 6f][KEYWORD]) AS foo]] + * \_MvExpand[language_name{f}#27,language_name{r}#28] + * \_Join[LEFT,[language_code{r}#3],[language_code{r}#3],[language_code{f}#26]] + * |_Eval[[1[INTEGER] AS language_code]] + * | \_EsRelation[test][_meta_field{f}#21, emp_no{f}#15, first_name{f}#16, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#26, language_name{f}#27] + */ + public void testEvalLookupMultipleSorts() { + var plan = optimizedPlan(""" + FROM test + | EVAL language_code = 1 + | LOOKUP JOIN languages_lookup ON language_code + | SORT language_name + | MV_EXPAND language_name + | EVAL foo = concat(language_name, "foo") + | MV_EXPAND foo + | WHERE emp_no > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var filter = as(topN.child(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var eval = as(mvExpand.child(), Eval.class); + mvExpand = as(eval.child(), MvExpand.class); + var join = as(mvExpand.child(), Join.class); + eval = as(join.left(), Eval.class); + as(eval.child(), EsRelation.class); + + } + + public void testUnboundedSortSimple() { + var query = """ + ROW x = [1,2,3], y = 1 + | SORT y + | MV_EXPAND x + | WHERE x > 2 + """; + + VerificationException e = expectThrows(VerificationException.class, () -> plan(query)); + assertThat(e.getMessage(), containsString("line 2:5: Unbounded sort not supported yet [SORT y] please add a limit")); + } + + public void testUnboundedSortJoin() { + var query = """ + ROW x = [1,2,3], y = 2, language_code = 1 + | SORT y + | LOOKUP JOIN languages_lookup ON language_code + | WHERE language_name == "foo" + """; + + VerificationException e = expectThrows(VerificationException.class, () -> plan(query)); + assertThat(e.getMessage(), containsString("line 2:5: Unbounded sort not supported yet [SORT y] please add a limit")); + } + + public void testUnboundedSortWithMvExpandAndFilter() { + var query = """ + FROM test + | EVAL language_code = 1 + | LOOKUP JOIN languages_lookup ON language_code + | SORT language_name + | EVAL foo = concat(language_name, "foo") + | MV_EXPAND foo + | WHERE foo == "foo" + """; + + VerificationException e = expectThrows(VerificationException.class, () -> plan(query)); + assertThat(e.getMessage(), containsString("line 4:3: Unbounded sort not supported yet [SORT language_name] please add a limit")); + } + + public void testUnboundedSortWithLookupJoinAndFilter() { + var query = """ + FROM test + | EVAL language_code = 1 + | EVAL foo = concat(language_code::string, "foo") + | MV_EXPAND foo + | SORT foo + | LOOKUP JOIN languages_lookup ON language_code + | WHERE language_name == "foo" + """; + + VerificationException e = expectThrows(VerificationException.class, () -> plan(query)); + assertThat(e.getMessage(), containsString("line 5:3: Unbounded sort not supported yet [SORT foo] please add a limit")); + } + + public void testUnboundedSortExpandFilter() { + var query = """ + ROW x = [1,2,3], y = 1 + | SORT x + | MV_EXPAND x + | WHERE x > 2 + """; + + VerificationException e = expectThrows(VerificationException.class, () -> plan(query)); + assertThat(e.getMessage(), containsString("line 2:5: Unbounded sort not supported yet [SORT x] please add a limit")); + } + + public void testPruneRedundantOrderBy() { + var rule = new PruneRedundantOrderBy(); + + var query = """ + row x = [1,2,3], y = 1 + | sort x + | mv_expand x + | sort x + | mv_expand x + | sort y + """; + LogicalPlan analyzed = analyzer.analyze(parser.createStatement(query)); + LogicalPlan optimized = rule.apply(analyzed); + + // check that all the redundant SORTs are removed in a single run + var limit = as(optimized, Limit.class); + var orderBy = as(limit.child(), OrderBy.class); + var mvExpand = as(orderBy.child(), MvExpand.class); + var mvExpand2 = as(mvExpand.child(), MvExpand.class); + as(mvExpand2.child(), Row.class); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 78aaf1f354723..4bf0c0563cfb6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -7596,7 +7596,7 @@ private LocalExecutionPlanner.LocalExecutionPlan physicalOperationsFromPhysicalP TestBlockFactory.getNonBreakingInstance(), Settings.EMPTY, config, - new ExchangeSourceHandler(10, null, null)::createExchangeSource, + new ExchangeSourceHandler(10, null)::createExchangeSource, () -> exchangeSinkHandler.createExchangeSink(() -> {}), null, null, @@ -7604,7 +7604,7 @@ private LocalExecutionPlanner.LocalExecutionPlan physicalOperationsFromPhysicalP List.of() ); - return planner.plan(FoldContext.small(), plan); + return planner.plan("test", FoldContext.small(), plan); } private List> findFieldNamesInLookupJoinDescription(LocalExecutionPlanner.LocalExecutionPlan physicalOperations) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java index 18d8bc9fb0a75..2ca1d8c4d1288 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java @@ -19,7 +19,7 @@ public void testDevelopmentInline() throws Exception { } public void testDevelopmentLookup() throws Exception { - parse("row a = 1 | lookup \"foo\" on j", "lookup"); + parse("row a = 1 | lookup_\uD83D\uDC14 \"foo\" on j", "lookup_\uD83D\uDC14"); } public void testDevelopmentMetrics() throws Exception { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 9bbada3cca53b..c8ff6ef1b55ae 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -1074,15 +1074,28 @@ public void testEnrich() { processingCommand("enrich _" + mode.name() + ":countries ON country_code") ); - expectError("from a | enrich countries on foo* ", "Using wildcards [*] in ENRICH WITH projections is not allowed [foo*]"); - expectError("from a | enrich countries on foo with bar*", "Using wildcards [*] in ENRICH WITH projections is not allowed [bar*]"); + expectError("from a | enrich countries on foo* ", "Using wildcards [*] in ENRICH WITH projections is not allowed, found [foo*]"); + expectError("from a | enrich countries on * ", "Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]"); + expectError( + "from a | enrich countries on foo with bar*", + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [bar*]" + ); + expectError("from a | enrich countries on foo with *", "Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]"); expectError( "from a | enrich countries on foo with x = bar* ", - "Using wildcards [*] in ENRICH WITH projections is not allowed [bar*]" + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [bar*]" + ); + expectError( + "from a | enrich countries on foo with x = * ", + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]" ); expectError( "from a | enrich countries on foo with x* = bar ", - "Using wildcards [*] in ENRICH WITH projections is not allowed [x*]" + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [x*]" + ); + expectError( + "from a | enrich countries on foo with * = bar ", + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]" ); expectError( "from a | enrich typo:countries on foo", @@ -1581,10 +1594,6 @@ public void testIntervalParam() { } public void testParamForIdentifier() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // field names can appear in eval/where/stats/sort/keep/drop/rename/dissect/grok/enrich/mvexpand // eval, where assertEquals( @@ -1842,10 +1851,6 @@ public void testParamForIdentifier() { } public void testParamForIdentifierPattern() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // name patterns can appear in keep and drop // all patterns LogicalPlan plan = statement( @@ -1935,10 +1940,6 @@ public void testParamForIdentifierPattern() { } public void testParamInInvalidPosition() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // param for pattern is not supported in eval/where/stats/sort/rename/dissect/grok/enrich/mvexpand // where/stats/sort/dissect/grok are covered in RestEsqlTestCase List invalidParamPositions = List.of("eval ?f1 = 1", "stats x = ?f1(*)", "mv_expand ?f1", "rename ?f1 as ?f2"); @@ -1979,7 +1980,7 @@ public void testParamInInvalidPosition() { expectError( "from idx1 | " + enrich, List.of(paramAsPattern("f1", pattern), paramAsIdentifier("f2", "f.2"), paramAsIdentifier("f3", "f.3*")), - "Using wildcards [*] in ENRICH WITH projections is not allowed [" + pattern + "]" + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [" + pattern + "]" ); expectError( "from idx1 | " + enrich, @@ -1990,10 +1991,6 @@ public void testParamInInvalidPosition() { } public void testMissingParam() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // cover all processing commands eval/where/stats/sort/rename/dissect/grok/enrich/mvexpand/keep/drop String error = "Unknown query parameter [f1], did you mean [f4]?"; String errorMvExpandFunctionNameCommandOption = "Query parameter [?f1] is null or undefined, cannot be used as an identifier"; @@ -2283,6 +2280,10 @@ public void testInvalidAlias() { expectError("from test | eval A = coalesce(\"Å\", Å)", "line 1:36: token recognition error at: 'Å'"); } + public void testInvalidRemoteClusterPattern() { + expectError("from \"rem:ote\":index", "cluster string [rem:ote] must not contain ':'"); + } + private LogicalPlan unresolvedRelation(String index) { return new UnresolvedRelation(EMPTY, new IndexPattern(EMPTY, index), false, List.of(), IndexMode.STANDARD, null, "FROM"); } @@ -2466,8 +2467,25 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt assertEquals(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields()); UnresolvedRelation ur = as(dissect.child(), UnresolvedRelation.class); assertEquals(ur, relation("test")); + } + public void testNamedFunctionArgumentInMapWithNamedParameters() { // map entry values provided in named parameter, arrays are not supported by named parameters yet + LinkedHashMap expectedMap1 = new LinkedHashMap<>(4); + expectedMap1.put("option1", "string"); + expectedMap1.put("option2", 1); + expectedMap1.put("option3", List.of(2.0, 3.0, 4.0)); + expectedMap1.put("option4", List.of(true, false)); + LinkedHashMap expectedMap2 = new LinkedHashMap<>(4); + expectedMap2.put("option1", List.of("string1", "string2")); + expectedMap2.put("option2", List.of(1, 2, 3)); + expectedMap2.put("option3", 2.0); + expectedMap2.put("option4", true); + LinkedHashMap expectedMap3 = new LinkedHashMap<>(4); + expectedMap3.put("option1", "string"); + expectedMap3.put("option2", 2.0); + expectedMap3.put("option3", List.of(1, 2, 3)); + expectedMap3.put("option4", List.of(true, false)); assertEquals( new Filter( EMPTY, @@ -2565,7 +2583,7 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt ) ); - plan = statement( + LogicalPlan plan = statement( """ from test | dissect ?fn1(?n1, ?n2, {"option1":?n3,"option2":?n4,"option3":[2.0,3.0,4.0],"option4":[true,false]}) "%{bar}" @@ -2585,16 +2603,16 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt ) ) ); - grok = as(plan, Grok.class); + Grok grok = as(plan, Grok.class); assertEquals(function("fn2", List.of(attribute("f3"), mapExpression(expectedMap2))), grok.input()); assertEquals("%{WORD:foo}", grok.parser().pattern()); assertEquals(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields()); - dissect = as(grok.child(), Dissect.class); + Dissect dissect = as(grok.child(), Dissect.class); assertEquals(function("fn1", List.of(attribute("f1"), attribute("f2"), mapExpression(expectedMap1))), dissect.input()); assertEquals("%{bar}", dissect.parser().pattern()); assertEquals("", dissect.parser().appendSeparator()); assertEquals(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields()); - ur = as(dissect.child(), UnresolvedRelation.class); + UnresolvedRelation ur = as(dissect.child(), UnresolvedRelation.class); assertEquals(ur, relation("test")); } @@ -2953,4 +2971,10 @@ public void testInvalidJoinPatterns() { ); } } + + public void testInvalidInsistAsterisk() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + expectError("FROM text | EVAL x = 4 | INSIST_🐔 *", "INSIST doesn't support wildcards, found [*]"); + expectError("FROM text | EVAL x = 4 | INSIST_🐔 foo*", "INSIST doesn't support wildcards, found [foo*]"); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/OrderExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/OrderExecSerializationTests.java deleted file mode 100644 index 755f1cd4f52da..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/OrderExecSerializationTests.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plan.physical; - -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.Order; -import org.elasticsearch.xpack.esql.expression.OrderSerializationTests; - -import java.io.IOException; -import java.util.List; - -public class OrderExecSerializationTests extends AbstractPhysicalPlanSerializationTests { - public static OrderExec randomOrderExec(int depth) { - Source source = randomSource(); - PhysicalPlan child = randomChild(depth); - List order = randomList(1, 10, OrderSerializationTests::randomOrder); - return new OrderExec(source, child, order); - } - - @Override - protected OrderExec createTestInstance() { - return randomOrderExec(0); - } - - @Override - protected OrderExec mutateInstance(OrderExec instance) throws IOException { - PhysicalPlan child = instance.child(); - List order = instance.order(); - if (randomBoolean()) { - child = randomValueOtherThan(child, () -> randomChild(0)); - } else { - order = randomValueOtherThan(order, () -> randomList(1, 10, OrderSerializationTests::randomOrder)); - } - return new OrderExec(instance.source(), child, order); - } - - @Override - protected boolean alwaysEmptySource() { - return true; - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index e1e606a6e84b1..7e5143d5a3ac0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -84,6 +84,7 @@ public void closeIndex() throws IOException { public void testLuceneSourceOperatorHugeRowSize() throws IOException { int estimatedRowSize = randomEstimatedRowSize(estimatedRowSizeIsHuge); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + "test", FoldContext.small(), new EsQueryExec( Source.EMPTY, @@ -110,6 +111,7 @@ public void testLuceneTopNSourceOperator() throws IOException { EsQueryExec.FieldSort sort = new EsQueryExec.FieldSort(sortField, Order.OrderDirection.ASC, Order.NullsPosition.LAST); Literal limit = new Literal(Source.EMPTY, 10, DataType.INTEGER); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + "test", FoldContext.small(), new EsQueryExec( Source.EMPTY, @@ -136,6 +138,7 @@ public void testLuceneTopNSourceOperatorDistanceSort() throws IOException { EsQueryExec.GeoDistanceSort sort = new EsQueryExec.GeoDistanceSort(sortField, Order.OrderDirection.ASC, 1, -1); Literal limit = new Literal(Source.EMPTY, 10, DataType.INTEGER); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + "test", FoldContext.small(), new EsQueryExec( Source.EMPTY, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index 780045077f7b8..cf2c5735310ae 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -48,8 +48,10 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; +import org.elasticsearch.xpack.esql.core.type.PotentiallyUnmappedKeywordEsField; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; @@ -63,8 +65,9 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.OptionalInt; +import java.util.Optional; import java.util.Random; +import java.util.Set; import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Function; @@ -90,9 +93,10 @@ public static TestPhysicalOperationProviders create(FoldContext foldContext, Lis return new TestPhysicalOperationProviders(foldContext, indexPages, createAnalysisRegistry()); } - public record IndexPage(String index, Page page, List columnNames) { - OptionalInt columnIndex(String columnName) { - return IntStream.range(0, columnNames.size()).filter(i -> columnNames.get(i).equals(columnName)).findFirst(); + public record IndexPage(String index, Page page, List columnNames, Set mappedFields) { + Optional columnIndex(String columnName) { + var result = IntStream.range(0, columnNames.size()).filter(i -> columnNames.get(i).equals(columnName)).findFirst(); + return result.isPresent() ? Optional.of(result.getAsInt()) : Optional.empty(); } } @@ -263,39 +267,72 @@ public String describe() { private Block getBlock(DocBlock docBlock, Attribute attribute, FieldExtractPreference extractPreference) { if (attribute instanceof UnsupportedAttribute) { - return docBlock.blockFactory().newConstantNullBlock(docBlock.getPositionCount()); - } - return extractBlockForColumn( - docBlock, - attribute.dataType(), - extractPreference, - attribute instanceof FieldAttribute fa && fa.field() instanceof MultiTypeEsField multiTypeEsField - ? (indexDoc, blockCopier) -> getBlockForMultiType(indexDoc, multiTypeEsField, blockCopier) - : (indexDoc, blockCopier) -> extractBlockForSingleDoc(indexDoc, attribute.name(), blockCopier) - ); + return getNullsBlock(docBlock); + } + BiFunction blockExtraction = getBlockExtraction(attribute); + return extractBlockForColumn(docBlock, attribute.dataType(), extractPreference, blockExtraction); + } + + private BiFunction getBlockExtraction(Attribute attribute) { + if (attribute instanceof FieldAttribute fa) { + if (fa.field() instanceof MultiTypeEsField m) { + return (doc, copier) -> getBlockForMultiType(doc, m, copier); + + } + if (fa.field() instanceof PotentiallyUnmappedKeywordEsField k) { + return (doc, copier) -> switch (extractBlockForSingleDoc(doc, k.getName(), copier)) { + case BlockResultMissing unused -> getNullsBlock(doc); + case BlockResultSuccess success -> success.block; + }; + } + } + return (indexDoc, blockCopier) -> switch (extractBlockForSingleDoc(indexDoc, attribute.name(), blockCopier)) { + case BlockResultMissing missing -> throw new EsqlIllegalArgumentException( + "Cannot find column named [{}] in {}", + missing.columnName, + missing.columnNames + ); + case BlockResultSuccess success -> success.block; + }; } private Block getBlockForMultiType(DocBlock indexDoc, MultiTypeEsField multiTypeEsField, TestBlockCopier blockCopier) { - var indexId = indexDoc.asVector().shards().getInt(0); - var indexPage = indexPages.get(indexId); - var conversion = (AbstractConvertFunction) multiTypeEsField.getConversionExpressionForIndex(indexPage.index); - Supplier nulls = () -> indexDoc.blockFactory().newConstantNullBlock(indexDoc.getPositionCount()); + var conversion = (AbstractConvertFunction) multiTypeEsField.getConversionExpressionForIndex(getIndexPage(indexDoc).index); if (conversion == null) { - return nulls.get(); + return getNullsBlock(indexDoc); } - var field = (FieldAttribute) conversion.field(); - return indexPage.columnIndex(field.fieldName()).isEmpty() - ? nulls.get() - : TypeConverter.fromConvertFunction(conversion).convert(extractBlockForSingleDoc(indexDoc, field.fieldName(), blockCopier)); + return switch (extractBlockForSingleDoc(indexDoc, ((FieldAttribute) conversion.field()).fieldName(), blockCopier)) { + case BlockResultMissing unused -> getNullsBlock(indexDoc); + case BlockResultSuccess success -> TypeConverter.fromConvertFunction(conversion).convert(success.block); + }; + } + + private IndexPage getIndexPage(DocBlock indexDoc) { + return indexPages.get(indexDoc.asVector().shards().getInt(0)); } - private Block extractBlockForSingleDoc(DocBlock docBlock, String columnName, TestBlockCopier blockCopier) { + private static Block getNullsBlock(DocBlock indexDoc) { + return indexDoc.blockFactory().newConstantNullBlock(indexDoc.getPositionCount()); + } + + private sealed interface BlockResult {} + + private record BlockResultSuccess(Block block) implements BlockResult {} + + private record BlockResultMissing(String columnName, List columnNames) implements BlockResult {} + + private BlockResult extractBlockForSingleDoc(DocBlock docBlock, String columnName, TestBlockCopier blockCopier) { var indexId = docBlock.asVector().shards().getInt(0); var indexPage = indexPages.get(indexId); - int columnIndex = indexPage.columnIndex(columnName) - .orElseThrow(() -> new EsqlIllegalArgumentException("Cannot find column named [{}] in {}", columnName, indexPage.columnNames)); - var originalData = indexPage.page.getBlock(columnIndex); - return blockCopier.copyBlock(originalData); + if (MetadataAttribute.INDEX.equals(columnName)) { + return new BlockResultSuccess( + docBlock.blockFactory() + .newConstantBytesRefBlockWith(new BytesRef(indexPage.index), blockCopier.docIndices.getPositionCount()) + ); + } + return indexPage.columnIndex(columnName) + .map(columnIndex -> new BlockResultSuccess(blockCopier.copyBlock(indexPage.page.getBlock(columnIndex)))) + .orElseGet(() -> new BlockResultMissing(columnName, indexPage.columnNames())); } private static void foreachIndexDoc(DocBlock docBlock, Consumer indexDocConsumer) { @@ -410,8 +447,9 @@ private Block extractBlockForColumn( ) { foreachIndexDoc(docBlock, indexDoc -> { TestBlockCopier blockCopier = blockCopier(dataType, extractPreference, indexDoc.asVector().docs()); - Block blockForIndex = extractBlock.apply(indexDoc, blockCopier); - blockBuilder.copyFrom(blockForIndex, 0, blockForIndex.getPositionCount()); + try (Block blockForIndex = extractBlock.apply(indexDoc, blockCopier)) { + blockBuilder.copyFrom(blockForIndex, 0, blockForIndex.getPositionCount()); + } }); var result = blockBuilder.build(); assert result.getPositionCount() == docBlock.getPositionCount() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java index f3b1d84e507a5..e58824290c49e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java @@ -155,11 +155,14 @@ protected ClusterComputeRequest mutateInstance(ClusterComputeRequest in) throws public void testFallbackIndicesOptions() throws Exception { ClusterComputeRequest request = createTestInstance(); - var version = TransportVersionUtils.randomVersionBetween(random(), TransportVersions.V_8_14_0, TransportVersions.V_8_16_0); - ClusterComputeRequest cloned = copyInstance(request, version); + var oldVersion = TransportVersionUtils.randomVersionBetween( + random(), + TransportVersions.V_8_14_0, + TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_16_0) + ); + ClusterComputeRequest cloned = copyInstance(request, oldVersion); assertThat(cloned.clusterAlias(), equalTo(request.clusterAlias())); assertThat(cloned.sessionId(), equalTo(request.sessionId())); - assertThat(cloned.configuration(), equalTo(request.configuration())); RemoteClusterPlan plan = cloned.remoteClusterPlan(); assertThat(plan.plan(), equalTo(request.remoteClusterPlan().plan())); assertThat(plan.targetIndices(), equalTo(request.remoteClusterPlan().targetIndices())); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java index 7db3216d1736d..f4deaa45f1f87 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.plugin; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.PlainActionFuture; @@ -62,6 +64,7 @@ private List randomProfiles() { for (int i = 0; i < numProfiles; i++) { profiles.add( new DriverProfile( + RandomStrings.randomAsciiLettersOfLength(random(), 5), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index e7ea479d199d8..6b797710bec31 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -1575,6 +1575,84 @@ public void testMultiLookupJoinSameIndexKeepAfter() { ); } + public void testInsist_fieldIsMappedToNonKeywordSingleIndex() { + assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + assertFieldNames( + "FROM partial_mapping_sample_data | INSIST_🐔 client_ip | KEEP @timestamp, client_ip", + Set.of("@timestamp", "@timestamp.*", "client_ip", "client_ip.*"), + Set.of() + ); + } + + public void testInsist_fieldIsMappedToKeywordSingleIndex() { + assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + assertFieldNames( + "FROM partial_mapping_sample_data | INSIST_🐔 message | KEEP @timestamp, message", + Set.of("@timestamp", "@timestamp.*", "message", "message.*"), + Set.of() + ); + } + + public void testInsist_fieldDoesNotExistSingleIndex() { + assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + assertFieldNames( + "FROM partial_mapping_sample_data | INSIST_🐔 foo | KEEP @timestamp, foo", + Set.of("@timestamp", "@timestamp.*", "foo", "foo.*"), + Set.of() + ); + } + + public void testInsist_fieldIsUnmappedSingleIndex() { + assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + assertFieldNames( + "FROM partial_mapping_sample_data | INSIST_🐔 unmapped_message | KEEP @timestamp, unmapped_message", + Set.of("@timestamp", "@timestamp.*", "unmapped_message", "unmapped_message.*"), + Set.of() + ); + } + + public void testInsist_multiFieldTestSingleIndex() { + assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + assertFieldNames( + "FROM partial_mapping_sample_data | INSIST_🐔 message, unmapped_message, client_ip, foo | KEEP @timestamp, unmapped_message", + Set.of( + "@timestamp", + "@timestamp.*", + "message", + "message.*", + "unmapped_message", + "unmapped_message.*", + "client_ip", + "client_ip.*", + "foo", + "foo.*" + ), + Set.of() + ); + } + + public void testInsist_fieldIsMappedToDifferentTypesMultiIndex() { + assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + assertFieldNames( + "FROM sample_data_ts_long, sample_data METADATA _index | INSIST_🐔 @timestamp | KEEP _index, @timestamp", + Set.of("@timestamp", "@timestamp.*"), + Set.of() + ); + } + + public void testInsist_multiFieldMappedMultiIndex() { + assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + assertFieldNames( + """ + FROM sample_data_ts_long, sample_data METADATA _index + | INSIST_🐔 @timestamp, unmapped_message + | INSIST_🐔 message, foo + | KEEP _index, @timestamp, message, foo""", + Set.of("@timestamp", "@timestamp.*", "message", "message.*", "unmapped_message", "unmapped_message.*", "foo", "foo.*"), + Set.of() + ); + } + private Set fieldNames(String query, Set enrichPolicyMatchFields) { var preAnalysisResult = new EsqlSession.PreAnalysisResult(null); return EsqlSession.fieldNames(parser.createStatement(query), enrichPolicyMatchFields, preAnalysisResult).fieldNames(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java index e4e10a5c6af19..8cf9f08165b7a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java @@ -54,7 +54,7 @@ private void resolve(String esTypeName, TimeSeriesParams.MetricType metricType, FieldCapabilitiesResponse caps = new FieldCapabilitiesResponse(idxResponses, List.of()); // IndexResolver uses EsqlDataTypeRegistry directly - IndexResolution resolution = new IndexResolver(null).mergedMappings("idx-*", caps); + IndexResolution resolution = IndexResolver.mergedMappings("idx-*", caps); EsField f = resolution.get().mapping().get(field); assertThat(f.getDataType(), equalTo(expected)); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java index 0a2200ff912ac..47f34fa486daf 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java @@ -40,9 +40,35 @@ public void testAttachToDeployment() throws IOException { is(Map.of("num_allocations", 1, "num_threads", 1, "model_id", "attach_to_deployment", "deployment_id", "existing_deployment")) ); + var getModel = getModel(inferenceId); + serviceSettings = getModel.get("service_settings"); + assertThat( + getModel.toString(), + serviceSettings, + is(Map.of("num_allocations", 1, "num_threads", 1, "model_id", "attach_to_deployment", "deployment_id", "existing_deployment")) + ); + var results = infer(inferenceId, List.of("washing machine")); assertNotNull(results.get("sparse_embedding")); + var updatedNumAllocations = randomIntBetween(1, 10); + var updatedEndpointConfig = updateEndpoint(inferenceId, updatedEndpointConfig(updatedNumAllocations), TaskType.SPARSE_EMBEDDING); + assertThat( + updatedEndpointConfig.get("service_settings"), + is( + Map.of( + "num_allocations", + updatedNumAllocations, + "num_threads", + 1, + "model_id", + "attach_to_deployment", + "deployment_id", + "existing_deployment" + ) + ) + ); + deleteModel(inferenceId); // assert deployment not stopped var stats = (List>) getTrainedModelStats(modelId).get("trained_model_stats"); @@ -80,9 +106,46 @@ public void testAttachWithModelId() throws IOException { ) ); + var getModel = getModel(inferenceId); + serviceSettings = getModel.get("service_settings"); + assertThat( + getModel.toString(), + serviceSettings, + is( + Map.of( + "num_allocations", + 1, + "num_threads", + 1, + "model_id", + "attach_with_model_id", + "deployment_id", + "existing_deployment_with_model_id" + ) + ) + ); + var results = infer(inferenceId, List.of("washing machine")); assertNotNull(results.get("sparse_embedding")); + var updatedNumAllocations = randomIntBetween(1, 10); + var updatedEndpointConfig = updateEndpoint(inferenceId, updatedEndpointConfig(updatedNumAllocations), TaskType.SPARSE_EMBEDDING); + assertThat( + updatedEndpointConfig.get("service_settings"), + is( + Map.of( + "num_allocations", + updatedNumAllocations, + "num_threads", + 1, + "model_id", + "attach_with_model_id", + "deployment_id", + "existing_deployment_with_model_id" + ) + ) + ); + stopMlNodeDeployment(deploymentId); } @@ -189,6 +252,16 @@ private String endpointConfig(String modelId, String deploymentId) { """, modelId, deploymentId); } + private String updatedEndpointConfig(int numAllocations) { + return Strings.format(""" + { + "service_settings": { + "num_allocations": %d + } + } + """, numAllocations); + } + private Response startMlNodeDeploymemnt(String modelId, String deploymentId) throws IOException { String endPoint = "/_ml/trained_models/" + modelId diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index bb3f3e9b46c4d..36a4b95a7ca23 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -238,6 +238,11 @@ static Map updateEndpoint(String inferenceID, String modelConfig return putRequest(endpoint, modelConfig); } + static Map updateEndpoint(String inferenceID, String modelConfig) throws IOException { + String endpoint = Strings.format("_inference/%s/_update", inferenceID); + return putRequest(endpoint, modelConfig); + } + protected Map putPipeline(String pipelineId, String modelId) throws IOException { String endpoint = Strings.format("_ingest/pipeline/%s", pipelineId); String body = """ @@ -355,8 +360,7 @@ protected Deque unifiedCompletionInferOnMockService( List input, @Nullable Consumer responseConsumerCallback ) throws Exception { - var route = randomBoolean() ? "_stream" : "_unified"; // TODO remove unified route - var endpoint = Strings.format("_inference/%s/%s/%s", taskType, modelId, route); + var endpoint = Strings.format("_inference/%s/%s/_stream", taskType, modelId); return callAsyncUnified(endpoint, input, "user", responseConsumerCallback); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index b786cd1298495..793b3f7a9a349 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -369,6 +369,61 @@ public void testUnifiedCompletionInference() throws Exception { } } + public void testUpdateEndpointWithWrongTaskTypeInURL() throws IOException { + putModel("sparse_embedding_model", mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var e = expectThrows( + ResponseException.class, + () -> updateEndpoint( + "sparse_embedding_model", + updateConfig(null, randomAlphaOfLength(10), randomIntBetween(1, 10)), + TaskType.TEXT_EMBEDDING + ) + ); + assertThat(e.getMessage(), containsString("Task type must match the task type of the existing endpoint")); + } + + public void testUpdateEndpointWithWrongTaskTypeInBody() throws IOException { + putModel("sparse_embedding_model", mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var e = expectThrows( + ResponseException.class, + () -> updateEndpoint( + "sparse_embedding_model", + updateConfig(TaskType.TEXT_EMBEDDING, randomAlphaOfLength(10), randomIntBetween(1, 10)) + ) + ); + assertThat(e.getMessage(), containsString("Task type must match the task type of the existing endpoint")); + } + + public void testUpdateEndpointWithTaskTypeInURL() throws IOException { + testUpdateEndpoint(false, true); + } + + public void testUpdateEndpointWithTaskTypeInBody() throws IOException { + testUpdateEndpoint(true, false); + } + + public void testUpdateEndpointWithTaskTypeInBodyAndURL() throws IOException { + testUpdateEndpoint(true, true); + } + + @SuppressWarnings("unchecked") + private void testUpdateEndpoint(boolean taskTypeInBody, boolean taskTypeInURL) throws IOException { + String endpointId = "sparse_embedding_model"; + putModel(endpointId, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + + int temperature = randomIntBetween(1, 10); + var expectedConfig = updateConfig(taskTypeInBody ? TaskType.SPARSE_EMBEDDING : null, randomAlphaOfLength(1), temperature); + Map updatedEndpoint; + if (taskTypeInURL) { + updatedEndpoint = updateEndpoint(endpointId, expectedConfig, TaskType.SPARSE_EMBEDDING); + } else { + updatedEndpoint = updateEndpoint(endpointId, expectedConfig); + } + + Map updatedTaskSettings = (Map) updatedEndpoint.get("task_settings"); + assertEquals(temperature, updatedTaskSettings.get("temperature")); + } + private static Iterator expectedResultsIterator(List input) { // The Locale needs to be ROOT to match what the test service is going to respond with return Stream.concat(input.stream().map(s -> s.toUpperCase(Locale.ROOT)).map(InferenceCrudIT::expectedResult), Stream.of("[DONE]")) diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/InferenceRevokeDefaultEndpointsIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/InferenceRevokeDefaultEndpointsIT.java new file mode 100644 index 0000000000000..201f1250427a8 --- /dev/null +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/InferenceRevokeDefaultEndpointsIT.java @@ -0,0 +1,271 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.integration; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.MinimalServiceSettings; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceComponents; +import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationHandler; +import org.junit.After; +import org.junit.Before; + +import java.util.Collection; +import java.util.EnumSet; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.hamcrest.CoreMatchers.is; +import static org.mockito.Mockito.mock; + +public class InferenceRevokeDefaultEndpointsIT extends ESSingleNodeTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + + private ModelRegistry modelRegistry; + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private String gatewayUrl; + + @Before + public void createComponents() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + webServer.start(); + gatewayUrl = getUrl(webServer); + modelRegistry = new ModelRegistry(client()); + } + + @After + public void shutdown() { + terminate(threadPool); + webServer.close(); + } + + @Override + protected boolean resetNodeAfterTest() { + return true; + } + + @Override + protected Collection> getPlugins() { + return pluginList(ReindexPlugin.class); + } + + public void testDefaultConfigs_Returns_DefaultChatCompletion_V1_WhenTaskTypeIsCorrect() throws Exception { + String responseJson = """ + { + "models": [ + { + "model_name": "rainbow-sprinkles", + "task_types": ["chat"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + try (var service = createElasticInferenceService()) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.ANY))); + assertThat( + service.defaultConfigIds(), + is( + List.of( + new InferenceService.DefaultConfigId(".rainbow-sprinkles-elastic", MinimalServiceSettings.chatCompletion(), service) + ) + ) + ); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + + PlainActionFuture> listener = new PlainActionFuture<>(); + service.defaultConfigs(listener); + assertThat(listener.actionGet(TIMEOUT).get(0).getConfigurations().getInferenceEntityId(), is(".rainbow-sprinkles-elastic")); + } + } + + public void testRemoves_DefaultChatCompletion_V1_WhenAuthorizationReturnsEmpty() throws Exception { + { + String responseJson = """ + { + "models": [ + { + "model_name": "rainbow-sprinkles", + "task_types": ["chat"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + try (var service = createElasticInferenceService()) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.ANY))); + assertThat( + service.defaultConfigIds(), + is( + List.of( + new InferenceService.DefaultConfigId( + ".rainbow-sprinkles-elastic", + MinimalServiceSettings.chatCompletion(), + service + ) + ) + ) + ); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + + PlainActionFuture> listener = new PlainActionFuture<>(); + service.defaultConfigs(listener); + assertThat(listener.actionGet(TIMEOUT).get(0).getConfigurations().getInferenceEntityId(), is(".rainbow-sprinkles-elastic")); + + var getModelListener = new PlainActionFuture(); + // persists the default endpoints + modelRegistry.getModel(".rainbow-sprinkles-elastic", getModelListener); + + var inferenceEntity = getModelListener.actionGet(TIMEOUT); + assertThat(inferenceEntity.inferenceEntityId(), is(".rainbow-sprinkles-elastic")); + assertThat(inferenceEntity.taskType(), is(TaskType.CHAT_COMPLETION)); + } + } + { + String noAuthorizationResponseJson = """ + { + "models": [] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(noAuthorizationResponseJson)); + + try (var service = createElasticInferenceService()) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.noneOf(TaskType.class))); + assertTrue(service.defaultConfigIds().isEmpty()); + assertThat(service.supportedTaskTypes(), is(EnumSet.noneOf(TaskType.class))); + + var getModelListener = new PlainActionFuture(); + modelRegistry.getModel(".rainbow-sprinkles-elastic", getModelListener); + + var exception = expectThrows(ResourceNotFoundException.class, () -> getModelListener.actionGet(TIMEOUT)); + assertThat(exception.getMessage(), is("Inference endpoint not found [.rainbow-sprinkles-elastic]")); + } + } + } + + public void testRemoves_DefaultChatCompletion_V1_WhenAuthorizationDoesNotReturnAuthForIt() throws Exception { + { + String responseJson = """ + { + "models": [ + { + "model_name": "rainbow-sprinkles", + "task_types": ["chat"] + }, + { + "model_name": "elser-v2", + "task_types": ["embed/text/sparse"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + try (var service = createElasticInferenceService()) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.ANY))); + assertThat( + service.defaultConfigIds(), + is( + List.of( + new InferenceService.DefaultConfigId( + ".rainbow-sprinkles-elastic", + MinimalServiceSettings.chatCompletion(), + service + ) + ) + ) + ); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.SPARSE_EMBEDDING))); + + PlainActionFuture> listener = new PlainActionFuture<>(); + service.defaultConfigs(listener); + assertThat(listener.actionGet(TIMEOUT).get(0).getConfigurations().getInferenceEntityId(), is(".rainbow-sprinkles-elastic")); + + var getModelListener = new PlainActionFuture(); + // persists the default endpoints + modelRegistry.getModel(".rainbow-sprinkles-elastic", getModelListener); + + var inferenceEntity = getModelListener.actionGet(TIMEOUT); + assertThat(inferenceEntity.inferenceEntityId(), is(".rainbow-sprinkles-elastic")); + assertThat(inferenceEntity.taskType(), is(TaskType.CHAT_COMPLETION)); + } + } + { + String noAuthorizationResponseJson = """ + { + "models": [ + { + "model_name": "elser-v2", + "task_types": ["embed/text/sparse"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(noAuthorizationResponseJson)); + + try (var service = createElasticInferenceService()) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.noneOf(TaskType.class))); + assertTrue(service.defaultConfigIds().isEmpty()); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.SPARSE_EMBEDDING))); + + var getModelListener = new PlainActionFuture(); + modelRegistry.getModel(".rainbow-sprinkles-elastic", getModelListener); + + var exception = expectThrows(ResourceNotFoundException.class, () -> getModelListener.actionGet(TIMEOUT)); + assertThat(exception.getMessage(), is("Inference endpoint not found [.rainbow-sprinkles-elastic]")); + } + } + } + + private ElasticInferenceService createElasticInferenceService() { + var httpManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, httpManager); + + return new ElasticInferenceService( + senderFactory, + createWithEmptySettings(threadPool), + new ElasticInferenceServiceComponents(gatewayUrl), + modelRegistry, + new ElasticInferenceServiceAuthorizationHandler(gatewayUrl, threadPool) + ); + } +} diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index 51ee42cf2f7f2..4fad6977ab852 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -10,10 +10,12 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.InferenceService; @@ -51,7 +53,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.Function; @@ -70,6 +74,7 @@ import static org.mockito.Mockito.mock; public class ModelRegistryIT extends ESSingleNodeTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); private ModelRegistry modelRegistry; @@ -195,6 +200,56 @@ public void testDeleteModel() throws Exception { assertThat(exceptionHolder.get().getMessage(), containsString("Inference endpoint not found [model1]")); } + public void testNonExistentDeleteModel_DoesNotThrowAnException() { + var listener = new PlainActionFuture(); + + modelRegistry.deleteModel("non-existent-model", listener); + assertTrue(listener.actionGet(TIMEOUT)); + } + + public void testRemoveDefaultConfigs_DoesNotThrowAnException_WhenSearchingForNonExistentInferenceEndpointIds() { + var listener = new PlainActionFuture(); + + modelRegistry.deleteModels(Set.of("non-existent-model", "abc"), listener); + assertTrue(listener.actionGet(TIMEOUT)); + } + + public void testRemoveDefaultConfigs_RemovesModelsFromPersistentStorage_AndInMemoryCache() { + var service = mock(InferenceService.class); + + var defaultConfigs = new ArrayList(); + var defaultIds = new ArrayList(); + for (var id : new String[] { "model1", "model2", "model3" }) { + var modelSettings = ModelRegistryTests.randomMinimalServiceSettings(); + defaultConfigs.add(createModel(id, modelSettings.taskType(), "name")); + defaultIds.add(new InferenceService.DefaultConfigId(id, modelSettings, service)); + } + + doAnswer(invocation -> { + ActionListener> listener = invocation.getArgument(0); + listener.onResponse(defaultConfigs); + return Void.TYPE; + }).when(service).defaultConfigs(any()); + + defaultIds.forEach(modelRegistry::addDefaultIds); + + var getModelsListener = new PlainActionFuture>(); + modelRegistry.getAllModels(true, getModelsListener); + var unparsedModels = getModelsListener.actionGet(TIMEOUT); + assertThat(unparsedModels.size(), is(3)); + + var removeModelsListener = new PlainActionFuture(); + + modelRegistry.removeDefaultConfigs(Set.of("model1", "model2", "model3"), removeModelsListener); + assertTrue(removeModelsListener.actionGet(TIMEOUT)); + + var getModelsAfterDeleteListener = new PlainActionFuture>(); + // the models should have been removed from the in memory cache, if not they they will be persisted again by this call + modelRegistry.getAllModels(true, getModelsAfterDeleteListener); + var unparsedModelsAfterDelete = getModelsAfterDeleteListener.actionGet(TIMEOUT); + assertThat(unparsedModelsAfterDelete.size(), is(0)); + } + public void testGetModelsByTaskType() throws InterruptedException { var service = "foo"; var sparseAndTextEmbeddingModels = new ArrayList(); @@ -315,8 +370,7 @@ public void testGetAllModels_WithDefaults() throws Exception { } doAnswer(invocation -> { - @SuppressWarnings("unchecked") - var listener = (ActionListener>) invocation.getArguments()[0]; + ActionListener> listener = invocation.getArgument(0); listener.onResponse(defaultConfigs); return Void.TYPE; }).when(service).defaultConfigs(any()); @@ -381,8 +435,7 @@ public void testGetAllModels_OnlyDefaults() throws Exception { } doAnswer(invocation -> { - @SuppressWarnings("unchecked") - var listener = (ActionListener>) invocation.getArguments()[0]; + ActionListener> listener = invocation.getArgument(0); listener.onResponse(defaultConfigs); return Void.TYPE; }).when(service).defaultConfigs(any()); @@ -424,8 +477,7 @@ public void testGetAllModels_withDoNotPersist() throws Exception { } doAnswer(invocation -> { - @SuppressWarnings("unchecked") - var listener = (ActionListener>) invocation.getArguments()[0]; + ActionListener> listener = invocation.getArgument(0); listener.onResponse(defaultConfigs); return Void.TYPE; }).when(service).defaultConfigs(any()); @@ -469,8 +521,7 @@ public void testGet_WithDefaults() throws InterruptedException { ); doAnswer(invocation -> { - @SuppressWarnings("unchecked") - var listener = (ActionListener>) invocation.getArguments()[0]; + ActionListener> listener = invocation.getArgument(0); listener.onResponse(defaultConfigs); return Void.TYPE; }).when(service).defaultConfigs(any()); @@ -523,8 +574,7 @@ public void testGetByTaskType_WithDefaults() throws Exception { defaultIds.add(new InferenceService.DefaultConfigId("default-chat", MinimalServiceSettings.completion(), service)); doAnswer(invocation -> { - @SuppressWarnings("unchecked") - var listener = (ActionListener>) invocation.getArguments()[0]; + ActionListener> listener = invocation.getArgument(0); listener.onResponse(List.of(defaultSparse, defaultChat, defaultText)); return Void.TYPE; }).when(service).defaultConfigs(any()); diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java index a22e179479dec..903961794b337 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java @@ -44,10 +44,12 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.XContentFormattedException; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEvent; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventField; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventParser; @@ -80,6 +82,7 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { private static final String REQUEST_COUNT = "request_count"; private static final String WITH_ERROR = "with_error"; private static final String ERROR_ROUTE = "/_inference_error"; + private static final String FORMATTED_ERROR_ROUTE = "/_formatted_inference_error"; private static final String NO_STREAM_ROUTE = "/_inference_no_stream"; private static final Exception expectedException = new IllegalStateException("hello there"); private static final String expectedExceptionAsServerSentEvent = """ @@ -88,6 +91,11 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { "type":"illegal_state_exception","reason":"hello there"},"status":500\ }"""; + private static final Exception expectedFormattedException = new XContentFormattedException( + expectedException, + RestStatus.INTERNAL_SERVER_ERROR + ); + @Override protected boolean addMockHttpTransport() { return false; @@ -145,6 +153,16 @@ public List routes() { public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { new ServerSentEventsRestActionListener(channel, threadPool).onFailure(expectedException); } + }, new RestHandler() { + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.POST, FORMATTED_ERROR_ROUTE)); + } + + @Override + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { + new ServerSentEventsRestActionListener(channel, threadPool).onFailure(expectedFormattedException); + } }, new RestHandler() { @Override public List routes() { @@ -424,6 +442,21 @@ public void testErrorMidStream() { assertThat(collector.stringsVerified.getLast(), equalTo(expectedExceptionAsServerSentEvent)); } + public void testFormattedError() throws IOException { + var request = new Request(RestRequest.Method.POST.name(), FORMATTED_ERROR_ROUTE); + + try { + getRestClient().performRequest(request); + fail("Expected an exception to be thrown from the error route"); + } catch (ResponseException e) { + var response = e.getResponse(); + assertThat(response.getStatusLine().getStatusCode(), is(HttpStatus.SC_INTERNAL_SERVER_ERROR)); + assertThat(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), equalTo(""" + \uFEFFevent: error + data:\s""" + expectedExceptionAsServerSentEvent + "\n\n")); + } + } + public void testNoStream() { var collector = new RandomStringCollector(); var expectedTestCount = randomIntBetween(2, 30); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 8c2be17777cca..e8fcb8dfe117d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -15,6 +15,7 @@ import java.util.Set; +import static org.elasticsearch.xpack.inference.queries.SemanticKnnVectorQueryRewriteInterceptor.SEMANTIC_KNN_FILTER_FIX; import static org.elasticsearch.xpack.inference.queries.SemanticKnnVectorQueryRewriteInterceptor.SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED; import static org.elasticsearch.xpack.inference.queries.SemanticMatchQueryRewriteInterceptor.SEMANTIC_MATCH_QUERY_REWRITE_INTERCEPTION_SUPPORTED; import static org.elasticsearch.xpack.inference.queries.SemanticSparseVectorQueryRewriteInterceptor.SEMANTIC_SPARSE_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED; @@ -35,6 +36,7 @@ public Set getTestFeatures() { SemanticTextFieldMapper.SEMANTIC_TEXT_DELETE_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_ZERO_SIZE_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX, + SemanticTextFieldMapper.SEMANTIC_TEXT_SKIP_INFERENCE_FIELDS, SEMANTIC_TEXT_HIGHLIGHTER, SEMANTIC_MATCH_QUERY_REWRITE_INTERCEPTION_SUPPORTED, SEMANTIC_SPARSE_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED, @@ -42,7 +44,8 @@ public Set getTestFeatures() { SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED, TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_ALIAS_HANDLING_FIX, SemanticInferenceMetadataFieldsMapper.INFERENCE_METADATA_FIELDS_ENABLED_BY_DEFAULT, - SEMANTIC_TEXT_HIGHLIGHTER_DEFAULT + SEMANTIC_TEXT_HIGHLIGHTER_DEFAULT, + SEMANTIC_KNN_FILTER_FIX ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 6fc9870034018..e8dc763116707 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -75,6 +75,8 @@ import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; import org.elasticsearch.xpack.inference.services.jinaai.JinaAIServiceSettings; import org.elasticsearch.xpack.inference.services.jinaai.embeddings.JinaAIEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.jinaai.embeddings.JinaAIEmbeddingsTaskSettings; @@ -364,6 +366,17 @@ private static void addIbmWatsonxNamedWritables(List namedWriteables) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 6f302f944c005..e3604351c1937 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.GetInferenceServicesAction; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; @@ -67,6 +68,7 @@ import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceServicesAction; import org.elasticsearch.xpack.inference.action.TransportInferenceAction; +import org.elasticsearch.xpack.inference.action.TransportInferenceActionProxy; import org.elasticsearch.xpack.inference.action.TransportInferenceUsageAction; import org.elasticsearch.xpack.inference.action.TransportPutInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportUnifiedCompletionInferenceAction; @@ -104,7 +106,6 @@ import org.elasticsearch.xpack.inference.rest.RestInferenceAction; import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestStreamInferenceAction; -import org.elasticsearch.xpack.inference.rest.RestUnifiedCompletionInferenceAction; import org.elasticsearch.xpack.inference.rest.RestUpdateInferenceModelAction; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.alibabacloudsearch.AlibabaCloudSearchService; @@ -195,6 +196,7 @@ public InferencePlugin(Settings settings) { public List> getActions() { return List.of( new ActionHandler<>(InferenceAction.INSTANCE, TransportInferenceAction.class), + new ActionHandler<>(InferenceActionProxy.INSTANCE, TransportInferenceActionProxy.class), new ActionHandler<>(GetInferenceModelAction.INSTANCE, TransportGetInferenceModelAction.class), new ActionHandler<>(PutInferenceModelAction.INSTANCE, TransportPutInferenceModelAction.class), new ActionHandler<>(UpdateInferenceModelAction.INSTANCE, TransportUpdateInferenceModelAction.class), @@ -226,8 +228,7 @@ public List getRestHandlers( new RestUpdateInferenceModelAction(), new RestDeleteInferenceEndpointAction(), new RestGetInferenceDiagnosticsAction(), - new RestGetInferenceServicesAction(), - new RestUnifiedCompletionInferenceAction(threadPoolSetOnce) + new RestGetInferenceServicesAction() ); } @@ -331,7 +332,6 @@ public Collection createComponents(PluginServices services) { // Add binding for interface -> implementation components.add(new PluginComponentBinding<>(InferenceServiceRateLimitCalculator.class, calculator)); - components.add(calculator); return components; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java index 08d74a36d6503..b390a51f6d3e2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java @@ -50,12 +50,13 @@ import java.io.IOException; import java.util.Random; import java.util.concurrent.Executor; +import java.util.concurrent.Flow; import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.InferencePlugin.INFERENCE_API_FEATURE; -import static org.elasticsearch.xpack.inference.common.InferenceAPIClusterAwareRateLimitingFeature.INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG; import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.modelAttributes; import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.responseAttributes; @@ -188,10 +189,6 @@ private void validateRequest(Request request, UnparsedModel unparsedModel) { } private NodeRoutingDecision determineRouting(String serviceName, Request request, UnparsedModel unparsedModel) { - if (INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG.isEnabled() == false) { - return NodeRoutingDecision.handleLocally(); - } - var modelTaskType = unparsedModel.taskType(); // Rerouting not supported or request was already rerouted @@ -285,7 +282,9 @@ private void inferOnServiceWithMetrics( var instrumentedStream = new PublisherWithMetrics(timer, model); taskProcessor.subscribe(instrumentedStream); - listener.onResponse(new InferenceAction.Response(inferenceResults, instrumentedStream)); + var streamErrorHandler = streamErrorHandler(instrumentedStream); + + listener.onResponse(new InferenceAction.Response(inferenceResults, streamErrorHandler)); } else { recordMetrics(model, timer, null); listener.onResponse(new InferenceAction.Response(inferenceResults)); @@ -296,9 +295,13 @@ private void inferOnServiceWithMetrics( })); } + protected Flow.Publisher streamErrorHandler(Flow.Processor upstream) { + return upstream; + } + private void recordMetrics(Model model, InferenceTimer timer, @Nullable Throwable t) { try { - inferenceStats.inferenceDuration().record(timer.elapsedMillis(), responseAttributes(model, t)); + inferenceStats.inferenceDuration().record(timer.elapsedMillis(), responseAttributes(model, unwrapCause(t))); } catch (Exception e) { log.atDebug().withThrowable(e).log("Failed to record metrics with a parsed model, dropping metrics"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java index 242c2f79733eb..3b6901ae0c31d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java @@ -88,6 +88,17 @@ private void doExecuteForked( ClusterState state, ActionListener masterListener ) { + if (modelRegistry.containsDefaultConfigId(request.getInferenceEndpointId())) { + masterListener.onFailure( + new ElasticsearchStatusException( + "[{}] is a reserved inference endpoint. Cannot delete a reserved inference endpoint.", + RestStatus.BAD_REQUEST, + request.getInferenceEndpointId() + ) + ); + return; + } + SubscribableListener.newForked(modelConfigListener -> { // Get the model from the registry diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxy.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxy.java new file mode 100644 index 0000000000000..6d46f834d4873 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxy.java @@ -0,0 +1,125 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; +import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; + +import java.io.IOException; + +import static org.elasticsearch.xpack.core.ClientHelper.INFERENCE_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +public class TransportInferenceActionProxy extends HandledTransportAction { + private final ModelRegistry modelRegistry; + private final Client client; + + @Inject + public TransportInferenceActionProxy( + TransportService transportService, + ActionFilters actionFilters, + ModelRegistry modelRegistry, + Client client + ) { + super( + InferenceActionProxy.NAME, + transportService, + actionFilters, + InferenceActionProxy.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + this.modelRegistry = modelRegistry; + this.client = client; + } + + @Override + protected void doExecute(Task task, InferenceActionProxy.Request request, ActionListener listener) { + try { + ActionListener getModelListener = listener.delegateFailureAndWrap((l, unparsedModel) -> { + if (unparsedModel.taskType() == TaskType.CHAT_COMPLETION) { + sendUnifiedCompletionRequest(request, l); + } else { + sendInferenceActionRequest(request, l); + } + }); + + if (request.getTaskType() == TaskType.ANY) { + modelRegistry.getModelWithSecrets(request.getInferenceEntityId(), getModelListener); + } else if (request.getTaskType() == TaskType.CHAT_COMPLETION) { + sendUnifiedCompletionRequest(request, listener); + } else { + sendInferenceActionRequest(request, listener); + } + } catch (Exception e) { + listener.onFailure(e); + } + } + + private void sendUnifiedCompletionRequest(InferenceActionProxy.Request request, ActionListener listener) { + // format any validation exceptions from the rest -> transport path as UnifiedChatCompletionException + var unifiedErrorFormatListener = listener.delegateResponse((l, e) -> l.onFailure(UnifiedChatCompletionException.fromThrowable(e))); + + try { + if (request.isStreaming() == false) { + throw new ElasticsearchStatusException( + "The [chat_completion] task type only supports streaming, please try again with the _stream API", + RestStatus.BAD_REQUEST + ); + } + + UnifiedCompletionAction.Request unifiedRequest; + try ( + var parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, request.getContent(), request.getContentType()) + ) { + unifiedRequest = UnifiedCompletionAction.Request.parseRequest( + request.getInferenceEntityId(), + request.getTaskType(), + request.getTimeout(), + parser + ); + } + + executeAsyncWithOrigin(client, INFERENCE_ORIGIN, UnifiedCompletionAction.INSTANCE, unifiedRequest, unifiedErrorFormatListener); + } catch (Exception e) { + unifiedErrorFormatListener.onFailure(e); + } + } + + private void sendInferenceActionRequest(InferenceActionProxy.Request request, ActionListener listener) + throws IOException { + InferenceAction.Request.Builder inferenceActionRequestBuilder; + try (var parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, request.getContent(), request.getContentType())) { + inferenceActionRequestBuilder = InferenceAction.Request.parseRequest( + request.getInferenceEntityId(), + request.getTaskType(), + parser + ); + inferenceActionRequestBuilder.setInferenceTimeout(request.getTimeout()).setStream(request.isStreaming()); + } + + executeAsyncWithOrigin(client, INFERENCE_ORIGIN, InferenceAction.INSTANCE, inferenceActionRequestBuilder.build(), listener); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java index 2e3090f2afd59..1144a11d86cc9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.InferenceServiceResults; @@ -20,14 +21,19 @@ import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; +import java.util.concurrent.Flow; + public class TransportUnifiedCompletionInferenceAction extends BaseTransportInferenceAction { @Inject @@ -86,4 +92,40 @@ protected void doInference( ) { service.unifiedCompletionInfer(model, request.getUnifiedCompletionRequest(), null, listener); } + + @Override + protected void doExecute(Task task, UnifiedCompletionAction.Request request, ActionListener listener) { + super.doExecute(task, request, listener.delegateResponse((l, e) -> l.onFailure(UnifiedChatCompletionException.fromThrowable(e)))); + } + + /** + * If we get any errors, either in {@link #doExecute} via the listener.onFailure or while streaming, make sure that they are formatted + * as {@link UnifiedChatCompletionException}. + */ + @Override + protected Flow.Publisher streamErrorHandler(Flow.Processor upstream) { + return downstream -> { + upstream.subscribe(new Flow.Subscriber<>() { + @Override + public void onSubscribe(Flow.Subscription subscription) { + downstream.onSubscribe(subscription); + } + + @Override + public void onNext(ChunkedToXContent item) { + downstream.onNext(item); + } + + @Override + public void onError(Throwable throwable) { + downstream.onError(UnifiedChatCompletionException.fromThrowable(throwable)); + } + + @Override + public void onComplete() { + downstream.onComplete(); + } + }); + }; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java index b857ef3068835..ed005a86d66b5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.inference.InferenceService; @@ -50,6 +51,7 @@ import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalModel; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalServiceSettings; @@ -255,14 +257,13 @@ private void updateInClusterEndpoint( ActionListener listener ) throws IOException { // The model we are trying to update must have a trained model associated with it if it is an in-cluster deployment - throwIfTrainedModelDoesntExist(request); + var deploymentId = getDeploymentIdForInClusterEndpoint(existingParsedModel); + throwIfTrainedModelDoesntExist(request.getInferenceEntityId(), deploymentId); Map serviceSettings = request.getContentAsSettings().serviceSettings(); if (serviceSettings != null && serviceSettings.get(NUM_ALLOCATIONS) instanceof Integer numAllocations) { - UpdateTrainedModelDeploymentAction.Request updateRequest = new UpdateTrainedModelDeploymentAction.Request( - request.getInferenceEntityId() - ); + UpdateTrainedModelDeploymentAction.Request updateRequest = new UpdateTrainedModelDeploymentAction.Request(deploymentId); updateRequest.setNumberOfAllocations(numAllocations); var delegate = listener.delegateFailure((l2, response) -> { @@ -270,7 +271,8 @@ private void updateInClusterEndpoint( }); logger.info( - "Updating trained model deployment for inference entity [{}] with [{}] num_allocations", + "Updating trained model deployment [{}] for inference entity [{}] with [{}] num_allocations", + deploymentId, request.getInferenceEntityId(), numAllocations ); @@ -293,12 +295,26 @@ private boolean isInClusterService(String name) { return List.of(ElasticsearchInternalService.NAME, ElasticsearchInternalService.OLD_ELSER_SERVICE_NAME).contains(name); } - private void throwIfTrainedModelDoesntExist(UpdateInferenceModelAction.Request request) throws ElasticsearchStatusException { - var assignments = TrainedModelAssignmentUtils.modelAssignments(request.getInferenceEntityId(), clusterService.state()); + private String getDeploymentIdForInClusterEndpoint(Model model) { + if (model instanceof ElasticsearchInternalModel esModel) { + return esModel.mlNodeDeploymentId(); + } else { + throw new IllegalStateException( + Strings.format( + "Cannot update inference endpoint [%s]. Class [%s] is not an Elasticsearch internal model", + model.getInferenceEntityId(), + model.getClass().getSimpleName() + ) + ); + } + } + + private void throwIfTrainedModelDoesntExist(String inferenceEntityId, String deploymentId) throws ElasticsearchStatusException { + var assignments = TrainedModelAssignmentUtils.modelAssignments(deploymentId, clusterService.state()); if ((assignments == null || assignments.isEmpty())) { throw ExceptionsHelper.entityNotFoundException( Messages.MODEL_ID_DOES_NOT_MATCH_EXISTING_MODEL_IDS_BUT_MUST_FOR_IN_CLUSTER_SERVICE, - request.getInferenceEntityId() + inferenceEntityId ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java index 7cad7c42bdcf1..6b1097256e97f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java @@ -12,9 +12,11 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.SenderExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.IbmWatsonxEmbeddingsRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.IbmWatsonxRerankRequestManager; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import java.util.Map; import java.util.Objects; @@ -22,7 +24,6 @@ import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; public class IbmWatsonxActionCreator implements IbmWatsonxActionVisitor { - private final Sender sender; private final ServiceComponents serviceComponents; @@ -41,6 +42,17 @@ public ExecutableAction create(IbmWatsonxEmbeddingsModel model, Map taskSettings) { + var overriddenModel = IbmWatsonxRerankModel.of(model, taskSettings); + var requestCreator = IbmWatsonxRerankRequestManager.of(overriddenModel, serviceComponents.threadPool()); + var failedToSendRequestErrorMessage = constructFailedToSendRequestMessage( + overriddenModel.getServiceSettings().uri(), + "Ibm Watsonx rerank" + ); + return new SenderExecutableAction(sender, requestCreator, failedToSendRequestErrorMessage); + } + protected IbmWatsonxEmbeddingsRequestManager getEmbeddingsRequestManager( IbmWatsonxEmbeddingsModel model, Truncator truncator, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java index 0a13ec2fb4645..474533040e0c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java @@ -9,9 +9,12 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import java.util.Map; public interface IbmWatsonxActionVisitor { ExecutableAction create(IbmWatsonxEmbeddingsModel model, Map taskSettings); + + ExecutableAction create(IbmWatsonxRerankModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java index c0bccb9b2cd49..9e9531af06c8f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java @@ -8,16 +8,23 @@ package org.elasticsearch.xpack.inference.external.elastic; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.inference.results.StreamingUnifiedChatCompletionResults; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.openai.OpenAiUnifiedStreamingProcessor; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.response.elastic.ElasticInferenceServiceErrorResponseEntity; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventParser; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventProcessor; +import java.util.Locale; import java.util.concurrent.Flow; +import static org.elasticsearch.core.Strings.format; + public class ElasticInferenceServiceUnifiedChatCompletionResponseHandler extends ElasticInferenceServiceResponseHandler { public ElasticInferenceServiceUnifiedChatCompletionResponseHandler(String requestType, ResponseParser parseFunction) { super(requestType, parseFunction); @@ -31,10 +38,54 @@ public boolean canHandleStreamingResponses() { @Override public InferenceServiceResults parseResult(Request request, Flow.Publisher flow) { var serverSentEventProcessor = new ServerSentEventProcessor(new ServerSentEventParser()); - var openAiProcessor = new OpenAiUnifiedStreamingProcessor(); // EIS uses the unified API spec + // EIS uses the unified API spec + var openAiProcessor = new OpenAiUnifiedStreamingProcessor((m, e) -> buildMidStreamError(request, m, e)); flow.subscribe(serverSentEventProcessor); serverSentEventProcessor.subscribe(openAiProcessor); return new StreamingUnifiedChatCompletionResults(openAiProcessor); } + + @Override + protected Exception buildError(String message, Request request, HttpResult result, ErrorResponse errorResponse) { + assert request.isStreaming() : "Only streaming requests support this format"; + var responseStatusCode = result.response().getStatusLine().getStatusCode(); + if (request.isStreaming()) { + var restStatus = toRestStatus(responseStatusCode); + return new UnifiedChatCompletionException( + restStatus, + errorMessage(message, request, result, errorResponse, responseStatusCode), + "error", + restStatus.name().toLowerCase(Locale.ROOT) + ); + } else { + return super.buildError(message, request, result, errorResponse); + } + } + + private static Exception buildMidStreamError(Request request, String message, Exception e) { + var errorResponse = ElasticInferenceServiceErrorResponseEntity.fromString(message); + if (errorResponse.errorStructureFound()) { + return new UnifiedChatCompletionException( + RestStatus.INTERNAL_SERVER_ERROR, + format( + "%s for request from inference entity id [%s]. Error message: [%s]", + SERVER_ERROR_OBJECT, + request.getInferenceEntityId(), + errorResponse.getErrorMessage() + ), + "error", + "stream_error" + ); + } else if (e != null) { + return UnifiedChatCompletionException.fromThrowable(e); + } else { + return new UnifiedChatCompletionException( + RestStatus.INTERNAL_SERVER_ERROR, + format("%s for request from inference entity id [%s]", SERVER_ERROR_OBJECT, request.getInferenceEntityId()), + "error", + "stream_error" + ); + } + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java index 1b0dd893ada6f..52a2ffba0c36c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java @@ -91,31 +91,24 @@ protected Exception buildError(String message, Request request, HttpResult resul protected Exception buildError(String message, Request request, HttpResult result, ErrorResponse errorResponse) { var responseStatusCode = result.response().getStatusLine().getStatusCode(); + return new ElasticsearchStatusException( + errorMessage(message, request, result, errorResponse, responseStatusCode), + toRestStatus(responseStatusCode) + ); + } - if (errorResponse == null + protected String errorMessage(String message, Request request, HttpResult result, ErrorResponse errorResponse, int statusCode) { + return (errorResponse == null || errorResponse.errorStructureFound() == false - || Strings.isNullOrEmpty(errorResponse.getErrorMessage())) { - return new ElasticsearchStatusException( - format( - "%s for request from inference entity id [%s] status [%s]", + || Strings.isNullOrEmpty(errorResponse.getErrorMessage())) + ? format("%s for request from inference entity id [%s] status [%s]", message, request.getInferenceEntityId(), statusCode) + : format( + "%s for request from inference entity id [%s] status [%s]. Error message: [%s]", message, request.getInferenceEntityId(), - responseStatusCode - ), - toRestStatus(responseStatusCode) - ); - } - - return new ElasticsearchStatusException( - format( - "%s for request from inference entity id [%s] status [%s]. Error message: [%s]", - message, - request.getInferenceEntityId(), - responseStatusCode, - errorResponse.getErrorMessage() - ), - toRestStatus(responseStatusCode) - ); + statusCode, + errorResponse.getErrorMessage() + ); } public static RestStatus toRestStatus(int statusCode) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/IbmWatsonxRerankRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/IbmWatsonxRerankRequestManager.java new file mode 100644 index 0000000000000..f503771510e72 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/IbmWatsonxRerankRequestManager.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.ibmwatsonx.IbmWatsonxResponseHandler; +import org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxRerankRequest; +import org.elasticsearch.xpack.inference.external.response.ibmwatsonx.IbmWatsonxRankedResponseEntity; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class IbmWatsonxRerankRequestManager extends IbmWatsonxRequestManager { + private static final Logger logger = LogManager.getLogger(IbmWatsonxRerankRequestManager.class); + private static final ResponseHandler HANDLER = createIbmWatsonxResponseHandler(); + + private static ResponseHandler createIbmWatsonxResponseHandler() { + return new IbmWatsonxResponseHandler( + "ibm watsonx rerank", + (request, response) -> IbmWatsonxRankedResponseEntity.fromResponse(response) + ); + } + + public static IbmWatsonxRerankRequestManager of(IbmWatsonxRerankModel model, ThreadPool threadPool) { + return new IbmWatsonxRerankRequestManager(Objects.requireNonNull(model), Objects.requireNonNull(threadPool)); + } + + private final IbmWatsonxRerankModel model; + + public IbmWatsonxRerankRequestManager(IbmWatsonxRerankModel model, ThreadPool threadPool) { + super(threadPool, model); + this.model = model; + } + + @Override + public void execute( + InferenceInputs inferenceInputs, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + var rerankInput = QueryAndDocsInputs.of(inferenceInputs); + + execute( + new ExecutableInferenceRequest( + requestSender, + logger, + getRerankRequest(rerankInput.getQuery(), rerankInput.getChunks(), model), + HANDLER, + hasRequestCompletedFunction, + listener + ) + ); + } + + protected IbmWatsonxRerankRequest getRerankRequest(String query, List chunks, IbmWatsonxRerankModel model) { + return new IbmWatsonxRerankRequest(query, chunks, model); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java index 6d1d3fb2a4f91..1f28a8cd61026 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java @@ -17,7 +17,6 @@ import static org.elasticsearch.core.Strings.format; public class IbmWatsonxResponseHandler extends BaseResponseHandler { - public IbmWatsonxResponseHandler(String requestType, ResponseParser parseFunction) { super(requestType, parseFunction, IbmWatsonxErrorResponseEntity::fromResponse); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java index 7607e5e4ed3a2..99f2a7c31e7dd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java @@ -8,15 +8,26 @@ package org.elasticsearch.xpack.inference.external.openai; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; import org.elasticsearch.xpack.inference.external.request.Request; +import java.util.function.Function; + public class OpenAiChatCompletionResponseHandler extends OpenAiResponseHandler { public OpenAiChatCompletionResponseHandler(String requestType, ResponseParser parseFunction) { super(requestType, parseFunction, true); } + protected OpenAiChatCompletionResponseHandler( + String requestType, + ResponseParser parseFunction, + Function errorParseFunction + ) { + super(requestType, parseFunction, errorParseFunction, true); + } + @Override protected RetryException buildExceptionHandling429(Request request, HttpResult result) { // We don't retry, if the chat completion input is too large diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java index cf867fb1a0ab0..07dfd8300ae5b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.ContentTooLargeException; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; import org.elasticsearch.xpack.inference.external.request.Request; @@ -21,6 +22,7 @@ import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventProcessor; import java.util.concurrent.Flow; +import java.util.function.Function; import static org.elasticsearch.xpack.inference.external.http.retry.ResponseHandlerUtils.getFirstHeaderOrUnknown; @@ -44,7 +46,16 @@ public class OpenAiResponseHandler extends BaseResponseHandler { private final boolean canHandleStreamingResponses; public OpenAiResponseHandler(String requestType, ResponseParser parseFunction, boolean canHandleStreamingResponses) { - super(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse); + this(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse, canHandleStreamingResponses); + } + + protected OpenAiResponseHandler( + String requestType, + ResponseParser parseFunction, + Function errorParseFunction, + boolean canHandleStreamingResponses + ) { + super(requestType, parseFunction, errorParseFunction); this.canHandleStreamingResponses = canHandleStreamingResponses; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java index fce2556efc5e0..b2096253bdeb7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java @@ -7,28 +7,171 @@ package org.elasticsearch.xpack.inference.external.openai; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.results.StreamingUnifiedChatCompletionResults; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventParser; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventProcessor; +import java.util.Locale; +import java.util.Objects; +import java.util.Optional; import java.util.concurrent.Flow; +import static org.elasticsearch.core.Strings.format; + public class OpenAiUnifiedChatCompletionResponseHandler extends OpenAiChatCompletionResponseHandler { public OpenAiUnifiedChatCompletionResponseHandler(String requestType, ResponseParser parseFunction) { - super(requestType, parseFunction); + super(requestType, parseFunction, OpenAiErrorResponse::fromResponse); } @Override public InferenceServiceResults parseResult(Request request, Flow.Publisher flow) { var serverSentEventProcessor = new ServerSentEventProcessor(new ServerSentEventParser()); - var openAiProcessor = new OpenAiUnifiedStreamingProcessor(); + var openAiProcessor = new OpenAiUnifiedStreamingProcessor((m, e) -> buildMidStreamError(request, m, e)); flow.subscribe(serverSentEventProcessor); serverSentEventProcessor.subscribe(openAiProcessor); return new StreamingUnifiedChatCompletionResults(openAiProcessor); } + + @Override + protected Exception buildError(String message, Request request, HttpResult result, ErrorResponse errorResponse) { + assert request.isStreaming() : "Only streaming requests support this format"; + var responseStatusCode = result.response().getStatusLine().getStatusCode(); + if (request.isStreaming()) { + var errorMessage = errorMessage(message, request, result, errorResponse, responseStatusCode); + var restStatus = toRestStatus(responseStatusCode); + return errorResponse instanceof OpenAiErrorResponse oer + ? new UnifiedChatCompletionException(restStatus, errorMessage, oer.type(), oer.code(), oer.param()) + : new UnifiedChatCompletionException( + restStatus, + errorMessage, + errorResponse != null ? errorResponse.getClass().getSimpleName() : "unknown", + restStatus.name().toLowerCase(Locale.ROOT) + ); + } else { + return super.buildError(message, request, result, errorResponse); + } + } + + private static Exception buildMidStreamError(Request request, String message, Exception e) { + var errorResponse = OpenAiErrorResponse.fromString(message); + if (errorResponse instanceof OpenAiErrorResponse oer) { + return new UnifiedChatCompletionException( + RestStatus.INTERNAL_SERVER_ERROR, + format( + "%s for request from inference entity id [%s]. Error message: [%s]", + SERVER_ERROR_OBJECT, + request.getInferenceEntityId(), + errorResponse.getErrorMessage() + ), + oer.type(), + oer.code(), + oer.param() + ); + } else if (e != null) { + return UnifiedChatCompletionException.fromThrowable(e); + } else { + return new UnifiedChatCompletionException( + RestStatus.INTERNAL_SERVER_ERROR, + format("%s for request from inference entity id [%s]", SERVER_ERROR_OBJECT, request.getInferenceEntityId()), + errorResponse != null ? errorResponse.getClass().getSimpleName() : "unknown", + "stream_error" + ); + } + } + + private static class OpenAiErrorResponse extends ErrorResponse { + private static final ConstructingObjectParser, Void> ERROR_PARSER = new ConstructingObjectParser<>( + "open_ai_error", + true, + args -> Optional.ofNullable((OpenAiErrorResponse) args[0]) + ); + private static final ConstructingObjectParser ERROR_BODY_PARSER = new ConstructingObjectParser<>( + "open_ai_error", + true, + args -> new OpenAiErrorResponse((String) args[0], (String) args[1], (String) args[2], (String) args[3]) + ); + + static { + ERROR_BODY_PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("message")); + ERROR_BODY_PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("code")); + ERROR_BODY_PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("param")); + ERROR_BODY_PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("type")); + + ERROR_PARSER.declareObjectOrNull( + ConstructingObjectParser.optionalConstructorArg(), + ERROR_BODY_PARSER, + null, + new ParseField("error") + ); + } + + private static ErrorResponse fromResponse(HttpResult response) { + try ( + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY, response.body()) + ) { + return ERROR_PARSER.apply(parser, null).orElse(ErrorResponse.UNDEFINED_ERROR); + } catch (Exception e) { + // swallow the error + } + + return ErrorResponse.UNDEFINED_ERROR; + } + + private static ErrorResponse fromString(String response) { + try ( + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY, response) + ) { + return ERROR_PARSER.apply(parser, null).orElse(ErrorResponse.UNDEFINED_ERROR); + } catch (Exception e) { + // swallow the error + } + + return ErrorResponse.UNDEFINED_ERROR; + } + + @Nullable + private final String code; + @Nullable + private final String param; + private final String type; + + OpenAiErrorResponse(String errorMessage, @Nullable String code, @Nullable String param, String type) { + super(errorMessage); + this.code = code; + this.param = param; + this.type = Objects.requireNonNull(type); + } + + @Nullable + public String code() { + return code; + } + + @Nullable + public String param() { + return param; + } + + public String type() { + return type; + } + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedStreamingProcessor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedStreamingProcessor.java index 599d71df3dcfa..bfd4456279a8a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedStreamingProcessor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedStreamingProcessor.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.core.inference.results.StreamingUnifiedChatCompletionResults; import org.elasticsearch.xpack.inference.common.DelegatingProcessor; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEvent; +import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventField; import java.io.IOException; import java.util.ArrayDeque; @@ -28,6 +29,7 @@ import java.util.Iterator; import java.util.List; import java.util.concurrent.LinkedBlockingDeque; +import java.util.function.BiFunction; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; @@ -57,7 +59,13 @@ public class OpenAiUnifiedStreamingProcessor extends DelegatingProcessor errorParser; private final Deque buffer = new LinkedBlockingDeque<>(); + private volatile boolean previousEventWasError = false; + + public OpenAiUnifiedStreamingProcessor(BiFunction errorParser) { + this.errorParser = errorParser; + } @Override protected void upstreamRequest(long n) { @@ -71,7 +79,25 @@ protected void upstreamRequest(long n) { @Override protected void next(Deque item) throws Exception { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); - var results = parseEvent(item, OpenAiUnifiedStreamingProcessor::parse, parserConfig, logger); + + var results = new ArrayDeque(item.size()); + for (var event : item) { + if (ServerSentEventField.EVENT == event.name() && "error".equals(event.value())) { + previousEventWasError = true; + } else if (ServerSentEventField.DATA == event.name() && event.hasValue()) { + if (previousEventWasError) { + throw errorParser.apply(event.value(), null); + } + + try { + var delta = parse(parserConfig, event); + delta.forEachRemaining(results::offer); + } catch (Exception e) { + logger.warn("Failed to parse event from inference provider: {}", event); + throw errorParser.apply(event.value(), e); + } + } + } if (results.isEmpty()) { upstream().request(1); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java index 77ae48e6ccdc2..0ba6b46da05e5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java @@ -23,7 +23,7 @@ public record ElasticInferenceServiceSparseEmbeddingsRequestEntity( ) implements ToXContentObject { private static final String INPUT_FIELD = "input"; - private static final String MODEL_ID_FIELD = "model_id"; + private static final String MODEL_FIELD = "model"; private static final String USAGE_CONTEXT = "usage_context"; public ElasticInferenceServiceSparseEmbeddingsRequestEntity { @@ -42,7 +42,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endArray(); - builder.field(MODEL_ID_FIELD, modelId); + builder.field(MODEL_FIELD, modelId); // optional field if ((usageContext == ElasticInferenceServiceUsageContext.UNSPECIFIED) == false) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequest.java new file mode 100644 index 0000000000000..cfc1f367be45c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequest.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +public class IbmWatsonxRerankRequest implements IbmWatsonxRequest { + + private final String query; + private final List input; + private final IbmWatsonxRerankTaskSettings taskSettings; + private final IbmWatsonxRerankModel model; + + public IbmWatsonxRerankRequest(String query, List input, IbmWatsonxRerankModel model) { + Objects.requireNonNull(model); + + this.input = Objects.requireNonNull(input); + this.query = Objects.requireNonNull(query); + taskSettings = model.getTaskSettings(); + this.model = model; + } + + @Override + public HttpRequest createHttpRequest() { + URI uri; + + try { + uri = new URI(model.uri().toString()); + } catch (URISyntaxException ex) { + throw new IllegalArgumentException("cannot parse URI patter"); + } + + HttpPost httpPost = new HttpPost(uri); + + ByteArrayEntity byteEntity = new ByteArrayEntity( + Strings.toString( + new IbmWatsonxRerankRequestEntity( + query, + input, + taskSettings, + model.getServiceSettings().modelId(), + model.getServiceSettings().projectId() + ) + ).getBytes(StandardCharsets.UTF_8) + ); + + httpPost.setEntity(byteEntity); + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); + + decorateWithAuth(httpPost); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + public void decorateWithAuth(HttpPost httpPost) { + IbmWatsonxRequest.decorateWithBearerToken(httpPost, model.getSecretSettings(), model.getInferenceEntityId()); + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + + @Override + public URI getURI() { + return model.uri(); + } + + @Override + public Request truncate() { + return this; + } + + public String getQuery() { + return query; + } + + public List getInput() { + return input; + } + + public IbmWatsonxRerankModel getModel() { + return model; + } + + @Override + public boolean[] getTruncationInfo() { + return null; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequestEntity.java new file mode 100644 index 0000000000000..36e5951ebdc15 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequestEntity.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx; + +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record IbmWatsonxRerankRequestEntity( + String query, + List inputs, + IbmWatsonxRerankTaskSettings taskSettings, + String modelId, + String projectId +) implements ToXContentObject { + + private static final String INPUTS_FIELD = "inputs"; + private static final String QUERY_FIELD = "query"; + private static final String MODEL_ID_FIELD = "model_id"; + private static final String PROJECT_ID_FIELD = "project_id"; + + public IbmWatsonxRerankRequestEntity { + Objects.requireNonNull(query); + Objects.requireNonNull(inputs); + Objects.requireNonNull(modelId); + Objects.requireNonNull(projectId); + Objects.requireNonNull(taskSettings); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(MODEL_ID_FIELD, modelId); + builder.field(QUERY_FIELD, query); + builder.startArray(INPUTS_FIELD); + for (String input : inputs) { + builder.startObject(); + builder.field("text", input); + builder.endObject(); + } + builder.endArray(); + builder.field(PROJECT_ID_FIELD, projectId); + + builder.startObject("parameters"); + { + if (taskSettings.getTruncateInputTokens() != null) { + builder.field("truncate_input_tokens", taskSettings.getTruncateInputTokens()); + } + + builder.startObject("return_options"); + { + if (taskSettings.getDoesReturnDocuments() != null) { + builder.field("inputs", taskSettings.getDoesReturnDocuments()); + } + if (taskSettings.getTopNDocumentsOnly() != null) { + builder.field("top_n", taskSettings.getTopNDocumentsOnly()); + } + } + builder.endObject(); + } + builder.endObject(); + + builder.endObject(); + + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java index a506a33385dfb..91679288e5ae3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java @@ -13,6 +13,7 @@ public class IbmWatsonxUtils { public static final String V1 = "v1"; public static final String TEXT = "text"; public static final String EMBEDDINGS = "embeddings"; + public static final String RERANKS = "reranks"; private IbmWatsonxUtils() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceErrorResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceErrorResponseEntity.java index 696be7b2acdd2..29b0903901694 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceErrorResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceErrorResponseEntity.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -16,6 +17,18 @@ import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; +import java.io.IOException; + +/** + * An example error response would look like + * + * + * { + * "error": "some error" + * } + * + * + */ public class ElasticInferenceServiceErrorResponseEntity extends ErrorResponse { private static final Logger logger = LogManager.getLogger(ElasticInferenceServiceErrorResponseEntity.class); @@ -24,24 +37,18 @@ private ElasticInferenceServiceErrorResponseEntity(String errorMessage) { super(errorMessage); } - /** - * An example error response would look like - * - * - * { - * "error": "some error" - * } - * - * - * @param response The error response - * @return An error entity if the response is JSON with the above structure - * or {@link ErrorResponse#UNDEFINED_ERROR} if the error field wasn't found - */ public static ErrorResponse fromResponse(HttpResult response) { - try ( - XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON) - .createParser(XContentParserConfiguration.EMPTY, response.body()) - ) { + return fromParser( + () -> XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, response.body()) + ); + } + + public static ErrorResponse fromString(String response) { + return fromParser(() -> XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, response)); + } + + private static ErrorResponse fromParser(CheckedSupplier jsonParserFactory) { + try (XContentParser jsonParser = jsonParserFactory.get()) { var responseMap = jsonParser.map(); var error = (String) responseMap.get("error"); if (error != null) { @@ -50,7 +57,6 @@ public static ErrorResponse fromResponse(HttpResult response) { } catch (Exception e) { logger.debug("Failed to parse error response", e); } - return ErrorResponse.UNDEFINED_ERROR; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntity.java new file mode 100644 index 0000000000000..05f369bd8961e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntity.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.inference.external.response.ibmwatsonx; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class IbmWatsonxRankedResponseEntity { + + private static final Logger logger = LogManager.getLogger(IbmWatsonxRankedResponseEntity.class); + + /** + * Parses the Ibm Watsonx ranked response. + * + * For a request like: + * "model": "rerank-english-v2.0", + * "query": "database", + * "return_documents": true, + * "top_n": 3, + * "input": ["greenland", "google","john", "mysql","potter", "grammar"] + *

+ * The response will look like (without whitespace): + * { + * "rerank": [ + * { + * "index": 3, + * "relevance_score": 0.7989932 + * }, + * { + * "index": 5, + * "relevance_score": 0.61281824 + * }, + * { + * "index": 1, + * "relevance_score": 0.5762553 + * }, + * { + * "index": 4, + * "relevance_score": 0.47395563 + * }, + * { + * "index": 0, + * "relevance_score": 0.4338926 + * }, + * { + * "index": 2, + * "relevance_score": 0.42638257 + * } + * ], + * } + * + * @param response the http response from ibm watsonx + * @return the parsed response + * @throws IOException if there is an error parsing the response + */ + public static InferenceServiceResults fromResponse(HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "results", FAILED_TO_FIND_FIELD_TEMPLATE); // TODO error message + + token = jsonParser.currentToken(); + if (token == XContentParser.Token.START_ARRAY) { + return new RankedDocsResults(parseList(jsonParser, IbmWatsonxRankedResponseEntity::parseRankedDocObject)); + } else { + throwUnknownToken(token, jsonParser); + } + + // This should never be reached. The above code should either return successfully or hit the throwUnknownToken + // or throw a parsing exception + throw new IllegalStateException("Reached an invalid state while parsing the Watsonx response"); + } + } + + private static RankedDocsResults.RankedDoc parseRankedDocObject(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + int index = -1; + float score = -1; + String documentText = null; + parser.nextToken(); + while (parser.currentToken() != XContentParser.Token.END_OBJECT) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + switch (parser.currentName()) { + case "index": + parser.nextToken(); // move to VALUE_NUMBER + index = parser.intValue(); + parser.nextToken(); // move to next FIELD_NAME or END_OBJECT + break; + case "score": + parser.nextToken(); // move to VALUE_NUMBER + score = parser.floatValue(); + parser.nextToken(); // move to next FIELD_NAME or END_OBJECT + break; + case "input": + parser.nextToken(); // move to START_OBJECT; document text is wrapped in an object + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + do { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME && parser.currentName().equals("text")) { + parser.nextToken(); // move to VALUE_STRING + documentText = parser.text(); + } + } while (parser.nextToken() != XContentParser.Token.END_OBJECT); + parser.nextToken();// move past END_OBJECT + // parser should now be at the next FIELD_NAME or END_OBJECT + break; + default: + throwUnknownField(parser.currentName(), parser); + } + } else { + parser.nextToken(); + } + } + + if (index == -1) { + logger.warn("Failed to find required field [index] in Watsonx rerank response"); + } + if (score == -1) { + logger.warn("Failed to find required field [relevance_score] in Watsonx rerank response"); + } + // documentText may or may not be present depending on the request parameter + + return new RankedDocsResults.RankedDoc(index, score, documentText); + } + + private IbmWatsonxRankedResponseEntity() {} + + static String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Watsonx rerank response"; +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index f24f407fd051d..3bebd8086d792 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -117,6 +117,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie public static final NodeFeature SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX = new NodeFeature( "semantic_text.always_emit_inference_id_fix" ); + public static final NodeFeature SEMANTIC_TEXT_SKIP_INFERENCE_FIELDS = new NodeFeature("semantic_text.skip_inference_fields"); public static final String CONTENT_TYPE = "semantic_text"; public static final String DEFAULT_ELSER_2_INFERENCE_ID = DEFAULT_ELSER_ID; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java index 57805d5277ffc..9e513a1ed9226 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java @@ -28,6 +28,7 @@ public class SemanticKnnVectorQueryRewriteInterceptor extends SemanticQueryRewri public static final NodeFeature SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED = new NodeFeature( "search.semantic_knn_vector_query_rewrite_interception_supported" ); + public static final NodeFeature SEMANTIC_KNN_FILTER_FIX = new NodeFeature("search.semantic_knn_filter_fix"); public SemanticKnnVectorQueryRewriteInterceptor() {} @@ -147,6 +148,7 @@ private KnnVectorQueryBuilder addIndexFilterToKnnVectorQuery(Collection ); } + copy.addFilterQueries(original.filterQueries()); copy.addFilterQuery(new TermsQueryBuilder(IndexFieldMapper.NAME, indices)); return copy; } @@ -165,8 +167,9 @@ private KnnVectorQueryBuilder buildNewKnnVectorQuery( KnnVectorQueryBuilder original, QueryVectorBuilder queryVectorBuilder ) { + KnnVectorQueryBuilder newQueryBuilder; if (original.queryVectorBuilder() != null) { - return new KnnVectorQueryBuilder( + newQueryBuilder = new KnnVectorQueryBuilder( fieldName, queryVectorBuilder, original.k(), @@ -174,7 +177,7 @@ private KnnVectorQueryBuilder buildNewKnnVectorQuery( original.getVectorSimilarity() ); } else { - return new KnnVectorQueryBuilder( + newQueryBuilder = new KnnVectorQueryBuilder( fieldName, original.queryVector(), original.k(), @@ -183,6 +186,9 @@ private KnnVectorQueryBuilder buildNewKnnVectorQuery( original.getVectorSimilarity() ); } + + newQueryBuilder.addFilterQueries(original.filterQueries()); + return newQueryBuilder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index a9642a685aec9..2bcb130ddccbd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.engine.VersionConflictEngineException; @@ -61,6 +62,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -111,7 +113,7 @@ public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) public ModelRegistry(Client client) { this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); - defaultConfigIds = new HashMap<>(); + defaultConfigIds = new ConcurrentHashMap<>(); } /** @@ -644,11 +646,32 @@ private static BulkItemResponse.Failure getFirstBulkFailure(BulkResponse bulkRes return null; } + public synchronized void removeDefaultConfigs(Set inferenceEntityIds, ActionListener listener) { + if (inferenceEntityIds.isEmpty()) { + listener.onResponse(true); + return; + } + + defaultConfigIds.keySet().removeAll(inferenceEntityIds); + deleteModels(inferenceEntityIds, listener); + } + public void deleteModel(String inferenceEntityId, ActionListener listener) { - if (preventDeletionLock.contains(inferenceEntityId)) { + deleteModels(Set.of(inferenceEntityId), listener); + } + + public void deleteModels(Set inferenceEntityIds, ActionListener listener) { + var lockedInferenceIds = new HashSet<>(inferenceEntityIds); + lockedInferenceIds.retainAll(preventDeletionLock); + + if (lockedInferenceIds.isEmpty() == false) { listener.onFailure( new ElasticsearchStatusException( - "Model is currently being updated, you may delete the model once the update completes", + Strings.format( + "The inference endpoint(s) %s are currently being updated, please wait until after they are " + + "finished updating to delete.", + lockedInferenceIds + ), RestStatus.CONFLICT ) ); @@ -657,7 +680,7 @@ public void deleteModel(String inferenceEntityId, ActionListener listen DeleteByQueryRequest request = new DeleteByQueryRequest().setAbortOnVersionConflict(false); request.indices(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN); - request.setQuery(documentIdQuery(inferenceEntityId)); + request.setQuery(documentIdsQuery(inferenceEntityIds)); request.setRefresh(true); client.execute(DeleteByQueryAction.INSTANCE, request, listener.delegateFailureAndWrap((l, r) -> l.onResponse(Boolean.TRUE))); @@ -695,6 +718,11 @@ private QueryBuilder documentIdQuery(String inferenceEntityId) { return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(Model.documentId(inferenceEntityId))); } + private QueryBuilder documentIdsQuery(Set inferenceEntityIds) { + var documentIdsArray = inferenceEntityIds.stream().map(Model::documentId).toArray(String[]::new); + return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(documentIdsArray)); + } + static Optional idMatchedDefault( String inferenceId, List defaultConfigIds diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/BaseInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/BaseInferenceAction.java index d911158e82296..06a0849b91d4e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/BaseInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/BaseInferenceAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; import java.io.IOException; @@ -41,21 +42,22 @@ static TimeValue parseTimeout(RestRequest restRequest) { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { var params = parseParams(restRequest); + var content = restRequest.requiredContent(); + var inferTimeout = parseTimeout(restRequest); - InferenceAction.Request.Builder requestBuilder; - try (var parser = restRequest.contentParser()) { - requestBuilder = InferenceAction.Request.parseRequest(params.inferenceEntityId(), params.taskType(), parser); - } + var request = new InferenceActionProxy.Request( + params.taskType(), + params.inferenceEntityId(), + content, + restRequest.getXContentType(), + inferTimeout, + shouldStream() + ); - var inferTimeout = parseTimeout(restRequest); - requestBuilder.setInferenceTimeout(inferTimeout); - var request = prepareInferenceRequest(requestBuilder); - return channel -> client.execute(InferenceAction.INSTANCE, request, listener(channel)); + return channel -> client.execute(InferenceActionProxy.INSTANCE, request, ActionListener.withRef(listener(channel), content)); } - protected InferenceAction.Request prepareInferenceRequest(InferenceAction.Request.Builder builder) { - return builder.build(); - } + protected abstract boolean shouldStream(); protected abstract ActionListener listener(RestChannel channel); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java index 7f43676dfb5f0..b1edec79dfb72 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java @@ -24,22 +24,14 @@ public final class Paths { static final String INFERENCE_SERVICES_PATH = "_inference/_services"; static final String TASK_TYPE_INFERENCE_SERVICES_PATH = "_inference/_services/{" + TASK_TYPE + "}"; - static final String STREAM_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/_stream"; + public static final String STREAM_SUFFIX = "_stream"; + static final String STREAM_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/" + STREAM_SUFFIX; static final String STREAM_TASK_TYPE_INFERENCE_ID_PATH = "_inference/{" - + TASK_TYPE_OR_INFERENCE_ID - + "}/{" - + INFERENCE_ID - + "}/_stream"; - - // TODO remove the _unified path - public static final String UNIFIED_SUFFIX = "_unified"; - static final String UNIFIED_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/" + UNIFIED_SUFFIX; - static final String UNIFIED_TASK_TYPE_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/{" + INFERENCE_ID + "}/" - + UNIFIED_SUFFIX; + + STREAM_SUFFIX; private Paths() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java index 0fbc2f8214cbb..55083dcd4c888 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java @@ -32,6 +32,11 @@ public List routes() { return List.of(new Route(POST, INFERENCE_ID_PATH), new Route(POST, TASK_TYPE_INFERENCE_ID_PATH)); } + @Override + protected boolean shouldStream() { + return false; + } + @Override protected ActionListener listener(RestChannel channel) { return new RestChunkedToXContentListener<>(channel); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java index 518056365d88b..f37f4e9fb1f9b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java @@ -9,17 +9,12 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestChannel; -import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; -import java.io.IOException; import java.util.List; import java.util.Objects; @@ -46,41 +41,13 @@ public List routes() { return List.of(new Route(POST, STREAM_INFERENCE_ID_PATH), new Route(POST, STREAM_TASK_TYPE_INFERENCE_ID_PATH)); } - @Override - protected InferenceAction.Request prepareInferenceRequest(InferenceAction.Request.Builder builder) { - return builder.setStream(true).build(); - } - @Override protected ActionListener listener(RestChannel channel) { return new ServerSentEventsRestActionListener(channel, threadPool); } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - var params = parseParams(restRequest); - var inferTimeout = parseTimeout(restRequest); - - if (params.taskType() == TaskType.CHAT_COMPLETION) { - UnifiedCompletionAction.Request request; - try (var parser = restRequest.contentParser()) { - request = UnifiedCompletionAction.Request.parseRequest(params.inferenceEntityId(), params.taskType(), inferTimeout, parser); - } - - return channel -> client.execute( - UnifiedCompletionAction.INSTANCE, - request, - new ServerSentEventsRestActionListener(channel, threadPool) - ); - } else { - InferenceAction.Request.Builder requestBuilder; - try (var parser = restRequest.contentParser()) { - requestBuilder = InferenceAction.Request.parseRequest(params.inferenceEntityId(), params.taskType(), parser); - } - - requestBuilder.setInferenceTimeout(inferTimeout); - var request = prepareInferenceRequest(requestBuilder); - return channel -> client.execute(InferenceAction.INSTANCE, request, listener(channel)); - } + protected boolean shouldStream() { + return true; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java deleted file mode 100644 index 51f1bc48c8306..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.rest; - -import org.apache.lucene.util.SetOnce; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.Scope; -import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.xpack.inference.rest.Paths.UNIFIED_INFERENCE_ID_PATH; -import static org.elasticsearch.xpack.inference.rest.Paths.UNIFIED_TASK_TYPE_INFERENCE_ID_PATH; - -@ServerlessScope(Scope.PUBLIC) -public class RestUnifiedCompletionInferenceAction extends BaseRestHandler { - private final SetOnce threadPool; - - public RestUnifiedCompletionInferenceAction(SetOnce threadPool) { - super(); - this.threadPool = Objects.requireNonNull(threadPool); - } - - @Override - public String getName() { - return "unified_inference_action"; - } - - @Override - public List routes() { - return List.of(new Route(POST, UNIFIED_INFERENCE_ID_PATH), new Route(POST, UNIFIED_TASK_TYPE_INFERENCE_ID_PATH)); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - var params = BaseInferenceAction.parseParams(restRequest); - - var inferTimeout = BaseInferenceAction.parseTimeout(restRequest); - - UnifiedCompletionAction.Request request; - try (var parser = restRequest.contentParser()) { - request = UnifiedCompletionAction.Request.parseRequest(params.inferenceEntityId(), params.taskType(), inferTimeout, parser); - } - - return channel -> client.execute( - UnifiedCompletionAction.INSTANCE, - request, - new ServerSentEventsRestActionListener(channel, threadPool) - ); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java index 120731a4f8e66..7b3c54c60cdcc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.inference.rest; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; @@ -48,7 +46,8 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient inferenceEntityId = restRequest.param(INFERENCE_ID); taskType = TaskType.fromStringOrStatusException(restRequest.param(TASK_TYPE_OR_INFERENCE_ID)); } else { - throw new ElasticsearchStatusException("Inference ID must be provided in the path", RestStatus.BAD_REQUEST); + inferenceEntityId = restRequest.param(TASK_TYPE_OR_INFERENCE_ID); + taskType = TaskType.ANY; } var content = restRequest.requiredContent(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java index 6991e1325f3bc..cadf3e5f1806b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java @@ -35,15 +35,19 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.XContentFormattedException; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.Iterator; +import java.util.Map; import java.util.Objects; import java.util.concurrent.Flow; import java.util.concurrent.atomic.AtomicBoolean; +import static org.elasticsearch.xpack.core.inference.results.XContentFormattedException.X_CONTENT_PARAM; + /** * A version of {@link org.elasticsearch.rest.action.RestChunkedToXContentListener} that reads from a {@link Flow.Publisher} and encodes * the response in Server-Sent Events. @@ -72,7 +76,7 @@ public ServerSentEventsRestActionListener(RestChannel channel, SetOnce threadPool) { this.channel = channel; - this.params = params; + this.params = new ToXContent.DelegatingMapParams(Map.of(X_CONTENT_PARAM, String.valueOf(channel.detailedErrorsEnabled())), params); this.threadPool = Objects.requireNonNull(threadPool); } @@ -150,6 +154,12 @@ public void onFailure(Exception e) { } private ChunkedToXContent errorChunk(Throwable t) { + // if we've already formatted it, just return that format + if (ExceptionsHelper.unwrapCause(t) instanceof XContentFormattedException xContentFormattedException) { + return xContentFormattedException; + } + + // else, try to parse the format and return something that the ES client knows how to interpret var status = ExceptionsHelper.status(t); Exception e; @@ -158,7 +168,8 @@ private ChunkedToXContent errorChunk(Throwable t) { } else { // if not exception, then error, and we should not let it escape. rethrow on another thread, and inform the user we're stopping. ExceptionsHelper.maybeDieOnAnotherThread(t); - e = new RuntimeException("Fatal error while streaming response", t); + e = new RuntimeException("Fatal error while streaming response. Please retry the request."); + logger.error(e.getMessage(), t); } return params -> Iterators.concat( ChunkedToXContentHelper.startObject(), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 1ddae3cc8df95..13d641101a1cf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -42,7 +42,7 @@ import static org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings.ENABLED; import static org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings.MAX_NUMBER_OF_ALLOCATIONS; import static org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings.MIN_NUMBER_OF_ALLOCATIONS; -import static org.elasticsearch.xpack.inference.rest.Paths.UNIFIED_SUFFIX; +import static org.elasticsearch.xpack.inference.rest.Paths.STREAM_SUFFIX; import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; public final class ServiceUtils { @@ -796,7 +796,7 @@ public static String useChatCompletionUrlMessage(Model model) { model.getTaskType(), model.getTaskType(), model.getInferenceEntityId(), - UNIFIED_SUFFIX + STREAM_SUFFIX ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java index 0fd0c281d8bc6..589ca1e033f06 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java @@ -409,7 +409,7 @@ public static InferenceServiceConfiguration get() { HTTP_SCHEMA_NAME, new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("") .setLabel("HTTP Schema") - .setRequired(true) + .setRequired(false) .setSensitive(false) .setUpdatable(false) .setType(SettingsConfigurationFieldType.STRING) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java index e13c668197a8f..493acd3c0cd1a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java @@ -54,6 +54,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -413,6 +414,20 @@ public static InferenceServiceConfiguration get() { .build() ); + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-embed-text.html." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(AmazonBedrockSecretSettings.Configuration.get()); configurationMap.putAll( RateLimitSettings.toSettingsConfigurationWithDescription( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java index 88d5b54398d06..34a5c2b4cc1e9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java @@ -53,6 +53,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -441,6 +442,20 @@ public static InferenceServiceConfiguration get() { .build() ); + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://learn.microsoft.com/en-us/azure/ai-studio/reference/reference-model-inference-embeddings." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index 5b622d68f2c25..9a77b63337978 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -50,6 +50,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -382,6 +383,20 @@ public static InferenceServiceConfiguration get() { .build() ); + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#request-body-1." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(AzureOpenAiSecretSettings.Configuration.get()); configurationMap.putAll( RateLimitSettings.toSettingsConfigurationWithDescription( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 60326a8a34ca3..6c2d3bb96d74d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -25,6 +25,7 @@ import org.elasticsearch.inference.SettingsConfiguration; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; @@ -51,6 +52,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -363,6 +365,19 @@ public static InferenceServiceConfiguration get() { () -> { var configurationMap = new HashMap(); + configurationMap.put( + MODEL_ID, + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The name of the model to use for the inference task." + ) + .setLabel("Model ID") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.STRING) + .build() + ); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index 8b8723b54d683..4b53353b95d9e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -68,8 +68,10 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.inference.results.ResultUtils.createInvalidChunkedResultException; +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; @@ -158,10 +160,7 @@ static AuthorizedContent empty() { private void getAuthorization() { try { - ActionListener listener = ActionListener.wrap(result -> { - setAuthorizedContent(result); - authorizationCompletedLatch.countDown(); - }, e -> { + ActionListener listener = ActionListener.wrap(this::setAuthorizedContent, e -> { // we don't need to do anything if there was a failure, everything is disabled by default authorizationCompletedLatch.countDown(); }); @@ -177,18 +176,30 @@ private synchronized void setAuthorizedContent(ElasticInferenceServiceAuthorizat var authorizedTaskTypesAndModels = auth.newLimitedToTaskTypes(EnumSet.copyOf(IMPLEMENTED_TASK_TYPES)); // recalculate which default config ids and models are authorized now - var authorizedDefaultConfigIds = getAuthorizedDefaultConfigIds(auth); - var authorizedDefaultModelObjects = getAuthorizedDefaultModelsObjects(auth); + var authorizedDefaultModelIds = getAuthorizedDefaultModelIds(auth); + + var authorizedDefaultConfigIds = getAuthorizedDefaultConfigIds(authorizedDefaultModelIds, auth); + var authorizedDefaultModelObjects = getAuthorizedDefaultModelsObjects(authorizedDefaultModelIds); authRef.set(new AuthorizedContent(authorizedTaskTypesAndModels, authorizedDefaultConfigIds, authorizedDefaultModelObjects)); configuration = new Configuration(authRef.get().taskTypesAndModels.getAuthorizedTaskTypes()); defaultConfigIds().forEach(modelRegistry::addDefaultIds); + handleRevokedDefaultConfigs(authorizedDefaultModelIds); } - private List getAuthorizedDefaultConfigIds(ElasticInferenceServiceAuthorization auth) { - var authorizedDefaultModelIds = getAuthorizedDefaultModelIds(auth); + private Set getAuthorizedDefaultModelIds(ElasticInferenceServiceAuthorization auth) { + var authorizedModels = auth.getAuthorizedModelIds(); + var authorizedDefaultModelIds = new HashSet<>(defaultModelsConfigs.keySet()); + authorizedDefaultModelIds.retainAll(authorizedModels); + return authorizedDefaultModelIds; + } + + private List getAuthorizedDefaultConfigIds( + Set authorizedDefaultModelIds, + ElasticInferenceServiceAuthorization auth + ) { var authorizedConfigIds = new ArrayList(); for (var id : authorizedDefaultModelIds) { var modelConfig = defaultModelsConfigs.get(id); @@ -210,17 +221,7 @@ private List getAuthorizedDefaultConfigIds(ElasticInferenceServ return authorizedConfigIds; } - private Set getAuthorizedDefaultModelIds(ElasticInferenceServiceAuthorization auth) { - var authorizedModels = auth.getAuthorizedModelIds(); - var authorizedDefaultModelIds = new HashSet<>(defaultModelsConfigs.keySet()); - authorizedDefaultModelIds.retainAll(authorizedModels); - - return authorizedDefaultModelIds; - } - - private List getAuthorizedDefaultModelsObjects(ElasticInferenceServiceAuthorization auth) { - var authorizedDefaultModelIds = getAuthorizedDefaultModelIds(auth); - + private List getAuthorizedDefaultModelsObjects(Set authorizedDefaultModelIds) { var authorizedModels = new ArrayList(); for (var id : authorizedDefaultModelIds) { var modelConfig = defaultModelsConfigs.get(id); @@ -232,8 +233,39 @@ private List getAuthorizedDefaultModelsObjects(ElasticInfere return authorizedModels; } - // Default for testing - void waitForAuthorizationToComplete(TimeValue waitTime) { + private void handleRevokedDefaultConfigs(Set authorizedDefaultModelIds) { + // if a model was initially returned in the authorization response but is absent, then we'll assume authorization was revoked + var unauthorizedDefaultModelIds = new HashSet<>(defaultModelsConfigs.keySet()); + unauthorizedDefaultModelIds.removeAll(authorizedDefaultModelIds); + + // get all the default inference endpoint ids for the unauthorized model ids + var unauthorizedDefaultInferenceEndpointIds = unauthorizedDefaultModelIds.stream() + .map(defaultModelsConfigs::get) // get all the model configs + .filter(Objects::nonNull) // limit to only non-null + .map(modelConfig -> modelConfig.model.getInferenceEntityId()) // get the inference ids + .collect(Collectors.toSet()); + + var deleteInferenceEndpointsListener = ActionListener.wrap(result -> { + logger.trace(Strings.format("Successfully revoked access to default inference endpoint IDs: %s", unauthorizedDefaultModelIds)); + authorizationCompletedLatch.countDown(); + }, e -> { + logger.warn( + Strings.format("Failed to revoke access to default inference endpoint IDs: %s, error: %s", unauthorizedDefaultModelIds, e) + ); + authorizationCompletedLatch.countDown(); + }); + + getServiceComponents().threadPool() + .executor(UTILITY_THREAD_POOL_NAME) + .execute(() -> modelRegistry.removeDefaultConfigs(unauthorizedDefaultInferenceEndpointIds, deleteInferenceEndpointsListener)); + } + + /** + * Waits the specified amount of time for the authorization call to complete. This is mainly to make testing easier. + * @param waitTime the max time to wait + * @throws IllegalStateException if the wait time is exceeded or the call receives an {@link InterruptedException} + */ + public void waitForAuthorizationToComplete(TimeValue waitTime) { try { if (authorizationCompletedLatch.await(waitTime.getSeconds(), TimeUnit.SECONDS) == false) { throw new IllegalStateException("The wait time has expired for authorization to complete."); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java index 3cc7e0c6c2b53..4591418419ded 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import java.io.IOException; @@ -20,15 +19,6 @@ public CustomElandInternalServiceSettings(ElasticsearchInternalServiceSettings o super(other); } - public CustomElandInternalServiceSettings( - Integer numAllocations, - int numThreads, - String modelId, - AdaptiveAllocationsSettings adaptiveAllocationsSettings - ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); - } - public CustomElandInternalServiceSettings(StreamInput in) throws IOException { super(in); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java index 133be5e2b7623..d5f4143b65d36 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.SimilarityMeasure; @@ -105,33 +106,17 @@ private static CommonFields commonFieldsFromMap(Map map, Validat private final SimilarityMeasure similarityMeasure; private final DenseVectorFieldMapper.ElementType elementType; - public CustomElandInternalTextEmbeddingServiceSettings( - int numAllocations, - int numThreads, - String modelId, - AdaptiveAllocationsSettings adaptiveAllocationsSettings - ) { - this( - numAllocations, - numThreads, - modelId, - adaptiveAllocationsSettings, - null, - SimilarityMeasure.COSINE, - DenseVectorFieldMapper.ElementType.FLOAT - ); - } - - public CustomElandInternalTextEmbeddingServiceSettings( + CustomElandInternalTextEmbeddingServiceSettings( int numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings, + @Nullable String deploymentId, Integer dimensions, SimilarityMeasure similarityMeasure, DenseVectorFieldMapper.ElementType elementType ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, deploymentId); this.dimensions = dimensions; this.similarityMeasure = Objects.requireNonNull(similarityMeasure); this.elementType = Objects.requireNonNull(elementType); @@ -159,7 +144,8 @@ private CustomElandInternalTextEmbeddingServiceSettings(CommonFields commonField commonFields.internalServiceSettings.getNumAllocations(), commonFields.internalServiceSettings.getNumThreads(), commonFields.internalServiceSettings.modelId(), - commonFields.internalServiceSettings.getAdaptiveAllocationsSettings() + commonFields.internalServiceSettings.getAdaptiveAllocationsSettings(), + commonFields.internalServiceSettings.getDeploymentId() ); this.dimensions = dimensions; similarityMeasure = commonFields.similarityMeasure; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java index 316dc092e03c7..2b7904e615682 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java @@ -14,21 +14,27 @@ import java.io.IOException; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.RERANKER_ID; + public class ElasticRerankerServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "elastic_reranker_service_settings"; + public static ElasticRerankerServiceSettings defaultEndpointSettings() { + return new ElasticRerankerServiceSettings(null, 1, RERANKER_ID, new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32)); + } + public ElasticRerankerServiceSettings(ElasticsearchInternalServiceSettings other) { super(other); } - public ElasticRerankerServiceSettings( + private ElasticRerankerServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, null); } public ElasticRerankerServiceSettings(StreamInput in) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 9dfa21a323c33..ddc5e3e1aa36c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.LazyInitializable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -33,13 +34,13 @@ import org.elasticsearch.inference.UnifiedCompletionRequest; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentStats; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; @@ -110,8 +111,11 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi private static final Logger logger = LogManager.getLogger(ElasticsearchInternalService.class); private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(ElasticsearchInternalService.class); + private final Settings settings; + public ElasticsearchInternalService(InferenceServiceExtension.InferenceServiceFactoryContext context) { super(context); + this.settings = context.settings(); } // for testing @@ -120,6 +124,7 @@ public ElasticsearchInternalService(InferenceServiceExtension.InferenceServiceFa Consumer> platformArch ) { super(context, platformArch); + this.settings = context.settings(); } @Override @@ -562,6 +567,7 @@ private static CustomElandEmbeddingModel updateModelWithEmbeddingDetails(CustomE model.getServiceSettings().getNumThreads(), model.getServiceSettings().modelId(), model.getServiceSettings().getAdaptiveAllocationsSettings(), + model.getServiceSettings().getDeploymentId(), embeddingSize, model.getServiceSettings().similarity(), model.getServiceSettings().elementType() @@ -837,18 +843,26 @@ public List defaultConfigIds() { @Override public void updateModelsWithDynamicFields(List models, ActionListener> listener) { - if (models.isEmpty()) { listener.onResponse(models); return; } - var modelsByDeploymentIds = new HashMap(); + // if ML is disabled, do not update Deployment Stats (there won't be changes) + if (XPackSettings.MACHINE_LEARNING_ENABLED.get(settings) == false) { + listener.onResponse(models); + return; + } + + var modelsByDeploymentIds = new HashMap>(); for (var model : models) { assert model instanceof ElasticsearchInternalModel; if (model instanceof ElasticsearchInternalModel esModel) { - modelsByDeploymentIds.put(esModel.mlNodeDeploymentId(), esModel); + modelsByDeploymentIds.merge(esModel.mlNodeDeploymentId(), new ArrayList<>(List.of(esModel)), (a, b) -> { + a.addAll(b); + return a; + }); } else { listener.onFailure( new ElasticsearchStatusException( @@ -867,10 +881,13 @@ public void updateModelsWithDynamicFields(List models, ActionListener { for (var deploymentStats : stats.getStats().results()) { - var model = modelsByDeploymentIds.get(deploymentStats.getDeploymentId()); - model.updateNumAllocations(deploymentStats.getNumberOfAllocations()); + var modelsForDeploymentId = modelsByDeploymentIds.get(deploymentStats.getDeploymentId()); + modelsForDeploymentId.forEach(model -> model.updateNumAllocations(deploymentStats.getNumberOfAllocations())); } - listener.onResponse(new ArrayList<>(modelsByDeploymentIds.values())); + var updatedModels = new ArrayList(); + modelsByDeploymentIds.values().forEach(updatedModels::addAll); + + listener.onResponse(updatedModels); }, e -> { logger.warn("Get deployment stats failed, cannot update the endpoint's number of allocations", e); // continue with the original response @@ -903,12 +920,7 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { DEFAULT_ELSER_ID, TaskType.SPARSE_EMBEDDING, NAME, - new ElserInternalServiceSettings( - null, - 1, - useLinuxOptimizedModel ? ELSER_V2_MODEL_LINUX_X86 : ELSER_V2_MODEL, - new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) - ), + ElserInternalServiceSettings.defaultEndpointSettings(useLinuxOptimizedModel), ElserMlNodeTaskSettings.DEFAULT, ChunkingSettingsBuilder.DEFAULT_SETTINGS ); @@ -916,19 +928,14 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { DEFAULT_E5_ID, TaskType.TEXT_EMBEDDING, NAME, - new MultilingualE5SmallInternalServiceSettings( - null, - 1, - useLinuxOptimizedModel ? MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 : MULTILINGUAL_E5_SMALL_MODEL_ID, - new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) - ), + MultilingualE5SmallInternalServiceSettings.defaultEndpointSettings(useLinuxOptimizedModel), ChunkingSettingsBuilder.DEFAULT_SETTINGS ); var defaultRerank = new ElasticRerankerModel( DEFAULT_RERANK_ID, TaskType.RERANK, NAME, - new ElasticRerankerServiceSettings(null, 1, RERANKER_ID, new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32)), + ElasticRerankerServiceSettings.defaultEndpointSettings(), RerankTaskSettings.DEFAULT_SETTINGS ); return List.of(defaultElser, defaultE5, defaultRerank); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index 244108edc3dd4..98730f33d10f9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.ToXContentObject; @@ -108,25 +109,12 @@ protected static ElasticsearchInternalServiceSettings.Builder fromMap( .setDeploymentId(deploymentId); } - public ElasticsearchInternalServiceSettings( - Integer numAllocations, - int numThreads, - String modelId, - AdaptiveAllocationsSettings adaptiveAllocationsSettings - ) { - this.numAllocations = numAllocations; - this.numThreads = numThreads; - this.modelId = Objects.requireNonNull(modelId); - this.adaptiveAllocationsSettings = adaptiveAllocationsSettings; - this.deploymentId = null; - } - public ElasticsearchInternalServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings, - String deploymentId + @Nullable String deploymentId ) { this.numAllocations = numAllocations; this.numThreads = numThreads; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java index da9164bf3f288..b94b9feb8a049 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java @@ -9,14 +9,14 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import java.io.IOException; -import java.util.Arrays; -import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.ELSER_V2_MODEL; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.ELSER_V2_MODEL_LINUX_X86; public class ElserInternalServiceSettings extends ElasticsearchInternalServiceSettings { @@ -26,37 +26,26 @@ public static MinimalServiceSettings minimalServiceSettings() { return MinimalServiceSettings.sparseEmbedding(); } - public static Builder fromRequestMap(Map map) { - ValidationException validationException = new ValidationException(); - var baseSettings = ElasticsearchInternalServiceSettings.fromMap(map, validationException); - - String modelId = baseSettings.getModelId(); - if (modelId != null && ElserModels.isValidModel(modelId) == false) { - var ve = new ValidationException(); - ve.addValidationError( - "Unknown ELSER model ID [" + modelId + "]. Valid models are " + Arrays.toString(ElserModels.VALID_ELSER_MODEL_IDS.toArray()) - ); - throw ve; - } - - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } - - return baseSettings; + public static ElserInternalServiceSettings defaultEndpointSettings(boolean useLinuxOptimizedModel) { + return new ElserInternalServiceSettings( + null, + 1, + useLinuxOptimizedModel ? ELSER_V2_MODEL_LINUX_X86 : ELSER_V2_MODEL, + new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) + ); } public ElserInternalServiceSettings(ElasticsearchInternalServiceSettings other) { super(other); } - public ElserInternalServiceSettings( + private ElserInternalServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings ) { - this(new ElasticsearchInternalServiceSettings(numAllocations, numThreads, modelId, adaptiveAllocationsSettings)); + this(new ElasticsearchInternalServiceSettings(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, null)); } public ElserInternalServiceSettings(StreamInput in) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java index 317cc48172fca..45d52d3c8deaa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java @@ -18,6 +18,9 @@ import java.util.Arrays; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86; + public class MultilingualE5SmallInternalServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "multilingual_e5_small_service_settings"; @@ -29,17 +32,26 @@ public static MinimalServiceSettings minimalServiceSettings() { return MinimalServiceSettings.textEmbedding(DIMENSIONS, SIMILARITY, DenseVectorFieldMapper.ElementType.FLOAT); } + public static MultilingualE5SmallInternalServiceSettings defaultEndpointSettings(boolean useLinuxOptimizedModel) { + return new MultilingualE5SmallInternalServiceSettings( + null, + 1, + useLinuxOptimizedModel ? MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 : MULTILINGUAL_E5_SMALL_MODEL_ID, + new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) + ); + } + public MultilingualE5SmallInternalServiceSettings(ElasticsearchInternalServiceSettings other) { super(other); } - public MultilingualE5SmallInternalServiceSettings( + MultilingualE5SmallInternalServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, null); } public MultilingualE5SmallInternalServiceSettings(StreamInput in) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java index 55397b2398d39..3e921f669e864 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java @@ -327,6 +327,8 @@ public static InferenceServiceConfiguration get() { () -> { var configurationMap = new HashMap(); + // TODO whether the model ID is required or not depends on the task type + // For rerank it is optional, for text_embedding it is required configurationMap.put( MODEL_ID, new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("ID of the LLM you're using.") diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java index 4f0b425cdaa51..09706f70e3684 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java @@ -12,6 +12,7 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.ibmwatsonx.IbmWatsonxActionVisitor; @@ -38,6 +39,12 @@ public IbmWatsonxModel(IbmWatsonxModel model, ServiceSettings serviceSettings) { rateLimitServiceSettings = model.rateLimitServiceSettings(); } + public IbmWatsonxModel(IbmWatsonxModel model, TaskSettings taskSettings) { + super(model, taskSettings); + + rateLimitServiceSettings = model.rateLimitServiceSettings(); + } + public abstract ExecutableAction accept(IbmWatsonxActionVisitor creator, Map taskSettings, InputType inputType); public IbmWatsonxRateLimitServiceSettings rateLimitServiceSettings() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java index 477225f00d22b..3fa423c2dae19 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.EnumSet; @@ -138,6 +139,15 @@ private static IbmWatsonxModel createModel( secretSettings, context ); + case RERANK -> new IbmWatsonxRerankModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModel.java new file mode 100644 index 0000000000000..cb4c509d88c2b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModel.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.apache.http.client.utils.URIBuilder; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.ibmwatsonx.IbmWatsonxActionVisitor; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxModel; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.ML; +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.RERANKS; +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.TEXT; +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.V1; + +public class IbmWatsonxRerankModel extends IbmWatsonxModel { + public static IbmWatsonxRerankModel of(IbmWatsonxRerankModel model, Map taskSettings) { + var requestTaskSettings = IbmWatsonxRerankTaskSettings.fromMap(taskSettings); + return new IbmWatsonxRerankModel(model, IbmWatsonxRerankTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public IbmWatsonxRerankModel( + String modelId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets, + ConfigurationParseContext context + ) { + this( + modelId, + taskType, + service, + IbmWatsonxRerankServiceSettings.fromMap(serviceSettings, context), + IbmWatsonxRerankTaskSettings.fromMap(taskSettings), + DefaultSecretSettings.fromMap(secrets) + ); + } + + // should only be used for testing + IbmWatsonxRerankModel( + String modelId, + TaskType taskType, + String service, + IbmWatsonxRerankServiceSettings serviceSettings, + IbmWatsonxRerankTaskSettings taskSettings, + @Nullable DefaultSecretSettings secretSettings + ) { + super( + new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secretSettings), + serviceSettings + ); + } + + private IbmWatsonxRerankModel(IbmWatsonxRerankModel model, IbmWatsonxRerankTaskSettings taskSettings) { + super(model, taskSettings); + } + + @Override + public IbmWatsonxRerankServiceSettings getServiceSettings() { + return (IbmWatsonxRerankServiceSettings) super.getServiceSettings(); + } + + @Override + public IbmWatsonxRerankTaskSettings getTaskSettings() { + return (IbmWatsonxRerankTaskSettings) super.getTaskSettings(); + } + + @Override + public DefaultSecretSettings getSecretSettings() { + return (DefaultSecretSettings) super.getSecretSettings(); + } + + public URI uri() { + URI uri; + try { + uri = buildUri(this.getServiceSettings().uri().toString(), this.getServiceSettings().apiVersion()); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + + return uri; + } + + /** + * Accepts a visitor to create an executable action. The returned action will not return documents in the response. + * @param visitor _ + * @param taskSettings _ + * @param inputType ignored for rerank task + * @return the rerank action + */ + @Override + public ExecutableAction accept(IbmWatsonxActionVisitor visitor, Map taskSettings, InputType inputType) { + return visitor.create(this, taskSettings); + } + + public static URI buildUri(String uri, String apiVersion) throws URISyntaxException { + return new URIBuilder().setScheme("https") + .setHost(uri) + .setPathSegments(ML, V1, TEXT, RERANKS) + .setParameter("version", apiVersion) + .build(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankServiceSettings.java new file mode 100644 index 0000000000000..969622f9ba54f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankServiceSettings.java @@ -0,0 +1,190 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxService; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.net.URI; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxServiceFields.API_VERSION; +import static org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxServiceFields.PROJECT_ID; + +public class IbmWatsonxRerankServiceSettings extends FilteredXContentObject implements ServiceSettings, IbmWatsonxRateLimitServiceSettings { + public static final String NAME = "ibm_watsonx_rerank_service_settings"; + + /** + * Rate limits are defined at + * Watson Machine Learning plans. + * For Lite plan, you've 120 requests per minute. + */ + private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(120); + + public static IbmWatsonxRerankServiceSettings fromMap(Map map, ConfigurationParseContext context) { + ValidationException validationException = new ValidationException(); + + String url = extractRequiredString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + String apiVersion = extractRequiredString(map, API_VERSION, ModelConfigurations.SERVICE_SETTINGS, validationException); + + String modelId = extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + String projectId = extractRequiredString(map, PROJECT_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + IbmWatsonxService.NAME, + context + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new IbmWatsonxRerankServiceSettings(uri, apiVersion, modelId, projectId, rateLimitSettings); + } + + private final URI uri; + + private final String apiVersion; + + private final String modelId; + + private final String projectId; + + private final RateLimitSettings rateLimitSettings; + + public IbmWatsonxRerankServiceSettings( + URI uri, + String apiVersion, + String modelId, + String projectId, + @Nullable RateLimitSettings rateLimitSettings + ) { + this.uri = uri; + this.apiVersion = apiVersion; + this.projectId = projectId; + this.modelId = modelId; + this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); + } + + public IbmWatsonxRerankServiceSettings(StreamInput in) throws IOException { + this.uri = createUri(in.readString()); + this.apiVersion = in.readString(); + this.modelId = in.readString(); + this.projectId = in.readString(); + this.rateLimitSettings = new RateLimitSettings(in); + + } + + public URI uri() { + return uri; + } + + public String apiVersion() { + return apiVersion; + } + + @Override + public String modelId() { + return modelId; + } + + public String projectId() { + return projectId; + } + + @Override + public RateLimitSettings rateLimitSettings() { + return rateLimitSettings; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + builder.field(URL, uri.toString()); + + builder.field(API_VERSION, apiVersion); + + builder.field(MODEL_ID, modelId); + + builder.field(PROJECT_ID, projectId); + + rateLimitSettings.toXContent(builder, params); + + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_IBM_WATSONX_RERANK_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(uri.toString()); + out.writeString(apiVersion); + + out.writeString(modelId); + out.writeString(projectId); + + rateLimitSettings.writeTo(out); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + IbmWatsonxRerankServiceSettings that = (IbmWatsonxRerankServiceSettings) object; + return Objects.equals(uri, that.uri) + && Objects.equals(apiVersion, that.apiVersion) + && Objects.equals(modelId, that.modelId) + && Objects.equals(projectId, that.projectId) + && Objects.equals(rateLimitSettings, that.rateLimitSettings); + } + + @Override + public int hashCode() { + return Objects.hash(uri, apiVersion, modelId, projectId, rateLimitSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankTaskSettings.java new file mode 100644 index 0000000000000..12f4b8f6fa33e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankTaskSettings.java @@ -0,0 +1,192 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalBoolean; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; + +public class IbmWatsonxRerankTaskSettings implements TaskSettings { + + public static final String NAME = "ibm_watsonx_rerank_task_settings"; + public static final String RETURN_DOCUMENTS = "return_documents"; + public static final String TOP_N_DOCS_ONLY = "top_n"; + public static final String TRUNCATE_INPUT_TOKENS = "truncate_input_tokens"; + + static final IbmWatsonxRerankTaskSettings EMPTY_SETTINGS = new IbmWatsonxRerankTaskSettings(null, null, null); + + public static IbmWatsonxRerankTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + if (map == null || map.isEmpty()) { + return EMPTY_SETTINGS; + } + + Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, validationException); + Integer topNDocumentsOnly = extractOptionalPositiveInteger( + map, + TOP_N_DOCS_ONLY, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + Integer truncateInputTokens = extractOptionalPositiveInteger( + map, + TRUNCATE_INPUT_TOKENS, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return of(topNDocumentsOnly, returnDocuments, truncateInputTokens); + } + + /** + * Creates a new {@link IbmWatsonxRerankTaskSettings} + * by preferring non-null fields from the request settings over the original settings. + * + * @param originalSettings the settings stored as part of the inference entity configuration + * @param requestTaskSettings the settings passed in within the task_settings field of the request + * @return a constructed {@link IbmWatsonxRerankTaskSettings} + */ + public static IbmWatsonxRerankTaskSettings of( + IbmWatsonxRerankTaskSettings originalSettings, + IbmWatsonxRerankTaskSettings requestTaskSettings + ) { + return new IbmWatsonxRerankTaskSettings( + requestTaskSettings.getTopNDocumentsOnly() != null + ? requestTaskSettings.getTopNDocumentsOnly() + : originalSettings.getTopNDocumentsOnly(), + requestTaskSettings.getReturnDocuments() != null + ? requestTaskSettings.getReturnDocuments() + : originalSettings.getReturnDocuments(), + requestTaskSettings.getTruncateInputTokens() != null + ? requestTaskSettings.getTruncateInputTokens() + : originalSettings.getTruncateInputTokens() + ); + } + + public static IbmWatsonxRerankTaskSettings of(Integer topNDocumentsOnly, Boolean returnDocuments, Integer maxChunksPerDoc) { + return new IbmWatsonxRerankTaskSettings(topNDocumentsOnly, returnDocuments, maxChunksPerDoc); + } + + private final Integer topNDocumentsOnly; + private final Boolean returnDocuments; + private final Integer truncateInputTokens; + + public IbmWatsonxRerankTaskSettings(StreamInput in) throws IOException { + this(in.readOptionalInt(), in.readOptionalBoolean(), in.readOptionalInt()); + } + + public IbmWatsonxRerankTaskSettings( + @Nullable Integer topNDocumentsOnly, + @Nullable Boolean doReturnDocuments, + @Nullable Integer truncateInputTokens + ) { + this.topNDocumentsOnly = topNDocumentsOnly; + this.returnDocuments = doReturnDocuments; + this.truncateInputTokens = truncateInputTokens; + } + + @Override + public boolean isEmpty() { + return topNDocumentsOnly == null && returnDocuments == null && truncateInputTokens == null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (topNDocumentsOnly != null) { + builder.field(TOP_N_DOCS_ONLY, topNDocumentsOnly); + } + if (returnDocuments != null) { + builder.field(RETURN_DOCUMENTS, returnDocuments); + } + if (truncateInputTokens != null) { + builder.field(TRUNCATE_INPUT_TOKENS, truncateInputTokens); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_IBM_WATSONX_RERANK_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalInt(topNDocumentsOnly); + out.writeOptionalBoolean(returnDocuments); + out.writeOptionalInt(truncateInputTokens); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IbmWatsonxRerankTaskSettings that = (IbmWatsonxRerankTaskSettings) o; + return Objects.equals(returnDocuments, that.returnDocuments) + && Objects.equals(topNDocumentsOnly, that.topNDocumentsOnly) + && Objects.equals(truncateInputTokens, that.truncateInputTokens); + } + + @Override + public int hashCode() { + return Objects.hash(returnDocuments, topNDocumentsOnly, truncateInputTokens); + } + + public static String invalidInputTypeMessage(InputType inputType) { + return Strings.format("received invalid input type value [%s]", inputType.toString()); + } + + public Boolean getDoesReturnDocuments() { + return returnDocuments; + } + + public Integer getTopNDocumentsOnly() { + return topNDocumentsOnly; + } + + public Boolean getReturnDocuments() { + return returnDocuments; + } + + public Integer getTruncateInputTokens() { + return truncateInputTokens; + } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + IbmWatsonxRerankTaskSettings updatedSettings = IbmWatsonxRerankTaskSettings.fromMap(new HashMap<>(newSettings)); + return IbmWatsonxRerankTaskSettings.of(this, updatedSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java index 7ad70fc88054d..37add1e264704 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java @@ -25,6 +25,7 @@ import org.elasticsearch.inference.SettingsConfiguration; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; @@ -49,6 +50,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -339,6 +341,33 @@ public static InferenceServiceConfiguration get() { () -> { var configurationMap = new HashMap(); + configurationMap.put( + JinaAIServiceSettings.MODEL_ID, + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The name of the model to use for the inference task." + ) + .setLabel("Model ID") + .setRequired(true) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.STRING) + .build() + ); + + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://api.jina.ai/redoc#tag/embeddings/operation/create_embedding_v1_embeddings_post." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 0ce5bc801b59f..8a420a62d1bce 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -56,8 +56,8 @@ import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; import static org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator.COMPLETION_ERROR_PREFIX; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; -import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -440,19 +440,16 @@ public static InferenceServiceConfiguration get() { ); configurationMap.put( - URL, - new SettingsConfiguration.Builder(SUPPORTED_TASK_TYPES_FOR_SERVICES_API).setDefaultValue( - "https://api.openai.com/v1/chat/completions" + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://platform.openai.com/docs/api-reference/embeddings/create#embeddings-create-dimensions." ) - .setDescription( - "The OpenAI API endpoint URL. For more information on the URL, refer to the " - + "https://platform.openai.com/docs/api-reference." - ) - .setLabel("URL") - .setRequired(true) + .setLabel("Dimensions") + .setRequired(false) .setSensitive(false) .setUpdatable(false) - .setType(SettingsConfigurationFieldType.STRING) + .setType(SettingsConfigurationFieldType.INTEGER) .build() ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java index e4de3d6beb800..1c4306c4edd46 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java @@ -29,7 +29,7 @@ public static ModelValidator buildModelValidator(TaskType taskType) { case SPARSE_EMBEDDING, RERANK, ANY -> { return new SimpleModelValidator(new SimpleServiceIntegrationValidator()); } - default -> throw new IllegalArgumentException(Strings.format("Can't validate inference model of for task type %s ", taskType)); + default -> throw new IllegalArgumentException(Strings.format("Can't validate inference model for task type %s", taskType)); } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java index 4fa0a1ec49c74..56966ca40c478 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.inference.action; -import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.node.NodeClient; @@ -28,7 +28,7 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; -import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; import org.junit.Before; @@ -47,9 +47,9 @@ import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.assertArg; -import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -64,7 +64,7 @@ public abstract class BaseTransportInferenceActionTestCase createAction( InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, StreamingTaskManager streamingTaskManager, - InferenceServiceNodeLocalRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, NodeClient nodeClient, ThreadPool threadPool ); @@ -127,8 +127,7 @@ public void testMetricsAfterModelRegistryError() { return null; }).when(modelRegistry).getModelWithSecrets(any(), any()); - var listener = doExecute(taskType); - verify(listener).onFailure(same(expectedException)); + doExecute(taskType); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), nullValue()); @@ -148,7 +147,13 @@ protected ActionListener doExecute(TaskType taskType, when(request.getInferenceEntityId()).thenReturn(inferenceId); when(request.getTaskType()).thenReturn(taskType); when(request.isStreaming()).thenReturn(stream); - ActionListener listener = mock(); + ActionListener listener = spy(new ActionListener<>() { + @Override + public void onResponse(InferenceAction.Response o) {} + + @Override + public void onFailure(Exception e) {} + }); action.doExecute(mock(), request, listener); return listener; } @@ -161,9 +166,9 @@ public void testMetricsAfterMissingService() { var listener = doExecute(taskType); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(ElasticsearchException.class)); assertThat(e.getMessage(), is("Unknown service [" + serviceId + "] for model [" + inferenceId + "]. ")); - assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + assertThat(((ElasticsearchException) e).status(), is(RestStatus.BAD_REQUEST)); })); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); @@ -192,7 +197,7 @@ public void testMetricsAfterUnknownTaskType() { var listener = doExecute(requestTaskType); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(ElasticsearchException.class)); assertThat( e.getMessage(), is( @@ -203,7 +208,7 @@ public void testMetricsAfterUnknownTaskType() { + "]" ) ); - assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + assertThat(((ElasticsearchException) e).status(), is(RestStatus.BAD_REQUEST)); })); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); @@ -221,7 +226,6 @@ public void testMetricsAfterInferError() { var listener = doExecute(taskType); - verify(listener).onFailure(same(expectedException)); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); assertThat(attributes.get("task_type"), is(taskType.toString())); @@ -239,8 +243,8 @@ public void testMetricsAfterStreamUnsupported() { var listener = doExecute(taskType, true); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); - var ese = (ElasticsearchStatusException) e; + assertThat(e, isA(ElasticsearchException.class)); + var ese = (ElasticsearchException) e; assertThat(ese.getMessage(), is("Streaming is not allowed for service [" + serviceId + "].")); assertThat(ese.status(), is(expectedStatus)); })); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointActionTests.java new file mode 100644 index 0000000000000..a640e64c2022d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointActionTests.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.MinimalServiceSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.inference.action.DeleteInferenceEndpointAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.junit.After; +import org.junit.Before; + +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class TransportDeleteInferenceEndpointActionTests extends ESTestCase { + + private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(30); + + private TransportDeleteInferenceEndpointAction action; + private ThreadPool threadPool; + private ModelRegistry modelRegistry; + + @Before + public void setUp() throws Exception { + super.setUp(); + modelRegistry = new ModelRegistry(mock(Client.class)); + threadPool = createThreadPool(inferenceUtilityPool()); + action = new TransportDeleteInferenceEndpointAction( + mock(TransportService.class), + mock(ClusterService.class), + threadPool, + mock(ActionFilters.class), + mock(IndexNameExpressionResolver.class), + modelRegistry, + mock(InferenceServiceRegistry.class) + ); + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + terminate(threadPool); + } + + public void testFailsToDelete_ADefaultEndpoint() { + modelRegistry.addDefaultIds( + new InferenceService.DefaultConfigId("model-id", MinimalServiceSettings.chatCompletion(), mock(InferenceService.class)) + ); + + var listener = new PlainActionFuture(); + + action.masterOperation( + mock(Task.class), + new DeleteInferenceEndpointAction.Request("model-id", TaskType.CHAT_COMPLETION, true, false), + mock(ClusterState.class), + listener + ); + + var exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + exception.getMessage(), + is("[model-id] is a reserved inference endpoint. " + "Cannot delete a reserved inference endpoint.") + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxyTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxyTests.java new file mode 100644 index 0000000000000..a9e6ec55a6224 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxyTests.java @@ -0,0 +1,191 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; +import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.junit.After; +import org.junit.Before; + +import java.util.Collections; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class TransportInferenceActionProxyTests extends ESTestCase { + private Client client; + private ThreadPool threadPool; + private TransportInferenceActionProxy action; + private ModelRegistry modelRegistry; + + @Before + public void setUp() throws Exception { + super.setUp(); + client = mock(Client.class); + threadPool = new TestThreadPool("test"); + when(client.threadPool()).thenReturn(threadPool); + modelRegistry = mock(ModelRegistry.class); + + action = new TransportInferenceActionProxy(mock(TransportService.class), mock(ActionFilters.class), modelRegistry, client); + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + terminate(threadPool); + } + + public void testExecutesAUnifiedCompletionRequest_WhenTaskTypeIsChatCompletion_InRequest() { + String requestJson = """ + { + "model": "gpt-4o", + "messages": [ + { + "role": "user", + "content": [ + { + "text": "some text", + "type": "string" + } + ] + } + ] + } + """; + + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) mock(ActionListener.class); + var request = new InferenceActionProxy.Request( + TaskType.CHAT_COMPLETION, + "id", + new BytesArray(requestJson), + XContentType.JSON, + TimeValue.ONE_MINUTE, + true + ); + + action.doExecute(mock(Task.class), request, listener); + + verify(client, times(1)).execute(eq(UnifiedCompletionAction.INSTANCE), any(), any()); + } + + public void testExecutesAUnifiedCompletionRequest_WhenTaskTypeIsChatCompletion_FromStorage() { + String requestJson = """ + { + "model": "gpt-4o", + "messages": [ + { + "role": "user", + "content": [ + { + "text": "some text", + "type": "string" + } + ] + } + ] + } + """; + + doAnswer(invocation -> { + ActionListener listener = invocation.getArgument(1); + listener.onResponse( + new UnparsedModel("id", TaskType.CHAT_COMPLETION, "service", Collections.emptyMap(), Collections.emptyMap()) + ); + + return Void.TYPE; + }).when(modelRegistry).getModelWithSecrets(any(), any()); + + var listener = new PlainActionFuture(); + var request = new InferenceActionProxy.Request( + TaskType.ANY, + "id", + new BytesArray(requestJson), + XContentType.JSON, + TimeValue.ONE_MINUTE, + true + ); + + action.doExecute(mock(Task.class), request, listener); + + verify(client, times(1)).execute(eq(UnifiedCompletionAction.INSTANCE), any(), any()); + } + + public void testExecutesAnInferenceAction_WhenTaskTypeIsCompletion_InRequest() { + String requestJson = """ + { + "input": ["some text"] + } + """; + + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) mock(ActionListener.class); + var request = new InferenceActionProxy.Request( + TaskType.COMPLETION, + "id", + new BytesArray(requestJson), + XContentType.JSON, + TimeValue.ONE_MINUTE, + true + ); + + action.doExecute(mock(Task.class), request, listener); + + verify(client, times(1)).execute(eq(InferenceAction.INSTANCE), any(), any()); + } + + public void testExecutesAnInferenceAction_WhenTaskTypeIsCompletion_FromStorage() { + String requestJson = """ + { + "input": ["some text"] + } + """; + + doAnswer(invocation -> { + ActionListener listener = invocation.getArgument(1); + listener.onResponse(new UnparsedModel("id", TaskType.COMPLETION, "service", Collections.emptyMap(), Collections.emptyMap())); + + return Void.TYPE; + }).when(modelRegistry).getModelWithSecrets(any(), any()); + + var listener = new PlainActionFuture(); + var request = new InferenceActionProxy.Request( + TaskType.ANY, + "id", + new BytesArray(requestJson), + XContentType.JSON, + TimeValue.ONE_MINUTE, + true + ); + + action.doExecute(mock(Task.class), request, listener); + + verify(client, times(1)).execute(eq(InferenceAction.INSTANCE), any(), any()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java index e71d15dbe0420..3129f0865a249 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; -import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.common.RateLimitAssignment; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; @@ -50,7 +50,7 @@ protected BaseTransportInferenceAction createAction( InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, StreamingTaskManager streamingTaskManager, - InferenceServiceNodeLocalRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, NodeClient nodeClient, ThreadPool threadPool ) { @@ -77,7 +77,7 @@ public void testNoRerouting_WhenTaskTypeNotSupported() { TaskType unsupportedTaskType = TaskType.COMPLETION; mockService(listener -> listener.onResponse(mock())); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, unsupportedTaskType)).thenReturn(false); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, unsupportedTaskType)).thenReturn(false); var listener = doExecute(unsupportedTaskType); @@ -89,8 +89,8 @@ public void testNoRerouting_WhenTaskTypeNotSupported() { public void testNoRerouting_WhenNoGroupingCalculatedYet() { mockService(listener -> listener.onResponse(mock())); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); - when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(null); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(null); var listener = doExecute(taskType); @@ -102,8 +102,8 @@ public void testNoRerouting_WhenNoGroupingCalculatedYet() { public void testNoRerouting_WhenEmptyNodeList() { mockService(listener -> listener.onResponse(mock())); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); - when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn( + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn( new RateLimitAssignment(List.of()) ); @@ -120,10 +120,10 @@ public void testRerouting_ToOtherNode() { // The local node is different to the "other-node" responsible for serviceId when(nodeClient.getLocalNodeId()).thenReturn("local-node"); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); // Requests for serviceId are always routed to "other-node" var assignment = new RateLimitAssignment(List.of(otherNode)); - when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); mockService(listener -> listener.onResponse(mock())); var listener = doExecute(taskType); @@ -141,9 +141,9 @@ public void testRerouting_ToLocalNode_WithoutGoingThroughTransportLayerAgain() { // The local node is the only one responsible for serviceId when(nodeClient.getLocalNodeId()).thenReturn(localNodeId); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); var assignment = new RateLimitAssignment(List.of(localNode)); - when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); mockService(listener -> listener.onResponse(mock())); var listener = doExecute(taskType); @@ -158,9 +158,9 @@ public void testRerouting_HandlesTransportException_FromOtherNode() { when(otherNode.getId()).thenReturn("other-node"); when(nodeClient.getLocalNodeId()).thenReturn("local-node"); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); var assignment = new RateLimitAssignment(List.of(otherNode)); - when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); mockService(listener -> listener.onResponse(mock())); @@ -173,6 +173,10 @@ public void testRerouting_HandlesTransportException_FromOtherNode() { var listener = doExecute(taskType); + // Verify request was rerouted + verify(transportService).sendRequest(same(otherNode), eq(InferenceAction.NAME), any(), any()); + // Verify local execution didn't happen + verify(listener, never()).onResponse(any()); // Verify exception was propagated from "other-node" to "local-node" verify(listener).onFailure(same(expectedException)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java index 4ed69e5abe537..7dac6a1015aae 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.action; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.InferenceServiceRegistry; @@ -17,8 +16,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; -import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; @@ -49,7 +49,7 @@ protected BaseTransportInferenceAction createAc InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, StreamingTaskManager streamingTaskManager, - InferenceServiceNodeLocalRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + InferenceServiceRateLimitCalculator inferenceServiceRateLimitCalculator, NodeClient nodeClient, ThreadPool threadPool ) { @@ -61,7 +61,7 @@ protected BaseTransportInferenceAction createAc serviceRegistry, inferenceStats, streamingTaskManager, - inferenceServiceNodeLocalRateLimitCalculator, + inferenceServiceRateLimitCalculator, nodeClient, threadPool ); @@ -81,12 +81,12 @@ public void testThrows_IncompatibleTaskTypeException_WhenUsingATextEmbeddingInfe var listener = doExecute(requestTaskType); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(UnifiedChatCompletionException.class)); assertThat( e.getMessage(), is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [chat_completion]") ); - assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + assertThat(((UnifiedChatCompletionException) e).status(), is(RestStatus.BAD_REQUEST)); })); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); @@ -106,12 +106,12 @@ public void testThrows_IncompatibleTaskTypeException_WhenUsingRequestIsAny_Model var listener = doExecute(requestTaskType); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(UnifiedChatCompletionException.class)); assertThat( e.getMessage(), is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [chat_completion]") ); - assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + assertThat(((UnifiedChatCompletionException) e).status(), is(RestStatus.BAD_REQUEST)); })); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionRequestTests.java new file mode 100644 index 0000000000000..ab1e1d9c4cb23 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionRequestTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; +import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; + +import java.io.IOException; + +public class UpdateInferenceModelActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateInferenceModelAction.Request::new; + } + + @Override + protected UpdateInferenceModelAction.Request createTestInstance() { + return new UpdateInferenceModelAction.Request( + randomAlphaOfLength(5), + randomBytesReference(50), + randomFrom(XContentType.values()), + randomFrom(TaskType.values()), + randomTimeValue() + ); + } + + @Override + protected UpdateInferenceModelAction.Request mutateInstance(UpdateInferenceModelAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(InferenceNamedWriteablesProvider.getNamedWriteables()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionResponseTests.java new file mode 100644 index 0000000000000..02208511d1484 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionResponseTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; +import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; +import org.elasticsearch.xpack.inference.ModelConfigurationsTests; + +import java.io.IOException; + +public class UpdateInferenceModelActionResponseTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return UpdateInferenceModelAction.Response::new; + } + + @Override + protected UpdateInferenceModelAction.Response createTestInstance() { + return new UpdateInferenceModelAction.Response(ModelConfigurationsTests.createRandomInstance()); + } + + @Override + protected UpdateInferenceModelAction.Response mutateInstance(UpdateInferenceModelAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(InferenceNamedWriteablesProvider.getNamedWriteables()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java index f6bc7e5981411..569c380953816 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java @@ -10,61 +10,66 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; -import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Set; +import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator.DEFAULT_MAX_NODES_PER_GROUPING; import static org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator.SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 0) public class InferenceServiceNodeLocalRateLimitCalculatorTests extends ESIntegTestCase { + private static final Integer RATE_LIMIT_ASSIGNMENT_MAX_WAIT_TIME_IN_SECONDS = 15; + public void setUp() throws Exception { super.setUp(); + assumeTrue( + "If inference_cluster_aware_rate_limiting_feature_flag_enabled=false we'll fallback to " + + "NoopNodeLocalRateLimitCalculator, which shouldn't be tested by this class.", + InferenceAPIClusterAwareRateLimitingFeature.INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG.isEnabled() + ); } - public void testInitialClusterGrouping_Correct() { + public void testInitialClusterGrouping_Correct() throws Exception { // Start with 2-5 nodes var numNodes = randomIntBetween(2, 5); var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); - RateLimitAssignment firstAssignment = null; + var firstCalculator = getCalculatorInstance(internalCluster(), nodeNames.get(0)); + waitForRateLimitingAssignments(firstCalculator); - for (String nodeName : nodeNames) { - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeName); - - // Check first node's assignments - if (firstAssignment == null) { - // Get assignment for a specific service (e.g., EIS) - firstAssignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); - - assertNotNull(firstAssignment); - // Verify there are assignments for this service - assertFalse(firstAssignment.responsibleNodes().isEmpty()); - } else { - // Verify other nodes see the same assignment - var currentAssignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); - assertEquals(firstAssignment, currentAssignment); - } + RateLimitAssignment firstAssignment = firstCalculator.getRateLimitAssignment( + ElasticInferenceService.NAME, + TaskType.SPARSE_EMBEDDING + ); + + // Verify that all other nodes land on the same assignment + for (String nodeName : nodeNames.subList(1, nodeNames.size())) { + var calculator = getCalculatorInstance(internalCluster(), nodeName); + waitForRateLimitingAssignments(calculator); + var currentAssignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); + assertEquals(firstAssignment, currentAssignment); } } - public void testNumberOfNodesPerGroup_Decreases_When_NodeLeavesCluster() throws IOException { + public void testNumberOfNodesPerGroup_Decreases_When_NodeLeavesCluster() throws Exception { // Start with 3-5 nodes var numNodes = randomIntBetween(3, 5); var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); - var nodeLeftInCluster = nodeNames.getFirst(); + var nodeLeftInCluster = nodeNames.get(0); var currentNumberOfNodes = numNodes; // Stop all nodes except one @@ -77,7 +82,8 @@ public void testNumberOfNodesPerGroup_Decreases_When_NodeLeavesCluster() throws ensureStableCluster(currentNumberOfNodes); } - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeLeftInCluster); + var calculator = getCalculatorInstance(internalCluster(), nodeLeftInCluster); + waitForRateLimitingAssignments(calculator); Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); @@ -93,13 +99,14 @@ public void testNumberOfNodesPerGroup_Decreases_When_NodeLeavesCluster() throws } } - public void testGrouping_RespectsMaxNodesPerGroupingLimit() { + public void testGrouping_RespectsMaxNodesPerGroupingLimit() throws Exception { // Start with more nodes possible per grouping var numNodes = DEFAULT_MAX_NODES_PER_GROUPING + randomIntBetween(1, 3); var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + var calculator = getCalculatorInstance(internalCluster(), nodeNames.get(0)); + waitForRateLimitingAssignments(calculator); Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); @@ -111,13 +118,14 @@ public void testGrouping_RespectsMaxNodesPerGroupingLimit() { } } - public void testInitialRateLimitsCalculation_Correct() throws IOException { + public void testInitialRateLimitsCalculation_Correct() throws Exception { // Start with max nodes per grouping (=3) int numNodes = DEFAULT_MAX_NODES_PER_GROUPING; var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + var calculator = getCalculatorInstance(internalCluster(), nodeNames.get(0)); + waitForRateLimitingAssignments(calculator); Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); @@ -129,7 +137,7 @@ public void testInitialRateLimitsCalculation_Correct() throws IOException { if ((service instanceof SenderService senderService)) { var sender = senderService.getSender(); - if (sender instanceof HttpRequestSender httpSender) { + if (sender instanceof HttpRequestSender) { var assignment = calculator.getRateLimitAssignment(service.name(), TaskType.SPARSE_EMBEDDING); assertNotNull(assignment); @@ -141,13 +149,14 @@ public void testInitialRateLimitsCalculation_Correct() throws IOException { } } - public void testRateLimits_Decrease_OnNodeJoin() { + public void testRateLimits_Decrease_OnNodeJoin() throws Exception { // Start with 2 nodes var initialNodes = 2; var nodeNames = internalCluster().startNodes(initialNodes); ensureStableCluster(initialNodes); - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + var calculator = getCalculatorInstance(internalCluster(), nodeNames.get(0)); + waitForRateLimitingAssignments(calculator); for (var serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { var configs = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.get(serviceName); @@ -159,6 +168,7 @@ public void testRateLimits_Decrease_OnNodeJoin() { // Add a new node internalCluster().startNode(); ensureStableCluster(initialNodes + 1); + waitForRateLimitingAssignments(calculator); // Get updated assignments var updatedAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); @@ -169,13 +179,14 @@ public void testRateLimits_Decrease_OnNodeJoin() { } } - public void testRateLimits_Increase_OnNodeLeave() throws IOException { + public void testRateLimits_Increase_OnNodeLeave() throws Exception { // Start with max nodes per grouping (=3) int numNodes = DEFAULT_MAX_NODES_PER_GROUPING; var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + var calculator = getCalculatorInstance(internalCluster(), nodeNames.get(0)); + waitForRateLimitingAssignments(calculator); for (var serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { var configs = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.get(serviceName); @@ -188,6 +199,7 @@ public void testRateLimits_Increase_OnNodeLeave() throws IOException { var nodeToRemove = nodeNames.get(numNodes - 1); internalCluster().stopNode(nodeToRemove); ensureStableCluster(numNodes - 1); + waitForRateLimitingAssignments(calculator); // Get updated assignments var updatedAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); @@ -202,4 +214,33 @@ public void testRateLimits_Increase_OnNodeLeave() throws IOException { protected Collection> nodePlugins() { return Arrays.asList(LocalStateInferencePlugin.class); } + + private InferenceServiceNodeLocalRateLimitCalculator getCalculatorInstance(InternalTestCluster internalTestCluster, String nodeName) { + InferenceServiceRateLimitCalculator calculatorInstance = internalTestCluster.getInstance( + InferenceServiceRateLimitCalculator.class, + nodeName + ); + assertThat( + "[" + + InferenceServiceNodeLocalRateLimitCalculatorTests.class.getName() + + "] should use [" + + InferenceServiceNodeLocalRateLimitCalculator.class.getName() + + "] as implementation for [" + + InferenceServiceRateLimitCalculator.class.getName() + + "]. Provided implementation was [" + + calculatorInstance.getClass().getName() + + "].", + calculatorInstance, + instanceOf(InferenceServiceNodeLocalRateLimitCalculator.class) + ); + return (InferenceServiceNodeLocalRateLimitCalculator) calculatorInstance; + } + + private void waitForRateLimitingAssignments(InferenceServiceNodeLocalRateLimitCalculator calculator) throws Exception { + assertBusy(() -> { + var assignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); + assertNotNull(assignment); + assertFalse(assignment.responsibleNodes().isEmpty()); + }, RATE_LIMIT_ASSIGNMENT_MAX_WAIT_TIME_IN_SECONDS, TimeUnit.SECONDS); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java index e1d2ee56733e3..28e182aa2d435 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java @@ -124,7 +124,7 @@ public void testExecute_ReturnsSuccessfulResponse_ForElserAction() throws IOExce assertThat(requestMap.get("input"), instanceOf(List.class)); var inputList = (List) requestMap.get("input"); assertThat(inputList, contains("hello world")); - assertThat(requestMap.get("model_id"), is("my-model-id")); + assertThat(requestMap.get("model"), is("my-model-id")); } } @@ -179,7 +179,7 @@ public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOEx assertThat(requestMap.get("input"), instanceOf(List.class)); var inputList = (List) requestMap.get("input"); assertThat(inputList, contains("hello world")); - assertThat(requestMap.get("model_id"), is("my-model-id")); + assertThat(requestMap.get("model"), is("my-model-id")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandlerTests.java new file mode 100644 index 0000000000000..4853aa8d2c563 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandlerTests.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.openai; + +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.elasticsearch.ExceptionsHelper.unwrapCause; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isA; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class OpenAiUnifiedChatCompletionResponseHandlerTests extends ESTestCase { + private final OpenAiUnifiedChatCompletionResponseHandler responseHandler = new OpenAiUnifiedChatCompletionResponseHandler( + "chat completions", + (a, b) -> mock() + ); + + public void testFailValidationWithAllFields() throws IOException { + var responseJson = """ + { + "error": { + "type": "not_found_error", + "message": "a message", + "code": "ahh", + "param": "model" + } + } + """; + + var errorJson = invalidResponseJson(responseJson); + + assertThat(errorJson, is(""" + {"error":{"code":"ahh","message":"Received a server error status code for request from inference entity id [abc] status [500]. \ + Error message: [a message]","param":"model","type":"not_found_error"}}""")); + } + + public void testFailValidationWithoutOptionalFields() throws IOException { + var responseJson = """ + { + "error": { + "type": "not_found_error", + "message": "a message" + } + } + """; + + var errorJson = invalidResponseJson(responseJson); + + assertThat(errorJson, is(""" + {"error":{"message":"Received a server error status code for request from inference entity id [abc] status [500]. \ + Error message: [a message]","type":"not_found_error"}}""")); + } + + public void testFailValidationWithInvalidJson() throws IOException { + var responseJson = """ + what? this isn't a json + """; + + var errorJson = invalidResponseJson(responseJson); + + assertThat(errorJson, is(""" + {"error":{"code":"bad_request","message":"Received a server error status code for request from inference entity id [abc] status\ + [500]","type":"ErrorResponse"}}""")); + } + + private String invalidResponseJson(String responseJson) throws IOException { + var exception = invalidResponse(responseJson); + assertThat(exception, isA(RetryException.class)); + assertThat(unwrapCause(exception), isA(UnifiedChatCompletionException.class)); + return toJson((UnifiedChatCompletionException) unwrapCause(exception)); + } + + private Exception invalidResponse(String responseJson) { + return expectThrows( + RetryException.class, + () -> responseHandler.validateResponse( + mock(), + mock(), + mockRequest(), + new HttpResult(mock500Response(), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + } + + private static Request mockRequest() { + var request = mock(Request.class); + when(request.getInferenceEntityId()).thenReturn("abc"); + when(request.isStreaming()).thenReturn(true); + return request; + } + + private static HttpResponse mock500Response() { + int statusCode = 500; + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(statusCode); + + var response = mock(HttpResponse.class); + when(response.getStatusLine()).thenReturn(statusLine); + + return response; + } + + private String toJson(UnifiedChatCompletionException e) throws IOException { + try (var builder = XContentFactory.jsonBuilder()) { + e.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> { + try { + xContent.toXContent(builder, EMPTY_PARAMS); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }); + return XContentHelper.convertToJson(BytesReference.bytes(builder), false, builder.contentType()); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java index c0ebaf8668c5c..f81f6e58964f0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java @@ -31,7 +31,7 @@ public void testToXContent_SingleInput_UnspecifiedUsageContext() throws IOExcept assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" { "input": ["abc"], - "model_id": "my-model-id" + "model": "my-model-id" }""")); } @@ -48,7 +48,7 @@ public void testToXContent_MultipleInputs_UnspecifiedUsageContext() throws IOExc "abc", "def" ], - "model_id": "my-model-id" + "model": "my-model-id" } """)); } @@ -63,7 +63,7 @@ public void testToXContent_MultipleInputs_SearchUsageContext() throws IOExceptio assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" { "input": ["abc"], - "model_id": "my-model-id", + "model": "my-model-id", "usage_context": "search" } """)); @@ -79,7 +79,7 @@ public void testToXContent_MultipleInputs_IngestUsageContext() throws IOExceptio assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" { "input": ["abc"], - "model_id": "my-model-id", + "model": "my-model-id", "usage_context": "ingest" } """)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java index abcc94640981c..9211b55236b10 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java @@ -46,7 +46,7 @@ public void testCreateHttpRequest_UsageContextSearch() throws IOException { var requestMap = entityAsMap(httpPost.getEntity().getContent()); assertThat(requestMap.size(), equalTo(3)); assertThat(requestMap.get("input"), is(List.of(input))); - assertThat(requestMap.get("model_id"), is(modelId)); + assertThat(requestMap.get("model"), is(modelId)); assertThat(requestMap.get("usage_context"), equalTo("search")); } @@ -83,7 +83,7 @@ public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { var requestMap = entityAsMap(httpPost.getEntity().getContent()); assertThat(requestMap, aMapWithSize(2)); assertThat(requestMap.get("input"), is(List.of("ab"))); - assertThat(requestMap.get("model_id"), is(modelId)); + assertThat(requestMap.get("model"), is(modelId)); } public void testIsTruncated_ReturnsTrue() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestEntityTests.java new file mode 100644 index 0000000000000..8278b76a1cee4 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestEntityTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx.rerank; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxRerankRequestEntity; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.inference.MatchersUtils.equalToIgnoringWhitespaceInJsonString; + +public class IbmWatsonxRerankRequestEntityTests extends ESTestCase { + public void testXContent_Request() throws IOException { + IbmWatsonxRerankTaskSettings taskSettings = new IbmWatsonxRerankTaskSettings(5, true, 100); + var entity = new IbmWatsonxRerankRequestEntity( + "database", + List.of("greenland", "google", "john", "mysql", "potter", "grammar"), + taskSettings, + "model", + "project_id" + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" + {"model_id":"model", + "query":"database", + "inputs":[ + {"text":"greenland"}, + {"text":"google"}, + {"text":"john"}, + {"text":"mysql"}, + {"text":"potter"}, + {"text":"grammar"} + ], + "project_id":"project_id", + "parameters":{ + "truncate_input_tokens":100, + "return_options":{ + "inputs":true, + "top_n":5 + } + } + } + """)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestTests.java new file mode 100644 index 0000000000000..8c95a01bc3230 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestTests.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx.rerank; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxRerankRequest; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModelTests; + +import java.io.IOException; +import java.net.URI; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class IbmWatsonxRerankRequestTests extends ESTestCase { + private static final String AUTH_HEADER_VALUE = "foo"; + + public void testCreateRequest() throws IOException { + var model = "model"; + var projectId = "project_id"; + URI uri = null; + try { + uri = new URI("http://abc.com"); + } catch (Exception ignored) {} + var apiVersion = "2023-05-04"; + var apiKey = "api_key"; + var query = "database"; + List input = List.of("greenland", "google", "john", "mysql", "potter", "grammar"); + + var request = createRequest(model, projectId, uri, apiVersion, apiKey, query, input); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getURI().toString(), endsWith(Strings.format("%s=%s", "version", apiVersion))); + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(5)); + assertThat( + requestMap, + is( + + Map.of( + "project_id", + "project_id", + "model_id", + "model", + "inputs", + List.of( + Map.of("text", "greenland"), + Map.of("text", "google"), + Map.of("text", "john"), + Map.of("text", "mysql"), + Map.of("text", "potter"), + Map.of("text", "grammar") + ), + "query", + "database", + "parameters", + Map.of("return_options", Map.of("top_n", 2, "inputs", true), "truncate_input_tokens", 100) + ) + ) + ); + } + + public static IbmWatsonxRerankRequest createRequest( + String model, + String projectId, + URI uri, + String apiVersion, + String apiKey, + String query, + List input + ) { + var embeddingsModel = IbmWatsonxRerankModelTests.createModel(model, projectId, uri, apiVersion, apiKey); + + return new IbmWatsonxRerankWithoutAuthRequest(query, input, embeddingsModel); + } + + private static class IbmWatsonxRerankWithoutAuthRequest extends IbmWatsonxRerankRequest { + IbmWatsonxRerankWithoutAuthRequest(String query, List input, IbmWatsonxRerankModel model) { + super(query, input, model); + } + + @Override + public void decorateWithAuth(HttpPost httpPost) { + httpPost.setHeader(HttpHeaders.AUTHORIZATION, AUTH_HEADER_VALUE); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntityTests.java new file mode 100644 index 0000000000000..6b59f25896a48 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntityTests.java @@ -0,0 +1,166 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.inference.external.response.ibmwatsonx; + +import org.apache.http.HttpResponse; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class IbmWatsonxRankedResponseEntityTests extends ESTestCase { + + public void testResponseLiteral() throws IOException { + InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseLiteral.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + List expected = responseLiteralDocs(); + for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) { + assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index()); + } + } + + public void testGeneratedResponse() throws IOException { + int numDocs = randomIntBetween(1, 10); + + List expected = new ArrayList<>(numDocs); + StringBuilder responseBuilder = new StringBuilder(); + + responseBuilder.append("{"); + responseBuilder.append("\"results\": ["); + List indices = linear(numDocs); + List scores = linearFloats(numDocs); + for (int i = 0; i < numDocs; i++) { + int index = indices.remove(randomInt(indices.size() - 1)); + + responseBuilder.append("{"); + responseBuilder.append("\"index\":").append(index).append(","); + responseBuilder.append("\"score\":").append(scores.get(i).toString()).append("}"); + expected.add(new RankedDocsResults.RankedDoc(index, scores.get(i), null)); + if (i < numDocs - 1) { + responseBuilder.append(","); + } + } + responseBuilder.append("]"); + + responseBuilder.append(randomIntBetween(1, 10)).append("}"); + + InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseBuilder.toString().getBytes(StandardCharsets.UTF_8)) + ); + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) { + assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index()); + } + } + + private ArrayList responseLiteralDocs() { + var list = new ArrayList(); + + list.add(new RankedDocsResults.RankedDoc(2, 0.98005307F, null)); + list.add(new RankedDocsResults.RankedDoc(3, 0.27904198F, null)); + list.add(new RankedDocsResults.RankedDoc(0, 0.10194652F, null)); + return list; + } + + private final String responseLiteral = """ + { + "results": [ + { + "index": 2, + "score": 0.98005307 + }, + { + "index": 3, + "score": 0.27904198 + }, + { + "index": 0, + "score": 0.10194652 + } + ] + } + """; + + public void testResponseLiteralWithDocuments() throws IOException { + InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseLiteralWithDocuments.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + MatcherAssert.assertThat(((RankedDocsResults) parsedResults).getRankedDocs(), is(responseLiteralDocsWithText)); + } + + private final String responseLiteralWithDocuments = """ + { + "results": [ + { + "input": { + "text": "Washington, D.C.." + }, + "index": 2, + "score": 0.98005307 + }, + { + "input": { + "text": "Capital punishment has existed in the United States since before the United States was a country. " + }, + "index": 3, + "score": 0.27904198 + }, + { + "input": { + "text": "Carson City is the capital city of the American state of Nevada." + }, + "index": 0, + "score": 0.10194652 + } + ] + } + """; + + private final List responseLiteralDocsWithText = List.of( + new RankedDocsResults.RankedDoc(2, 0.98005307F, "Washington, D.C.."), + new RankedDocsResults.RankedDoc( + 3, + 0.27904198F, + "Capital punishment has existed in the United States since before the United States was a country. " + ), + new RankedDocsResults.RankedDoc(0, 0.10194652F, "Carson City is the capital city of the American state of Nevada.") + ); + + private ArrayList linear(int n) { + ArrayList list = new ArrayList<>(); + for (int i = 0; i <= n; i++) { + list.add(i); + } + return list; + } + + // creates a list of doubles of monotonically decreasing magnitude + private ArrayList linearFloats(int n) { + ArrayList list = new ArrayList<>(); + float startValue = 1.0f; + float decrement = startValue / n + 1; + for (int i = 0; i <= n; i++) { + list.add(startValue - (i * decrement)); + } + return list; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapperTests.java index 57d71a48a4aeb..f877b0182d888 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapperTests.java @@ -55,6 +55,24 @@ public void testIsEnabled() { assertTrue(InferenceMetadataFieldsMapper.isEnabled(settings)); } + public void testIsEnabledByDefault() { + var settings = Settings.builder() + .put( + IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), + IndexVersionUtils.getPreviousVersion(InferenceMetadataFieldsMapper.USE_NEW_SEMANTIC_TEXT_FORMAT_BY_DEFAULT) + ) + .build(); + assertFalse(InferenceMetadataFieldsMapper.isEnabled(settings)); + + settings = Settings.builder() + .put( + IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), + InferenceMetadataFieldsMapper.USE_NEW_SEMANTIC_TEXT_FORMAT_BY_DEFAULT + ) + .build(); + assertTrue(InferenceMetadataFieldsMapper.isEnabled(settings)); + } + @Override public void testFieldHasValue() { assertTrue( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 162bcc8f09713..65e4d049ef58b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchResponseUtils; @@ -41,6 +42,7 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; @@ -52,6 +54,8 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class ModelRegistryTests extends ESTestCase { @@ -295,6 +299,37 @@ public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { ); } + public void testRemoveDefaultConfigs_DoesNotCallClient_WhenPassedAnEmptySet() { + var client = mock(Client.class); + + var registry = new ModelRegistry(client); + var listener = new PlainActionFuture(); + + registry.removeDefaultConfigs(Set.of(), listener); + + assertTrue(listener.actionGet(TIMEOUT)); + verify(client, times(0)).execute(any(), any(), any()); + } + + public void testDeleteModels_Returns_ConflictException_WhenModelIsBeingAdded() { + var client = mockClient(); + + var registry = new ModelRegistry(client); + var model = TestModel.createRandomInstance(); + var newModel = TestModel.createRandomInstance(); + registry.updateModelTransaction(newModel, model, new PlainActionFuture<>()); + + var listener = new PlainActionFuture(); + + registry.deleteModels(Set.of(newModel.getInferenceEntityId()), listener); + var exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + exception.getMessage(), + containsString("are currently being updated, please wait until after they are finished updating to delete.") + ); + assertThat(exception.status(), is(RestStatus.CONFLICT)); + } + public void testIdMatchedDefault() { var defaultConfigIds = new ArrayList(); defaultConfigIds.add( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/BaseInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/BaseInferenceActionTests.java index 5528c80066b0a..4961778a03726 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/BaseInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/BaseInferenceActionTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.junit.Before; @@ -42,6 +43,11 @@ public class BaseInferenceActionTests extends RestActionTestCase { @Before public void setUpAction() { controller().registerHandler(new BaseInferenceAction() { + @Override + protected boolean shouldStream() { + return false; + } + @Override protected ActionListener listener(RestChannel channel) { return new RestChunkedToXContentListener<>(channel); @@ -102,10 +108,10 @@ public void testParseTimeout_ReturnsDefaultTimeout() { public void testUsesDefaultTimeout() { SetOnce executeCalled = new SetOnce<>(); verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(InferenceAction.Request.class)); + assertThat(actionRequest, instanceOf(InferenceActionProxy.Request.class)); - var request = (InferenceAction.Request) actionRequest; - assertThat(request.getInferenceTimeout(), is(InferenceAction.Request.DEFAULT_TIMEOUT)); + var request = (InferenceActionProxy.Request) actionRequest; + assertThat(request.getTimeout(), is(InferenceAction.Request.DEFAULT_TIMEOUT)); executeCalled.set(true); return createResponse(); @@ -122,10 +128,10 @@ public void testUsesDefaultTimeout() { public void testUses3SecondTimeoutFromParams() { SetOnce executeCalled = new SetOnce<>(); verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(InferenceAction.Request.class)); + assertThat(actionRequest, instanceOf(InferenceActionProxy.Request.class)); - var request = (InferenceAction.Request) actionRequest; - assertThat(request.getInferenceTimeout(), is(TimeValue.timeValueSeconds(3))); + var request = (InferenceActionProxy.Request) actionRequest; + assertThat(request.getTimeout(), is(TimeValue.timeValueSeconds(3))); executeCalled.set(true); return createResponse(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionProxyTests.java similarity index 90% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionProxyTests.java index 1b0df1b4a20da..433e33fe15210 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionProxyTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; import org.junit.Before; import static org.elasticsearch.xpack.inference.rest.BaseInferenceActionTests.createResponse; @@ -21,7 +21,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class RestInferenceActionTests extends RestActionTestCase { +public class RestInferenceActionProxyTests extends RestActionTestCase { @Before public void setUpAction() { @@ -31,9 +31,9 @@ public void setUpAction() { public void testStreamIsFalse() { SetOnce executeCalled = new SetOnce<>(); verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(InferenceAction.Request.class)); + assertThat(actionRequest, instanceOf(InferenceActionProxy.Request.class)); - var request = (InferenceAction.Request) actionRequest; + var request = (InferenceActionProxy.Request) actionRequest; assertThat(request.isStreaming(), is(false)); executeCalled.set(true); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java index f67680ef6b625..e69dd3fda6240 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java @@ -9,13 +9,18 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.rest.AbstractRestChannel; +import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; import org.junit.After; import org.junit.Before; @@ -42,9 +47,9 @@ public void tearDownAction() { public void testStreamIsTrue() { SetOnce executeCalled = new SetOnce<>(); verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(InferenceAction.Request.class)); + assertThat(actionRequest, instanceOf(InferenceActionProxy.Request.class)); - var request = (InferenceAction.Request) actionRequest; + var request = (InferenceActionProxy.Request) actionRequest; assertThat(request.isStreaming(), is(true)); executeCalled.set(true); @@ -58,4 +63,50 @@ public void testStreamIsTrue() { dispatchRequest(inferenceRequest); assertThat(executeCalled.get(), equalTo(true)); } + + public void testStreamIsTrue_ChatCompletion() { + SetOnce executeCalled = new SetOnce<>(); + verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { + assertThat(actionRequest, instanceOf(InferenceActionProxy.Request.class)); + + var request = (InferenceActionProxy.Request) actionRequest; + assertThat(request.isStreaming(), is(true)); + + executeCalled.set(true); + return createResponse(); + })); + + var requestBody = """ + { + "messages": [ + { + "content": "abc", + "role": "user" + } + ] + } + """; + + RestRequest inferenceRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) + .withPath("_inference/chat_completion/test/_stream") + .withContent(new BytesArray(requestBody), XContentType.JSON) + .build(); + + final SetOnce responseSetOnce = new SetOnce<>(); + dispatchRequest(inferenceRequest, new AbstractRestChannel(inferenceRequest, true) { + @Override + public void sendResponse(RestResponse response) { + responseSetOnce.set(response); + } + }); + + // the response content will be null when there is no error + assertNull(responseSetOnce.get().content()); + assertThat(executeCalled.get(), equalTo(true)); + } + + private void dispatchRequest(final RestRequest request, final RestChannel channel) { + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + controller().dispatchRequest(request, channel, threadContext); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceActionTests.java deleted file mode 100644 index 9dc23c890c14d..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceActionTests.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.rest; - -import org.apache.lucene.util.SetOnce; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.rest.AbstractRestChannel; -import org.elasticsearch.rest.RestChannel; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; -import org.junit.After; -import org.junit.Before; - -import static org.elasticsearch.xpack.inference.rest.BaseInferenceActionTests.createResponse; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; - -public class RestUnifiedCompletionInferenceActionTests extends RestActionTestCase { - private final SetOnce threadPool = new SetOnce<>(); - - @Before - public void setUpAction() { - threadPool.set(new TestThreadPool(getTestName())); - controller().registerHandler(new RestUnifiedCompletionInferenceAction(threadPool)); - } - - @After - public void tearDownAction() { - terminate(threadPool.get()); - } - - public void testStreamIsTrue() { - SetOnce executeCalled = new SetOnce<>(); - verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(UnifiedCompletionAction.Request.class)); - - var request = (UnifiedCompletionAction.Request) actionRequest; - assertThat(request.isStreaming(), is(true)); - - executeCalled.set(true); - return createResponse(); - })); - - var requestBody = """ - { - "messages": [ - { - "content": "abc", - "role": "user" - } - ] - } - """; - - RestRequest inferenceRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) - .withPath("_inference/completion/test/_unified") - .withContent(new BytesArray(requestBody), XContentType.JSON) - .build(); - - final SetOnce responseSetOnce = new SetOnce<>(); - dispatchRequest(inferenceRequest, new AbstractRestChannel(inferenceRequest, true) { - @Override - public void sendResponse(RestResponse response) { - responseSetOnce.set(response); - } - }); - - // the response content will be null when there is no error - assertNull(responseSetOnce.get().content()); - assertThat(executeCalled.get(), equalTo(true)); - } - - private void dispatchRequest(final RestRequest request, final RestChannel channel) { - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller().dispatchRequest(request, channel, threadContext); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java index 7cfd231be39f3..637ae726572a4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.XContentFactory; import org.hamcrest.MatcherAssert; @@ -85,6 +86,16 @@ public InferenceEventsAssertion hasErrorContaining(String message) { return this; } + public InferenceEventsAssertion hasErrorMatching(CheckedConsumer matcher) { + hasError(); + try { + matcher.accept(error); + } catch (Exception e) { + fail(e); + } + return this; + } + public InferenceEventsAssertion hasEvents(String... events) { Arrays.stream(events).forEach(this::hasEvent); return this; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java index 92544d5535acb..1ca50d1887ee1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java @@ -490,7 +490,7 @@ public void testGetConfiguration() throws Exception { "http_schema": { "description": "", "label": "HTTP Schema", - "required": true, + "required": false, "sensitive": false, "updatable": false, "type": "str", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java index c11d4b4c7923d..ec41388684df1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -154,69 +154,80 @@ public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOExcepti @SuppressWarnings("checkstyle:LineLength") public void testGetConfiguration() throws Exception { try (var service = createAmazonBedrockService()) { - String content = XContentHelper.stripWhitespace(""" - { - "service": "amazonbedrock", - "name": "Amazon Bedrock", - "task_types": ["text_embedding", "completion"], - "configurations": { - "secret_key": { - "description": "A valid AWS secret key that is paired with the access_key.", - "label": "Secret Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "provider": { - "description": "The model provider for your deployment.", - "label": "Provider", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "access_key": { - "description": "A valid AWS access key that has permissions to use Amazon Bedrock.", - "label": "Access Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "model": { - "description": "The base model ID or an ARN to a custom model based on a foundational model.", - "label": "Model", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "rate_limit.requests_per_minute": { - "description": "By default, the amazonbedrock service sets the number of requests allowed per minute to 240.", - "label": "Rate Limit", - "required": false, - "sensitive": false, - "updatable": false, - "type": "int", - "supported_task_types": ["text_embedding", "completion"] - }, - "region": { - "description": "The region that your model or ARN is deployed in.", - "label": "Region", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] + String content = XContentHelper.stripWhitespace( + """ + { + "service": "amazonbedrock", + "name": "Amazon Bedrock", + "task_types": ["text_embedding", "completion"], + "configurations": { + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-embed-text.html.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, + "secret_key": { + "description": "A valid AWS secret key that is paired with the access_key.", + "label": "Secret Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "provider": { + "description": "The model provider for your deployment.", + "label": "Provider", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "access_key": { + "description": "A valid AWS access key that has permissions to use Amazon Bedrock.", + "label": "Access Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "model": { + "description": "The base model ID or an ARN to a custom model based on a foundational model.", + "label": "Model", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "rate_limit.requests_per_minute": { + "description": "By default, the amazonbedrock service sets the number of requests allowed per minute to 240.", + "label": "Rate Limit", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding", "completion"] + }, + "region": { + "description": "The region that your model or ARN is deployed in.", + "label": "Region", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + } } } - } - """); + """ + ); InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( new BytesArray(content), XContentType.JSON diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index d2e4652b96488..77ed889fc7361 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -1389,60 +1389,71 @@ public void testInfer_StreamRequest_ErrorResponse() throws Exception { @SuppressWarnings("checkstyle:LineLength") public void testGetConfiguration() throws Exception { try (var service = createService()) { - String content = XContentHelper.stripWhitespace(""" - { - "service": "azureaistudio", - "name": "Azure AI Studio", - "task_types": ["text_embedding", "completion"], - "configurations": { - "endpoint_type": { - "description": "Specifies the type of endpoint that is used in your model deployment.", - "label": "Endpoint Type", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "provider": { - "description": "The model provider for your deployment.", - "label": "Provider", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "api_key": { - "description": "API Key for the provider you're connecting to.", - "label": "API Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "rate_limit.requests_per_minute": { - "description": "Minimize the number of rate limit errors.", - "label": "Rate Limit", - "required": false, - "sensitive": false, - "updatable": false, - "type": "int", - "supported_task_types": ["text_embedding", "completion"] - }, - "target": { - "description": "The target URL of your Azure AI Studio model deployment.", - "label": "Target", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] + String content = XContentHelper.stripWhitespace( + """ + { + "service": "azureaistudio", + "name": "Azure AI Studio", + "task_types": ["text_embedding", "completion"], + "configurations": { + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://learn.microsoft.com/en-us/azure/ai-studio/reference/reference-model-inference-embeddings.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, + "endpoint_type": { + "description": "Specifies the type of endpoint that is used in your model deployment.", + "label": "Endpoint Type", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "provider": { + "description": "The model provider for your deployment.", + "label": "Provider", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "api_key": { + "description": "API Key for the provider you're connecting to.", + "label": "API Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "rate_limit.requests_per_minute": { + "description": "Minimize the number of rate limit errors.", + "label": "Rate Limit", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding", "completion"] + }, + "target": { + "description": "The target URL of your Azure AI Studio model deployment.", + "label": "Target", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + } } } - } - """); + """ + ); InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( new BytesArray(content), XContentType.JSON diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index 52527d74aad19..3500f11b199af 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -1473,6 +1473,15 @@ public void testGetConfiguration() throws Exception { "type": "str", "supported_task_types": ["text_embedding", "completion"] }, + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#request-body-1.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, "entra_id": { "description": "You must provide either an API key or an Entra ID.", "label": "Entra ID", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 86b3edc4130da..b1c5e02fb6f51 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -1648,6 +1648,15 @@ public void testGetConfiguration() throws Exception { "type": "str", "supported_task_types": ["text_embedding", "rerank", "completion"] }, + "model_id": { + "description": "The name of the model to use for the inference task.", + "label": "Model ID", + "required": false, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "rerank", "completion"] + }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", "label": "Rate Limit", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index b46fd4941e6f6..f4ec3f97cf0f7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -27,14 +27,17 @@ import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnifiedCompletionRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.core.ml.search.WeightedToken; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -44,11 +47,15 @@ import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; +import org.elasticsearch.xpack.inference.services.InferenceEventsAssertion; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorization; import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationHandler; import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationTests; +import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionModel; +import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.After; @@ -61,8 +68,10 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.getModelListenerForException; import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; @@ -76,6 +85,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.isA; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -356,6 +366,14 @@ public void testInfer_ThrowsErrorWhenModelIsNotAValidModel() throws IOException private ModelRegistry mockModelRegistry() { var client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); + + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + var listener = (ActionListener) invocationOnMock.getArgument(2); + listener.onResponse(true); + + return Void.TYPE; + }).when(client).execute(any(), any(), any()); return new ModelRegistry(client); } @@ -426,7 +444,7 @@ public void testInfer_ThrowsErrorWhenTaskTypeIsNotValid_ChatCompletion() throws "Inference entity [model_id] does not support task type [chat_completion] " + "for inference, the task type must be one of [sparse_embedding]. " + "The task type for the inference entity is chat_completion, " - + "please use the _inference/chat_completion/model_id/_unified URL." + + "please use the _inference/chat_completion/model_id/_stream URL." ) ); @@ -486,7 +504,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), Matchers.equalTo(XContentType.JSON.mediaType())); var requestMap = entityAsMap(request.getBody()); - assertThat(requestMap, is(Map.of("input", List.of("input text"), "model_id", "my-model-id", "usage_context", "search"))); + assertThat(requestMap, is(Map.of("input", List.of("input text"), "model", "my-model-id", "usage_context", "search"))); } } @@ -544,7 +562,7 @@ public void testChunkedInfer_PassesThrough() throws IOException { ); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap, is(Map.of("input", List.of("input text"), "model_id", "my-model-id", "usage_context", "ingest"))); + assertThat(requestMap, is(Map.of("input", List.of("input text"), "model", "my-model-id", "usage_context", "ingest"))); } } @@ -949,6 +967,92 @@ public void testDefaultConfigs_Returns_DefaultChatCompletion_V1_WhenTaskTypeIsCo } } + public void testUnifiedCompletionError() throws Exception { + testUnifiedStreamError(404, """ + { + "error": "The model `rainbow-sprinkles` does not exist or you do not have access to it." + }""", """ + {\ + "error":{\ + "code":"not_found",\ + "message":"Received an unsuccessful status code for request from inference entity id [id] status \ + [404]. Error message: [The model `rainbow-sprinkles` does not exist or you do not have access to it.]",\ + "type":"error"\ + }}"""); + } + + public void testUnifiedCompletionErrorMidStream() throws Exception { + testUnifiedStreamError(200, """ + data: { "error": "some error" } + + """, """ + {\ + "error":{\ + "code":"stream_error",\ + "message":"Received an error response for request from inference entity id [id]. Error message: [some error]",\ + "type":"error"\ + }}"""); + } + + public void testUnifiedCompletionMalformedError() throws Exception { + testUnifiedStreamError(200, """ + data: { i am not json } + + """, """ + {\ + "error":{\ + "code":"bad_request",\ + "message":"[1:3] Unexpected character ('i' (code 105)): was expecting double-quote to start field name\\n\ + at [Source: (String)\\"{ i am not json }\\"; line: 1, column: 3]",\ + "type":"x_content_parse_exception"\ + }}"""); + } + + private void testUnifiedStreamError(int responseCode, String responseJson, String expectedJson) throws Exception { + var eisGatewayUrl = getUrl(webServer); + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = createService(senderFactory, eisGatewayUrl)) { + webServer.enqueue(new MockResponse().setResponseCode(responseCode).setBody(responseJson)); + var model = new ElasticInferenceServiceCompletionModel( + "id", + TaskType.COMPLETION, + "elastic", + new ElasticInferenceServiceCompletionServiceSettings("model_id", new RateLimitSettings(100)), + EmptyTaskSettings.INSTANCE, + EmptySecretSettings.INSTANCE, + new ElasticInferenceServiceComponents(eisGatewayUrl) + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.unifiedCompletionInfer( + model, + UnifiedCompletionRequest.of( + List.of(new UnifiedCompletionRequest.Message(new UnifiedCompletionRequest.ContentString("hello"), "user", null, null)) + ), + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var result = listener.actionGet(TIMEOUT); + + InferenceEventsAssertion.assertThat(result).hasFinishedStream().hasNoEvents().hasErrorMatching(e -> { + e = unwrapCause(e); + assertThat(e, isA(UnifiedChatCompletionException.class)); + try (var builder = XContentFactory.jsonBuilder()) { + ((UnifiedChatCompletionException) e).toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> { + try { + xContent.toXContent(builder, EMPTY_PARAMS); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }); + var json = XContentHelper.convertToJson(BytesReference.bytes(builder), false, builder.contentType()); + + assertThat(json, is(expectedJson)); + } + }); + } + } + private ElasticInferenceService createServiceWithMockSender() { return createServiceWithMockSender(ElasticInferenceServiceAuthorizationTests.createEnabledAuth()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java index ebb9c964e4c9a..3b8ce3a7cc64c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java @@ -48,6 +48,7 @@ public static CustomElandInternalTextEmbeddingServiceSettings createRandom() { numThreads, modelId, null, + null, dims, similarityMeasure, elementType @@ -86,6 +87,7 @@ public void testFromMap_Request_CreatesSettingsCorrectly() { modelId, null, null, + null, SimilarityMeasure.DOT_PRODUCT, DenseVectorFieldMapper.ElementType.FLOAT ) @@ -111,6 +113,7 @@ public void testFromMap_Request_DoesNotDefaultSimilarityElementType() { modelId, null, null, + null, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT ) @@ -152,6 +155,7 @@ public void testFromMap_Request_IgnoresDimensions() { modelId, null, null, + null, SimilarityMeasure.DOT_PRODUCT, DenseVectorFieldMapper.ElementType.FLOAT ) @@ -192,6 +196,7 @@ public void testFromMap_Persistent_CreatesSettingsCorrectly() { numThreads, modelId, null, + null, 1, SimilarityMeasure.DOT_PRODUCT, DenseVectorFieldMapper.ElementType.FLOAT @@ -206,6 +211,7 @@ public void testToXContent_WritesAllValues() throws IOException { 1, "model_id", null, + null, 100, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.BYTE diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java index 0db0a7669c8aa..4ec575420613f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java @@ -103,7 +103,7 @@ public void testFromMap() { ) ) ).build(); - assertEquals(new ElasticsearchInternalServiceSettings(1, 4, ".elser_model_1", null), serviceSettings); + assertEquals(new ElasticsearchInternalServiceSettings(1, 4, ".elser_model_1", null, null), serviceSettings); } public void testFromMapMissingOptions() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 3b634f45dc751..d1ce79b863c61 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; @@ -46,12 +47,14 @@ import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError; import org.elasticsearch.xpack.core.ml.MachineLearningField; +import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; +import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentStats; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResultsTests; @@ -67,13 +70,16 @@ import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.chunking.WordBoundaryChunkingSettings; import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; @@ -81,12 +87,14 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction.Response.RESULTS_FIELD; import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettingsMap; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86; @@ -101,6 +109,8 @@ import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class ElasticsearchInternalServiceTests extends ESTestCase { @@ -351,7 +361,9 @@ public void testParseRequestConfig_elser() { ) ); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); service.parseRequestConfig( randomInferenceEntityId, @@ -381,7 +393,9 @@ public void testParseRequestConfig_elser() { ) ); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); String criticalWarning = "Putting elasticsearch service inference endpoints (including elser service) without a model_id field is" @@ -450,7 +464,9 @@ public void testParseRequestConfig_elser() { ); config.put(ModelConfigurations.CHUNKING_SETTINGS, createRandomChunkingSettingsMap()); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); service.parseRequestConfig( randomInferenceEntityId, @@ -486,7 +502,9 @@ public void testParseRequestConfig_elser() { ) ); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); service.parseRequestConfig( randomInferenceEntityId, @@ -742,7 +760,16 @@ public void testParsePersistedConfig() { TaskType.TEXT_EMBEDDING, settings ); - var elandServiceSettings = new CustomElandInternalTextEmbeddingServiceSettings(1, 4, "invalid", null); + var elandServiceSettings = new CustomElandInternalTextEmbeddingServiceSettings( + 1, + 4, + "invalid", + null, + null, + null, + SimilarityMeasure.COSINE, + DenseVectorFieldMapper.ElementType.FLOAT + ); assertEquals( new CustomElandEmbeddingModel( randomInferenceEntityId, @@ -933,7 +960,7 @@ private void testChunkInfer_Sparse(ChunkingSettings chunkingSettings) throws Int "foo", TaskType.SPARSE_EMBEDDING, "elasticsearch", - new ElasticsearchInternalServiceSettings(1, 1, "model-id", null), + new ElasticsearchInternalServiceSettings(1, 1, "model-id", null, null), chunkingSettings ); var service = createService(client); @@ -1003,7 +1030,7 @@ private void testChunkInfer_Elser(ChunkingSettings chunkingSettings) throws Inte "foo", TaskType.SPARSE_EMBEDDING, "elasticsearch", - new ElserInternalServiceSettings(1, 1, "model-id", null), + new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, "model-id", null, null)), new ElserMlNodeTaskSettings(), chunkingSettings ); @@ -1328,11 +1355,20 @@ private CustomElandModel getCustomElandModel(TaskType taskType) { randomInferenceEntityId, taskType, ElasticsearchInternalService.NAME, - new CustomElandInternalServiceSettings(1, 4, "custom-model", null), + new CustomElandInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 4, "custom-model", null, null)), RerankTaskSettings.DEFAULT_SETTINGS ); } else if (taskType == TaskType.TEXT_EMBEDDING) { - var serviceSettings = new CustomElandInternalTextEmbeddingServiceSettings(1, 4, "custom-model", null); + var serviceSettings = new CustomElandInternalTextEmbeddingServiceSettings( + 1, + 4, + "custom-model", + null, + null, + null, + SimilarityMeasure.COSINE, + DenseVectorFieldMapper.ElementType.FLOAT + ); expectedModel = new CustomElandEmbeddingModel( randomInferenceEntityId, @@ -1346,7 +1382,7 @@ private CustomElandModel getCustomElandModel(TaskType taskType) { randomInferenceEntityId, taskType, ElasticsearchInternalService.NAME, - new CustomElandInternalServiceSettings(1, 4, "custom-model", null), + new CustomElandInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 4, "custom-model", null, null)), (ChunkingSettings) null ); } @@ -1438,6 +1474,7 @@ public void testParseRequestConfigEland_SetsDimensionsToOne() { 4, "custom-model", null, + null, 1, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT @@ -1463,6 +1500,7 @@ public void testParseRequestConfigEland_SetsDimensionsToOne() { "custom-model", null, null, + null, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT ), @@ -1511,7 +1549,7 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { EmbeddingRequestChunker.EmbeddingType.SPARSE, ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( TaskType.SPARSE_EMBEDDING, - new ElasticsearchInternalServiceSettings(1, 1, "foo", null) + new ElasticsearchInternalServiceSettings(1, 1, "foo", null, null) ) ); assertEquals( @@ -1526,7 +1564,7 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { ElasticsearchStatusException.class, () -> ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( TaskType.COMPLETION, - new ElasticsearchInternalServiceSettings(1, 1, "foo", null) + new ElasticsearchInternalServiceSettings(1, 1, "foo", null, null) ) ); assertThat(e1.getMessage(), containsString("Chunking is not supported for task type [completion]")); @@ -1535,7 +1573,7 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { ElasticsearchStatusException.class, () -> ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( TaskType.RERANK, - new ElasticsearchInternalServiceSettings(1, 1, "foo", null) + new ElasticsearchInternalServiceSettings(1, 1, "foo", null, null) ) ); assertThat(e2.getMessage(), containsString("Chunking is not supported for task type [rerank]")); @@ -1604,6 +1642,209 @@ public void testGetConfiguration() throws Exception { } } + public void testUpdateModelsWithDynamicFields_NoModelsToUpdate() throws Exception { + ActionListener> resultsListener = ActionListener.>wrap( + updatedModels -> assertEquals(Collections.emptyList(), updatedModels), + e -> fail("Unexpected exception: " + e) + ); + + try (var service = createService(mock(Client.class))) { + service.updateModelsWithDynamicFields(List.of(), resultsListener); + } + } + + public void testUpdateModelsWithDynamicFields_InvalidModelProvided() throws IOException { + ActionListener> resultsListener = ActionListener.wrap( + updatedModels -> fail("Expected invalid model assertion error to be thrown"), + e -> fail("Expected invalid model assertion error to be thrown") + ); + + try (var service = createService(mock(Client.class))) { + assertThrows( + AssertionError.class, + () -> { service.updateModelsWithDynamicFields(List.of(mock(Model.class)), resultsListener); } + ); + } + } + + @SuppressWarnings("unchecked") + public void testUpdateModelsWithDynamicFields_FailsToRetrieveDeployments() throws IOException { + var deploymentId = randomAlphaOfLength(10); + var model = mock(ElasticsearchInternalModel.class); + when(model.mlNodeDeploymentId()).thenReturn(deploymentId); + when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + ActionListener> resultsListener = ActionListener.wrap(updatedModels -> { + assertEquals(updatedModels.size(), 1); + verify(model).mlNodeDeploymentId(); + verifyNoMoreInteractions(model); + }, e -> fail("Expected original models to be returned")); + + var client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + doAnswer(invocation -> { + var listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(new RuntimeException(randomAlphaOfLength(10))); + return null; + }).when(client).execute(eq(GetDeploymentStatsAction.INSTANCE), any(), any()); + + try (var service = createService(client)) { + service.updateModelsWithDynamicFields(List.of(model), resultsListener); + } + } + + public void testUpdateModelsWithDynamicFields_SingleModelToUpdate() throws IOException { + var deploymentId = randomAlphaOfLength(10); + var model = mock(ElasticsearchInternalModel.class); + when(model.mlNodeDeploymentId()).thenReturn(deploymentId); + when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + var modelsByDeploymentId = new HashMap>(); + modelsByDeploymentId.put(deploymentId, List.of(model)); + + testUpdateModelsWithDynamicFields(modelsByDeploymentId); + } + + public void testUpdateModelsWithDynamicFields_MultipleModelsWithDifferentDeploymentsToUpdate() throws IOException { + var deploymentId1 = randomAlphaOfLength(10); + var model1 = mock(ElasticsearchInternalModel.class); + when(model1.mlNodeDeploymentId()).thenReturn(deploymentId1); + when(model1.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + var deploymentId2 = randomAlphaOfLength(10); + var model2 = mock(ElasticsearchInternalModel.class); + when(model2.mlNodeDeploymentId()).thenReturn(deploymentId2); + when(model2.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + var modelsByDeploymentId = new HashMap>(); + modelsByDeploymentId.put(deploymentId1, List.of(model1)); + modelsByDeploymentId.put(deploymentId2, List.of(model2)); + + testUpdateModelsWithDynamicFields(modelsByDeploymentId); + } + + public void testUpdateModelsWithDynamicFields_MultipleModelsWithSameDeploymentsToUpdate() throws IOException { + var deploymentId = randomAlphaOfLength(10); + var model1 = mock(ElasticsearchInternalModel.class); + when(model1.mlNodeDeploymentId()).thenReturn(deploymentId); + when(model1.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + var model2 = mock(ElasticsearchInternalModel.class); + when(model2.mlNodeDeploymentId()).thenReturn(deploymentId); + when(model2.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + var modelsByDeploymentId = new HashMap>(); + modelsByDeploymentId.put(deploymentId, List.of(model1, model2)); + + testUpdateModelsWithDynamicFields(modelsByDeploymentId); + } + + @SuppressWarnings("unchecked") + private void testUpdateModelsWithDynamicFields(Map> modelsByDeploymentId) throws IOException { + var modelsToUpdate = new ArrayList(); + modelsByDeploymentId.values().forEach(modelsToUpdate::addAll); + + var updatedNumberOfAllocations = new HashMap(); + modelsByDeploymentId.keySet().forEach(deploymentId -> updatedNumberOfAllocations.put(deploymentId, randomIntBetween(1, 10))); + + ActionListener> resultsListener = ActionListener.wrap(updatedModels -> { + assertEquals(updatedModels.size(), modelsToUpdate.size()); + modelsByDeploymentId.forEach((deploymentId, models) -> { + var expectedNumberOfAllocations = updatedNumberOfAllocations.get(deploymentId); + models.forEach(model -> { + verify((ElasticsearchInternalModel) model).updateNumAllocations(expectedNumberOfAllocations); + verify((ElasticsearchInternalModel) model).mlNodeDeploymentId(); + verifyNoMoreInteractions(model); + }); + }); + }, e -> fail("Unexpected exception: " + e)); + + var client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + doAnswer(invocation -> { + var listener = (ActionListener) invocation.getArguments()[2]; + var mockAssignmentStats = new ArrayList(); + modelsByDeploymentId.keySet().forEach(deploymentId -> { + var mockAssignmentStatsForDeploymentId = mock(AssignmentStats.class); + when(mockAssignmentStatsForDeploymentId.getDeploymentId()).thenReturn(deploymentId); + when(mockAssignmentStatsForDeploymentId.getNumberOfAllocations()).thenReturn(updatedNumberOfAllocations.get(deploymentId)); + mockAssignmentStats.add(mockAssignmentStatsForDeploymentId); + }); + listener.onResponse( + new GetDeploymentStatsAction.Response( + Collections.emptyList(), + Collections.emptyList(), + mockAssignmentStats, + mockAssignmentStats.size() + ) + ); + return null; + }).when(client).execute(eq(GetDeploymentStatsAction.INSTANCE), any(), any()); + + try (var service = createService(client)) { + service.updateModelsWithDynamicFields(modelsToUpdate, resultsListener); + } + } + + public void testUpdateWithoutMlEnabled() throws IOException, InterruptedException { + var cs = mock(ClusterService.class); + var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(MachineLearningField.MAX_LAZY_ML_NODES)); + when(cs.getClusterSettings()).thenReturn(cSettings); + var context = new InferenceServiceExtension.InferenceServiceFactoryContext( + mock(), + threadPool, + cs, + Settings.builder().put("xpack.ml.enabled", false).build() + ); + try (var service = new ElasticsearchInternalService(context)) { + var models = List.of(mock(Model.class)); + var latch = new CountDownLatch(1); + service.updateModelsWithDynamicFields(models, ActionTestUtils.assertNoFailureListener(r -> { + latch.countDown(); + assertThat(r, Matchers.sameInstance(models)); + })); + assertTrue(latch.await(30, TimeUnit.SECONDS)); + } + } + + public void testUpdateWithMlEnabled() throws IOException, InterruptedException { + var deploymentId = "deploymentId"; + var model = mock(ElasticsearchInternalModel.class); + when(model.mlNodeDeploymentId()).thenReturn(deploymentId); + + AssignmentStats stats = mock(); + when(stats.getDeploymentId()).thenReturn(deploymentId); + when(stats.getNumberOfAllocations()).thenReturn(3); + + var client = mock(Client.class); + doAnswer(ans -> { + QueryPage queryPage = new QueryPage<>(List.of(stats), 1, RESULTS_FIELD); + + GetDeploymentStatsAction.Response response = mock(); + when(response.getStats()).thenReturn(queryPage); + + ActionListener listener = ans.getArgument(2); + listener.onResponse(response); + return null; + }).when(client).execute(eq(GetDeploymentStatsAction.INSTANCE), any(), any()); + when(client.threadPool()).thenReturn(threadPool); + + var cs = mock(ClusterService.class); + var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(MachineLearningField.MAX_LAZY_ML_NODES)); + when(cs.getClusterSettings()).thenReturn(cSettings); + var context = new InferenceServiceExtension.InferenceServiceFactoryContext( + client, + threadPool, + cs, + Settings.builder().put("xpack.ml.enabled", true).build() + ); + try (var service = new ElasticsearchInternalService(context)) { + List models = List.of(model); + var latch = new CountDownLatch(1); + service.updateModelsWithDynamicFields(models, ActionTestUtils.assertNoFailureListener(r -> latch.countDown())); + assertTrue(latch.await(30, TimeUnit.SECONDS)); + verify(model).updateNumAllocations(3); + } + } + private ElasticsearchInternalService createService(Client client) { var cs = mock(ClusterService.class); var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(MachineLearningField.MAX_LAZY_ML_NODES)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java index 96cd42efa42f5..5b21717ac03e4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java @@ -16,7 +16,7 @@ public void testUpdateNumAllocation() { "foo", TaskType.SPARSE_EMBEDDING, ElasticsearchInternalService.NAME, - new ElserInternalServiceSettings(null, 1, "elser", null), + new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(null, 1, "elser", null, null)), new ElserMlNodeTaskSettings(), null ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java index f4e97b2c2e5e0..dd4513db0d50a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java @@ -24,12 +24,12 @@ public static ElserInternalServiceSettings createRandom() { public void testBwcWrite() throws IOException { { - var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null)); + var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null, null)); var copy = copyInstance(settings, TransportVersions.V_8_12_0); assertEquals(settings, copy); } { - var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null)); + var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null, null)); var copy = copyInstance(settings, TransportVersions.V_8_11_X); assertEquals(settings, copy); } @@ -53,6 +53,7 @@ protected ElserInternalServiceSettings mutateInstance(ElserInternalServiceSettin instance.getNumAllocations() == null ? 1 : instance.getNumAllocations() + 1, instance.getNumThreads(), instance.modelId(), + null, null ) ); @@ -61,6 +62,7 @@ protected ElserInternalServiceSettings mutateInstance(ElserInternalServiceSettin instance.getNumAllocations(), instance.getNumThreads() + 1, instance.modelId(), + null, null ) ); @@ -72,6 +74,7 @@ yield new ElserInternalServiceSettings( instance.getNumAllocations(), instance.getNumThreads(), versions.iterator().next(), + null, null ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index ff99101fc4ee5..99b7b3868b7f4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModelTests; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; @@ -154,6 +155,42 @@ public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModel() throws IO } } + public void testParseRequestConfig_CreatesAIbmWatsonxRerankModel() throws IOException { + try (var service = createIbmWatsonxService()) { + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(IbmWatsonxRerankModel.class)); + + var rerankModel = (IbmWatsonxRerankModel) model; + assertThat(rerankModel.getServiceSettings().modelId(), is(modelId)); + assertThat(rerankModel.getServiceSettings().projectId(), is(projectId)); + assertThat(rerankModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(rerankModel.getSecretSettings().apiKey().toString(), is(apiKey)); + }, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage())); + + service.parseRequestConfig( + "id", + TaskType.RERANK, + getRequestConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + new HashMap<>(Map.of()), + getSecretSettingsMap(apiKey) + ), + modelListener + ); + } + } + public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createIbmWatsonxService()) { ActionListener modelListener = ActionListener.wrap(model -> { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModelTests.java new file mode 100644 index 0000000000000..0138952c11e07 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModelTests.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.net.URI; + +public class IbmWatsonxRerankModelTests extends ESTestCase { + public static IbmWatsonxRerankModel createModel(String model, String projectId, URI uri, String apiVersion, String apiKey) { + return new IbmWatsonxRerankModel( + "id", + TaskType.RERANK, + "service", + new IbmWatsonxRerankServiceSettings(uri, apiVersion, model, projectId, null), + new IbmWatsonxRerankTaskSettings(2, true, 100), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java index 5fa14da4ba733..2aeb0447f9c78 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java @@ -1831,33 +1831,53 @@ public void testDefaultSimilarity() { @SuppressWarnings("checkstyle:LineLength") public void testGetConfiguration() throws Exception { try (var service = createJinaAIService()) { - String content = XContentHelper.stripWhitespace(""" - { - "service": "jinaai", - "name": "Jina AI", - "task_types": ["text_embedding", "rerank"], - "configurations": { - "api_key": { - "description": "API Key for the provider you're connecting to.", - "label": "API Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "rerank"] - }, - "rate_limit.requests_per_minute": { - "description": "Minimize the number of rate limit errors.", - "label": "Rate Limit", - "required": false, - "sensitive": false, - "updatable": false, - "type": "int", - "supported_task_types": ["text_embedding", "rerank"] + String content = XContentHelper.stripWhitespace( + """ + { + "service": "jinaai", + "name": "Jina AI", + "task_types": ["text_embedding", "rerank"], + "configurations": { + "api_key": { + "description": "API Key for the provider you're connecting to.", + "label": "API Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "rerank"] + }, + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://api.jina.ai/redoc#tag/embeddings/operation/create_embedding_v1_embeddings_post.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, + "model_id": { + "description": "The name of the model to use for the inference task.", + "label": "Model ID", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "rerank"] + }, + "rate_limit.requests_per_minute": { + "description": "Minimize the number of rate limit errors.", + "label": "Rate Limit", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding", "rerank"] + } } } - } - """); + """ + ); InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( new BytesArray(content), XContentType.JSON diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 6fddbf4450283..13aff10e3148a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -33,9 +33,11 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingFloat; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -61,8 +63,10 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.getRequestConfigMap; @@ -84,6 +88,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.isA; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -931,7 +936,7 @@ public void testInfer_ThrowsErrorWhenTaskTypeIsNotValid_ChatCompletion() throws "Inference entity [model_id] does not support task type [chat_completion] " + "for inference, the task type must be one of [text_embedding, completion]. " + "The task type for the inference entity is chat_completion, " - + "please use the _inference/chat_completion/model_id/_unified URL." + + "please use the _inference/chat_completion/model_id/_stream URL." ) ); @@ -1061,6 +1066,94 @@ public void testUnifiedCompletionInfer() throws Exception { } } + public void testUnifiedCompletionError() throws Exception { + String responseJson = """ + { + "error": { + "message": "The model `gpt-4awero` does not exist or you do not have access to it.", + "type": "invalid_request_error", + "param": null, + "code": "model_not_found" + } + }"""; + webServer.enqueue(new MockResponse().setResponseCode(404).setBody(responseJson)); + testStreamError(""" + {\ + "error":{\ + "code":"model_not_found",\ + "message":"Received an unsuccessful status code for request from inference entity id [id] status \ + [404]. Error message: [The model `gpt-4awero` does not exist or you do not have access to it.]",\ + "type":"invalid_request_error"\ + }}"""); + } + + private void testStreamError(String expectedResponse) throws Exception { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new OpenAiService(senderFactory, createWithEmptySettings(threadPool))) { + var model = OpenAiChatCompletionModelTests.createChatCompletionModel(getUrl(webServer), "org", "secret", "model", "user"); + PlainActionFuture listener = new PlainActionFuture<>(); + service.unifiedCompletionInfer( + model, + UnifiedCompletionRequest.of( + List.of(new UnifiedCompletionRequest.Message(new UnifiedCompletionRequest.ContentString("hello"), "user", null, null)) + ), + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var result = listener.actionGet(TIMEOUT); + + InferenceEventsAssertion.assertThat(result).hasFinishedStream().hasNoEvents().hasErrorMatching(e -> { + e = unwrapCause(e); + assertThat(e, isA(UnifiedChatCompletionException.class)); + try (var builder = XContentFactory.jsonBuilder()) { + ((UnifiedChatCompletionException) e).toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> { + try { + xContent.toXContent(builder, EMPTY_PARAMS); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }); + var json = XContentHelper.convertToJson(BytesReference.bytes(builder), false, builder.contentType()); + + assertThat(json, is(expectedResponse)); + } + }); + } + } + + public void testMidStreamUnifiedCompletionError() throws Exception { + String responseJson = """ + event: error + data: { "error": { "message": "Timed out waiting for more data", "type": "timeout" } } + + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + testStreamError(""" + {\ + "error":{\ + "message":"Received an error response for request from inference entity id [id]. Error message: \ + [Timed out waiting for more data]",\ + "type":"timeout"\ + }}"""); + } + + public void testUnifiedCompletionMalformedError() throws Exception { + String responseJson = """ + data: { invalid json } + + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + testStreamError(""" + {\ + "error":{\ + "code":"bad_request",\ + "message":"[1:3] Unexpected character ('i' (code 105)): was expecting double-quote to start field name\\n\ + at [Source: (String)\\"{ invalid json }\\"; line: 1, column: 3]",\ + "type":"x_content_parse_exception"\ + }}"""); + } + public void testInfer_StreamRequest() throws Exception { String responseJson = """ data: {\ @@ -1752,6 +1845,15 @@ public void testGetConfiguration() throws Exception { "type": "str", "supported_task_types": ["text_embedding", "completion", "chat_completion"] }, + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://platform.openai.com/docs/api-reference/embeddings/create#embeddings-create-dimensions.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, "organization_id": { "description": "The unique identifier of your organization.", "label": "Organization ID", @@ -1778,16 +1880,6 @@ public void testGetConfiguration() throws Exception { "updatable": false, "type": "str", "supported_task_types": ["text_embedding", "completion", "chat_completion"] - }, - "url": { - "default_value": "https://api.openai.com/v1/chat/completions", - "description": "The OpenAI API endpoint URL. For more information on the URL, refer to the https://platform.openai.com/docs/api-reference.", - "label": "URL", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion", "chat_completion"] } } } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference_bwc.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference_bwc.yml index 07341273151bc..5f87942b2c710 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference_bwc.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference_bwc.yml @@ -650,3 +650,28 @@ setup: - match: { hits.total.value: 1 } - match: { hits.total.relation: eq } - match: { hits.hits.0._source.dense_field.text: "updated text" } + +--- +"Skip fetching _inference_fields": + - requires: + cluster_features: semantic_text.skip_inference_fields + reason: Skip _inference_fields when search is performed on legacy semantic_text format. + + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "test value" + refresh: true + + - do: + search: + index: test-index + body: + fields: [ _inference_fields ] + query: + match_all: { } + + - match: { hits.total.value: 1 } + - not_exists: hits.hits.0._source._inference_fields diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml index dec4e127e501c..64ecb0f2d882c 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml @@ -43,6 +43,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: semantic_text inference_id: dense-inference-id @@ -53,6 +55,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: semantic_text inference_id: dense-inference-id-2 @@ -63,6 +67,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: dense_vector dims: 10 @@ -74,6 +80,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: dense_vector dims: 3 @@ -84,6 +92,7 @@ setup: index: test-semantic-text-index id: doc_1 body: + keyword_field: "foo" inference_field: [ "inference test", "another inference test" ] refresh: true @@ -92,6 +101,7 @@ setup: index: test-semantic-text-index-2 id: doc_2 body: + keyword_field: "bar" inference_field: [ "inference test", "another inference test" ] refresh: true @@ -100,6 +110,7 @@ setup: index: test-dense-vector-index id: doc_3 body: + keyword_field: "baz" inference_field: [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ] refresh: true @@ -108,6 +119,7 @@ setup: index: test-incompatible-dense-vector-index id: doc_4 body: + keyword_field: "qux" inference_field: [ 1, 2, 3 ] refresh: true @@ -311,6 +323,34 @@ setup: - match: { hits.total.value: 2 } +--- +"knn query respects filters": + - requires: + cluster_features: "search.semantic_knn_filter_fix" + reason: filters fixed in 8.18.0 + + - do: + search: + index: + - test-semantic-text-index + - test-semantic-text-index-2 + body: + query: + knn: + field: inference_field + k: 10 + num_candidates: 100 + query_vector_builder: + text_embedding: + model_text: test + filter: + term: + keyword_field: "foo" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + + --- "knn query against multiple semantic_text fields with multiple inference IDs specified in semantic_text fields with smaller k returns k for each index": diff --git a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java index 78d59c0af0d06..038e553046cad 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java +++ b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java @@ -235,7 +235,7 @@ public void testLogsdbRouteOnSortFields() throws IOException { var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); assertEquals("logsdb", settings.get("index.mode")); assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); - assertEquals("true", settings.get(IndexSettings.LOGSDB_ROUTE_ON_SORT_FIELDS.getKey())); - assertEquals(List.of("host.name", "message"), settings.get(IndexMetadata.INDEX_ROUTING_PATH.getKey())); + assertEquals("false", settings.get(IndexSettings.LOGSDB_ROUTE_ON_SORT_FIELDS.getKey())); + assertNull(settings.get(IndexMetadata.INDEX_ROUTING_PATH.getKey())); } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java index 675b1baad7c2c..69c1496cf479f 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; public class LogsdbRestIT extends ESRestTestCase { @@ -66,9 +67,15 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { List> features = (List>) response.get("features"); logger.info("response's features: {}", features); assertThat(features, Matchers.not(Matchers.empty())); - Map feature = features.stream().filter(map -> "mappings".equals(map.get("family"))).findFirst().get(); - assertThat(feature.get("name"), equalTo("synthetic-source")); - assertThat(feature.get("license_level"), equalTo("enterprise")); + boolean found = false; + for (var feature : features) { + if (feature.get("family") != null) { + assertThat(feature.get("name"), anyOf(equalTo("synthetic-source"), equalTo("logsdb-routing-on-sort-fields"))); + assertThat(feature.get("license_level"), equalTo("enterprise")); + found = true; + } + } + assertTrue(found); var indexResponse = (Map) getIndexSettings("test-index", true).get("test-index"); logger.info("indexResponse: {}", indexResponse); diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbSnapshotRestoreIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbSnapshotRestoreIT.java new file mode 100644 index 0000000000000..0b57d0ed8c4f0 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbSnapshotRestoreIT.java @@ -0,0 +1,372 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.xcontent.XContentType; +import org.junit.After; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.net.InetAddress; +import java.time.Instant; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasSize; + +public class LogsdbSnapshotRestoreIT extends ESRestTestCase { + + private static TemporaryFolder repoDirectory = new TemporaryFolder(); + + private static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("path.repo", () -> getRepoPath()) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + // TODO: remove when initializing / serializing default SourceFieldMapper instance have been fixed: + // (SFM's mode attribute often gets initialized, even when mode attribute isn't set) + .jvmArg("-da:org.elasticsearch.index.mapper.DocumentMapper") + .jvmArg("-da:org.elasticsearch.index.mapper.MapperService") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + static final String LOGS_TEMPLATE = """ + { + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 1000, + "template": { + "settings": { + "index": { + "mapping": { + "source":{ + "mode": "{{source_mode}}" + } + } + } + }, + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "host": { + "properties": { + "name": { + "type": "keyword" + } + } + }, + "pid": { + "type": "integer" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "ip_address": { + "type": "ip" + }, + "my_object_array": { + "type": "{{array_type}}" + } + } + } + } + }"""; + + static final String DOC_TEMPLATE = """ + { + "@timestamp": "%s", + "host": { "name": "%s"}, + "pid": %d, + "method": "%s", + "message": "%s", + "ip_address": "%s", + "memory_usage_bytes": "%d", + "my_object_array": [ + { + "field_1": "a", + "field_2": "b" + }, + { + "field_1": "c", + "field_2": "d" + } + ] + } + """; + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testSnapshotRestore() throws Exception { + snapshotAndRestore("synthetic", "object", false); + } + + public void testSnapshotRestoreWithSourceOnlyRepository() throws Exception { + snapshotAndFail("object"); + } + + public void testSnapshotRestoreNested() throws Exception { + snapshotAndRestore("synthetic", "nested", false); + } + + public void testSnapshotRestoreNestedWithSourceOnlyRepository() throws Exception { + snapshotAndFail("nested"); + } + + public void testSnapshotRestoreStoredSource() throws Exception { + snapshotAndRestore("stored", "object", false); + } + + public void testSnapshotRestoreStoredSourceWithSourceOnlyRepository() throws Exception { + snapshotAndRestore("stored", "object", true); + } + + public void testSnapshotRestoreStoredSourceNested() throws Exception { + snapshotAndRestore("stored", "nested", false); + } + + public void testSnapshotRestoreStoredSourceNestedWithSourceOnlyRepository() throws Exception { + snapshotAndRestore("stored", "nested", true); + } + + @After + public void cleanup() throws Exception { + deleteSnapshot("my-repository", "my-snapshot", true); + deleteRepository("my-repository"); + deleteDataStream("logs-my-test"); + } + + static void snapshotAndRestore(String sourceMode, String arrayType, boolean sourceOnly) throws IOException { + String dataStreamName = "logs-my-test"; + String repositoryName = "my-repository"; + if (sourceOnly) { + var repositorySettings = Settings.builder().put("delegate_type", "fs").put("location", getRepoPath()).build(); + registerRepository(repositoryName, "source", true, repositorySettings); + } else { + var repositorySettings = Settings.builder().put("location", getRepoPath()).build(); + registerRepository(repositoryName, FsRepository.TYPE, true, repositorySettings); + } + + putTemplate("my-template", LOGS_TEMPLATE.replace("{{source_mode}}", sourceMode).replace("{{array_type}}", arrayType)); + String[] docs = new String[100]; + for (int i = 0; i < 100; i++) { + docs[i] = document( + Instant.now(), + String.format(Locale.ROOT, "host-%03d", i), + randomNonNegativeInt(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) + ); + indexDocument(dataStreamName, docs[i]); + } + refresh(dataStreamName); + assertDocCount(client(), dataStreamName, 100); + assertSource(dataStreamName, docs); + assertDataStream(dataStreamName, sourceMode); + + String snapshotName = "my-snapshot"; + var snapshotResponse = performSnapshot(repositoryName, dataStreamName, snapshotName, true); + assertOK(snapshotResponse); + var snapshotResponseBody = entityAsMap(snapshotResponse); + Map snapshotItem = (Map) snapshotResponseBody.get("snapshot"); + List failures = (List) snapshotItem.get("failures"); + assertThat(failures, empty()); + deleteDataStream(dataStreamName); + assertDocCount(dataStreamName, 0); + + restoreSnapshot(repositoryName, snapshotName, true); + assertDataStream(dataStreamName, sourceMode); + assertDocCount(dataStreamName, 100); + assertSource(dataStreamName, docs); + } + + static void snapshotAndFail(String arrayType) throws IOException { + String dataStreamName = "logs-my-test"; + String repositoryName = "my-repository"; + var repositorySettings = Settings.builder().put("delegate_type", "fs").put("location", getRepoPath()).build(); + registerRepository(repositoryName, "source", true, repositorySettings); + + putTemplate("my-template", LOGS_TEMPLATE.replace("{{source_mode}}", "synthetic").replace("{{array_type}}", arrayType)); + for (int i = 0; i < 100; i++) { + indexDocument( + dataStreamName, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()), + randomIntBetween(1_000_000, 2_000_000) + ) + ); + } + refresh(dataStreamName); + assertDocCount(client(), dataStreamName, 100); + assertDataStream(dataStreamName, "synthetic"); + + String snapshotName = "my-snapshot"; + var snapshotResponse = performSnapshot(repositoryName, dataStreamName, snapshotName, true); + assertOK(snapshotResponse); + var snapshotResponseBody = entityAsMap(snapshotResponse); + Map snapshotItem = (Map) snapshotResponseBody.get("snapshot"); + List failures = (List) snapshotItem.get("failures"); + assertThat(failures, hasSize(1)); + Map failure = (Map) failures.get(0); + assertThat( + (String) failure.get("reason"), + containsString( + "Can't snapshot _source only on an index that has incomplete source ie. has _source disabled or filters the source" + ) + ); + } + + static void deleteDataStream(String dataStreamName) throws IOException { + assertOK(client().performRequest(new Request("DELETE", "/_data_stream/" + dataStreamName))); + } + + static void putTemplate(String templateName, String template) throws IOException { + final Request request = new Request("PUT", "/_index_template/" + templateName); + request.setJsonEntity(template); + assertOK(client().performRequest(request)); + } + + static void indexDocument(String indexOrtDataStream, String doc) throws IOException { + final Request request = new Request("POST", "/" + indexOrtDataStream + "/_doc?refresh=true"); + request.setJsonEntity(doc); + final Response response = client().performRequest(request); + assertOK(response); + assertThat(entityAsMap(response).get("result"), equalTo("created")); + } + + static String document( + final Instant timestamp, + final String hostname, + long pid, + final String method, + final String message, + final InetAddress ipAddress, + long memoryUsageBytes + ) { + return String.format( + Locale.ROOT, + DOC_TEMPLATE, + DateFormatter.forPattern(FormatNames.DATE_TIME.getName()).format(timestamp), + hostname, + pid, + method, + message, + InetAddresses.toAddrString(ipAddress), + memoryUsageBytes + ); + } + + static Response performSnapshot(String repository, String dataStreamName, String snapshot, boolean waitForCompletion) + throws IOException { + final Request request = new Request(HttpPut.METHOD_NAME, "_snapshot/" + repository + '/' + snapshot); + request.setJsonEntity(""" + { + "indices": "{{dataStreamName}}" + } + """.replace("{{dataStreamName}}", dataStreamName)); + request.addParameter("wait_for_completion", Boolean.toString(waitForCompletion)); + + return client().performRequest(request); + } + + static void assertDataStream(String dataStreamName, final String sourceMode) throws IOException { + String indexName = getWriteBackingIndex(dataStreamName, 0); + var flatSettings = (Map) ((Map) getIndexSettings(indexName).get(indexName)).get("settings"); + assertThat(flatSettings, hasEntry("index.mode", "logsdb")); + assertThat(flatSettings, hasEntry("index.mapping.source.mode", sourceMode)); + } + + static String getWriteBackingIndex(String dataStreamName, int backingIndex) throws IOException { + final Request request = new Request("GET", "_data_stream/" + dataStreamName); + final List dataStreams = (List) entityAsMap(client().performRequest(request)).get("data_streams"); + final Map dataStream = (Map) dataStreams.get(0); + final List backingIndices = (List) dataStream.get("indices"); + return (String) ((Map) backingIndices.get(backingIndex)).get("index_name"); + } + + static void assertDocCount(String indexName, long docCount) throws IOException { + Request countReq = new Request("GET", "/" + indexName + "/_count"); + countReq.addParameter("ignore_unavailable", "true"); + ObjectPath resp = ObjectPath.createFromResponse(client().performRequest(countReq)); + assertEquals( + "expected " + docCount + " documents but it was a different number", + docCount, + Long.parseLong(resp.evaluate("count").toString()) + ); + } + + static void assertSource(String indexName, String[] docs) throws IOException { + Request searchReq = new Request("GET", "/" + indexName + "/_search"); + searchReq.addParameter("size", String.valueOf(docs.length)); + var response = client().performRequest(searchReq); + assertOK(response); + var responseBody = entityAsMap(response); + List hits = (List) ((Map) responseBody.get("hits")).get("hits"); + assertThat(hits, hasSize(docs.length)); + for (Object hit : hits) { + Map actualSource = (Map) ((Map) hit).get("_source"); + String actualHost = (String) ((Map) actualSource.get("host")).get("name"); + Map expectedSource = null; + for (String doc : docs) { + expectedSource = XContentHelper.convertToMap(XContentType.JSON.xContent(), doc, false); + String expectedHost = (String) ((Map) expectedSource.get("host")).get("name"); + if (expectedHost.equals(actualHost)) { + break; + } + } + + assertMap(actualSource, matchesMap(expectedSource)); + } + } + + @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") + private static String getRepoPath() { + return repoDirectory.getRoot().getPath(); + } + +} diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 455e707cc0d2c..20f114389fe4e 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -40,12 +40,12 @@ import java.util.function.Predicate; import java.util.function.Supplier; -import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseService.FALLBACK_SETTING; +import static org.elasticsearch.xpack.logsdb.LogsdbLicenseService.FALLBACK_SETTING; public class LogsDBPlugin extends Plugin implements ActionPlugin { private final Settings settings; - private final SyntheticSourceLicenseService licenseService; + private final LogsdbLicenseService licenseService; private static final Setting LOGSDB_PRIOR_LOGS_USAGE = Setting.boolSetting( "logsdb.prior_logs_usage", false, @@ -63,7 +63,7 @@ public class LogsDBPlugin extends Plugin implements ActionPlugin { public LogsDBPlugin(Settings settings) { this.settings = settings; - this.licenseService = new SyntheticSourceLicenseService(settings); + this.licenseService = new LogsdbLicenseService(settings); this.logsdbIndexModeSettingsProvider = new LogsdbIndexModeSettingsProvider(licenseService, settings); } @@ -82,7 +82,6 @@ public Collection createComponents(PluginServices services) { CLUSTER_LOGSDB_ENABLED, logsdbIndexModeSettingsProvider::updateClusterIndexModeLogsdbEnabled ); - // Nothing to share here: return super.createComponents(services); } @@ -95,6 +94,7 @@ public Collection getAdditionalIndexSettingProviders(Index IndexVersion.current(), parameters.clusterService().state().nodes().getMaxDataNodeCompatibleIndexVersion() ), + DiscoveryNode.isStateless(settings) == false, DiscoveryNode.isStateless(settings) == false ); return List.of(logsdbIndexModeSettingsProvider); diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java index de2b509eb0195..8fede5eca3479 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java @@ -64,7 +64,7 @@ protected void masterOperation( } } final boolean enabled = LogsDBPlugin.CLUSTER_LOGSDB_ENABLED.get(clusterService.getSettings()); - final boolean hasCustomCutoffDate = System.getProperty(SyntheticSourceLicenseService.CUTOFF_DATE_SYS_PROP_NAME) != null; + final boolean hasCustomCutoffDate = System.getProperty(LogsdbLicenseService.CUTOFF_DATE_SYS_PROP_NAME) != null; final DiscoveryNode[] nodes = state.nodes().getDataNodes().values().toArray(DiscoveryNode[]::new); final var statsRequest = new IndexModeStatsActionType.StatsRequest(nodes); final int finalNumIndices = numIndices; diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java index ac19c96f31b5c..01f989e8062fc 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java @@ -49,15 +49,16 @@ final class LogsdbIndexModeSettingsProvider implements IndexSettingProvider { static final String LOGS_PATTERN = "logs-*-*"; private static final Set MAPPING_INCLUDES = Set.of("_doc._source.*", "_doc.properties.host**", "_doc.subobjects"); - private final SyntheticSourceLicenseService syntheticSourceLicenseService; + private final LogsdbLicenseService licenseService; private final SetOnce> mapperServiceFactory = new SetOnce<>(); private final SetOnce> createdIndexVersion = new SetOnce<>(); private final SetOnce supportFallbackToStoredSource = new SetOnce<>(); + private final SetOnce supportFallbackLogsdbRouting = new SetOnce<>(); private volatile boolean isLogsdbEnabled; - LogsdbIndexModeSettingsProvider(SyntheticSourceLicenseService syntheticSourceLicenseService, final Settings settings) { - this.syntheticSourceLicenseService = syntheticSourceLicenseService; + LogsdbIndexModeSettingsProvider(LogsdbLicenseService licenseService, final Settings settings) { + this.licenseService = licenseService; this.isLogsdbEnabled = CLUSTER_LOGSDB_ENABLED.get(settings); } @@ -68,11 +69,13 @@ void updateClusterIndexModeLogsdbEnabled(boolean isLogsdbEnabled) { void init( CheckedFunction factory, Supplier indexVersion, - boolean supportFallbackToStoredSource + boolean supportFallbackToStoredSource, + boolean supportFallbackLogsdbRouting ) { this.mapperServiceFactory.set(factory); this.createdIndexVersion.set(indexVersion); this.supportFallbackToStoredSource.set(supportFallbackToStoredSource); + this.supportFallbackLogsdbRouting.set(supportFallbackLogsdbRouting); } @Override @@ -93,6 +96,7 @@ public Settings getAdditionalIndexSettings( ) { Settings.Builder settingsBuilder = null; boolean isLogsDB = templateIndexMode == IndexMode.LOGSDB; + boolean isTemplateValidation = "validate-index-name".equals(indexName); // Inject logsdb index mode, based on the logs pattern. if (isLogsdbEnabled @@ -110,76 +114,74 @@ && matchesLogsPattern(dataStreamName)) { if (mappingHints.hasSyntheticSourceUsage && supportFallbackToStoredSource.get()) { // This index name is used when validating component and index templates, we should skip this check in that case. // (See MetadataIndexTemplateService#validateIndexTemplateV2(...) method) - boolean isTemplateValidation = "validate-index-name".equals(indexName); boolean legacyLicensedUsageOfSyntheticSourceAllowed = isLegacyLicensedUsageOfSyntheticSourceAllowed( templateIndexMode, indexName, dataStreamName ); - if (syntheticSourceLicenseService.fallbackToStoredSource(isTemplateValidation, legacyLicensedUsageOfSyntheticSourceAllowed)) { + if (licenseService.fallbackToStoredSource(isTemplateValidation, legacyLicensedUsageOfSyntheticSourceAllowed)) { LOGGER.debug("creation of index [{}] with synthetic source without it being allowed", indexName); - if (settingsBuilder == null) { - settingsBuilder = Settings.builder(); - } - settingsBuilder.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED.toString()); + settingsBuilder = getBuilder(settingsBuilder).put( + IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), + SourceFieldMapper.Mode.STORED.toString() + ); } } if (isLogsDB) { // Inject sorting on [host.name], in addition to [@timestamp]. if (mappingHints.sortOnHostName) { - if (settingsBuilder == null) { - settingsBuilder = Settings.builder(); - } if (mappingHints.addHostNameField) { // Inject keyword field [host.name] too. - settingsBuilder.put(IndexSettings.LOGSDB_ADD_HOST_NAME_FIELD.getKey(), true); + settingsBuilder = getBuilder(settingsBuilder).put(IndexSettings.LOGSDB_ADD_HOST_NAME_FIELD.getKey(), true); } - settingsBuilder.put(IndexSettings.LOGSDB_SORT_ON_HOST_NAME.getKey(), true); + settingsBuilder = getBuilder(settingsBuilder).put(IndexSettings.LOGSDB_SORT_ON_HOST_NAME.getKey(), true); } // Inject routing path matching sort fields. if (settings.getAsBoolean(IndexSettings.LOGSDB_ROUTE_ON_SORT_FIELDS.getKey(), false)) { - List sortFields = new ArrayList<>(settings.getAsList(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey())); - sortFields.removeIf(s -> s.equals(DataStreamTimestampFieldMapper.DEFAULT_PATH)); - if (sortFields.size() < 2) { - throw new IllegalStateException( - String.format( - Locale.ROOT, - "data stream [%s] in logsdb mode and with [%s] index setting has only %d sort fields " - + "(excluding timestamp), needs at least 2", - dataStreamName, - IndexSettings.LOGSDB_ROUTE_ON_SORT_FIELDS.getKey(), - sortFields.size() - ) - ); - } - if (settings.hasValue(IndexMetadata.INDEX_ROUTING_PATH.getKey())) { - List routingPaths = settings.getAsList(IndexMetadata.INDEX_ROUTING_PATH.getKey()); - if (routingPaths.equals(sortFields) == false) { + if (supportFallbackLogsdbRouting.get() == false || licenseService.allowLogsdbRoutingOnSortField(isTemplateValidation)) { + List sortFields = new ArrayList<>(settings.getAsList(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey())); + sortFields.removeIf(s -> s.equals(DataStreamTimestampFieldMapper.DEFAULT_PATH)); + if (sortFields.size() < 2) { throw new IllegalStateException( String.format( Locale.ROOT, - "data stream [%s] in logsdb mode and with [%s] index setting has mismatching sort " - + "and routing fields, [index.routing_path:%s], [index.sort.fields:%s]", + "data stream [%s] in logsdb mode and with [%s] index setting has only %d sort fields " + + "(excluding timestamp), needs at least 2", dataStreamName, IndexSettings.LOGSDB_ROUTE_ON_SORT_FIELDS.getKey(), - routingPaths, - sortFields + sortFields.size() ) ); } - } else { - if (settingsBuilder == null) { - settingsBuilder = Settings.builder(); + if (settings.hasValue(IndexMetadata.INDEX_ROUTING_PATH.getKey())) { + List routingPaths = settings.getAsList(IndexMetadata.INDEX_ROUTING_PATH.getKey()); + if (routingPaths.equals(sortFields) == false) { + throw new IllegalStateException( + String.format( + Locale.ROOT, + "data stream [%s] in logsdb mode and with [%s] index setting has mismatching sort " + + "and routing fields, [index.routing_path:%s], [index.sort.fields:%s]", + dataStreamName, + IndexSettings.LOGSDB_ROUTE_ON_SORT_FIELDS.getKey(), + routingPaths, + sortFields + ) + ); + } + } else { + settingsBuilder = getBuilder(settingsBuilder).putList(INDEX_ROUTING_PATH.getKey(), sortFields); } - settingsBuilder.putList(INDEX_ROUTING_PATH.getKey(), sortFields); + } else { + // Routing on sort fields is not allowed, reset the corresponding index setting. + LOGGER.debug("creation of index [{}] with logsdb mode and routing on sort fields without it being allowed", indexName); + settingsBuilder = getBuilder(settingsBuilder).put(IndexSettings.LOGSDB_ROUTE_ON_SORT_FIELDS.getKey(), false); } } } return settingsBuilder == null ? Settings.EMPTY : settingsBuilder.build(); - } record MappingHints(boolean hasSyntheticSourceUsage, boolean sortOnHostName, boolean addHostNameField) { @@ -194,6 +196,14 @@ private static IndexMode resolveIndexMode(final String mode) { return mode != null ? Enum.valueOf(IndexMode.class, mode.toUpperCase(Locale.ROOT)) : null; } + // Returned value needs to be reassigned to the passed arg, to track the created builder. + private static Settings.Builder getBuilder(Settings.Builder builder) { + if (builder == null) { + return Settings.builder(); + } + return builder; + } + MappingHints getMappingHints( String indexName, IndexMode templateIndexMode, @@ -260,8 +270,8 @@ MappingHints getMappingHints( || mapperService.mappingLookup().getMapping().getRoot().subobjects() == ObjectMapper.Subobjects.DISABLED)); boolean sortOnHostName = IndexSettings.LOGSDB_SORT_ON_HOST_NAME.get(indexTemplateAndCreateRequestSettings) || addHostNameField - || ((hostName instanceof NumberFieldMapper nfm && nfm.fieldType().hasDocValues()) - || (hostName instanceof KeywordFieldMapper kfm && kfm.fieldType().hasDocValues())); + || (hostName instanceof NumberFieldMapper nfm && nfm.fieldType().hasDocValues()) + || (hostName instanceof KeywordFieldMapper kfm && kfm.fieldType().hasDocValues()); return new MappingHints(hasSyntheticSourceUsage, sortOnHostName, addHostNameField); } } catch (AssertionError | Exception e) { diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbLicenseService.java similarity index 86% rename from x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java rename to x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbLicenseService.java index f7f228859fb6d..d3487e205b33e 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbLicenseService.java @@ -23,12 +23,12 @@ /** * Determines based on license and fallback setting whether synthetic source usages should fallback to stored source. */ -final class SyntheticSourceLicenseService { +final class LogsdbLicenseService { static final String MAPPINGS_FEATURE_FAMILY = "mappings"; // You can only override this property if you received explicit approval from Elastic. static final String CUTOFF_DATE_SYS_PROP_NAME = "es.mapping.synthetic_source_fallback_to_stored_source.cutoff_date_restricted_override"; - private static final Logger LOGGER = LogManager.getLogger(SyntheticSourceLicenseService.class); + private static final Logger LOGGER = LogManager.getLogger(LogsdbLicenseService.class); static final long DEFAULT_CUTOFF_DATE = LocalDateTime.of(2025, 2, 4, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); /** @@ -53,16 +53,22 @@ final class SyntheticSourceLicenseService { License.OperationMode.GOLD ); + static final LicensedFeature.Momentary LOGSDB_ROUTING_ON_SORT_FIELDS_FEATURE = LicensedFeature.momentary( + MAPPINGS_FEATURE_FAMILY, + "logsdb-routing-on-sort-fields", + License.OperationMode.ENTERPRISE + ); + private final long cutoffDate; private LicenseService licenseService; private XPackLicenseState licenseState; private volatile boolean syntheticSourceFallback; - SyntheticSourceLicenseService(Settings settings) { + LogsdbLicenseService(Settings settings) { this(settings, System.getProperty(CUTOFF_DATE_SYS_PROP_NAME)); } - SyntheticSourceLicenseService(Settings settings, String cutoffDate) { + LogsdbLicenseService(Settings settings, String cutoffDate) { this.syntheticSourceFallback = FALLBACK_SETTING.get(settings); this.cutoffDate = getCutoffDate(cutoffDate); } @@ -97,6 +103,13 @@ && checkFeature(SYNTHETIC_SOURCE_FEATURE_LEGACY, licenseStateSnapshot, isTemplat return true; } + /** + * @return whether indexes in logsdb mode can use routing on sort fields. + */ + public boolean allowLogsdbRoutingOnSortField(boolean isTemplateValidation) { + return checkFeature(LOGSDB_ROUTING_ON_SORT_FIELDS_FEATURE, licenseState.copyCurrentLicenseState(), isTemplateValidation); + } + private static boolean checkFeature( LicensedFeature.Momentary licensedFeature, XPackLicenseState licenseStateSnapshot, diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java index b2a533f6b76c9..22462a2b22bd4 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java @@ -31,8 +31,8 @@ import java.util.List; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; -import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseServiceTests.createEnterpriseLicense; -import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseServiceTests.createGoldOrPlatinumLicense; +import static org.elasticsearch.xpack.logsdb.LogsdbLicenseServiceTests.createEnterpriseLicense; +import static org.elasticsearch.xpack.logsdb.LogsdbLicenseServiceTests.createGoldOrPlatinumLicense; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -57,15 +57,15 @@ public void setup() throws Exception { public void testSyntheticSourceUsageDisallowed() { createIndexWithSyntheticSourceAndAssertExpectedType("test", "STORED"); - assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, nullValue()); - assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE, nullValue()); + assertFeatureUsage(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, nullValue()); + assertFeatureUsage(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE, nullValue()); } public void testSyntheticSourceUsageWithLegacyLicense() { createIndexWithSyntheticSourceAndAssertExpectedType(".profiling-stacktraces", "synthetic"); - assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, not(nullValue())); - assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE, nullValue()); + assertFeatureUsage(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, not(nullValue())); + assertFeatureUsage(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE, nullValue()); } public void testSyntheticSourceUsageWithLegacyLicensePastCutoff() throws Exception { @@ -75,8 +75,8 @@ public void testSyntheticSourceUsageWithLegacyLicensePastCutoff() throws Excepti ensureGreen(); createIndexWithSyntheticSourceAndAssertExpectedType(".profiling-stacktraces", "STORED"); - assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, nullValue()); - assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE, nullValue()); + assertFeatureUsage(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, nullValue()); + assertFeatureUsage(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE, nullValue()); } public void testSyntheticSourceUsageWithEnterpriseLicensePastCutoff() throws Exception { @@ -87,8 +87,8 @@ public void testSyntheticSourceUsageWithEnterpriseLicensePastCutoff() throws Exc createIndexWithSyntheticSourceAndAssertExpectedType(".profiling-traces", "synthetic"); // also supports non-exceptional indices createIndexWithSyntheticSourceAndAssertExpectedType("test", "synthetic"); - assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, nullValue()); - assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE, not(nullValue())); + assertFeatureUsage(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, nullValue()); + assertFeatureUsage(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE, not(nullValue())); } public void testSyntheticSourceUsageTracksBothLegacyAndRegularFeature() throws Exception { @@ -99,8 +99,8 @@ public void testSyntheticSourceUsageTracksBothLegacyAndRegularFeature() throws E createIndexWithSyntheticSourceAndAssertExpectedType(".profiling-traces-v2", "synthetic"); - assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, not(nullValue())); - assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE, not(nullValue())); + assertFeatureUsage(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, not(nullValue())); + assertFeatureUsage(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE, not(nullValue())); } private void createIndexWithSyntheticSourceAndAssertExpectedType(String indexName, String expectedType) { @@ -119,7 +119,7 @@ private List getFeatureUsageInfo() { private void assertFeatureUsage(LicensedFeature.Momentary syntheticSourceFeature, Matcher matcher) { GetFeatureUsageResponse.FeatureUsageInfo featureUsage = getFeatureUsageInfo().stream() - .filter(f -> f.getFamily().equals(SyntheticSourceLicenseService.MAPPINGS_FEATURE_FAMILY)) + .filter(f -> f.getFamily().equals(LogsdbLicenseService.MAPPINGS_FEATURE_FAMILY)) .filter(f -> f.getName().equals(syntheticSourceFeature.getName())) .findAny() .orElse(null); diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java index 5220b5eba4567..09086b75f9384 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java @@ -37,10 +37,12 @@ import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.common.settings.Settings.builder; -import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseServiceTests.createEnterpriseLicense; +import static org.elasticsearch.xpack.logsdb.LogsdbLicenseServiceTests.createEnterpriseLicense; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -65,33 +67,31 @@ public class LogsdbIndexModeSettingsProviderTests extends ESTestCase { } """; - private SyntheticSourceLicenseService syntheticSourceLicenseService; + private LogsdbLicenseService logsdbLicenseService; private final AtomicInteger newMapperServiceCounter = new AtomicInteger(); @Before public void setup() throws Exception { MockLicenseState licenseState = MockLicenseState.createMock(); when(licenseState.isAllowed(any())).thenReturn(true); - var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); - licenseService.setLicenseState(licenseState); var mockLicenseService = mock(LicenseService.class); License license = createEnterpriseLicense(); when(mockLicenseService.getLicense()).thenReturn(license); - syntheticSourceLicenseService = new SyntheticSourceLicenseService(Settings.EMPTY); - syntheticSourceLicenseService.setLicenseState(licenseState); - syntheticSourceLicenseService.setLicenseService(mockLicenseService); + logsdbLicenseService = new LogsdbLicenseService(Settings.EMPTY); + logsdbLicenseService.setLicenseState(licenseState); + logsdbLicenseService.setLicenseService(mockLicenseService); } private LogsdbIndexModeSettingsProvider withSyntheticSourceDemotionSupport(boolean enabled) { newMapperServiceCounter.set(0); var provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", enabled).build() ); provider.init(im -> { newMapperServiceCounter.incrementAndGet(); return MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()); - }, IndexVersion::current, true); + }, IndexVersion::current, true, true); return provider; } @@ -102,13 +102,13 @@ private Settings generateLogsdbSettings(Settings settings) throws IOException { private Settings generateLogsdbSettings(Settings settings, String mapping) throws IOException { Metadata metadata = Metadata.EMPTY_METADATA; var provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); provider.init(im -> { newMapperServiceCounter.incrementAndGet(); return MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()); - }, IndexVersion::current, true); + }, IndexVersion::current, true, true); var result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(DATA_STREAM_NAME, 0), DATA_STREAM_NAME, @@ -123,7 +123,7 @@ private Settings generateLogsdbSettings(Settings settings, String mapping) throw public void testDisabled() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", false).build() ); @@ -142,7 +142,7 @@ public void testDisabled() throws IOException { public void testOnIndexCreation() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); @@ -161,7 +161,7 @@ public void testOnIndexCreation() throws IOException { public void testOnExplicitStandardIndex() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); @@ -180,7 +180,7 @@ public void testOnExplicitStandardIndex() throws IOException { public void testOnExplicitTimeSeriesIndex() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); @@ -199,7 +199,7 @@ public void testOnExplicitTimeSeriesIndex() throws IOException { public void testNonLogsDataStream() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); @@ -218,7 +218,7 @@ public void testNonLogsDataStream() throws IOException { public void testWithoutLogsComponentTemplate() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); @@ -237,7 +237,7 @@ public void testWithoutLogsComponentTemplate() throws IOException { public void testWithLogsComponentTemplate() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); @@ -256,7 +256,7 @@ public void testWithLogsComponentTemplate() throws IOException { public void testWithMultipleComponentTemplates() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); @@ -275,7 +275,7 @@ public void testWithMultipleComponentTemplates() throws IOException { public void testWithCustomComponentTemplatesOnly() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); @@ -294,7 +294,7 @@ public void testWithCustomComponentTemplatesOnly() throws IOException { public void testNonMatchingTemplateIndexPattern() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); @@ -313,7 +313,7 @@ public void testNonMatchingTemplateIndexPattern() throws IOException { public void testCaseSensitivity() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); @@ -332,7 +332,7 @@ public void testCaseSensitivity() throws IOException { public void testMultipleHyphensInDataStreamName() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", true).build() ); @@ -351,7 +351,7 @@ public void testMultipleHyphensInDataStreamName() throws IOException { public void testBeforeAndAFterSettingUpdate() throws IOException { final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( - syntheticSourceLicenseService, + logsdbLicenseService, Settings.builder().put("cluster.logsdb.enabled", false).build() ); @@ -583,7 +583,7 @@ public void testNewIndexHasSyntheticSourceUsageTimeSeries() throws IOException { } } - public void testNewIndexHasSyntheticSourceUsage_invalidSettings() throws IOException { + public void testNewIndexHasSyntheticSourceUsageInvalidSettings() throws IOException { String dataStreamName = DATA_STREAM_NAME; String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 0); Settings settings = Settings.builder().put("index.soft_deletes.enabled", false).build(); @@ -655,7 +655,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSource() throws assertThat(result.size(), equalTo(0)); assertThat(newMapperServiceCounter.get(), equalTo(1)); - syntheticSourceLicenseService.setSyntheticSourceFallback(true); + logsdbLicenseService.setSyntheticSourceFallback(true); result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 2), dataStreamName, @@ -699,7 +699,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSource() throws } public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSourceFileMatch() throws IOException { - syntheticSourceLicenseService.setSyntheticSourceFallback(true); + logsdbLicenseService.setSyntheticSourceFallback(true); LogsdbIndexModeSettingsProvider provider = withSyntheticSourceDemotionSupport(true); final Settings settings = Settings.EMPTY; @@ -810,7 +810,7 @@ public void testExplicitRoutingPathMatchesSortFields() throws Exception { assertTrue(result.isEmpty()); } - public void testExplicitRoutingPathDoesNotMatchSortFields() throws Exception { + public void testExplicitRoutingPathDoesNotMatchSortFields() { var settings = Settings.builder() .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host,message,@timestamp") .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "host,message,foo") @@ -829,6 +829,22 @@ public void testExplicitRoutingPathDoesNotMatchSortFields() throws Exception { ); } + public void testExplicitRoutingPathNotAllowedByLicense() throws Exception { + MockLicenseState licenseState = MockLicenseState.createMock(); + when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); + when(licenseState.isAllowed(same(LogsdbLicenseService.LOGSDB_ROUTING_ON_SORT_FIELDS_FEATURE))).thenReturn(false); + logsdbLicenseService = new LogsdbLicenseService(Settings.EMPTY); + logsdbLicenseService.setLicenseState(licenseState); + + var settings = Settings.builder() + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host,message") + .put(IndexSettings.LOGSDB_ROUTE_ON_SORT_FIELDS.getKey(), true) + .build(); + Settings result = generateLogsdbSettings(settings); + assertFalse(IndexSettings.LOGSDB_ROUTE_ON_SORT_FIELDS.get(result)); + assertThat(IndexMetadata.INDEX_ROUTING_PATH.get(result), empty()); + } + public void testSortAndHostNamePropagateValue() throws Exception { var settings = Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB) diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexSettingsProviderLegacyLicenseTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexSettingsProviderLegacyLicenseTests.java index 7fa2f11880f40..540f5ad162a41 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexSettingsProviderLegacyLicenseTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexSettingsProviderLegacyLicenseTests.java @@ -25,7 +25,7 @@ import java.time.ZoneOffset; import java.util.List; -import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseServiceTests.createGoldOrPlatinumLicense; +import static org.elasticsearch.xpack.logsdb.LogsdbLicenseServiceTests.createGoldOrPlatinumLicense; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -40,12 +40,12 @@ public void setup() throws Exception { License license = createGoldOrPlatinumLicense(); var licenseState = new XPackLicenseState(() -> time, new XPackLicenseStatus(license.operationMode(), true, null)); - var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + var licenseService = new LogsdbLicenseService(Settings.EMPTY); licenseService.setLicenseState(licenseState); var mockLicenseService = mock(LicenseService.class); when(mockLicenseService.getLicense()).thenReturn(license); - SyntheticSourceLicenseService syntheticSourceLicenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + LogsdbLicenseService syntheticSourceLicenseService = new LogsdbLicenseService(Settings.EMPTY); syntheticSourceLicenseService.setLicenseState(licenseState); syntheticSourceLicenseService.setLicenseService(mockLicenseService); @@ -53,6 +53,7 @@ public void setup() throws Exception { provider.init( im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), IndexVersion::current, + true, true ); } @@ -102,12 +103,12 @@ public void testGetAdditionalIndexSettingsTsdbAfterCutoffDate() throws Exception long time = LocalDateTime.of(2024, 12, 31, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); var licenseState = new XPackLicenseState(() -> time, new XPackLicenseStatus(license.operationMode(), true, null)); - var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + var licenseService = new LogsdbLicenseService(Settings.EMPTY); licenseService.setLicenseState(licenseState); var mockLicenseService = mock(LicenseService.class); when(mockLicenseService.getLicense()).thenReturn(license); - SyntheticSourceLicenseService syntheticSourceLicenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + LogsdbLicenseService syntheticSourceLicenseService = new LogsdbLicenseService(Settings.EMPTY); syntheticSourceLicenseService.setLicenseState(licenseState); syntheticSourceLicenseService.setLicenseService(mockLicenseService); @@ -115,6 +116,7 @@ public void testGetAdditionalIndexSettingsTsdbAfterCutoffDate() throws Exception provider.init( im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), IndexVersion::current, + true, true ); diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseServiceTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbLicenseServiceTests.java similarity index 75% rename from x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseServiceTests.java rename to x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbLicenseServiceTests.java index 0eb0d21ff2e78..60701e9402e6c 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseServiceTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbLicenseServiceTests.java @@ -26,23 +26,43 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class SyntheticSourceLicenseServiceTests extends ESTestCase { +public class LogsdbLicenseServiceTests extends ESTestCase { private LicenseService mockLicenseService; - private SyntheticSourceLicenseService licenseService; + private LogsdbLicenseService licenseService; @Before public void setup() throws Exception { mockLicenseService = mock(LicenseService.class); License license = createEnterpriseLicense(); when(mockLicenseService.getLicense()).thenReturn(license); - licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + licenseService = new LogsdbLicenseService(Settings.EMPTY); + } + + public void testAllowRoutingOnSortFields() { + MockLicenseState licenseState = MockLicenseState.createMock(); + when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); + when(licenseState.isAllowed(same(LogsdbLicenseService.LOGSDB_ROUTING_ON_SORT_FIELDS_FEATURE))).thenReturn(true); + licenseService.setLicenseState(licenseState); + licenseService.setLicenseService(mockLicenseService); + assertTrue(licenseService.allowLogsdbRoutingOnSortField(false)); + Mockito.verify(licenseState, Mockito.times(1)).featureUsed(any()); + } + + public void testAllowRoutingOnSortFieldsTemplateValidation() { + MockLicenseState licenseState = MockLicenseState.createMock(); + when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); + when(licenseState.isAllowed(same(LogsdbLicenseService.LOGSDB_ROUTING_ON_SORT_FIELDS_FEATURE))).thenReturn(true); + licenseService.setLicenseState(licenseState); + licenseService.setLicenseService(mockLicenseService); + assertTrue(licenseService.allowLogsdbRoutingOnSortField(true)); + Mockito.verify(licenseState, Mockito.never()).featureUsed(any()); } public void testLicenseAllowsSyntheticSource() { MockLicenseState licenseState = MockLicenseState.createMock(); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); - when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(true); + when(licenseState.isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(true); licenseService.setLicenseState(licenseState); licenseService.setLicenseService(mockLicenseService); assertFalse( @@ -55,7 +75,7 @@ public void testLicenseAllowsSyntheticSource() { public void testLicenseAllowsSyntheticSourceTemplateValidation() { MockLicenseState licenseState = MockLicenseState.createMock(); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); - when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(true); + when(licenseState.isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(true); licenseService.setLicenseState(licenseState); licenseService.setLicenseService(mockLicenseService); assertFalse( @@ -65,10 +85,10 @@ public void testLicenseAllowsSyntheticSourceTemplateValidation() { Mockito.verify(licenseState, Mockito.never()).featureUsed(any()); } - public void testDefaultDisallow() { + public void testDefaultDisallowSyntheticSource() { MockLicenseState licenseState = MockLicenseState.createMock(); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); - when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(false); + when(licenseState.isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(false); licenseService.setLicenseState(licenseState); licenseService.setLicenseService(mockLicenseService); assertTrue( @@ -78,10 +98,10 @@ public void testDefaultDisallow() { Mockito.verify(licenseState, Mockito.never()).featureUsed(any()); } - public void testFallback() { + public void testFallbackSyntheticSource() { MockLicenseState licenseState = MockLicenseState.createMock(); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); - when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(true); + when(licenseState.isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(true); licenseService.setLicenseState(licenseState); licenseService.setLicenseService(mockLicenseService); licenseService.setSyntheticSourceFallback(true); @@ -101,15 +121,15 @@ public void testGoldOrPlatinumLicense() throws Exception { MockLicenseState licenseState = MockLicenseState.createMock(); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); when(licenseState.getOperationMode()).thenReturn(license.operationMode()); - when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY))).thenReturn(true); + when(licenseState.isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY))).thenReturn(true); licenseService.setLicenseState(licenseState); licenseService.setLicenseService(mockLicenseService); assertFalse( "legacy licensed usage is allowed, so not fallback to stored source", licenseService.fallbackToStoredSource(false, true) ); - Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE)); - Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY)); + Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE)); + Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY)); Mockito.verify(licenseState, Mockito.times(1)).featureUsed(any()); } @@ -121,7 +141,7 @@ public void testGoldOrPlatinumLicenseLegacyLicenseNotAllowed() throws Exception MockLicenseState licenseState = MockLicenseState.createMock(); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); when(licenseState.getOperationMode()).thenReturn(license.operationMode()); - when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(false); + when(licenseState.isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(false); licenseService.setLicenseState(licenseState); licenseService.setLicenseService(mockLicenseService); assertTrue( @@ -129,7 +149,7 @@ public void testGoldOrPlatinumLicenseLegacyLicenseNotAllowed() throws Exception licenseService.fallbackToStoredSource(false, false) ); Mockito.verify(licenseState, Mockito.never()).featureUsed(any()); - Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE)); + Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE)); } public void testGoldOrPlatinumLicenseBeyondCutoffDate() throws Exception { @@ -141,17 +161,17 @@ public void testGoldOrPlatinumLicenseBeyondCutoffDate() throws Exception { MockLicenseState licenseState = MockLicenseState.createMock(); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); when(licenseState.getOperationMode()).thenReturn(license.operationMode()); - when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(false); - when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY))).thenReturn(true); + when(licenseState.isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(false); + when(licenseState.isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY))).thenReturn(true); licenseService.setLicenseState(licenseState); licenseService.setLicenseService(mockLicenseService); assertTrue("beyond cutoff date, so fallback to stored source", licenseService.fallbackToStoredSource(false, true)); Mockito.verify(licenseState, Mockito.never()).featureUsed(any()); - Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE)); + Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE)); } public void testGoldOrPlatinumLicenseCustomCutoffDate() throws Exception { - licenseService = new SyntheticSourceLicenseService(Settings.EMPTY, "2025-01-02T00:00"); + licenseService = new LogsdbLicenseService(Settings.EMPTY, "2025-01-02T00:00"); long start = LocalDateTime.of(2025, 1, 3, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); License license = createGoldOrPlatinumLicense(start); @@ -161,12 +181,12 @@ public void testGoldOrPlatinumLicenseCustomCutoffDate() throws Exception { MockLicenseState licenseState = MockLicenseState.createMock(); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); when(licenseState.getOperationMode()).thenReturn(license.operationMode()); - when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(false); - when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY))).thenReturn(true); + when(licenseState.isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(false); + when(licenseState.isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY))).thenReturn(true); licenseService.setLicenseState(licenseState); licenseService.setLicenseService(mockLicenseService); assertTrue("custom cutoff date, so fallback to stored source", licenseService.fallbackToStoredSource(false, true)); - Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE)); + Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(LogsdbLicenseService.SYNTHETIC_SOURCE_FEATURE)); Mockito.verify(licenseState, Mockito.never()).featureUsed(any()); } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricMapperPlugin.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricMapperPlugin.java index fea55e793d638..9320df583b4c5 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricMapperPlugin.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricMapperPlugin.java @@ -17,7 +17,7 @@ import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.xpack.aggregatemetric.aggregations.metrics.AggregateMetricsAggregatorsRegistrar; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; @@ -32,7 +32,7 @@ public class AggregateMetricMapperPlugin extends Plugin implements MapperPlugin, @Override public Map getMappers() { - return singletonMap(AggregateDoubleMetricFieldMapper.CONTENT_TYPE, AggregateDoubleMetricFieldMapper.PARSER); + return singletonMap(AggregateMetricDoubleFieldMapper.CONTENT_TYPE, AggregateMetricDoubleFieldMapper.PARSER); } @Override diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregator.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregator.java index add4fb3e5d2db..a12d476af3ac1 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregator.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregator.java @@ -24,14 +24,14 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSource; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.Map; class AggregateMetricBackedAvgAggregator extends NumericMetricsAggregator.SingleValue { - final AggregateMetricsValuesSource.AggregateDoubleMetric valuesSource; + final AggregateMetricsValuesSource.AggregateMetricDouble valuesSource; LongArray counts; DoubleArray sums; @@ -47,7 +47,7 @@ class AggregateMetricBackedAvgAggregator extends NumericMetricsAggregator.Single ) throws IOException { super(name, context, parent, metadata); assert valuesSourceConfig.hasValues(); - this.valuesSource = (AggregateMetricsValuesSource.AggregateDoubleMetric) valuesSourceConfig.getValuesSource(); + this.valuesSource = (AggregateMetricsValuesSource.AggregateMetricDouble) valuesSourceConfig.getValuesSource(); final BigArrays bigArrays = context.bigArrays(); counts = bigArrays.newLongArray(1, true); sums = bigArrays.newDoubleArray(1, true); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregator.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregator.java index dd485ec218371..a007f334a69e2 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregator.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregator.java @@ -24,14 +24,14 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSource; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.Map; class AggregateMetricBackedMaxAggregator extends NumericMetricsAggregator.SingleValue { - private final AggregateMetricsValuesSource.AggregateDoubleMetric valuesSource; + private final AggregateMetricsValuesSource.AggregateMetricDouble valuesSource; final DocValueFormat formatter; DoubleArray maxes; @@ -44,7 +44,7 @@ class AggregateMetricBackedMaxAggregator extends NumericMetricsAggregator.Single ) throws IOException { super(name, context, parent, metadata); assert config.hasValues(); - this.valuesSource = (AggregateMetricsValuesSource.AggregateDoubleMetric) config.getValuesSource(); + this.valuesSource = (AggregateMetricsValuesSource.AggregateMetricDouble) config.getValuesSource(); maxes = context.bigArrays().newDoubleArray(1, false); maxes.fill(0, maxes.size(), Double.NEGATIVE_INFINITY); this.formatter = config.format(); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregator.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregator.java index 5a70801b8ae76..3b024c512aa82 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregator.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregator.java @@ -24,14 +24,14 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSource; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.Map; class AggregateMetricBackedMinAggregator extends NumericMetricsAggregator.SingleValue { - private final AggregateMetricsValuesSource.AggregateDoubleMetric valuesSource; + private final AggregateMetricsValuesSource.AggregateMetricDouble valuesSource; final DocValueFormat format; DoubleArray mins; @@ -44,7 +44,7 @@ class AggregateMetricBackedMinAggregator extends NumericMetricsAggregator.Single ) throws IOException { super(name, context, parent, metadata); assert config.hasValues(); - this.valuesSource = (AggregateMetricsValuesSource.AggregateDoubleMetric) config.getValuesSource(); + this.valuesSource = (AggregateMetricsValuesSource.AggregateMetricDouble) config.getValuesSource(); mins = context.bigArrays().newDoubleArray(1, false); mins.fill(0, mins.size(), Double.POSITIVE_INFINITY); this.format = config.format(); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregator.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregator.java index f4c28d7381214..480590b359bd3 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregator.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregator.java @@ -23,14 +23,14 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSource; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.Map; class AggregateMetricBackedSumAggregator extends NumericMetricsAggregator.SingleValue { - private final AggregateMetricsValuesSource.AggregateDoubleMetric valuesSource; + private final AggregateMetricsValuesSource.AggregateMetricDouble valuesSource; private final DocValueFormat format; private DoubleArray sums; @@ -45,7 +45,7 @@ class AggregateMetricBackedSumAggregator extends NumericMetricsAggregator.Single ) throws IOException { super(name, context, parent, metadata); assert valuesSourceConfig.hasValues(); - this.valuesSource = (AggregateMetricsValuesSource.AggregateDoubleMetric) valuesSourceConfig.getValuesSource(); + this.valuesSource = (AggregateMetricsValuesSource.AggregateMetricDouble) valuesSourceConfig.getValuesSource(); sums = context.bigArrays().newDoubleArray(1, true); compensations = context.bigArrays().newDoubleArray(1, true); this.format = valuesSourceConfig.format(); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregator.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregator.java index 065a5411b0bcb..49b3fd8846f9c 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregator.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregator.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSource; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; import java.io.IOException; import java.util.Map; @@ -32,7 +32,7 @@ */ class AggregateMetricBackedValueCountAggregator extends NumericMetricsAggregator.SingleValue { - private final AggregateMetricsValuesSource.AggregateDoubleMetric valuesSource; + private final AggregateMetricsValuesSource.AggregateMetricDouble valuesSource; // a count per bucket LongArray counts; @@ -46,7 +46,7 @@ class AggregateMetricBackedValueCountAggregator extends NumericMetricsAggregator ) throws IOException { super(name, aggregationContext, parent, metadata); assert valuesSourceConfig.hasValues(); - this.valuesSource = (AggregateMetricsValuesSource.AggregateDoubleMetric) valuesSourceConfig.getValuesSource(); + this.valuesSource = (AggregateMetricsValuesSource.AggregateMetricDouble) valuesSourceConfig.getValuesSource(); counts = bigArrays().newLongArray(1, true); } @@ -55,7 +55,7 @@ public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, final BigArrays bigArrays = bigArrays(); final SortedNumericDoubleValues values = valuesSource.getAggregateMetricValues( aggCtx.getLeafReaderContext(), - AggregateDoubleMetricFieldMapper.Metric.value_count + AggregateMetricDoubleFieldMapper.Metric.value_count ); return new LeafBucketCollectorBase(sub, values) { diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSource.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSource.java index a964573dbb5d9..c4a9c37fcf380 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSource.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSource.java @@ -13,23 +13,23 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.support.AggregationContext; -import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateDoubleMetricFieldData; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateMetricDoubleFieldData; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.function.Function; public class AggregateMetricsValuesSource { - public abstract static class AggregateDoubleMetric extends org.elasticsearch.search.aggregations.support.ValuesSource { + public abstract static class AggregateMetricDouble extends org.elasticsearch.search.aggregations.support.ValuesSource { public abstract SortedNumericDoubleValues getAggregateMetricValues(LeafReaderContext context, Metric metric) throws IOException; - public static class Fielddata extends AggregateDoubleMetric { + public static class Fielddata extends AggregateMetricDouble { - protected final IndexAggregateDoubleMetricFieldData indexFieldData; + protected final IndexAggregateMetricDoubleFieldData indexFieldData; - public Fielddata(IndexAggregateDoubleMetricFieldData indexFieldData) { + public Fielddata(IndexAggregateMetricDoubleFieldData indexFieldData) { this.indexFieldData = indexFieldData; } @@ -51,7 +51,7 @@ public boolean advanceExact(int doc) throws IOException { @Override protected Function roundingPreparer(AggregationContext context) throws IOException { - throw AggregationErrors.unsupportedRounding(AggregateDoubleMetricFieldMapper.CONTENT_TYPE); + throw AggregationErrors.unsupportedRounding(AggregateMetricDoubleFieldMapper.CONTENT_TYPE); } public SortedNumericDoubleValues getAggregateMetricValues(LeafReaderContext context, Metric metric) throws IOException { diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java index bcac3f12fd131..e47275ed4b756 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java @@ -15,7 +15,7 @@ import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateDoubleMetricFieldData; +import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateMetricDoubleFieldData; import java.util.Locale; import java.util.function.LongSupplier; @@ -43,7 +43,7 @@ public ValuesSource getScript(AggregationScript.LeafFactory script, ValueType sc public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFactory script) { final IndexFieldData indexFieldData = fieldContext.indexFieldData(); - if ((indexFieldData instanceof IndexAggregateDoubleMetricFieldData) == false) { + if ((indexFieldData instanceof IndexAggregateMetricDoubleFieldData) == false) { throw new IllegalArgumentException( "Expected aggregate_metric_double type on field [" + fieldContext.field() @@ -52,7 +52,7 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa + "]" ); } - return new AggregateMetricsValuesSource.AggregateDoubleMetric.Fielddata((IndexAggregateDoubleMetricFieldData) indexFieldData); + return new AggregateMetricsValuesSource.AggregateMetricDouble.Fielddata((IndexAggregateMetricDoubleFieldData) indexFieldData); } @Override diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateDoubleMetricFieldData.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateMetricDoubleFieldData.java similarity index 83% rename from x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateDoubleMetricFieldData.java rename to x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateMetricDoubleFieldData.java index eb07f9c641efb..a98b6eb4c04a4 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateDoubleMetricFieldData.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateMetricDoubleFieldData.java @@ -13,12 +13,12 @@ /** * Specialization of {@link IndexFieldData} for aggregate_metric. */ -public abstract class IndexAggregateDoubleMetricFieldData implements IndexFieldData { +public abstract class IndexAggregateMetricDoubleFieldData implements IndexFieldData { protected final String fieldName; protected final ValuesSourceType valuesSourceType; - public IndexAggregateDoubleMetricFieldData(String fieldName, ValuesSourceType valuesSourceType) { + public IndexAggregateMetricDoubleFieldData(String fieldName, ValuesSourceType valuesSourceType) { this.fieldName = fieldName; this.valuesSourceType = valuesSourceType; } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateDoubleMetricFieldData.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateMetricDoubleFieldData.java similarity index 72% rename from x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateDoubleMetricFieldData.java rename to x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateMetricDoubleFieldData.java index c8a89456be5e5..c11ccd5d4ff2e 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateDoubleMetricFieldData.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateMetricDoubleFieldData.java @@ -8,12 +8,12 @@ import org.elasticsearch.index.fielddata.LeafFieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; /** - * {@link LeafFieldData} specialization for aggregate_double_metric data. + * {@link LeafFieldData} specialization for aggregate_metric_double data. */ -public interface LeafAggregateDoubleMetricFieldData extends LeafFieldData { +public interface LeafAggregateMetricDoubleFieldData extends LeafFieldData { /** * Return aggregate_metric of double values for a given metric diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapper.java similarity index 96% rename from x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java rename to x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapper.java index a58f8dae8cc73..3ab49126ecb0d 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapper.java @@ -59,8 +59,8 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentSubParser; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateDoubleMetricFieldData; -import org.elasticsearch.xpack.aggregatemetric.fielddata.LeafAggregateDoubleMetricFieldData; +import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateMetricDoubleFieldData; +import org.elasticsearch.xpack.aggregatemetric.fielddata.LeafAggregateMetricDoubleFieldData; import java.io.IOException; import java.time.ZoneId; @@ -78,15 +78,15 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; /** A {@link FieldMapper} for a field containing aggregate metrics such as min/max/value_count etc. */ -public class AggregateDoubleMetricFieldMapper extends FieldMapper { +public class AggregateMetricDoubleFieldMapper extends FieldMapper { - private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(AggregateDoubleMetricFieldMapper.class); + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(AggregateMetricDoubleFieldMapper.class); public static final String CONTENT_TYPE = "aggregate_metric_double"; public static final String SUBFIELD_SEPARATOR = "."; - private static AggregateDoubleMetricFieldMapper toType(FieldMapper in) { - return (AggregateDoubleMetricFieldMapper) in; + private static AggregateMetricDoubleFieldMapper toType(FieldMapper in) { + return (AggregateMetricDoubleFieldMapper) in; } /** @@ -97,7 +97,7 @@ private static AggregateDoubleMetricFieldMapper toType(FieldMapper in) { * @return the name of the subfield */ public static String subfieldName(String fieldName, Metric metric) { - return fieldName + AggregateDoubleMetricFieldMapper.SUBFIELD_SEPARATOR + metric.name(); + return fieldName + AggregateMetricDoubleFieldMapper.SUBFIELD_SEPARATOR + metric.name(); } /** @@ -150,7 +150,7 @@ public static final class Builder extends FieldMapper.Builder { /** * Parameter that marks this field as a time series metric defining its time series metric type. - * For {@link AggregateDoubleMetricFieldMapper} fields gauge, counter and summary metric types are + * For {@link AggregateMetricDoubleFieldMapper} fields gauge, counter and summary metric types are * supported. */ private final Parameter timeSeriesMetric; @@ -194,7 +194,7 @@ public Builder metric(MetricType metric) { } @Override - public AggregateDoubleMetricFieldMapper build(MapperBuilderContext context) { + public AggregateMetricDoubleFieldMapper build(MapperBuilderContext context) { if (multiFieldsBuilder.hasMultiFields()) { DEPRECATION_LOGGER.warn( DeprecationCategory.MAPPINGS, @@ -261,7 +261,7 @@ public AggregateDoubleMetricFieldMapper build(MapperBuilderContext context) { throw new IllegalArgumentException("Duplicate keys " + l + "and " + r + "."); }, () -> new EnumMap<>(Metric.class))); - AggregateDoubleMetricFieldType metricFieldType = new AggregateDoubleMetricFieldType( + AggregateMetricDoubleFieldType metricFieldType = new AggregateMetricDoubleFieldType( context.buildFullName(leafName()), meta.getValue(), timeSeriesMetric.getValue() @@ -269,7 +269,7 @@ public AggregateDoubleMetricFieldMapper build(MapperBuilderContext context) { metricFieldType.setMetricFields(metricFields); metricFieldType.setDefaultMetric(defaultMetric.getValue()); - return new AggregateDoubleMetricFieldMapper(leafName(), metricFieldType, metricMappers, builderParams(this, context), this); + return new AggregateMetricDoubleFieldMapper(leafName(), metricFieldType, metricMappers, builderParams(this, context), this); } } @@ -278,7 +278,7 @@ public AggregateDoubleMetricFieldMapper build(MapperBuilderContext context) { notInMultiFields(CONTENT_TYPE) ); - public static final class AggregateDoubleMetricFieldType extends SimpleMappedFieldType { + public static final class AggregateMetricDoubleFieldType extends SimpleMappedFieldType { private EnumMap metricFields; @@ -286,11 +286,11 @@ public static final class AggregateDoubleMetricFieldType extends SimpleMappedFie private final MetricType metricType; - public AggregateDoubleMetricFieldType(String name) { + public AggregateMetricDoubleFieldType(String name) { this(name, Collections.emptyMap(), null); } - public AggregateDoubleMetricFieldType(String name, Map meta, MetricType metricType) { + public AggregateMetricDoubleFieldType(String name, Map meta, MetricType metricType) { super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.metricType = metricType; } @@ -326,7 +326,7 @@ public Map getMetricFields() { public void addMetricField(Metric m, NumberFieldMapper.NumberFieldType subfield) { if (metricFields == null) { - metricFields = new EnumMap<>(AggregateDoubleMetricFieldMapper.Metric.class); + metricFields = new EnumMap<>(AggregateMetricDoubleFieldMapper.Metric.class); } if (name() == null) { @@ -408,13 +408,13 @@ public boolean isAggregatable() { @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { - return (cache, breakerService) -> new IndexAggregateDoubleMetricFieldData( + return (cache, breakerService) -> new IndexAggregateMetricDoubleFieldData( name(), AggregateMetricsValuesSourceType.AGGREGATE_METRIC ) { @Override - public LeafAggregateDoubleMetricFieldData load(LeafReaderContext context) { - return new LeafAggregateDoubleMetricFieldData() { + public LeafAggregateMetricDoubleFieldData load(LeafReaderContext context) { + return new LeafAggregateMetricDoubleFieldData() { @Override public SortedNumericDoubleValues getAggregateMetricValues(final Metric metric) { try { @@ -476,7 +476,7 @@ public long ramBytesUsed() { } @Override - public LeafAggregateDoubleMetricFieldData loadDirect(LeafReaderContext context) { + public LeafAggregateMetricDoubleFieldData loadDirect(LeafReaderContext context) { return load(context); } @@ -677,7 +677,7 @@ public MetricType getMetricType() { private final IndexMode indexMode; - private AggregateDoubleMetricFieldMapper( + private AggregateMetricDoubleFieldMapper( String simpleName, MappedFieldType mappedFieldType, EnumMap metricFieldMappers, @@ -705,8 +705,8 @@ Metric defaultMetric() { } @Override - public AggregateDoubleMetricFieldType fieldType() { - return (AggregateDoubleMetricFieldType) super.fieldType(); + public AggregateMetricDoubleFieldType fieldType() { + return (AggregateMetricDoubleFieldType) super.fieldType(); } @Override diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java index 8378f99b2d7b2..fade3f68376d0 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java @@ -27,15 +27,15 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.List; import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; public class AggregateMetricBackedAvgAggregatorTests extends AggregatorTestCase { @@ -116,8 +116,8 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private AggregateMetricDoubleFieldType createDefaultFieldType(String fieldName) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(fieldName); for (Metric m : List.of(Metric.value_count, Metric.sum)) { String subfieldName = subfieldName(fieldName, m); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java index 9cbafff116b4c..33e9151773fc2 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java @@ -27,15 +27,15 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.List; import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; public class AggregateMetricBackedMaxAggregatorTests extends AggregatorTestCase { @@ -116,8 +116,8 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private AggregateMetricDoubleFieldType createDefaultFieldType(String fieldName) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(fieldName); for (Metric m : List.of(Metric.min, Metric.max)) { String subfieldName = subfieldName(fieldName, m); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java index fb4ea5785fbce..0f655b90a2358 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java @@ -27,15 +27,15 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.List; import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; public class AggregateMetricBackedMinAggregatorTests extends AggregatorTestCase { @@ -116,8 +116,8 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private AggregateMetricDoubleFieldType createDefaultFieldType(String fieldName) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(fieldName); for (Metric m : List.of(Metric.min, Metric.max)) { String subfieldName = subfieldName(fieldName, m); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java index 91a34b4643456..e0e421189497c 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java @@ -27,15 +27,15 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.List; import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; public class AggregateMetricBackedSumAggregatorTests extends AggregatorTestCase { @@ -116,8 +116,8 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private AggregateMetricDoubleFieldType createDefaultFieldType(String fieldName) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(fieldName); for (Metric m : List.of(Metric.value_count, Metric.sum)) { String subfieldName = subfieldName(fieldName, m); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java index faff3c2d7cb30..dbae604b8f725 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java @@ -27,15 +27,15 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.List; import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; public class AggregateMetricBackedValueCountAggregatorTests extends AggregatorTestCase { @@ -115,8 +115,8 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private AggregateMetricDoubleFieldType createDefaultFieldType(String fieldName) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(fieldName); for (Metric m : List.of(Metric.value_count, Metric.sum)) { String subfieldName = subfieldName(fieldName, m); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapperTests.java similarity index 94% rename from x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java rename to x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapperTests.java index 0d62e7a9c1fd2..3674043a72766 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapperTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import org.hamcrest.Matchers; import org.junit.AssumptionViolatedException; @@ -38,18 +38,18 @@ import java.util.Map; import java.util.function.Function; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.IGNORE_MALFORMED; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.METRICS; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Names.IGNORE_MALFORMED; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Names.METRICS; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.IsInstanceOf.instanceOf; -public class AggregateDoubleMetricFieldMapperTests extends MapperTestCase { +public class AggregateMetricDoubleFieldMapperTests extends MapperTestCase { public static final String METRICS_FIELD = METRICS; - public static final String CONTENT_TYPE = AggregateDoubleMetricFieldMapper.CONTENT_TYPE; - public static final String DEFAULT_METRIC = AggregateDoubleMetricFieldMapper.Names.DEFAULT_METRIC; + public static final String CONTENT_TYPE = AggregateMetricDoubleFieldMapper.CONTENT_TYPE; + public static final String DEFAULT_METRIC = AggregateMetricDoubleFieldMapper.Names.DEFAULT_METRIC; @Override protected Collection getPlugins() { @@ -109,7 +109,7 @@ public void testParseValue() throws Exception { assertEquals("DoubleField ", doc.rootDoc().getField("field.min").toString()); Mapper fieldMapper = mapper.mappers().getMapper("field"); - assertThat(fieldMapper, instanceOf(AggregateDoubleMetricFieldMapper.class)); + assertThat(fieldMapper, instanceOf(AggregateMetricDoubleFieldMapper.class)); } /** @@ -325,8 +325,8 @@ public void testExplicitDefaultMetric() throws Exception { ); Mapper fieldMapper = mapper.mappers().getMapper("field"); - assertThat(fieldMapper, instanceOf(AggregateDoubleMetricFieldMapper.class)); - assertEquals(Metric.sum, ((AggregateDoubleMetricFieldMapper) fieldMapper).defaultMetric()); + assertThat(fieldMapper, instanceOf(AggregateMetricDoubleFieldMapper.class)); + assertEquals(Metric.sum, ((AggregateMetricDoubleFieldMapper) fieldMapper).defaultMetric()); } /** @@ -338,8 +338,8 @@ public void testImplicitDefaultMetricSingleMetric() throws Exception { ); Mapper fieldMapper = mapper.mappers().getMapper("field"); - assertThat(fieldMapper, instanceOf(AggregateDoubleMetricFieldMapper.class)); - assertEquals(Metric.value_count, ((AggregateDoubleMetricFieldMapper) fieldMapper).defaultMetric); + assertThat(fieldMapper, instanceOf(AggregateMetricDoubleFieldMapper.class)); + assertEquals(Metric.value_count, ((AggregateMetricDoubleFieldMapper) fieldMapper).defaultMetric); } /** @@ -348,8 +348,8 @@ public void testImplicitDefaultMetricSingleMetric() throws Exception { public void testImplicitDefaultMetric() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = mapper.mappers().getMapper("field"); - assertThat(fieldMapper, instanceOf(AggregateDoubleMetricFieldMapper.class)); - assertEquals(Metric.max, ((AggregateDoubleMetricFieldMapper) fieldMapper).defaultMetric); + assertThat(fieldMapper, instanceOf(AggregateMetricDoubleFieldMapper.class)); + assertEquals(Metric.max, ((AggregateMetricDoubleFieldMapper) fieldMapper).defaultMetric); } /** @@ -418,7 +418,7 @@ public void testParseNestedValue() throws Exception { ); Mapper fieldMapper = mapper.mappers().getMapper("field.subfield"); - assertThat(fieldMapper, instanceOf(AggregateDoubleMetricFieldMapper.class)); + assertThat(fieldMapper, instanceOf(AggregateMetricDoubleFieldMapper.class)); ParsedDocument doc = mapper.parse( source( b -> b.startObject("field") @@ -462,7 +462,7 @@ public void testFieldCaps() throws IOException { protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) { assertThat(query, Matchers.instanceOf(FieldExistsQuery.class)); FieldExistsQuery fieldExistsQuery = (FieldExistsQuery) query; - String defaultMetric = ((AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType) fieldType).getDefaultMetric().name(); + String defaultMetric = ((AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType) fieldType).getDefaultMetric().name(); assertEquals("field." + defaultMetric, fieldExistsQuery.getField()); assertNoFieldNamesField(fields); } @@ -488,10 +488,10 @@ public void testCannotBeUsedInMultifields() { public void testMetricType() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); - AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType ft = - (AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType) mapperService.fieldType("field"); + AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType ft = + (AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType) mapperService.fieldType("field"); assertNull(ft.getMetricType()); - assertMetricType("gauge", AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType::getMetricType); + assertMetricType("gauge", AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType::getMetricType); { // Test invalid metric type for this field type @@ -519,7 +519,7 @@ public void testMetricType() throws IOException { @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { - return new AggregateDoubleMetricSyntheticSourceSupport(ignoreMalformed); + return new AggregateMetricDoubleSyntheticSourceSupport(ignoreMalformed); } @Override @@ -564,11 +564,11 @@ public void testArrayValueSyntheticSource() throws Exception { assertEquals(Strings.toString(expected), syntheticSource); } - protected final class AggregateDoubleMetricSyntheticSourceSupport implements SyntheticSourceSupport { + protected final class AggregateMetricDoubleSyntheticSourceSupport implements SyntheticSourceSupport { private final boolean malformedExample; private final EnumSet storedMetrics; - public AggregateDoubleMetricSyntheticSourceSupport(boolean malformedExample) { + public AggregateMetricDoubleSyntheticSourceSupport(boolean malformedExample) { this.malformedExample = malformedExample; this.storedMetrics = EnumSet.copyOf(randomNonEmptySubsetOf(Arrays.asList(Metric.values()))); } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldTypeTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldTypeTests.java similarity index 91% rename from x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldTypeTests.java rename to x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldTypeTests.java index 89c2799d8327d..55ecfc13b1f3e 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldTypeTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldTypeTests.java @@ -27,8 +27,8 @@ import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.Script; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.Collections; @@ -36,20 +36,20 @@ import java.util.Map; import static java.util.Arrays.asList; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class AggregateDoubleMetricFieldTypeTests extends FieldTypeTestCase { +public class AggregateMetricDoubleFieldTypeTests extends FieldTypeTestCase { - protected AggregateDoubleMetricFieldType createDefaultFieldType(String name, Map meta, Metric defaultMetric) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(name, meta, null); - for (AggregateDoubleMetricFieldMapper.Metric m : List.of( - AggregateDoubleMetricFieldMapper.Metric.min, - AggregateDoubleMetricFieldMapper.Metric.max + protected AggregateMetricDoubleFieldType createDefaultFieldType(String name, Map meta, Metric defaultMetric) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(name, meta, null); + for (AggregateMetricDoubleFieldMapper.Metric m : List.of( + AggregateMetricDoubleFieldMapper.Metric.min, + AggregateMetricDoubleFieldMapper.Metric.max )) { String subfieldName = subfieldName(fieldType.name(), m); NumberFieldMapper.NumberFieldType subfield = new NumberFieldMapper.NumberFieldType( diff --git a/x-pack/plugin/migrate/build.gradle b/x-pack/plugin/migrate/build.gradle index 283362a637e78..796263846859d 100644 --- a/x-pack/plugin/migrate/build.gradle +++ b/x-pack/plugin/migrate/build.gradle @@ -17,8 +17,10 @@ dependencies { compileOnly project(path: xpackModule('core')) testImplementation(testArtifact(project(xpackModule('core')))) testImplementation project(xpackModule('ccr')) + testImplementation project(xpackModule('ilm')) testImplementation project(':modules:data-streams') testImplementation project(path: ':modules:reindex') + testImplementation project(path: ':modules:ingest-common') } addQaCheckDependencies(project) diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CopyLifecycleIndexMetadataTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CopyLifecycleIndexMetadataTransportActionIT.java new file mode 100644 index 0000000000000..f104b66b0e40a --- /dev/null +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CopyLifecycleIndexMetadataTransportActionIT.java @@ -0,0 +1,290 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.migrate.action; + +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; +import org.elasticsearch.action.datastreams.CreateDataStreamAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.LifecycleExecutionState; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.datastreams.DataStreamsPlugin; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; +import org.elasticsearch.xpack.core.ilm.LifecycleSettings; +import org.elasticsearch.xpack.core.ilm.OperationMode; +import org.elasticsearch.xpack.core.ilm.Phase; +import org.elasticsearch.xpack.core.ilm.StartILMRequest; +import org.elasticsearch.xpack.core.ilm.StopILMRequest; +import org.elasticsearch.xpack.core.ilm.action.GetStatusAction; +import org.elasticsearch.xpack.core.ilm.action.ILMActions; +import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; +import org.elasticsearch.xpack.ilm.IndexLifecycle; +import org.elasticsearch.xpack.migrate.MigratePlugin; + +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + +public class CopyLifecycleIndexMetadataTransportActionIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of( + LocalStateCompositeXPackPlugin.class, + MigratePlugin.class, + DataStreamsPlugin.class, + IngestCommonPlugin.class, + IndexLifecycle.class + ); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(LifecycleSettings.LIFECYCLE_POLL_INTERVAL, "1s") + // This just generates less churn and makes it easier to read the log file if needed + .put(LifecycleSettings.LIFECYCLE_HISTORY_INDEX_ENABLED, false) + .build(); + } + + public void testCreationDate() { + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); + + // so creation date is different + safeSleep(2); + + var destIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(destIndex))); + + // verify source and dest date are actually different before copying + var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(sourceIndex, destIndex)).actionGet(); + var indexToSettings = settingsResponse.getIndexToSettings(); + var sourceDate = indexToSettings.get(sourceIndex).getAsLong(IndexMetadata.SETTING_CREATION_DATE, 0L); + { + var destDate = indexToSettings.get(destIndex).getAsLong(IndexMetadata.SETTING_CREATION_DATE, 0L); + assertTrue(sourceDate > 0); + assertTrue(destDate > 0); + assertNotEquals(sourceDate, destDate); + } + + // copy over the metadata + copyMetadata(sourceIndex, destIndex); + + var destDate = indicesAdmin().getSettings(new GetSettingsRequest().indices(sourceIndex, destIndex)) + .actionGet() + .getIndexToSettings() + .get(destIndex) + .getAsLong(IndexMetadata.SETTING_CREATION_DATE, 0L); + assertEquals(sourceDate, destDate); + } + + public void testILMState() throws Exception { + + Map phases = Map.of( + "hot", + new Phase( + "hot", + TimeValue.ZERO, + Map.of( + "rollover", + new org.elasticsearch.xpack.core.ilm.RolloverAction(null, null, null, 1L, null, null, null, null, null, null) + ) + ) + ); + + var policyName = "my-policy"; + LifecyclePolicy policy = new LifecyclePolicy(policyName, phases); + PutLifecycleRequest putLifecycleRequest = new PutLifecycleRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, policy); + assertAcked(client().execute(ILMActions.PUT, putLifecycleRequest).actionGet()); + + // create data stream with a document and wait for ILM to roll it over + var dataStream = createDataStream(policyName); + createDocument(dataStream); + assertAcked(safeGet(client().execute(ILMActions.START, new StartILMRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT)))); + assertBusy(() -> { + var getIndexResponse = safeGet(indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(dataStream))); + assertTrue(getIndexResponse.indices().length > 1); + }); + // stop ILM so source does not change after copying metadata + assertAcked(safeGet(client().execute(ILMActions.STOP, new StopILMRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT)))); + assertBusy(() -> { + var statusResponse = safeGet( + client().execute(GetStatusAction.INSTANCE, new AcknowledgedRequest.Plain(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT)) + ); + assertEquals(OperationMode.STOPPED, statusResponse.getMode()); + }); + + var getIndexResponse = safeGet(indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(dataStream))); + for (var backingIndex : getIndexResponse.indices()) { + var destIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(destIndex))); + + IndexMetadata destBefore = getClusterMetadata(destIndex).index(destIndex); + assertNull(destBefore.getCustomData(LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY)); + + // copy over the metadata + copyMetadata(backingIndex, destIndex); + + var metadataAfter = getClusterMetadata(backingIndex, destIndex); + IndexMetadata sourceAfter = metadataAfter.index(backingIndex); + IndexMetadata destAfter = metadataAfter.index(destIndex); + assertNotNull(destAfter.getCustomData(LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY)); + assertEquals( + sourceAfter.getCustomData(LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY), + destAfter.getCustomData(LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY) + ); + + } + } + + public void testRolloverInfos() throws Exception { + var dataStream = createDataStream(null); + + // rollover a few times + createDocument(dataStream); + rollover(dataStream); + createDocument(dataStream); + rollover(dataStream); + createDocument(dataStream); + var writeIndex = rollover(dataStream); + + var getIndexResponse = safeGet(indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(dataStream))); + for (var backingIndex : getIndexResponse.indices()) { + + var destIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(destIndex))); + + var metadataBefore = getClusterMetadata(backingIndex, destIndex); + IndexMetadata source = metadataBefore.index(backingIndex); + IndexMetadata destBefore = metadataBefore.index(destIndex); + + // sanity check not equal before the copy + if (backingIndex.equals(writeIndex)) { + assertTrue(source.getRolloverInfos().isEmpty()); + assertTrue(destBefore.getRolloverInfos().isEmpty()); + } else { + assertNotEquals(source.getRolloverInfos(), destBefore.getRolloverInfos()); + } + + // copy over the metadata + copyMetadata(backingIndex, destIndex); + + // now rollover info should be equal + IndexMetadata destAfter = getClusterMetadata(destIndex).index(destIndex); + assertEquals(source.getRolloverInfos(), destAfter.getRolloverInfos()); + } + } + + private String createDataStream(String ilmPolicy) throws Exception { + String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.getDefault()); + + Settings settings = ilmPolicy != null ? Settings.builder().put(IndexMetadata.LIFECYCLE_NAME, ilmPolicy).build() : null; + + String mapping = """ + { + "properties": { + "@timestamp": { + "type":"date" + }, + "data":{ + "type":"keyword" + } + } + } + """; + Template idxTemplate = new Template(settings, new CompressedXContent(mapping), null); + + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName + "*")) + .template(idxTemplate) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build(); + + assertAcked( + client().execute( + TransportPutComposableIndexTemplateAction.TYPE, + new TransportPutComposableIndexTemplateAction.Request(dataStreamName + "_template").indexTemplate(template) + ) + ); + assertAcked( + client().execute( + CreateDataStreamAction.INSTANCE, + new CreateDataStreamAction.Request(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, dataStreamName) + ) + ); + return dataStreamName; + } + + private long createDocument(String dataStreamName) throws Exception { + // Get some randomized but reasonable timestamps on the data since not all of it is guaranteed to arrive in order. + long timeSeed = System.currentTimeMillis(); + long timestamp = randomLongBetween(timeSeed - TimeUnit.HOURS.toMillis(5), timeSeed); + safeGet( + client().index( + new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE) + .source( + JsonXContent.contentBuilder() + .startObject() + .field("@timestamp", timestamp) + .field("data", randomAlphaOfLength(25)) + .endObject() + ) + ) + ); + safeGet( + indicesAdmin().refresh( + new RefreshRequest(".ds-" + dataStreamName + "*").indicesOptions(IndicesOptions.lenientExpandOpenHidden()) + ) + ); + return timestamp; + } + + private void copyMetadata(String sourceIndex, String destIndex) { + assertAcked( + client().execute( + CopyLifecycleIndexMetadataAction.INSTANCE, + new CopyLifecycleIndexMetadataAction.Request(TEST_REQUEST_TIMEOUT, sourceIndex, destIndex) + ) + ); + } + + private String rollover(String dataStream) { + var rolloverResponse = safeGet(indicesAdmin().rolloverIndex(new RolloverRequest(dataStream, null))); + assertTrue(rolloverResponse.isAcknowledged()); + return rolloverResponse.getNewIndex(); + } + + private Metadata getClusterMetadata(String... indices) { + return safeGet(clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT).indices(indices))).getState().metadata(); + } +} diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java index 5220e17618a34..fb491132147a7 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java @@ -32,6 +32,8 @@ import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.not; public class CreateIndexFromSourceActionIT extends ESIntegTestCase { @@ -206,7 +208,7 @@ public void testSettingsNullOverride() throws Exception { assertTrue(destSettings.getAsBoolean(IndexMetadata.SETTING_BLOCKS_READ, false)); // override null removed - assertNull(destSettings.get(IndexMetadata.SETTING_BLOCKS_WRITE)); + assertThat(destSettings.keySet(), not(hasItem(IndexMetadata.SETTING_BLOCKS_WRITE))); } public void testRemoveIndexBlocksByDefault() throws Exception { @@ -236,9 +238,9 @@ public void testRemoveIndexBlocksByDefault() throws Exception { var destSettings = settingsResponse.getIndexToSettings().get(destIndex); // remove block settings override both source settings and override settings - assertNull(destSettings.get(IndexMetadata.SETTING_BLOCKS_WRITE)); - assertNull(destSettings.get(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE)); - assertNull(destSettings.get(IndexMetadata.SETTING_BLOCKS_READ)); + assertThat(destSettings.keySet(), not(hasItem(IndexMetadata.SETTING_BLOCKS_WRITE))); + assertThat(destSettings.keySet(), not(hasItem(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE))); + assertThat(destSettings.keySet(), not(hasItem(IndexMetadata.SETTING_BLOCKS_READ))); } public void testMappingsOverridden() { diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java index 1c9d85af8d5bd..e3b73d0aaa5cb 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteIndexTemplateAction; @@ -23,11 +24,15 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; +import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; @@ -36,12 +41,15 @@ import org.elasticsearch.datastreams.DataStreamsPlugin; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.migrate.MigratePlugin; +import org.elasticsearch.xpack.migrate.MigrateTemplateRegistry; +import org.junit.Before; import java.io.IOException; import java.time.Instant; @@ -50,22 +58,29 @@ import java.util.Locale; import java.util.Map; +import static java.lang.Boolean.parseBoolean; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { + @Before + private void setup() throws Exception { + deletePipeline(MigrateTemplateRegistry.REINDEX_DATA_STREAM_PIPELINE_NAME); + assertBusy(() -> { assertTrue(getPipelines(MigrateTemplateRegistry.REINDEX_DATA_STREAM_PIPELINE_NAME).isFound()); }); + } + private static final String MAPPING = """ { "_doc":{ "dynamic":"strict", "properties":{ - "foo1":{ - "type":"text" - } + "foo1": {"type":"text"}, + "@timestamp": {"type":"date"} } } } @@ -73,23 +88,142 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return List.of(MigratePlugin.class, ReindexPlugin.class, MockTransportService.TestPlugin.class, DataStreamsPlugin.class); + return List.of( + MigratePlugin.class, + ReindexPlugin.class, + MockTransportService.TestPlugin.class, + DataStreamsPlugin.class, + IngestCommonPlugin.class + ); + } + + private static String DATA_STREAM_MAPPING = """ + { + "dynamic": true, + "_data_stream_timestamp": { + "enabled": true + }, + "properties": { + "@timestamp": {"type":"date"} + } + } + """; + + public void testTimestamp0AddedIfMissing() { + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); + + // add doc without timestamp + addDoc(sourceIndex, "{\"foo\":\"baz\"}"); + + // wait until doc is written to all shards before adding mapping + ensureHealth(sourceIndex); + + // add timestamp to source mapping + indicesAdmin().preparePutMapping(sourceIndex).setSource(DATA_STREAM_MAPPING, XContentType.JSON).get(); + + // call reindex + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); + + assertResponse(prepareSearch(destIndex), response -> { + Map sourceAsMap = response.getHits().getAt(0).getSourceAsMap(); + assertEquals(Integer.valueOf(0), sourceAsMap.get(DEFAULT_TIMESTAMP_FIELD)); + }); + } + + public void testTimestampNotAddedIfExists() { + + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); + + // add doc with timestamp + String time = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis()); + var doc = String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, time); + addDoc(sourceIndex, doc); + + // wait until doc is written to all shards before adding mapping + ensureHealth(sourceIndex); + + // add timestamp to source mapping + indicesAdmin().preparePutMapping(sourceIndex).setSource(DATA_STREAM_MAPPING, XContentType.JSON).get(); + + // call reindex + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); + + assertResponse(prepareSearch(destIndex), response -> { + Map sourceAsMap = response.getHits().getAt(0).getSourceAsMap(); + assertEquals(time, sourceAsMap.get(DEFAULT_TIMESTAMP_FIELD)); + }); + } + + public void testCustomReindexPipeline() { + String customPipeline = """ + { + "processors": [ + { + "set": { + "field": "cheese", + "value": "gorgonzola" + } + } + ], + "version": 1000 + } + """; + + PutPipelineRequest putRequest = new PutPipelineRequest( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + MigrateTemplateRegistry.REINDEX_DATA_STREAM_PIPELINE_NAME, + new BytesArray(customPipeline), + XContentType.JSON + ); + + safeGet(clusterAdmin().execute(PutPipelineTransportAction.TYPE, putRequest)); + + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); + + // add doc with timestamp + String time = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis()); + var doc = String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, time); + addDoc(sourceIndex, doc); + + // wait until doc is written to all shards before adding mapping + ensureHealth(sourceIndex); + + // add timestamp to source mapping + indicesAdmin().preparePutMapping(sourceIndex).setSource(DATA_STREAM_MAPPING, XContentType.JSON).get(); + + String destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); + + assertResponse(prepareSearch(destIndex), response -> { + Map sourceAsMap = response.getHits().getAt(0).getSourceAsMap(); + assertEquals("gorgonzola", sourceAsMap.get("cheese")); + assertEquals(time, sourceAsMap.get(DEFAULT_TIMESTAMP_FIELD)); + }); } public void testDestIndexDeletedIfExists() throws Exception { // empty source index var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // dest index with docs var destIndex = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); - indicesAdmin().create(new CreateIndexRequest(destIndex)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(destIndex))); indexDocs(destIndex, 10); - indicesAdmin().refresh(new RefreshRequest(destIndex)).actionGet(); + safeGet(indicesAdmin().refresh(new RefreshRequest(destIndex))); assertHitCount(prepareSearch(destIndex).setSize(0), 10); // call reindex - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); + safeGet(client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex))); // verify that dest still exists, but is now empty assertTrue(indexExists(destIndex)); @@ -98,11 +232,12 @@ public void testDestIndexDeletedIfExists() throws Exception { public void testDestIndexNameSet_noDotPrefix() throws Exception { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // call reindex - var response = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet(); + var response = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); var expectedDestIndexName = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); assertEquals(expectedDestIndexName, response.getDestIndex()); @@ -111,11 +246,12 @@ public void testDestIndexNameSet_noDotPrefix() throws Exception { public void testDestIndexNameSet_withDotPrefix() throws Exception { var sourceIndex = "." + randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // call reindex - var response = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet(); + var response = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); var expectedDestIndexName = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); assertEquals(expectedDestIndexName, response.getDestIndex()); @@ -125,13 +261,14 @@ public void testDestIndexContainsDocs() throws Exception { // source index with docs var numDocs = randomIntBetween(1, 100); var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); indexDocs(sourceIndex, numDocs); // call reindex - var response = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet(); - indicesAdmin().refresh(new RefreshRequest(response.getDestIndex())).actionGet(); + var response = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); + safeGet(indicesAdmin().refresh(new RefreshRequest(response.getDestIndex()))); // verify that dest contains docs assertHitCount(prepareSearch(response.getDestIndex()).setSize(0), numDocs); @@ -142,32 +279,38 @@ public void testSetSourceToBlockWrites() throws Exception { // empty source index var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); // call reindex - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); + safeGet(client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex))); + + // Assert that source index is now read-only but not verified read-only + GetSettingsResponse getSettingsResponse = safeGet(admin().indices().getSettings(new GetSettingsRequest().indices(sourceIndex))); + assertTrue(parseBoolean(getSettingsResponse.getSetting(sourceIndex, IndexMetadata.SETTING_BLOCKS_WRITE))); + assertFalse( + parseBoolean(getSettingsResponse.getSetting(sourceIndex, MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey())) + ); // assert that write to source fails var indexReq = new IndexRequest(sourceIndex).source(jsonBuilder().startObject().field("field", "1").endObject()); - assertThrows(ClusterBlockException.class, () -> client().index(indexReq).actionGet()); + expectThrows(ClusterBlockException.class, client().index(indexReq)); assertHitCount(prepareSearch(sourceIndex).setSize(0), 0); } public void testMissingSourceIndex() { var nonExistentSourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - assertThrows( + expectThrows( ResourceNotFoundException.class, - () -> client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(nonExistentSourceIndex)) - .actionGet() + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(nonExistentSourceIndex)) ); } - public void testSettingsAddedBeforeReindex() throws Exception { + public void testSettingsAddedBeforeReindex() { // start with a static setting var numShards = randomIntBetween(1, 10); var staticSettings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards).build(); var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, staticSettings)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, staticSettings))); // update with a dynamic setting var numReplicas = randomIntBetween(0, 10); @@ -176,31 +319,32 @@ public void testSettingsAddedBeforeReindex() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval) .build(); - indicesAdmin().updateSettings(new UpdateSettingsRequest(dynamicSettings, sourceIndex)).actionGet(); + safeGet(indicesAdmin().updateSettings(new UpdateSettingsRequest(dynamicSettings, sourceIndex))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); // assert both static and dynamic settings set on dest index - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex))); assertEquals(numReplicas, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS))); assertEquals(numShards, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS))); assertEquals(refreshInterval, settingsResponse.getSetting(destIndex, IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey())); } - public void testMappingsAddedToDestIndex() throws Exception { + public void testMappingsAddedToDestIndex() { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(MAPPING)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(MAPPING))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); - var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) - .actionGet(); + var mappingsResponse = safeGet( + indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) + ); Map mappings = mappingsResponse.mappings(); var destMappings = mappings.get(destIndex).sourceAsMap(); var sourceMappings = mappings.get(sourceIndex).sourceAsMap(); @@ -213,13 +357,13 @@ public void testMappingsAddedToDestIndex() throws Exception { public void testFailIfMetadataBlockSet() { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); var settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_METADATA, true).build(); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); - try { - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); - } catch (ElasticsearchException e) { - assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); - } + ElasticsearchException e = expectThrows( + ElasticsearchException.class, + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); + assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); cleanupMetadataBlocks(sourceIndex); } @@ -227,13 +371,13 @@ public void testFailIfMetadataBlockSet() { public void testFailIfReadBlockSet() { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); var settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_READ, true).build(); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); - try { - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); - } catch (ElasticsearchException e) { - assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); - } + ElasticsearchException e = expectThrows( + ElasticsearchException.class, + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); + assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); cleanupMetadataBlocks(sourceIndex); } @@ -245,17 +389,17 @@ public void testReadOnlyBlocksNotAddedBack() { .put(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE, randomBoolean()) .put(IndexMetadata.SETTING_BLOCKS_WRITE, randomBoolean()) .build(); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); - assertFalse(Boolean.parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY))); - assertFalse(Boolean.parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE))); - assertFalse(Boolean.parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_BLOCKS_WRITE))); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex))); + assertFalse(parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY))); + assertFalse(parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE))); + assertFalse(parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_BLOCKS_WRITE))); cleanupMetadataBlocks(sourceIndex); cleanupMetadataBlocks(destIndex); @@ -274,11 +418,11 @@ public void testUpdateSettingsDefaultsRestored() { assertAcked(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(sourceIndex, destIndex)).actionGet(); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(sourceIndex, destIndex))); var destSettings = settingsResponse.getIndexToSettings().get(destIndex); assertEquals( @@ -307,33 +451,34 @@ public void testSettingsAndMappingsFromTemplate() throws IOException { .build(); var request = new TransportPutComposableIndexTemplateAction.Request("logs-template"); request.indexTemplate(template); - client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); + safeGet(client().execute(TransportPutComposableIndexTemplateAction.TYPE, request)); var sourceIndex = "logs-" + randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); { var indexRequest = new IndexRequest(sourceIndex); indexRequest.source("{ \"foo1\": \"cheese\" }", XContentType.JSON); - client().index(indexRequest).actionGet(); + safeGet(client().index(indexRequest)); } // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); // verify settings from templates copied to dest index { - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex))); assertEquals(numReplicas, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS))); assertEquals(numShards, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS))); } // verify mappings from templates copied to dest index { - var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) - .actionGet(); + var mappingsResponse = safeGet( + indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) + ); var destMappings = mappingsResponse.mappings().get(destIndex).sourceAsMap(); var sourceMappings = mappingsResponse.mappings().get(sourceIndex).sourceAsMap(); assertEquals(sourceMappings, destMappings); @@ -394,7 +539,7 @@ public void testTsdbStartEndSet() throws Exception { .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) .build() ); - client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); + safeGet(client().execute(TransportPutComposableIndexTemplateAction.TYPE, request)); // index doc Instant time = Instant.now(); @@ -402,12 +547,11 @@ public void testTsdbStartEndSet() throws Exception { { var indexRequest = new IndexRequest("k8s").opType(DocWriteRequest.OpType.CREATE); indexRequest.source(TSDB_DOC.replace("$time", formatInstant(time)), XContentType.JSON); - var indexResponse = client().index(indexRequest).actionGet(); + var indexResponse = safeGet(client().index(indexRequest)); backingIndexName = indexResponse.getIndex(); } - var sourceSettings = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndexName)) - .actionGet() + var sourceSettings = safeGet(indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndexName))) .getSettings() .get(backingIndexName); Instant startTime = IndexSettings.TIME_SERIES_START_TIME.get(sourceSettings); @@ -420,17 +564,15 @@ public void testTsdbStartEndSet() throws Exception { // force a rollover so can call reindex and delete var rolloverRequest = new RolloverRequest("k8s", null); - var rolloverResponse = indicesAdmin().rolloverIndex(rolloverRequest).actionGet(); + var rolloverResponse = safeGet(indicesAdmin().rolloverIndex(rolloverRequest)); rolloverResponse.getNewIndex(); // call reindex on the original backing index - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(backingIndexName)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(backingIndexName)) + ).getDestIndex(); - var destSettings = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(destIndex)) - .actionGet() - .getSettings() + var destSettings = safeGet(indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(destIndex))).getSettings() .get(destIndex); var destStart = IndexSettings.TIME_SERIES_START_TIME.get(destSettings); var destEnd = IndexSettings.TIME_SERIES_END_TIME.get(destSettings); @@ -448,7 +590,7 @@ private static void cleanupMetadataBlocks(String index) { .putNull(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE) .putNull(IndexMetadata.SETTING_BLOCKS_METADATA) .build(); - assertAcked(indicesAdmin().updateSettings(new UpdateSettingsRequest(settings, index)).actionGet()); + safeGet(indicesAdmin().updateSettings(new UpdateSettingsRequest(settings, index))); } private static void indexDocs(String index, int numDocs) { @@ -461,7 +603,7 @@ private static void indexDocs(String index, int numDocs) { .source(String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, value), XContentType.JSON) ); } - BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); + BulkResponse bulkResponse = safeGet(client().bulk(bulkRequest)); assertThat(bulkResponse.getItems().length, equalTo(numDocs)); } @@ -469,12 +611,17 @@ private static String formatInstant(Instant instant) { return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); } - private static String getIndexUUID(String index) { - return indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(index)) - .actionGet() - .getSettings() - .get(index) - .get(IndexMetadata.SETTING_INDEX_UUID); + void addDoc(String index, String doc) { + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(index).opType(DocWriteRequest.OpType.CREATE).source(doc, XContentType.JSON)); + safeGet(client().bulk(bulkRequest)); } + private void ensureHealth(String index) { + if (cluster().numDataNodes() > 1) { + ensureGreen(index); + } else { + ensureYellow(index); + } + } } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java index f5f8beba26d8f..0c2f7e561294e 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java @@ -36,6 +36,8 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamAction; import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamTransportAction; +import org.elasticsearch.xpack.migrate.action.CopyLifecycleIndexMetadataAction; +import org.elasticsearch.xpack.migrate.action.CopyLifecycleIndexMetadataTransportAction; import org.elasticsearch.xpack.migrate.action.CreateIndexFromSourceAction; import org.elasticsearch.xpack.migrate.action.CreateIndexFromSourceTransportAction; import org.elasticsearch.xpack.migrate.action.GetMigrationReindexStatusAction; @@ -55,6 +57,7 @@ import org.elasticsearch.xpack.migrate.task.ReindexDataStreamTaskParams; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.function.Predicate; import java.util.function.Supplier; @@ -64,6 +67,18 @@ import static org.elasticsearch.xpack.migrate.task.ReindexDataStreamPersistentTaskExecutor.MAX_CONCURRENT_INDICES_REINDEXED_PER_DATA_STREAM_SETTING; public class MigratePlugin extends Plugin implements ActionPlugin, PersistentTaskPlugin { + @Override + public Collection createComponents(PluginServices services) { + var registry = new MigrateTemplateRegistry( + services.environment().settings(), + services.clusterService(), + services.threadPool(), + services.client(), + services.xContentRegistry() + ); + registry.initialize(); + return List.of(registry); + } @Override public List getRestHandlers( @@ -93,6 +108,7 @@ public List getRestHandlers( actions.add(new ActionHandler<>(CancelReindexDataStreamAction.INSTANCE, CancelReindexDataStreamTransportAction.class)); actions.add(new ActionHandler<>(ReindexDataStreamIndexAction.INSTANCE, ReindexDataStreamIndexTransportAction.class)); actions.add(new ActionHandler<>(CreateIndexFromSourceAction.INSTANCE, CreateIndexFromSourceTransportAction.class)); + actions.add(new ActionHandler<>(CopyLifecycleIndexMetadataAction.INSTANCE, CopyLifecycleIndexMetadataTransportAction.class)); return actions; } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigrateTemplateRegistry.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigrateTemplateRegistry.java new file mode 100644 index 0000000000000..2a9dc97e16352 --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigrateTemplateRegistry.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.migrate; + +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.template.IndexTemplateRegistry; +import org.elasticsearch.xpack.core.template.IngestPipelineConfig; +import org.elasticsearch.xpack.core.template.JsonIngestPipelineConfig; + +import java.util.List; + +public class MigrateTemplateRegistry extends IndexTemplateRegistry { + + // This number must be incremented when we make changes to built-in pipeline. + // If a specific user pipeline is needed instead, its version should be set to a value higher than the REGISTRY_VERSION. + static final int REGISTRY_VERSION = 1; + public static final String REINDEX_DATA_STREAM_PIPELINE_NAME = "reindex-data-stream-pipeline"; + private static final String TEMPLATE_VERSION_VARIABLE = "xpack.migrate.reindex.pipeline.version"; + + public MigrateTemplateRegistry( + Settings nodeSettings, + ClusterService clusterService, + ThreadPool threadPool, + Client client, + NamedXContentRegistry xContentRegistry + ) { + super(nodeSettings, clusterService, threadPool, client, xContentRegistry); + } + + @Override + protected List getIngestPipelines() { + return List.of( + new JsonIngestPipelineConfig( + REINDEX_DATA_STREAM_PIPELINE_NAME, + "/" + REINDEX_DATA_STREAM_PIPELINE_NAME + ".json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ) + ); + } + + @Override + protected String getOrigin() { + return ClientHelper.STACK_ORIGIN; + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CopyLifecycleIndexMetadataAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CopyLifecycleIndexMetadataAction.java new file mode 100644 index 0000000000000..d2acca1484b0c --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CopyLifecycleIndexMetadataAction.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.migrate.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class CopyLifecycleIndexMetadataAction extends ActionType { + + public static final String NAME = "indices:admin/index/copy_lifecycle_index_metadata"; + + public static final ActionType INSTANCE = new CopyLifecycleIndexMetadataAction(); + + private CopyLifecycleIndexMetadataAction() { + super(NAME); + } + + public static class Request extends AcknowledgedRequest implements IndicesRequest { + private final String sourceIndex; + private final String destIndex; + + public Request(TimeValue masterNodeTimeout, String sourceIndex, String destIndex) { + super(masterNodeTimeout, DEFAULT_ACK_TIMEOUT); + this.sourceIndex = sourceIndex; + this.destIndex = destIndex; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.sourceIndex = in.readString(); + this.destIndex = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(sourceIndex); + out.writeString(destIndex); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public String sourceIndex() { + return sourceIndex; + } + + public String destIndex() { + return destIndex; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(sourceIndex, request.sourceIndex) && Objects.equals(destIndex, request.destIndex); + } + + @Override + public int hashCode() { + return Objects.hash(sourceIndex, destIndex); + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return "copying lifecycle metadata for index " + sourceIndex; + } + + @Override + public String[] indices() { + return new String[] { sourceIndex, destIndex }; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + } + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CopyLifecycleIndexMetadataTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CopyLifecycleIndexMetadataTransportAction.java new file mode 100644 index 0000000000000..ef263fdda2db3 --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CopyLifecycleIndexMetadataTransportAction.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.migrate.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.AckedBatchedClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateAckListener; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.SimpleBatchedAckListenerTaskExecutor; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.LifecycleExecutionState; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.MasterServiceTaskQueue; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.HashMap; + +public class CopyLifecycleIndexMetadataTransportAction extends TransportMasterNodeAction< + CopyLifecycleIndexMetadataAction.Request, + AcknowledgedResponse> { + private static final Logger logger = LogManager.getLogger(CopyLifecycleIndexMetadataTransportAction.class); + private final ClusterStateTaskExecutor executor; + private final MasterServiceTaskQueue taskQueue; + + @Inject + public CopyLifecycleIndexMetadataTransportAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters + ) { + super( + CopyLifecycleIndexMetadataAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + CopyLifecycleIndexMetadataAction.Request::new, + AcknowledgedResponse::readFrom, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.executor = new SimpleBatchedAckListenerTaskExecutor<>() { + @Override + public Tuple executeTask(UpdateIndexMetadataTask task, ClusterState clusterState) { + return new Tuple<>(applyUpdate(clusterState, task), task); + } + }; + this.taskQueue = clusterService.createTaskQueue("migrate-copy-index-metadata", Priority.NORMAL, this.executor); + } + + @Override + protected void masterOperation( + Task task, + CopyLifecycleIndexMetadataAction.Request request, + ClusterState state, + ActionListener listener + ) { + taskQueue.submitTask( + "migrate-copy-index-metadata", + new UpdateIndexMetadataTask(request.sourceIndex(), request.destIndex(), request.ackTimeout(), listener), + request.masterNodeTimeout() + ); + } + + @Override + protected ClusterBlockException checkBlock(CopyLifecycleIndexMetadataAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + private static ClusterState applyUpdate(ClusterState state, UpdateIndexMetadataTask updateTask) { + + IndexMetadata sourceMetadata = state.metadata().index(updateTask.sourceIndex); + if (sourceMetadata == null) { + throw new IndexNotFoundException(updateTask.sourceIndex); + } + IndexMetadata destMetadata = state.metadata().index(updateTask.destIndex); + if (destMetadata == null) { + throw new IndexNotFoundException(updateTask.destIndex); + } + + IndexMetadata.Builder newDestMetadata = IndexMetadata.builder(destMetadata); + + var sourceILM = sourceMetadata.getCustomData(LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY); + if (sourceILM != null) { + newDestMetadata.putCustom(LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY, sourceILM); + } + + newDestMetadata.putRolloverInfos(sourceMetadata.getRolloverInfos()) + // creation date is required for ILM to function + .creationDate(sourceMetadata.getCreationDate()) + // creation date updates settings so must increment settings version + .settingsVersion(destMetadata.getSettingsVersion() + 1); + + var indices = new HashMap<>(state.metadata().indices()); + indices.put(updateTask.destIndex, newDestMetadata.build()); + + Metadata newMetadata = Metadata.builder(state.metadata()).indices(indices).build(); + return ClusterState.builder(state).metadata(newMetadata).build(); + } + + static class UpdateIndexMetadataTask extends AckedBatchedClusterStateUpdateTask { + private final String sourceIndex; + private final String destIndex; + + UpdateIndexMetadataTask(String sourceIndex, String destIndex, TimeValue ackTimeout, ActionListener listener) { + super(ackTimeout, listener); + this.sourceIndex = sourceIndex; + this.destIndex = destIndex; + } + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceAction.java index 14e5e8cccd910..5ab009decd381 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceAction.java @@ -15,6 +15,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -191,5 +194,15 @@ public String[] indices() { public IndicesOptions indicesOptions() { return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return "creating index " + destIndex + " from " + sourceIndex; + } } } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceTransportAction.java index 81edb0e716f51..a12316129a4b5 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceTransportAction.java @@ -46,13 +46,13 @@ public class CreateIndexFromSourceTransportAction extends HandledTransportAction private final ClusterService clusterService; private final Client client; private final IndexScopedSettings indexScopedSettings; - private static final Settings REMOVE_INDEX_BLOCKS_SETTING_OVERRIDE = Settings.builder() - .putNull(IndexMetadata.SETTING_READ_ONLY) - .putNull(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE) - .putNull(IndexMetadata.SETTING_BLOCKS_WRITE) - .putNull(IndexMetadata.SETTING_BLOCKS_METADATA) - .putNull(IndexMetadata.SETTING_BLOCKS_READ) - .build(); + private static final Set INDEX_BLOCK_SETTINGS = Set.of( + IndexMetadata.SETTING_READ_ONLY, + IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE, + IndexMetadata.SETTING_BLOCKS_WRITE, + IndexMetadata.SETTING_BLOCKS_METADATA, + IndexMetadata.SETTING_BLOCKS_READ + ); @Inject public CreateIndexFromSourceTransportAction( @@ -89,12 +89,15 @@ protected void doExecute(Task task, CreateIndexFromSourceAction.Request request, Settings.Builder settings = Settings.builder() // first settings from source index - .put(filterSettings(sourceIndex)) - // then override with request settings - .put(request.settingsOverride()); + .put(filterSettings(sourceIndex)); + + if (request.settingsOverride().isEmpty() == false) { + applyOverrides(settings, request.settingsOverride()); + } + if (request.removeIndexBlocks()) { // lastly, override with settings to remove index blocks if requested - settings.put(REMOVE_INDEX_BLOCKS_SETTING_OVERRIDE); + INDEX_BLOCK_SETTINGS.forEach(settings::remove); } Map mergeMappings; @@ -114,6 +117,16 @@ protected void doExecute(Task task, CreateIndexFromSourceAction.Request request, client.admin().indices().create(createIndexRequest, listener.map(response -> response)); } + private void applyOverrides(Settings.Builder settings, Settings overrides) { + overrides.keySet().forEach(key -> { + if (overrides.get(key) != null) { + settings.put(key, overrides.get(key)); + } else { + settings.remove(key); + } + }); + } + private static Map toMap(@Nullable MappingMetadata sourceMapping) { return Optional.ofNullable(sourceMapping) .map(MappingMetadata::source) @@ -122,10 +135,14 @@ private static Map toMap(@Nullable MappingMetadata sourceMapping .orElse(Map.of()); } + @SuppressWarnings("unchecked") private static Map mergeMappings(@Nullable MappingMetadata sourceMapping, Map mappingAddition) throws IOException { Map combinedMappingMap = new HashMap<>(toMap(sourceMapping)); XContentHelper.update(combinedMappingMap, mappingAddition, true); + if (sourceMapping != null && combinedMappingMap.size() == 1 && combinedMappingMap.containsKey(sourceMapping.type())) { + combinedMappingMap = (Map) combinedMappingMap.get(sourceMapping.type()); + } return combinedMappingMap; } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java index faf8982b79bf0..5ebd2040fbcb1 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java @@ -16,6 +16,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -24,6 +27,7 @@ import java.io.IOException; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.function.Predicate; @@ -144,5 +148,15 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return "reindexing data stream " + sourceDataStream; + } } } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java index 2e3fd1b76ed32..dec3cf2901fcc 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java @@ -14,8 +14,12 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import java.io.IOException; +import java.util.Map; import java.util.Objects; public class ReindexDataStreamIndexAction extends ActionType { @@ -78,6 +82,16 @@ public String[] indices() { public IndicesOptions indicesOptions() { return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return "reindexing data stream index " + sourceIndex; + } } public static class Response extends ActionResponse { diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java index 8c12011ca4bb1..31fdcbe074c13 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -44,6 +45,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequest; +import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; @@ -51,9 +53,11 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.deprecation.DeprecatedIndexPredicate; +import org.elasticsearch.xpack.migrate.MigrateTemplateRegistry; import java.util.Locale; import java.util.Map; +import java.util.Objects; import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.WRITE; @@ -159,6 +163,7 @@ protected void doExecute( .andThen(l -> createIndex(sourceIndex, destIndexName, l, taskId)) .andThen(l -> reindex(sourceIndexName, destIndexName, l, taskId)) .andThen(l -> copyOldSourceSettingsToDest(settingsBefore, destIndexName, l, taskId)) + .andThen(l -> copyIndexMetadataToDest(sourceIndexName, destIndexName, l, taskId)) .andThen(l -> sanityCheck(sourceIndexName, destIndexName, l, taskId)) .andThen(l -> closeIndexIfWasClosed(destIndexName, wasClosed, l, taskId)) .andThenApply(ignored -> new ReindexDataStreamIndexAction.Response(destIndexName)) @@ -268,13 +273,41 @@ void reindex(String sourceIndexName, String destIndexName, ActionListener checkForFailuresListener = ActionListener.wrap(bulkByScrollResponse -> { + if (bulkByScrollResponse.getSearchFailures().isEmpty() == false) { + ScrollableHitSource.SearchFailure firstSearchFailure = bulkByScrollResponse.getSearchFailures().get(0); + listener.onFailure( + new ElasticsearchException( + "Failure reading data from {} caused by {}", + firstSearchFailure.getReason(), + sourceIndexName, + firstSearchFailure.getReason().getMessage() + ) + ); + } else if (bulkByScrollResponse.getBulkFailures().isEmpty() == false) { + BulkItemResponse.Failure firstBulkFailure = bulkByScrollResponse.getBulkFailures().get(0); + listener.onFailure( + new ElasticsearchException( + "Failure loading data from {} into {} caused by {}", + firstBulkFailure.getCause(), + sourceIndexName, + destIndexName, + firstBulkFailure.getCause().getMessage() + ) + ); + } else { + listener.onResponse(bulkByScrollResponse); + } + }, listener::onFailure); + client.execute(ReindexAction.INSTANCE, reindexRequest, checkForFailuresListener); } private void updateSettings( @@ -302,6 +335,24 @@ private void copyOldSourceSettingsToDest( updateSettings(destIndexName, settings, listener, parentTaskId); } + private void copyIndexMetadataToDest( + String sourceIndexName, + String destIndexName, + ActionListener listener, + TaskId parentTaskId + ) { + logger.debug("Copying index metadata to destination index [{}] from source index [{}]", destIndexName, sourceIndexName); + var request = new CopyLifecycleIndexMetadataAction.Request(TimeValue.MAX_VALUE, sourceIndexName, destIndexName); + request.setParentTask(parentTaskId); + var errorMessage = String.format( + Locale.ROOT, + "Failed to acknowledge copying index metadata from source [%s] to dest [%s]", + sourceIndexName, + destIndexName + ); + client.execute(CopyLifecycleIndexMetadataAction.INSTANCE, request, failIfNotAcknowledged(listener, errorMessage)); + } + private static void copySettingOrUnset(Settings settingsBefore, Settings.Builder builder, String setting) { // if setting was explicitly added to the source index if (settingsBefore.get(setting) != null) { @@ -341,6 +392,7 @@ private void addBlockToIndex( TaskId parentTaskId ) { AddIndexBlockRequest addIndexBlockRequest = new AddIndexBlockRequest(block, index); + addIndexBlockRequest.markVerified(false); addIndexBlockRequest.setParentTask(parentTaskId); client.admin().indices().execute(TransportAddIndexBlockAction.TYPE, addIndexBlockRequest, listener); } @@ -366,26 +418,24 @@ private void sanityCheck( ) { if (Assertions.ENABLED) { logger.debug("Comparing source [{}] and dest [{}] doc counts", sourceIndexName, destIndexName); - client.execute( - RefreshAction.INSTANCE, - new RefreshRequest(destIndexName), - listener.delegateFailureAndWrap((delegate, ignored) -> { - getIndexDocCount(sourceIndexName, parentTaskId, delegate.delegateFailureAndWrap((delegate1, sourceCount) -> { - getIndexDocCount(destIndexName, parentTaskId, delegate1.delegateFailureAndWrap((delegate2, destCount) -> { - assert sourceCount == destCount - : String.format( - Locale.ROOT, - "source index [%s] has %d docs and dest [%s] has %d docs", - sourceIndexName, - sourceCount, - destIndexName, - destCount - ); - delegate2.onResponse(null); - })); + RefreshRequest refreshRequest = new RefreshRequest(destIndexName); + refreshRequest.setParentTask(parentTaskId); + client.execute(RefreshAction.INSTANCE, refreshRequest, listener.delegateFailureAndWrap((delegate, ignored) -> { + getIndexDocCount(sourceIndexName, parentTaskId, delegate.delegateFailureAndWrap((delegate1, sourceCount) -> { + getIndexDocCount(destIndexName, parentTaskId, delegate1.delegateFailureAndWrap((delegate2, destCount) -> { + assert Objects.equals(sourceCount, destCount) + : String.format( + Locale.ROOT, + "source index [%s] has %d docs and dest [%s] has %d docs", + sourceIndexName, + sourceCount, + destIndexName, + destCount + ); + delegate2.onResponse(null); })); - }) - ); + })); + })); } else { listener.onResponse(null); } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java index 1f6a871388350..38ab0275f62c1 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java @@ -226,6 +226,7 @@ private void maybeProcessNextIndex( }, e -> { reindexDataStreamTask.reindexFailed(index.getName(), e); listener.onResponse(null); + maybeProcessNextIndex(indicesRemaining, reindexDataStreamTask, sourceDataStream, listener, parentTaskId); })); } diff --git a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportActionTests.java b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportActionTests.java index 99e1031dec3a2..55e4da30cdf11 100644 --- a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportActionTests.java +++ b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportActionTests.java @@ -30,6 +30,7 @@ import java.util.Collections; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.when; @@ -111,7 +112,7 @@ public void testReindexIncludesRateLimit() { ) ); - doNothing().when(client).execute(eq(ReindexAction.INSTANCE), request.capture(), eq(listener)); + doNothing().when(client).execute(eq(ReindexAction.INSTANCE), request.capture(), any()); action.reindex(sourceIndex, destIndex, listener, taskId); @@ -136,7 +137,7 @@ public void testReindexIncludesInfiniteRateLimit() { Collections.singleton(ReindexDataStreamIndexTransportAction.REINDEX_MAX_REQUESTS_PER_SECOND_SETTING) ) ); - doNothing().when(client).execute(eq(ReindexAction.INSTANCE), request.capture(), eq(listener)); + doNothing().when(client).execute(eq(ReindexAction.INSTANCE), request.capture(), any()); action.reindex(sourceIndex, destIndex, listener, taskId); diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index a63d911e9d40d..e33fe677179d8 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -109,7 +109,7 @@ public List getBootstrapChecks() { @Override public BootstrapCheckResult check(BootstrapContext context) { try { - validateModelRepository(MODEL_REPOSITORY.get(context.settings()), context.environment().configFile()); + validateModelRepository(MODEL_REPOSITORY.get(context.settings()), context.environment().configDir()); } catch (Exception e) { return BootstrapCheckResult.failure( "Found an invalid configuration for xpack.ml.model_repository. " diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index 0869ae394d3de..d18f6da13cad2 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -258,4 +258,5 @@ testClusters.configureEach { user username: "no_ml", password: "x-pack-test-password", role: "minimal" setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' + systemProperty 'es.queryable_built_in_roles_enabled', 'false' } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java index c90c461fe8b1a..231d165c599c0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java @@ -124,7 +124,7 @@ protected StopDataFrameAnalyticsAction.Response forceStopAnalytics(String id) { } protected void waitUntilAnalyticsIsStopped(String id) throws Exception { - waitUntilAnalyticsIsStopped(id, TimeValue.timeValueSeconds(60)); + waitUntilAnalyticsIsStopped(id, TimeValue.timeValueSeconds(90)); } protected void waitUntilAnalyticsIsStopped(String id, TimeValue waitTime) throws Exception { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java index 5e24393be0a22..6e377770ed0ef 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java @@ -149,7 +149,7 @@ private void createNativeProcess( ProcessPipes processPipes ) { AnalyticsBuilder analyticsBuilder = new AnalyticsBuilder( - env::tmpFile, + env::tmpDir, nativeController, processPipes, analyticsProcessConfig, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeMemoryUsageEstimationProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeMemoryUsageEstimationProcessFactory.java index 8d106be13882d..99adf6b6d506d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeMemoryUsageEstimationProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeMemoryUsageEstimationProcessFactory.java @@ -116,7 +116,7 @@ private void createNativeProcess( ProcessPipes processPipes ) { AnalyticsBuilder analyticsBuilder = new AnalyticsBuilder( - env::tmpFile, + env::tmpDir, nativeController, processPipes, analyticsProcessConfig, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index 23b12a23e7495..9da7187746174 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -125,6 +125,7 @@ import org.elasticsearch.xpack.core.ml.stats.ForecastStats; import org.elasticsearch.xpack.core.ml.stats.StatsAccumulator; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.core.ml.utils.MlIndexAndAlias; import org.elasticsearch.xpack.core.security.support.Exceptions; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.categorization.GrokPatternCreator; @@ -306,11 +307,15 @@ public void createJobResultIndex(Job job, ClusterState state, final ActionListen String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(job.getId()); String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(job.getId()); String tempIndexName = job.getInitialResultsIndexName(); + // Find all indices starting with this name and pick the latest one + String[] concreteIndices = resolver.concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), tempIndexName + "*"); + if (concreteIndices.length > 0) { + tempIndexName = MlIndexAndAlias.latestIndex(concreteIndices); + } // Our read/write aliases should point to the concrete index // If the initial index is NOT an alias, either it is already a concrete index, or it does not exist yet if (state.getMetadata().hasAlias(tempIndexName)) { - String[] concreteIndices = resolver.concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), tempIndexName); // SHOULD NOT be closed as in typical call flow checkForLeftOverDocuments already verified this // if it is closed, we bailout and return an error @@ -324,8 +329,8 @@ public void createJobResultIndex(Job job, ClusterState state, final ActionListen ); return; } - tempIndexName = concreteIndices[0]; } + final String indexName = tempIndexName; ActionListener indexAndMappingsListener = ActionListener.wrap(success -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java index 2d4ea308a6693..9c6d953cee5fb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java @@ -209,9 +209,9 @@ public static Path writeNormalizerInitState(String jobId, String state, Environm // createTempFile has a race condition where it may return the same // temporary file name to different threads if called simultaneously // from multiple threads, hence add the thread ID to avoid this - FileUtils.recreateTempDirectoryIfNeeded(env.tmpFile()); + FileUtils.recreateTempDirectoryIfNeeded(env.tmpDir()); Path stateFile = Files.createTempFile( - env.tmpFile(), + env.tmpDir(), jobId + "_quantiles_" + Thread.currentThread().getId(), QUANTILES_FILE_EXTENSION ); @@ -227,8 +227,8 @@ private void buildScheduledEventsConfig(List command) throws IOException if (scheduledEvents.isEmpty()) { return; } - FileUtils.recreateTempDirectoryIfNeeded(env.tmpFile()); - Path eventsConfigFile = Files.createTempFile(env.tmpFile(), "eventsConfig", JSON_EXTENSION); + FileUtils.recreateTempDirectoryIfNeeded(env.tmpDir()); + Path eventsConfigFile = Files.createTempFile(env.tmpDir(), "eventsConfig", JSON_EXTENSION); filesToDelete.add(eventsConfigFile); List scheduledEventToRuleWriters = scheduledEvents.stream() @@ -252,8 +252,8 @@ private void buildScheduledEventsConfig(List command) throws IOException } private void buildJobConfig(List command) throws IOException { - FileUtils.recreateTempDirectoryIfNeeded(env.tmpFile()); - Path configFile = Files.createTempFile(env.tmpFile(), "config", JSON_EXTENSION); + FileUtils.recreateTempDirectoryIfNeeded(env.tmpDir()); + Path configFile = Files.createTempFile(env.tmpDir(), "config", JSON_EXTENSION); filesToDelete.add(configFile); try ( OutputStreamWriter osw = new OutputStreamWriter(Files.newOutputStream(configFile), StandardCharsets.UTF_8); @@ -271,8 +271,8 @@ private void buildFiltersConfig(List command) throws IOException { if (referencedFilters.isEmpty()) { return; } - FileUtils.recreateTempDirectoryIfNeeded(env.tmpFile()); - Path filtersConfigFile = Files.createTempFile(env.tmpFile(), "filtersConfig", JSON_EXTENSION); + FileUtils.recreateTempDirectoryIfNeeded(env.tmpDir()); + Path filtersConfigFile = Files.createTempFile(env.tmpDir(), "filtersConfig", JSON_EXTENSION); filesToDelete.add(filtersConfigFile); try ( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/JobModelSnapshotUpgrader.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/JobModelSnapshotUpgrader.java index d69acab30451a..d42eb8f748b51 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/JobModelSnapshotUpgrader.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/JobModelSnapshotUpgrader.java @@ -12,19 +12,27 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; -import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeState; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskState; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -44,9 +52,7 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; -import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; import java.util.concurrent.TimeoutException; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -153,6 +159,55 @@ synchronized void start() { executor.execute(); } + private void removeDuplicateModelSnapshotDoc(Consumer runAfter) { + String snapshotDocId = jobId + "_model_snapshot_" + snapshotId; + client.prepareSearch(AnomalyDetectorsIndex.jobResultsIndexPattern()) + .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(snapshotDocId))) + .setSize(2) + .addSort(ModelSnapshot.MIN_VERSION.getPreferredName(), org.elasticsearch.search.sort.SortOrder.ASC) + .execute(ActionListener.wrap(searchResponse -> { + if (searchResponse.getHits().getTotalHits().value() > 1) { + deleteOlderSnapshotDoc(searchResponse, runAfter); + } else { + onFinish.accept(null); + } + }, e -> { + logger.warn(() -> format("[%s] [%s] error during search for model snapshot documents", jobId, snapshotId), e); + onFinish.accept(null); + })); + } + + private void deleteOlderSnapshotDoc(SearchResponse searchResponse, Consumer runAfter) { + SearchHit firstHit = searchResponse.getHits().getAt(0); + logger.debug(() -> format("[%s] deleting duplicate model snapshot doc [%s]", jobId, firstHit.getId())); + client.prepareDelete() + .setIndex(firstHit.getIndex()) + .setId(firstHit.getId()) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .execute(ActionListener.runAfter(ActionListener.wrap(deleteResponse -> { + if ((deleteResponse.getResult() == DocWriteResponse.Result.DELETED) == false) { + logger.warn( + () -> format( + "[%s] [%s] failed to delete old snapshot [%s] result document, document not found", + jobId, + snapshotId, + ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName() + ) + ); + } + }, e -> { + logger.warn( + () -> format( + "[%s] [%s] failed to delete old snapshot [%s] result document", + jobId, + snapshotId, + ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName() + ), + e + ); + }), () -> runAfter.accept(null))); + } + void setTaskToFailed(String reason, ActionListener> listener) { SnapshotUpgradeTaskState taskState = new SnapshotUpgradeTaskState(SnapshotUpgradeState.FAILED, task.getAllocationId(), reason); task.updatePersistentTaskState(taskState, ActionListener.wrap(listener::onResponse, f -> { @@ -259,7 +314,7 @@ void restoreState() { logger.error(() -> format("[%s] [%s] failed to write old state", jobId, snapshotId), e); setTaskToFailed( "Failed to write old state due to: " + e.getMessage(), - ActionListener.wrap(t -> shutdown(e), f -> shutdown(e)) + ActionListener.running(() -> shutdownWithFailure(e)) ); return; } @@ -273,7 +328,7 @@ void restoreState() { logger.error(() -> format("[%s] [%s] failed to flush after writing old state", jobId, snapshotId), e); nextStep = () -> setTaskToFailed( "Failed to flush after writing old state due to: " + e.getMessage(), - ActionListener.wrap(t -> shutdown(e), f -> shutdown(e)) + ActionListener.running(() -> shutdownWithFailure(e)) ); } else { logger.debug( @@ -295,7 +350,7 @@ private void requestStateWrite() { new SnapshotUpgradeTaskState(SnapshotUpgradeState.SAVING_NEW_STATE, task.getAllocationId(), ""), ActionListener.wrap(readingNewState -> { if (continueRunning.get() == false) { - shutdown(null); + shutdownWithFailure(null); return; } submitOperation(() -> { @@ -310,12 +365,12 @@ private void requestStateWrite() { // Execute callback in the UTILITY thread pool, as the current thread in the callback will be one in the // autodetectWorkerExecutor. Trying to run the callback in that executor will cause a dead lock as that // executor has a single processing queue. - (aVoid, e) -> threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(() -> shutdown(e)) + (aVoid, e) -> threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(() -> handlePersistingState(e)) ); logger.debug("[{}] [{}] asked for state to be persisted", jobId, snapshotId); }, f -> { logger.error(() -> format("[%s] [%s] failed to update snapshot upgrader task to started", jobId, snapshotId), f); - shutdown( + shutdownWithFailure( new ElasticsearchStatusException( "Failed to start snapshot upgrade [{}] for job [{}]", RestStatus.INTERNAL_SERVER_ERROR, @@ -378,17 +433,45 @@ private void checkResultsProcessorIsAlive() { } } - void shutdown(Exception e) { + private void handlePersistingState(@Nullable Exception exception) { + assert Thread.currentThread().getName().contains(UTILITY_THREAD_POOL_NAME); + + if (exception != null) { + shutdownWithFailure(exception); + } else { + stopProcess((aVoid, e) -> { + threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(() -> { + autodetectWorkerExecutor.shutdownNow(); + // If there are two snapshot documents in the results indices with the same snapshot id, + // remove the old one. This can happen when the result index has been rolled over and + // the write alias is pointing to the new index. + removeDuplicateModelSnapshotDoc(onFinish); + }); + + }); + } + } + + void shutdownWithFailure(Exception e) { + stopProcess((aVoid, ignored) -> { + threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(() -> { + onFinish.accept(e); + autodetectWorkerExecutor.shutdownNow(); + }); + }); + } + + private void stopProcess(BiConsumer, Exception> runNext) { logger.debug("[{}] [{}] shutdown initiated", jobId, snapshotId); // No point in sending an action to the executor if the process has died if (process.isProcessAlive() == false) { logger.debug("[{}] [{}] process is dead, no need to shutdown", jobId, snapshotId); - onFinish.accept(e); - autodetectWorkerExecutor.shutdownNow(); stateStreamer.cancel(); + runNext.accept(null, null); return; } - Future future = autodetectWorkerExecutor.submit(() -> { + + submitOperation(() -> { try { logger.debug("[{}] [{}] shutdown is now occurring", jobId, snapshotId); if (process.isReady()) { @@ -401,24 +484,10 @@ void shutdown(Exception e) { processor.awaitCompletion(); } catch (IOException | TimeoutException exc) { logger.warn(() -> format("[%s] [%s] failed to shutdown process", jobId, snapshotId), exc); - } finally { - onFinish.accept(e); } logger.debug("[{}] [{}] connection for upgrade has been closed, process is shutdown", jobId, snapshotId); - }); - try { - future.get(); - autodetectWorkerExecutor.shutdownNow(); - } catch (InterruptedException interrupt) { - Thread.currentThread().interrupt(); - } catch (ExecutionException executionException) { - if (processor.isProcessKilled()) { - // In this case the original exception is spurious and highly misleading - throw ExceptionsHelper.conflictStatusException("close snapshot upgrade interrupted by kill request"); - } else { - throw FutureUtils.rethrowExecutionException(executionException); - } - } + return Void.TYPE; + }, runNext); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java index 213d3851b3b98..99b03c2725411 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java @@ -47,7 +47,8 @@ protected AbstractMlAuditor( clusterService.getNodeName(), messageFactory, clusterService, - indexNameExpressionResolver + indexNameExpressionResolver, + clusterService.threadPool().generic() ); clusterService.addListener(event -> { if (event.metadataChanged()) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java index df97b39d2e397..594f72398bc9d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java @@ -52,7 +52,7 @@ public NativeStorageProvider(Environment environment, ByteSizeValue minDiskSpace */ public void cleanupLocalTmpStorageInCaseOfUncleanShutdown() { try { - for (Path p : environment.dataFiles()) { + for (Path p : environment.dataDirs()) { IOUtils.rm(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER)); } } catch (Exception e) { @@ -79,7 +79,7 @@ public Path tryGetLocalTmpStorage(String uniqueIdentifier, ByteSizeValue request } private Path tryAllocateStorage(String uniqueIdentifier, ByteSizeValue requestedSize) { - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { try { if (getUsableSpace(path) >= requestedSize.getBytes() + minLocalStorageAvailable.getBytes()) { Path tmpDirectory = path.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER).resolve(uniqueIdentifier); @@ -97,7 +97,7 @@ private Path tryAllocateStorage(String uniqueIdentifier, ByteSizeValue requested public boolean localTmpStorageHasEnoughSpace(Path path, ByteSizeValue requestedSize) { Path realPath = path.toAbsolutePath(); - for (Path p : environment.dataFiles()) { + for (Path p : environment.dataDirs()) { try { if (realPath.startsWith(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER))) { return getUsableSpace(p) >= requestedSize.getBytes() + minLocalStorageAvailable.getBytes(); @@ -122,7 +122,7 @@ public void cleanupLocalTmpStorage(String uniqueIdentifier) throws IOException { if (path != null) { // do not allow to breakout from the tmp storage provided Path realPath = path.toAbsolutePath(); - for (Path p : environment.dataFiles()) { + for (Path p : environment.dataDirs()) { if (realPath.startsWith(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER))) { IOUtils.rm(path); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java index 6b09e38b02ea6..9f8378a5b0087 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java @@ -94,7 +94,7 @@ public ProcessPipes( ) { this.namedPipeHelper = namedPipeHelper; this.jobId = jobId; - this.tempDir = env.tmpFile(); + this.tempDir = env.tmpDir(); this.timeout = timeout; // The way the pipe names are formed MUST match what is done in the controller main() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java index 6a5e328d7530a..84b00aca81f71 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java @@ -78,7 +78,7 @@ public String getDefaultPipeDirectoryPrefix(Environment env) { // All these factors need to align for everything to work in production. If any changes // are made here then CNamedPipeFactory::defaultPath() in the C++ code will probably // also need to be changed. - return env.tmpFile().toString() + PathUtils.getDefaultFileSystem().getSeparator(); + return env.tmpDir().toString() + PathUtils.getDefaultFileSystem().getSeparator(); } /** diff --git a/x-pack/plugin/ml/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/ml/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 0000000000000..ff8f2a8f73eac --- /dev/null +++ b/x-pack/plugin/ml/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,2 @@ +org.elasticsearch.ml: + - manage_threads diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java index f2a4add8444bb..22a6ff630f2bc 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java @@ -123,7 +123,7 @@ public void testTmpStorageCleanupOnStart() throws IOException { private NativeStorageProvider createNativeStorageProvider(Map paths) throws IOException { Environment environment = mock(Environment.class); - when(environment.dataFiles()).thenReturn(paths.keySet().toArray(new Path[paths.size()])); + when(environment.dataDirs()).thenReturn(paths.keySet().toArray(new Path[paths.size()])); NativeStorageProvider storageProvider = spy(new NativeStorageProvider(environment, ByteSizeValue.ofGb(5))); doAnswer( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java index d09f0cbb59c1b..fc1b5abc04fbb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java @@ -67,7 +67,7 @@ public void testOpenForInputGivenPipeIsRegularFile() throws IOException { Environment env = TestEnvironment.newEnvironment( Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build() ); - Path tempFile = Files.createTempFile(env.tmpFile(), "not a named pipe", null); + Path tempFile = Files.createTempFile(env.tmpDir(), "not a named pipe", null); IOException ioe = ESTestCase.expectThrows( IOException.class, @@ -83,7 +83,7 @@ public void testOpenForOutputGivenPipeIsRegularFile() throws IOException { Environment env = TestEnvironment.newEnvironment( Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build() ); - Path tempFile = Files.createTempFile(env.tmpFile(), "not a named pipe", null); + Path tempFile = Files.createTempFile(env.tmpDir(), "not a named pipe", null); IOException ioe = ESTestCase.expectThrows( IOException.class, diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java index 0605177b2c2e5..6a76d6749489a 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @@ -77,7 +77,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { * writes monitoring data in ECS format as of 8.0. These templates define the ECS schema as well as alias fields for the old monitoring * mappings that point to the corresponding ECS fields. */ - public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 20; + public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 21; private static final String STACK_MONITORING_REGISTRY_VERSION_VARIABLE = "xpack.stack.monitoring.template.release.version"; private static final String STACK_TEMPLATE_VERSION = "8"; private static final String STACK_TEMPLATE_VERSION_VARIABLE = "xpack.stack.monitoring.template.version"; diff --git a/x-pack/plugin/redact/src/main/java/org/elasticsearch/xpack/redact/RedactProcessor.java b/x-pack/plugin/redact/src/main/java/org/elasticsearch/xpack/redact/RedactProcessor.java index 187126fb31e3e..c378b822ce0b0 100644 --- a/x-pack/plugin/redact/src/main/java/org/elasticsearch/xpack/redact/RedactProcessor.java +++ b/x-pack/plugin/redact/src/main/java/org/elasticsearch/xpack/redact/RedactProcessor.java @@ -294,9 +294,13 @@ public void extract(byte[] utf8Bytes, int offset, Region region) { */ String redactMatches(byte[] utf8Bytes, String redactStartToken, String redactEndToken) { var merged = mergeOverlappingReplacements(replacementPositions); - int longestPatternName = merged.stream().mapToInt(r -> r.patternName.getBytes(StandardCharsets.UTF_8).length).max().getAsInt(); + int maxPatternNameLength = merged.stream() + .mapToInt(r -> r.patternName.getBytes(StandardCharsets.UTF_8).length) + .max() + .getAsInt(); - int maxPossibleLength = longestPatternName * merged.size() + utf8Bytes.length; + int maxPossibleLength = (redactStartToken.length() + maxPatternNameLength + redactEndToken.length()) * merged.size() + + utf8Bytes.length; byte[] redact = new byte[maxPossibleLength]; int readOffset = 0; diff --git a/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java b/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java index 76bf99d170a8f..bf287735d9fc3 100644 --- a/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java +++ b/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java @@ -108,6 +108,18 @@ public void testMatchRedact() throws Exception { var redacted = RedactProcessor.matchRedact(input, List.of(grok)); assertEquals(" ", redacted); } + { + var config = new HashMap(); + config.put("field", "to_redact"); + config.put("patterns", List.of("%{NUMBER:NUMBER}")); + config.put("pattern_definitions", Map.of("NUMBER", "\\d{4}")); + var processor = new RedactProcessor.Factory(mockLicenseState(), MatcherWatchdog.noop()).create(null, "t", "d", config); + var grok = processor.getGroks().get(0); + + String input = "1001"; + var redacted = RedactProcessor.matchRedact(input, List.of(grok), "_prefix_", "_suffix_"); + assertEquals("_prefix_NUMBER_suffix_", redacted); + } } public void testMatchRedactMultipleGroks() throws Exception { diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java index 25617028fe544..c207ea1fde1ea 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java @@ -820,7 +820,7 @@ public void testSnapshotOfSearchableSnapshotIncludesNoDataButCanBeRestored() thr final String tmpRepositoryName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); createRepositoryNoVerify(tmpRepositoryName, "fs"); final Path repoPath = internalCluster().getCurrentMasterNodeInstance(Environment.class) - .resolveRepoFile( + .resolveRepoDir( clusterAdmin().prepareGetRepositories(TEST_REQUEST_TIMEOUT, tmpRepositoryName) .get() .repositories() diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java index c955457b78d60..3534988b25ce7 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java @@ -145,7 +145,7 @@ public void testConcurrentPrewarming() throws Exception { docsPerIndex.put(indexName, nbDocs); } - final Path repositoryPath = node().getEnvironment().resolveRepoFile(randomAlphaOfLength(10)); + final Path repositoryPath = node().getEnvironment().resolveRepoDir(randomAlphaOfLength(10)); final Settings.Builder repositorySettings = Settings.builder().put("location", repositoryPath); if (randomBoolean()) { repositorySettings.put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java index 53ea908ad8801..3d4d7f768c1b3 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java @@ -98,7 +98,7 @@ public void testRandomReads() throws IOException { .put("path.home", createTempDir()) .build(); final Environment environment = TestEnvironment.newEnvironment(settings); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { Files.createDirectories(path); } SnapshotId snapshotId = new SnapshotId("_name", "_uuid"); diff --git a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java index 3994fb50c7fc6..dbe0e0b0e9577 100644 --- a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java @@ -163,7 +163,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce final boolean inEnrollmentMode = options.has(enrollmentTokenParam); // skipping security auto-configuration because node considered as restarting. - for (Path dataPath : env.dataFiles()) { + for (Path dataPath : env.dataDirs()) { if (Files.isDirectory(dataPath) && false == isDirEmpty(dataPath)) { final String msg = "Skipping security auto configuration because it appears that the node is not starting up for the " + "first time. The node might already be part of a cluster and this auto setup utility is designed to configure " @@ -173,7 +173,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } // pre-flight checks for the files that are going to be changed - final Path ymlPath = env.configFile().resolve("elasticsearch.yml"); + final Path ymlPath = env.configDir().resolve("elasticsearch.yml"); // it is odd for the `elasticsearch.yml` file to be missing or not be a regular (the node won't start) // but auto configuration should not be concerned with fixing it (by creating the file) and let the node startup fail if (false == Files.exists(ymlPath) || false == Files.isRegularFile(ymlPath, LinkOption.NOFOLLOW_LINKS)) { @@ -194,7 +194,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce ); notifyOfFailure(inEnrollmentMode, terminal, Terminal.Verbosity.NORMAL, ExitCodes.NOOP, msg); } - final Path keystorePath = KeyStoreWrapper.keystorePath(env.configFile()); + final Path keystorePath = KeyStoreWrapper.keystorePath(env.configDir()); // Inform that auto-configuration will not run if keystore cannot be read. if (Files.exists(keystorePath) && (false == Files.isRegularFile(keystorePath, LinkOption.NOFOLLOW_LINKS) || false == Files.isReadable(keystorePath))) { @@ -218,7 +218,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce checkExistingConfiguration(env.settings(), inEnrollmentMode, terminal); final ZonedDateTime autoConfigDate = ZonedDateTime.now(ZoneOffset.UTC); - final Path tempGeneratedTlsCertsDir = env.configFile() + final Path tempGeneratedTlsCertsDir = env.configDir() .resolve(String.format(Locale.ROOT, TLS_GENERATED_CERTS_DIR_NAME + ".%d.tmp", autoConfigDate.toInstant().getEpochSecond())); try { // it is useful to pre-create the sub-config dir in order to check that the config dir is writable and that file owners match @@ -247,12 +247,12 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce // If the node process works OK given the owner of the config dir, it should also tolerate the auto-created config dir, // provided that they both have the same owner and permissions. final UserPrincipal newFileOwner = Files.getOwner(tempGeneratedTlsCertsDir, LinkOption.NOFOLLOW_LINKS); - if (false == newFileOwner.equals(Files.getOwner(env.configFile(), LinkOption.NOFOLLOW_LINKS))) { + if (false == newFileOwner.equals(Files.getOwner(env.configDir(), LinkOption.NOFOLLOW_LINKS))) { // the following is only printed once, if the node starts successfully UserException userException = new UserException( ExitCodes.CONFIG, "Aborting auto configuration because of config dir ownership mismatch. Config dir is owned by " - + Files.getOwner(env.configFile(), LinkOption.NOFOLLOW_LINKS).getName() + + Files.getOwner(env.configDir(), LinkOption.NOFOLLOW_LINKS).getName() + " but auto-configuration directory would be owned by " + newFileOwner.getName() ); @@ -496,7 +496,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } // save the existing keystore before replacing - final Path keystoreBackupPath = env.configFile() + final Path keystoreBackupPath = env.configDir() .resolve( String.format(Locale.ROOT, KeyStoreWrapper.KEYSTORE_FILENAME + ".%d.orig", autoConfigDate.toInstant().getEpochSecond()) ); @@ -514,7 +514,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } final SetOnce nodeKeystorePassword = new SetOnce<>(); - try (KeyStoreWrapper nodeKeystore = KeyStoreWrapper.bootstrap(env.configFile(), () -> { + try (KeyStoreWrapper nodeKeystore = KeyStoreWrapper.bootstrap(env.configDir(), () -> { nodeKeystorePassword.set(new SecureString(terminal.readSecret(""))); return nodeKeystorePassword.get().clone(); })) { @@ -581,7 +581,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce nodeKeystore.setString("xpack.security.http.ssl.keystore.secure_password", httpKeystorePassword.getChars()); } // finally overwrites the node keystore (if the keystores have been successfully written) - nodeKeystore.save(env.configFile(), nodeKeystorePassword.get() == null ? new char[0] : nodeKeystorePassword.get().getChars()); + nodeKeystore.save(env.configDir(), nodeKeystorePassword.get() == null ? new char[0] : nodeKeystorePassword.get().getChars()); } catch (Throwable t) { // restore keystore to revert possible keystore bootstrap try { @@ -614,10 +614,10 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce try { // all certs and keys have been generated in the temp certs dir, therefore: // 1. backup (move) any previously existing tls certs dir (this backup is NOT removed when auto-conf finishes) - if (Files.exists(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { + if (Files.exists(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { moveDirectory( - env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME), - env.configFile() + env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME), + env.configDir() .resolve( String.format( Locale.ROOT, @@ -628,7 +628,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce ); } // 2. move the newly populated temp certs dir to its permanent static dir name - moveDirectory(tempGeneratedTlsCertsDir, env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + moveDirectory(tempGeneratedTlsCertsDir, env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } catch (Throwable t) { // restore keystore to revert possible keystore bootstrap try { @@ -649,7 +649,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce // revert any previously existing TLS certs try { if (Files.exists( - env.configFile() + env.configDir() .resolve( String.format( Locale.ROOT, @@ -659,7 +659,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce ) )) { moveDirectory( - env.configFile() + env.configDir() .resolve( String.format( Locale.ROOT, @@ -667,7 +667,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce autoConfigDate.toInstant().getEpochSecond() ) ), - env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME) + env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME) ); } } catch (Exception ex) { @@ -686,7 +686,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce final Environment localFinalEnv = env; final DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("dd-MM-yyyy HH:mm:ss", Locale.ROOT); List existingConfigLines = Files.readAllLines(ymlPath, StandardCharsets.UTF_8); - fullyWriteFile(env.configFile(), "elasticsearch.yml", true, stream -> { + fullyWriteFile(env.configDir(), "elasticsearch.yml", true, stream -> { try (BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(stream, StandardCharsets.UTF_8))) { // start with the existing config lines for (String line : existingConfigLines) { @@ -827,16 +827,16 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } try { // this removes a statically named directory, so it is potentially dangerous - deleteDirectory(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + deleteDirectory(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } catch (Exception ex) { t.addSuppressed(ex); } - Path backupCertsDir = env.configFile() + Path backupCertsDir = env.configDir() .resolve( String.format(Locale.ROOT, TLS_GENERATED_CERTS_DIR_NAME + ".%d.orig", autoConfigDate.toInstant().getEpochSecond()) ); if (Files.exists(backupCertsDir)) { - moveDirectory(backupCertsDir, env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + moveDirectory(backupCertsDir, env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } throw t; } @@ -887,14 +887,14 @@ private Environment possiblyReconfigureNode(Environment env, Terminal terminal, // with --enrolment-token token, in the first place. final List existingConfigLines; try { - existingConfigLines = Files.readAllLines(env.configFile().resolve("elasticsearch.yml"), StandardCharsets.UTF_8); + existingConfigLines = Files.readAllLines(env.configDir().resolve("elasticsearch.yml"), StandardCharsets.UTF_8); } catch (IOException e) { // This shouldn't happen, we would have failed earlier but we need to catch the exception throw new UserException(ExitCodes.IO_ERROR, "Aborting enrolling to cluster. Unable to read elasticsearch.yml.", e); } final List existingConfigWithoutAutoconfiguration = removePreviousAutoconfiguration(existingConfigLines); if (false == existingConfigLines.equals(existingConfigWithoutAutoconfiguration) - && Files.exists(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { + && Files.exists(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { terminal.println(""); terminal.println("This node will be reconfigured to join an existing cluster, using the enrollment token that you provided."); terminal.println("This operation will overwrite the existing configuration. Specifically: "); @@ -907,7 +907,7 @@ private Environment possiblyReconfigureNode(Environment env, Terminal terminal, } removeAutoConfigurationFromKeystore(env, terminal); try { - fullyWriteFile(env.configFile(), "elasticsearch.yml", true, stream -> { + fullyWriteFile(env.configDir(), "elasticsearch.yml", true, stream -> { try (BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(stream, StandardCharsets.UTF_8))) { for (String l : existingConfigWithoutAutoconfiguration) { bw.write(l); @@ -915,7 +915,7 @@ private Environment possiblyReconfigureNode(Environment env, Terminal terminal, } } }); - deleteDirectory(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + deleteDirectory(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } catch (Throwable t) { throw new UserException( ExitCodes.IO_ERROR, @@ -1262,9 +1262,9 @@ static List removePreviousAutoconfiguration(List existingConfigL } private static void removeAutoConfigurationFromKeystore(Environment env, Terminal terminal) throws UserException { - if (Files.exists(KeyStoreWrapper.keystorePath(env.configFile()))) { + if (Files.exists(KeyStoreWrapper.keystorePath(env.configDir()))) { try ( - KeyStoreWrapper existingKeystore = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper existingKeystore = KeyStoreWrapper.load(env.configDir()); SecureString keystorePassword = existingKeystore.hasPassword() ? new SecureString(terminal.readSecret("Enter password for the elasticsearch keystore: ")) : new SecureString(new char[0]); @@ -1288,7 +1288,7 @@ private static void removeAutoConfigurationFromKeystore(Environment env, Termina } existingKeystore.remove(setting); } - existingKeystore.save(env.configFile(), keystorePassword.getChars()); + existingKeystore.save(env.configDir(), keystorePassword.getChars()); } catch (Exception e) { terminal.errorPrintln(Terminal.Verbosity.VERBOSE, ""); terminal.errorPrintln(Terminal.Verbosity.VERBOSE, ExceptionsHelper.stackTrace(e)); diff --git a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java index b67bb9898991f..0e96911405b30 100644 --- a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java @@ -508,7 +508,7 @@ private static Map buildSubstitutions(Environment env, Map runAutoConfigAndReturnCertificat SecureString httpKeystorePassword = nodeKeystore.getString("xpack.security.http.ssl.keystore.secure_password"); SecureString transportKeystorePassword = nodeKeystore.getString("xpack.security.transport.ssl.keystore.secure_password"); - final Settings newSettings = Settings.builder().loadFromPath(env.configFile().resolve("elasticsearch.yml")).build(); + final Settings newSettings = Settings.builder().loadFromPath(env.configDir().resolve("elasticsearch.yml")).build(); final String httpKeystorePath = newSettings.get("xpack.security.http.ssl.keystore.path"); final String transportKeystorePath = newSettings.get("xpack.security.transport.ssl.keystore.path"); diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 5d3b13b9d451a..6ea522a4276a9 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.operator; import org.elasticsearch.cluster.metadata.DataStream; -import org.elasticsearch.common.util.FeatureFlag; import java.util.Objects; import java.util.Set; @@ -327,6 +326,8 @@ public class Constants { "cluster:admin/xpack/watcher/settings/update", "cluster:admin/xpack/watcher/watch/put", "cluster:internal/remote_cluster/nodes", + "cluster:internal/xpack/inference", + "cluster:internal/xpack/inference/unified", "cluster:internal/xpack/ml/coordinatedinference", "cluster:internal/xpack/ml/datafeed/isolate", "cluster:internal/xpack/ml/datafeed/running_state", @@ -386,9 +387,8 @@ public class Constants { "cluster:monitor/xpack/enrich/stats", "cluster:monitor/xpack/eql/stats/dist", "cluster:monitor/xpack/esql/stats/dist", - "cluster:monitor/xpack/inference", + "cluster:monitor/xpack/inference/post", "cluster:monitor/xpack/inference/get", - "cluster:monitor/xpack/inference/unified", "cluster:monitor/xpack/inference/diagnostics/get", "cluster:monitor/xpack/inference/services/get", "cluster:monitor/xpack/info", @@ -637,11 +637,12 @@ public class Constants { "internal:gateway/local/started_shards", "internal:admin/indices/prevalidate_shard_path", "internal:index/metadata/migration_version/update", - new FeatureFlag("reindex_data_stream").isEnabled() ? "indices:admin/migration/reindex_status" : null, - new FeatureFlag("reindex_data_stream").isEnabled() ? "indices:admin/data_stream/index/reindex" : null, - new FeatureFlag("reindex_data_stream").isEnabled() ? "indices:admin/data_stream/reindex" : null, - new FeatureFlag("reindex_data_stream").isEnabled() ? "indices:admin/data_stream/reindex_cancel" : null, - new FeatureFlag("reindex_data_stream").isEnabled() ? "indices:admin/index/create_from_source" : null, + "indices:admin/migration/reindex_status", + "indices:admin/data_stream/index/reindex", + "indices:admin/data_stream/reindex", + "indices:admin/data_stream/reindex_cancel", + "indices:admin/index/create_from_source", + "indices:admin/index/copy_lifecycle_index_metadata", "internal:admin/repository/verify", "internal:admin/repository/verify/coordinate" ).filter(Objects::nonNull).collect(Collectors.toUnmodifiableSet()); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java index 07bdd83c9a144..061ce6f3fc654 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java @@ -7,9 +7,16 @@ package org.elasticsearch.test; import org.apache.http.HttpHost; +import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; @@ -27,6 +34,7 @@ import org.elasticsearch.license.LicenseSettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xpack.core.security.authc.support.Hasher; +import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.test.TestRestrictedIndices; import org.elasticsearch.xpack.security.LocalStateSecurity; import org.elasticsearch.xpack.security.support.SecurityMigrations; @@ -45,9 +53,12 @@ import java.util.stream.Collectors; import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.getMigrationVersionFromIndexMetadata; +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; /** * A test that starts a single node with security enabled. This test case allows for customization @@ -82,6 +93,13 @@ public static void destroyDefaultSettings() { tearDownRestClient(); } + @Override + public void setUp() throws Exception { + super.setUp(); + deleteSecurityIndexIfExists(); + createSecurityIndexWithWaitForActiveShards(); + } + @Override public void tearDown() throws Exception { awaitSecurityMigration(); @@ -100,7 +118,7 @@ private boolean isMigrationComplete(ClusterState state) { return getMigrationVersionFromIndexMetadata(indexMetadata) == SecurityMigrations.MIGRATIONS_BY_VERSION.lastKey(); } - private void awaitSecurityMigration() { + protected void awaitSecurityMigration() { final var latch = new CountDownLatch(1); ClusterService clusterService = getInstanceFromNode(ClusterService.class); clusterService.addListener((event) -> { @@ -362,4 +380,40 @@ private static RestClient createRestClient( } return builder.build(); } + + protected void deleteSecurityIndexIfExists() { + // delete the security index, if it exist + GetIndexRequest getIndexRequest = new GetIndexRequest(TEST_REQUEST_TIMEOUT); + getIndexRequest.indices(SECURITY_MAIN_ALIAS); + getIndexRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); + GetIndexResponse getIndexResponse = client().admin().indices().getIndex(getIndexRequest).actionGet(); + if (getIndexResponse.getIndices().length > 0) { + assertThat(getIndexResponse.getIndices().length, is(1)); + assertThat(getIndexResponse.getIndices()[0], is(TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7)); + + // Security migration needs to finish before deleting the index + awaitSecurityMigration(); + DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(getIndexResponse.getIndices()); + assertAcked(client().admin().indices().delete(deleteIndexRequest).actionGet()); + } + } + + protected void createSecurityIndexWithWaitForActiveShards() { + final Client client = client().filterWithHeader( + Collections.singletonMap( + "Authorization", + UsernamePasswordToken.basicAuthHeaderValue( + SecuritySettingsSource.ES_TEST_ROOT_USER, + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING + ) + ) + ); + CreateIndexRequest createIndexRequest = new CreateIndexRequest(SECURITY_MAIN_ALIAS).waitForActiveShards(ActiveShardCount.ALL) + .masterNodeTimeout(TEST_REQUEST_TIMEOUT); + try { + client.admin().indices().create(createIndexRequest).actionGet(); + } catch (ResourceAlreadyExistsException e) { + logger.info("Security index already exists, ignoring.", e); + } + } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/UnregisteredSettingsIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/UnregisteredSettingsIntegTests.java index c714aa352fd41..5a76b81a9f3fc 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/UnregisteredSettingsIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/UnregisteredSettingsIntegTests.java @@ -11,6 +11,8 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.SecurityIntegTestCase; +import java.io.IOException; + import static org.elasticsearch.test.NodeRoles.dataOnlyNode; import static org.elasticsearch.test.NodeRoles.masterNode; import static org.hamcrest.Matchers.containsString; @@ -18,15 +20,19 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) public class UnregisteredSettingsIntegTests extends SecurityIntegTestCase { - public void testIncludeReservedRolesSettingNotRegistered() { + public void testIncludeReservedRolesSettingNotRegistered() throws IOException { internalCluster().setBootstrapMasterNodeIndex(0); final Settings.Builder builder = Settings.builder() .put(randomBoolean() ? masterNode() : dataOnlyNode()) .putList("xpack.security.reserved_roles.include", "superuser"); - final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> internalCluster().startNode(builder)); - assertThat(e.getMessage(), containsString("unknown setting [xpack.security.reserved_roles.include]")); + try { + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> internalCluster().startNode(builder)); + assertThat(e.getMessage(), containsString("unknown setting [xpack.security.reserved_roles.include]")); + } finally { + internalCluster().close(); + } } public void testSamlExcludeRolesSettingNotRegistered() throws Exception { @@ -36,7 +42,11 @@ public void testSamlExcludeRolesSettingNotRegistered() throws Exception { .put(randomBoolean() ? masterNode() : dataOnlyNode()) .putList("xpack.security.authc.realms.saml.saml1.exclude_roles", "superuser"); - final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> internalCluster().startNode(builder)); - assertThat(e.getMessage(), containsString("unknown setting [xpack.security.authc.realms.saml.saml1.exclude_roles]")); + try { + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> internalCluster().startNode(builder)); + assertThat(e.getMessage(), containsString("unknown setting [xpack.security.authc.realms.saml.saml1.exclude_roles]")); + } finally { + internalCluster().close(); + } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java index 1d7e1da66a91f..8b8e6fa6b8ea8 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java @@ -8,17 +8,10 @@ package org.elasticsearch.xpack.security.authc.esnative; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; -import org.elasticsearch.action.admin.indices.get.GetIndexRequest; -import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; @@ -29,14 +22,9 @@ import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; -import org.elasticsearch.xpack.core.security.test.TestRestrictedIndices; import org.junit.BeforeClass; -import java.util.concurrent.CountDownLatch; - import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction.MIGRATION_VERSION_CUSTOM_KEY; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.is; public class ReservedRealmElasticAutoconfigIntegTests extends SecuritySingleNodeTestCase { @@ -70,46 +58,10 @@ protected SecureString getBootstrapPassword() { return null; // no bootstrap password for this test } - private boolean isMigrationComplete(ClusterState state) { - IndexMetadata indexMetadata = state.metadata().getIndices().get(TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7); - return indexMetadata != null && indexMetadata.getCustomData(MIGRATION_VERSION_CUSTOM_KEY) != null; - } - - private void awaitSecurityMigrationRanOnce() { - final var latch = new CountDownLatch(1); - ClusterService clusterService = getInstanceFromNode(ClusterService.class); - clusterService.addListener((event) -> { - if (isMigrationComplete(event.state())) { - latch.countDown(); - } - }); - if (isMigrationComplete(clusterService.state())) { - latch.countDown(); - } - safeAwait(latch); - } - - private void deleteSecurityIndex() { - // delete the security index, if it exist - GetIndexRequest getIndexRequest = new GetIndexRequest(TEST_REQUEST_TIMEOUT); - getIndexRequest.indices(SECURITY_MAIN_ALIAS); - getIndexRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); - GetIndexResponse getIndexResponse = client().admin().indices().getIndex(getIndexRequest).actionGet(); - if (getIndexResponse.getIndices().length > 0) { - assertThat(getIndexResponse.getIndices().length, is(1)); - assertThat(getIndexResponse.getIndices()[0], is(TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7)); - - // Security migration needs to finish before deleting the index - awaitSecurityMigrationRanOnce(); - DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(getIndexResponse.getIndices()); - assertAcked(client().admin().indices().delete(deleteIndexRequest).actionGet()); - } - } - public void testAutoconfigFailedPasswordPromotion() throws Exception { try { // .security index is created automatically on node startup so delete the security index first - deleteSecurityIndex(); + deleteSecurityIndexIfExists(); // prevents the .security index from being created automatically (after elastic user authentication) ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest( TEST_REQUEST_TIMEOUT, @@ -176,7 +128,7 @@ public void testAutoconfigSucceedsAfterPromotionFailure() throws Exception { putUserRequest.roles(Strings.EMPTY_ARRAY); client().execute(PutUserAction.INSTANCE, putUserRequest).get(); // Security migration needs to finish before making the cluster read only - awaitSecurityMigrationRanOnce(); + awaitSecurityMigration(); // but then make the cluster read-only ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java index 65f6f4f1a5b0a..4aaa2c4ee34e2 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java @@ -99,13 +99,28 @@ public class ProfileIntegTests extends AbstractProfileIntegTestCase { + protected static final String ANONYMOUS_ROLE = "anonymous_role"; + + @Override + protected String configRoles() { + return super.configRoles() + + "\n" + + ANONYMOUS_ROLE + + ":\n" + + " cluster:\n" + + " - 'manage_own_api_key'\n" + + " - 'manage_token'\n" + + " - 'manage_service_account'\n" + + " - 'monitor'\n"; + } + @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { final Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); // This setting tests that the setting is registered builder.put("xpack.security.authc.domains.my_domain.realms", "file"); // enable anonymous - builder.putList(AnonymousUser.ROLES_SETTING.getKey(), RAC_ROLE); + builder.putList(AnonymousUser.ROLES_SETTING.getKey(), ANONYMOUS_ROLE); return builder.build(); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java index 44f7a6d47e361..b1fda5f6c4e6e 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java @@ -55,7 +55,7 @@ public Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Environment tmpEnv = TestEnvironment.newEnvironment(settings); // For each node, copy the original testnode.jks into each node's config directory. - Path nodeKeystorePath = tmpEnv.configFile().resolve("testnode.jks"); + Path nodeKeystorePath = tmpEnv.configDir().resolve("testnode.jks"); try { Path goodKeystorePath = getDataPath(goodKeyStoreFilePath); Files.copy(goodKeystorePath, nodeKeystorePath, StandardCopyOption.REPLACE_EXISTING); @@ -93,7 +93,7 @@ public void testReloadDuringStartup() throws Exception { final Environment env = internalCluster().getInstance(Environment.class, nodeName); final CountDownLatch beforeKeystoreFix = new CountDownLatch(2); // SYNC: Cert update & ES restart final CountDownLatch afterKeystoreFix = new CountDownLatch(1); // SYNC: Verify cluster after cert update - final Path nodeKeystorePath = env.configFile().resolve("testnode.jks"); // all nodes have good keystore + final Path nodeKeystorePath = env.configDir().resolve("testnode.jks"); // all nodes have good keystore final Path badKeystorePath = getDataPath(badKeyStoreFilePath); // stop a node, and apply this bad keystore final Path goodKeystorePath = getDataPath(goodKeyStoreFilePath); // start the node, and apply this good keystore assertTrue(Files.exists(nodeKeystorePath)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 804610f8dd341..794dd443dfc42 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -726,9 +726,9 @@ protected List getReloadableSecurityComponents() { * ES has already checked the file is actually in the config directory */ public static Path resolveSecuredConfigFile(Environment env, String file) { - Path config = env.configFile().resolve(file); + Path config = env.configDir().resolve(file); if (doPrivileged((PrivilegedAction) () -> Files.exists(config)) == false) { - Path legacyConfig = env.configFile().resolve("x-pack").resolve(file); + Path legacyConfig = env.configDir().resolve("x-pack").resolve(file); if (doPrivileged((PrivilegedAction) () -> Files.exists(legacyConfig))) { DeprecationLogger.getLogger(XPackPlugin.class) .warn( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java index 0718742d362cb..f04c670eb1ea7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java @@ -43,7 +43,7 @@ class ResetPasswordTool extends BaseRunAsSuperuserCommand { private final OptionSpec usernameOption; ResetPasswordTool() { - this(CommandLineHttpClient::new, environment -> KeyStoreWrapper.load(environment.configFile())); + this(CommandLineHttpClient::new, environment -> KeyStoreWrapper.load(environment.configDir())); } protected ResetPasswordTool( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java index 91c75c076881e..3c7fa029d4514 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java @@ -95,11 +95,11 @@ class SetupPasswordTool extends MultiCommand { SetupPasswordTool() { this(environment -> new CommandLineHttpClient(environment), environment -> { - KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(environment.configFile()); + KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(environment.configDir()); if (keyStoreWrapper == null) { throw new UserException( ExitCodes.CONFIG, - "Elasticsearch keystore file is missing [" + KeyStoreWrapper.keystorePath(environment.configFile()) + "]" + "Elasticsearch keystore file is missing [" + KeyStoreWrapper.keystorePath(environment.configDir()) + "]" ); } return keyStoreWrapper; @@ -142,7 +142,7 @@ class AutoSetup extends SetupCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configFile()); + terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configDir()); setupOptions(terminal, options, env); checkElasticKeystorePasswordValid(terminal, env); checkClusterHealth(terminal); @@ -198,7 +198,7 @@ class InteractiveSetup extends SetupCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configFile()); + terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configDir()); setupOptions(terminal, options, env); checkElasticKeystorePasswordValid(terminal, env); checkClusterHealth(terminal); @@ -298,7 +298,7 @@ void setupOptions(Terminal terminal, OptionSet options, Environment env) throws Settings settings = settingsBuilder.build(); elasticUserPassword = ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.get(settings); - final Environment newEnv = new Environment(settings, env.configFile()); + final Environment newEnv = new Environment(settings, env.configDir()); Environment.assertEquivalent(newEnv, env); client = clientFunction.apply(newEnv); @@ -354,7 +354,7 @@ void checkElasticKeystorePasswordValid(Terminal terminal, Environment env) throw terminal.errorPrintln("Possible causes include:"); terminal.errorPrintln(" * The password for the '" + elasticUser + "' user has already been changed on this cluster"); terminal.errorPrintln(" * Your elasticsearch node is running against a different keystore"); - terminal.errorPrintln(" This tool used the keystore at " + KeyStoreWrapper.keystorePath(env.configFile())); + terminal.errorPrintln(" This tool used the keystore at " + KeyStoreWrapper.keystorePath(env.configDir())); terminal.errorPrintln(""); terminal.errorPrintln( "You can use the `elasticsearch-reset-password` CLI tool to reset the password of the '" + elasticUser + "' user" diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java index 8b3f8ec09675a..0fafd6b63c03f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java @@ -338,7 +338,7 @@ public void cancelled() { } public static Path resolvePath(final Environment environment, final String jwkSetPath) { - final Path directoryPath = environment.configFile(); + final Path directoryPath = environment.configDir(); return directoryPath.resolve(jwkSetPath); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java index d5ef90f7f1664..65e72568cacf8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java @@ -101,7 +101,7 @@ public KerberosRealm(final RealmConfig config, final UserRoleMapper userRoleMapp } this.kerberosTicketValidator = kerberosTicketValidator; this.threadPool = threadPool; - this.keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + this.keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); validateKeytab(this.keytabPath); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java index aa1946f445670..65d2492e3b6b8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java @@ -365,7 +365,7 @@ private void validateAccessToken(AccessToken accessToken, JWT idToken) { * @throws IOException if the file cannot be read */ private JWKSet readJwkSetFromFile(String jwkSetPath) throws IOException, ParseException { - final Path path = realmConfig.env().configFile().resolve(jwkSetPath); + final Path path = realmConfig.env().configDir().resolve(jwkSetPath); // avoid using JWKSet.loadFile() as it does not close FileInputStream internally try { String jwkSet = AccessController.doPrivileged( @@ -814,7 +814,7 @@ IDTokenValidator createIdTokenValidator(boolean addFileWatcherIfRequired) { } private void setMetadataFileWatcher(String jwkSetPath) throws IOException { - final Path path = realmConfig.env().configFile().resolve(jwkSetPath); + final Path path = realmConfig.env().configDir().resolve(jwkSetPath); FileWatcher watcher = new PrivilegedFileWatcher(path); watcher.addListener(new FileListener(LOGGER, () -> this.idTokenValidator.set(createIdTokenValidator(false)))); watcherService.add(watcher, ResourceWatcherService.Frequency.MEDIUM); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java index 106b550a1e23c..1d69050d7ab25 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java @@ -93,7 +93,7 @@ class SamlMetadataCommand extends KeyStoreAwareCommand { SamlMetadataCommand() { this((environment) -> { - KeyStoreWrapper ksWrapper = KeyStoreWrapper.load(environment.configFile()); + KeyStoreWrapper ksWrapper = KeyStoreWrapper.load(environment.configDir()); return ksWrapper; }); } @@ -458,7 +458,7 @@ private RealmConfig findRealm(Terminal terminal, OptionSet options, Environment final RealmConfig.RealmIdentifier identifier = new RealmConfig.RealmIdentifier(SamlRealmSettings.TYPE, name); final Settings realmSettings = realms.get(identifier); if (realmSettings == null) { - throw new UserException(ExitCodes.CONFIG, "No such realm '" + name + "' defined in " + env.configFile()); + throw new UserException(ExitCodes.CONFIG, "No such realm '" + name + "' defined in " + env.configDir()); } if (isSamlRealm(identifier)) { return buildRealm(identifier, env, settings); @@ -471,10 +471,10 @@ private RealmConfig findRealm(Terminal terminal, OptionSet options, Environment .filter(entry -> isSamlRealm(entry.getKey())) .toList(); if (saml.isEmpty()) { - throw new UserException(ExitCodes.CONFIG, "There is no SAML realm configured in " + env.configFile()); + throw new UserException(ExitCodes.CONFIG, "There is no SAML realm configured in " + env.configDir()); } if (saml.size() > 1) { - terminal.errorPrintln("Using configuration in " + env.configFile()); + terminal.errorPrintln("Using configuration in " + env.configDir()); terminal.errorPrintln( "Found multiple SAML realms: " + saml.stream().map(Map.Entry::getKey).map(Object::toString).collect(Collectors.joining(", ")) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java index 9adfd15e23207..d82be264b2248 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java @@ -792,7 +792,7 @@ private static Tuple new SecureString(new char[0])) + KeyStoreWrapper nodeKeystore = KeyStoreWrapper.bootstrap(env.configDir(), () -> new SecureString(new char[0])) ) { nodeKeystore.setString(AUTOCONFIG_ELASTIC_PASSWORD_HASH.getKey(), hasher.hash(elasticPassword)); - nodeKeystore.save(env.configFile(), new char[0]); + nodeKeystore.save(env.configDir(), new char[0]); terminal.print(Terminal.Verbosity.NORMAL, elasticPassword.toString()); } catch (Exception e) { throw new UserException(ExitCodes.CANT_CREATE, "Failed to generate a password for the elastic user", e); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java index 919f4531734fb..8f5fc96761cc9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java @@ -36,7 +36,7 @@ class CreateEnrollmentTokenTool extends BaseRunAsSuperuserCommand { CreateEnrollmentTokenTool() { this( environment -> new CommandLineHttpClient(environment), - environment -> KeyStoreWrapper.load(environment.configFile()), + environment -> KeyStoreWrapper.load(environment.configDir()), environment -> new ExternalEnrollmentTokenGenerator(environment) ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java index 2f45bafe493bb..542bbbe086cc5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java @@ -93,7 +93,7 @@ public final void execute(Terminal terminal, OptionSet options, Environment env, settingsBuilder.setSecureSettings(keyStoreWrapper); } settings = settingsBuilder.build(); - newEnv = new Environment(settings, env.configFile()); + newEnv = new Environment(settings, env.configDir()); } else { newEnv = env; settings = env.settings(); diff --git a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml index 636627240bf4c..90367da4cbceb 100644 --- a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,12 +1,30 @@ org.elasticsearch.security: - set_https_connection_properties # for CommandLineHttpClient io.netty.transport: + - manage_threads - inbound_network - outbound_network io.netty.common: + - manage_threads - inbound_network - outbound_network + - files: + - path: "/etc/os-release" + mode: "read" + - path: "/usr/lib/os-release" + mode: "read" org.opensaml.xmlsec.impl: - write_system_properties: properties: - org.apache.xml.security.ignoreLineBreaks +org.opensaml.saml.impl: + - files: + - relative_path: idp-docs-metadata.xml + relative_to: config + mode: read + - relative_path: idp-metadata.xml + relative_to: config + mode: read + - relative_path: saml-metadata.xml + relative_to: config + mode: read diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java index 0ed6d92fd551d..3ad55d5f64698 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java @@ -69,7 +69,7 @@ public void setup() throws Exception { final Path tempDir = createTempDir(); final Path httpCaPath = tempDir.resolve("httpCa.p12"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/action/enrollment/httpCa.p12"), httpCaPath); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("keystore.secure_password", "password"); final Settings settings = Settings.builder().put("keystore.path", httpCaPath).setSecureSettings(secureSettings).build(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java index c85684a60e449..62af3d74410cc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java @@ -78,7 +78,7 @@ public void testDoExecute() throws Exception { Path transportPath = tempDir.resolve("transport.p12"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/action/enrollment/httpCa.p12"), httpCaPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/action/enrollment/transport.p12"), transportPath); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); final SSLService sslService = mock(SSLService.class); final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("keystore.secure_password", "password"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 185669a6a203b..c7632943b63b1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; @@ -3008,48 +3007,6 @@ public void testGetApiKeyMetadata() throws IOException { assertThat(e.getMessage(), containsString("authentication realm must be [_es_api_key]")); } - public void testMaybeRemoveRemoteIndicesPrivilegesWithUnsupportedVersion() { - final String apiKeyId = randomAlphaOfLengthBetween(5, 8); - final Set userRoleDescriptors = Set.copyOf( - randomList( - 2, - 5, - () -> RoleDescriptorTestHelper.builder() - .allowReservedMetadata(randomBoolean()) - .allowRemoteIndices(randomBoolean()) - .allowRestriction(randomBoolean()) - .allowRemoteClusters(false) - .build() - ) - ); - - // Selecting random unsupported version. - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) - ); - - final Set result = ApiKeyService.maybeRemoveRemotePrivileges(userRoleDescriptors, minTransportVersion, apiKeyId); - assertThat(result.stream().anyMatch(RoleDescriptor::hasRemoteIndicesPrivileges), equalTo(false)); - assertThat(result.size(), equalTo(userRoleDescriptors.size())); - - // Roles for which warning headers are added. - final List userRoleNamesWithRemoteIndicesPrivileges = userRoleDescriptors.stream() - .filter(RoleDescriptor::hasRemoteIndicesPrivileges) - .map(RoleDescriptor::getName) - .sorted() - .toList(); - - if (false == userRoleNamesWithRemoteIndicesPrivileges.isEmpty()) { - assertWarnings( - "Removed API key's remote indices privileges from role(s) " - + userRoleNamesWithRemoteIndicesPrivileges - + ". Remote indices are not supported by all nodes in the cluster. " - ); - } - } - public void testMaybeRemoveRemoteClusterPrivilegesWithUnsupportedVersion() { final String apiKeyId = randomAlphaOfLengthBetween(5, 8); final Set userRoleDescriptors = Set.copyOf( @@ -3124,52 +3081,6 @@ public void testBuildDelimitedStringWithLimit() { assertThat(e.getMessage(), equalTo("limit must be positive number")); } - public void testCreateCrossClusterApiKeyMinVersionConstraint() { - final Authentication authentication = randomValueOtherThanMany( - Authentication::isApiKey, - () -> AuthenticationTestHelper.builder().build() - ); - final AbstractCreateApiKeyRequest request = mock(AbstractCreateApiKeyRequest.class); - when(request.getType()).thenReturn(ApiKey.Type.CROSS_CLUSTER); - - final ClusterService clusterService = mock(ClusterService.class); - when(clusterService.getClusterSettings()).thenReturn( - new ClusterSettings(Settings.EMPTY, Set.of(ApiKeyService.DELETE_RETENTION_PERIOD, ApiKeyService.DELETE_INTERVAL)) - ); - final ClusterState clusterState = mock(ClusterState.class); - when(clusterService.state()).thenReturn(clusterState); - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) - ); - when(clusterState.getMinTransportVersion()).thenReturn(minTransportVersion); - - final ApiKeyService service = new ApiKeyService( - Settings.EMPTY, - clock, - client, - securityIndex, - clusterService, - cacheInvalidatorRegistry, - threadPool, - MeterRegistry.NOOP - ); - - final PlainActionFuture future = new PlainActionFuture<>(); - service.createApiKey(authentication, request, Set.of(), future); - final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, future::actionGet); - - assertThat( - e.getMessage(), - containsString( - "all nodes must have version [" - + TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion() - + "] or higher to support creating cross cluster API keys" - ) - ); - } - public void testAuthenticationFailureWithApiKeyTypeMismatch() throws Exception { final Settings settings = Settings.builder().put(XPackSettings.API_KEY_SERVICE_ENABLED_SETTING.getKey(), true).build(); final ApiKeyService service = spy(createApiKeyService(settings)); @@ -3268,73 +3179,6 @@ public void testValidateApiKeyTypeAndExpiration() throws IOException { assertThat(auth3.getMetadata(), hasEntry(API_KEY_TYPE_KEY, apiKeyDoc3.type.value())); } - public void testCreateOrUpdateApiKeyWithWorkflowsRestrictionForUnsupportedVersion() { - final Authentication authentication = AuthenticationTestHelper.builder().build(); - final ClusterService clusterService = mock(ClusterService.class); - when(clusterService.getClusterSettings()).thenReturn( - new ClusterSettings(Settings.EMPTY, Set.of(ApiKeyService.DELETE_RETENTION_PERIOD, ApiKeyService.DELETE_INTERVAL)) - ); - final ClusterState clusterState = mock(ClusterState.class); - when(clusterService.state()).thenReturn(clusterState); - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(WORKFLOWS_RESTRICTION_VERSION) - ); - when(clusterState.getMinTransportVersion()).thenReturn(minTransportVersion); - - final ApiKeyService service = new ApiKeyService( - Settings.EMPTY, - clock, - client, - securityIndex, - clusterService, - cacheInvalidatorRegistry, - threadPool, - MeterRegistry.NOOP - ); - - final List roleDescriptorsWithWorkflowsRestriction = randomList( - 1, - 3, - () -> randomRoleDescriptorWithWorkflowsRestriction() - ); - - final AbstractCreateApiKeyRequest createRequest = mock(AbstractCreateApiKeyRequest.class); - when(createRequest.getType()).thenReturn(ApiKey.Type.REST); - when(createRequest.getRoleDescriptors()).thenReturn(roleDescriptorsWithWorkflowsRestriction); - - final PlainActionFuture createFuture = new PlainActionFuture<>(); - service.createApiKey(authentication, createRequest, Set.of(), createFuture); - final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, createFuture::actionGet); - assertThat( - e1.getMessage(), - containsString( - "all nodes must have version [" - + WORKFLOWS_RESTRICTION_VERSION.toReleaseVersion() - + "] or higher to support restrictions for API keys" - ) - ); - - final BulkUpdateApiKeyRequest updateRequest = new BulkUpdateApiKeyRequest( - randomList(1, 3, () -> randomAlphaOfLengthBetween(3, 5)), - roleDescriptorsWithWorkflowsRestriction, - Map.of(), - ApiKeyTests.randomFutureExpirationTime() - ); - final PlainActionFuture updateFuture = new PlainActionFuture<>(); - service.updateApiKeys(authentication, updateRequest, Set.of(), updateFuture); - final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, createFuture::actionGet); - assertThat( - e2.getMessage(), - containsString( - "all nodes must have version [" - + WORKFLOWS_RESTRICTION_VERSION.toReleaseVersion() - + "] or higher to support restrictions for API keys" - ) - ); - } - public void testValidateOwnerUserRoleDescriptorsWithWorkflowsRestriction() { final Authentication authentication = AuthenticationTestHelper.builder().build(); final ClusterService clusterService = mock(ClusterService.class); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java index aed39b24f217d..31c6d6f0c2341 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.service.ClusterService; @@ -17,7 +16,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.action.apikey.ApiKey; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -36,7 +34,6 @@ import java.io.IOException; import java.util.concurrent.ExecutionException; -import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -75,55 +72,6 @@ public void init() throws Exception { ); } - public void testAuthenticateThrowsOnUnsupportedMinVersions() throws IOException { - when(clusterService.state().getMinTransportVersion()).thenReturn( - TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) - ) - ); - final var authcContext = mock(Authenticator.Context.class, Mockito.RETURNS_DEEP_STUBS); - when(authcContext.getThreadContext()).thenReturn(threadContext); - final var crossClusterAccessHeaders = new CrossClusterAccessHeaders( - CrossClusterAccessHeadersTests.randomEncodedApiKeyHeader(), - AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo() - ); - crossClusterAccessHeaders.writeToContext(threadContext); - final AuthenticationService.AuditableRequest auditableRequest = mock(AuthenticationService.AuditableRequest.class); - when(authcContext.getRequest()).thenReturn(auditableRequest); - when(auditableRequest.exceptionProcessingRequest(any(), any())).thenAnswer( - i -> new ElasticsearchSecurityException("potato", (Exception) i.getArguments()[0]) - ); - doAnswer( - invocationOnMock -> new Authenticator.Context( - threadContext, - auditableRequest, - mock(Realms.class), - (AuthenticationToken) invocationOnMock.getArguments()[2] - ) - ).when(authenticationService).newContext(anyString(), any(), any()); - - final PlainActionFuture future = new PlainActionFuture<>(); - crossClusterAccessAuthenticationService.authenticate("action", mock(TransportRequest.class), future); - final ExecutionException actual = expectThrows(ExecutionException.class, future::get); - - assertThat(actual.getCause().getCause(), instanceOf(IllegalArgumentException.class)); - assertThat( - actual.getCause().getCause().getMessage(), - equalTo( - "all nodes must have version [" - + TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion() - + "] or higher to support cross cluster requests through the dedicated remote cluster port" - ) - ); - verify(auditableRequest).exceptionProcessingRequest( - any(Exception.class), - credentialsArgMatches(crossClusterAccessHeaders.credentials()) - ); - verifyNoMoreInteractions(auditableRequest); - } - public void testAuthenticationSuccessOnSuccessfulAuthentication() throws IOException, ExecutionException, InterruptedException { final var crossClusterAccessHeaders = new CrossClusterAccessHeaders( CrossClusterAccessHeadersTests.randomEncodedApiKeyHeader(), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java index b84282bd40660..417725d908b41 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java @@ -72,7 +72,7 @@ public void shutdown() { } public void testStore_ConfiguredWithUnreadableFile() throws Exception { - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path file = configDir.resolve("users"); @@ -88,7 +88,7 @@ public void testStore_ConfiguredWithUnreadableFile() throws Exception { public void testStore_AutoReload() throws Exception { Path users = getDataPath("users"); - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path file = configDir.resolve("users"); Files.copy(users, file, StandardCopyOption.REPLACE_EXISTING); @@ -149,7 +149,7 @@ private RealmConfig getRealmConfig() { public void testStore_AutoReload_WithParseFailures() throws Exception { Path users = getDataPath("users"); - Path confDir = env.configFile(); + Path confDir = env.configDir(); Files.createDirectories(confDir); Path testUsers = confDir.resolve("users"); Files.copy(users, testUsers, StandardCopyOption.REPLACE_EXISTING); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java index 258770b10c743..759f57a4e0174 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java @@ -285,7 +285,7 @@ private Path writeUsersRoles(String input) throws Exception { } private Path getUsersRolesPath() throws IOException { - Path xpackConf = env.configFile(); + Path xpackConf = env.configDir(); Files.createDirectories(xpackConf); return xpackConf.resolve("users_roles"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java index 9800cb73faf6a..3d05b7540596a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java @@ -41,7 +41,7 @@ public void testConcurrentReloadWillBeQueuedAndShareTheResults() throws IOExcept final RealmConfig realmConfig = mock(RealmConfig.class); when(realmConfig.getSetting(JwtRealmSettings.PKC_JWKSET_PATH)).thenReturn("jwkset.json"); final Environment env = mock(Environment.class); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); when(realmConfig.env()).thenReturn(env); final JwkSetLoader jwkSetLoader = spy(new JwkSetLoader(realmConfig, List.of(), null)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java index f1927876eba5f..2c9e57df60e26 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java @@ -59,7 +59,7 @@ public void setup() throws Exception { final RealmConfig realmConfig = mock(RealmConfig.class); when(realmConfig.getSetting(JwtRealmSettings.PKC_JWKSET_PATH)).thenReturn("jwkset.json"); final Environment env = mock(Environment.class); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); when(realmConfig.env()).thenReturn(env); validateSignatureAttemptCounter = new AtomicInteger(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java index f01914a7fed06..b15edd943db52 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java @@ -63,7 +63,7 @@ public void testAuthenticateDifferentFailureScenarios() throws LoginException, G final boolean throwExceptionForInvalidTicket = validTicket ? false : randomBoolean(); final boolean throwLoginException = randomBoolean(); final byte[] decodedTicket = randomByteArrayOfLength(5); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); if (validTicket) { mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); @@ -144,7 +144,7 @@ public void testDelegatedAuthorizationFailedToResolve() throws Exception { settings = Settings.builder().put(settings).putList("authorization_realms", "other_realm").build(); final KerberosRealm kerberosRealm = createKerberosRealm(Collections.singletonList(otherRealm), username); final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, "out-token"), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java index b1ddb631a8dd2..c6431a8d81685 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java @@ -48,7 +48,7 @@ public void testAuthenticateWithCache() throws LoginException, GSSException { metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, username); final User expectedUser = new User(expectedUsername, roles.toArray(new String[0]), null, null, metadata, true); final byte[] decodedTicket = randomByteArrayOfLength(10); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); @@ -78,7 +78,7 @@ public void testCacheInvalidationScenarios() throws LoginException, GSSException final String authNUsername = randomFrom(userNames); final byte[] decodedTicket = randomByteArrayOfLength(10); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(authNUsername, outToken), null); final String expectedUsername = maybeRemoveRealmName(authNUsername); @@ -137,7 +137,7 @@ public void testAuthenticateWithValidTicketSucessAuthnWithUserDetailsWhenCacheDi metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, username); final User expectedUser = new User(expectedUsername, roles.toArray(new String[0]), null, null, metadata, true); final byte[] decodedTicket = randomByteArrayOfLength(10); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java index 177507ce6d792..e4718f3e95019 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java @@ -82,7 +82,7 @@ public void testAuthenticateWithValidTicketSucessAuthnWithUserDetails() throws L metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, username); final User expectedUser = new User(expectedUsername, roles.toArray(new String[roles.size()]), null, null, metadata, true); final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, "out-token"), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); @@ -106,7 +106,7 @@ public void testFailedAuthorization() throws LoginException, GSSException { final String username = randomPrincipalName(); final KerberosRealm kerberosRealm = createKerberosRealm(username); final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>("does-not-exist@REALM", "out-token"), null); @@ -236,7 +236,7 @@ public void testDelegatedAuthorization() throws Exception { final KerberosRealm kerberosRealm = createKerberosRealm(Collections.singletonList(otherRealm), username); final User expectedUser = lookupUser; final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, "out-token"), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java index 00b55e5b48337..0f2a720660afd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java @@ -128,7 +128,7 @@ public void testParseFileNotExists() throws IllegalAccessException, IOException public void testAutoReload() throws Exception { Path serviceTokensSourceFile = getDataPath("service_tokens"); - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path targetFile = configDir.resolve("service_tokens"); Files.copy(serviceTokensSourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING); @@ -225,7 +225,7 @@ public void testAutoReload() throws Exception { public void testFindTokensFor() throws IOException { Path serviceTokensSourceFile = getDataPath("service_tokens"); - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path targetFile = configDir.resolve("service_tokens"); Files.copy(serviceTokensSourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java index 6332e63ca5958..ee025fe64ff9a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java @@ -76,8 +76,8 @@ public class DnRoleMapperTests extends ESTestCase { public void init() throws IOException { settings = Settings.builder().put("resource.reload.interval.high", "100ms").put("path.home", createTempDir()).build(); env = TestEnvironment.newEnvironment(settings); - if (Files.exists(env.configFile()) == false) { - Files.createDirectory(env.configFile()); + if (Files.exists(env.configDir()) == false) { + Files.createDirectory(env.configDir()); } threadPool = new TestThreadPool("test"); } @@ -100,7 +100,7 @@ public void testMapper_ConfiguredWithUnreadableFile() throws Exception { public void testMapper_AutoReload() throws Exception { Path roleMappingFile = getDataPath("role_mapping.yml"); - Path file = env.configFile().resolve("test_role_mapping.yml"); + Path file = env.configDir().resolve("test_role_mapping.yml"); Files.copy(roleMappingFile, file, StandardCopyOption.REPLACE_EXISTING); final CountDownLatch latch = new CountDownLatch(1); @@ -144,7 +144,7 @@ public void testMapper_AutoReload() throws Exception { public void testMapper_AutoReload_WithParseFailures() throws Exception { Path roleMappingFile = getDataPath("role_mapping.yml"); - Path file = env.configFile().resolve("test_role_mapping.yml"); + Path file = env.configDir().resolve("test_role_mapping.yml"); Files.copy(roleMappingFile, file, StandardCopyOption.REPLACE_EXISTING); final CountDownLatch latch = new CountDownLatch(1); @@ -171,7 +171,7 @@ public void testMapper_AutoReload_WithParseFailures() throws Exception { public void testMapperAutoReloadWithoutListener() throws Exception { Path roleMappingFile = getDataPath("role_mapping.yml"); - Path file = env.configFile().resolve("test_role_mapping.yml"); + Path file = env.configDir().resolve("test_role_mapping.yml"); Files.copy(roleMappingFile, file, StandardCopyOption.REPLACE_EXISTING); try (ResourceWatcherService watcherService = new ResourceWatcherService(settings, threadPool)) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index 2b8a77d63588a..89f32e59f6bad 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.bulk.BulkRequest; @@ -51,7 +50,6 @@ import org.elasticsearch.license.TestUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -64,8 +62,6 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.RoleRestrictionTests; -import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; -import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; @@ -85,20 +81,17 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_FORMAT_SETTING; import static org.elasticsearch.indices.SystemIndexDescriptor.VERSION_META_KEY; -import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.elasticsearch.xpack.core.security.SecurityField.DOCUMENT_LEVEL_SECURITY_FEATURE; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomClusterPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteIndicesPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; -import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -465,101 +458,6 @@ public void testPutOfRoleWithFlsDlsUnlicensed() throws IOException { assertThat(e.getMessage(), containsString("field and document level security")); } - public void testPutRoleWithRemotePrivsUnsupportedMinNodeVersion() throws IOException { - // Init for validation - new ReservedRolesStore(Set.of("superuser")); - enum TEST_MODE { - REMOTE_INDICES_PRIVS, - REMOTE_CLUSTER_PRIVS, - REMOTE_INDICES_AND_CLUSTER_PRIVS - } - for (TEST_MODE testMode : TEST_MODE.values()) { - // default to both remote indices and cluster privileges and use the switch below to remove one or the other - TransportVersion transportVersionBeforeAdvancedRemoteClusterSecurity = TransportVersionUtils.getPreviousVersion( - TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY - ); - RoleDescriptor.RemoteIndicesPrivileges[] remoteIndicesPrivileges = new RoleDescriptor.RemoteIndicesPrivileges[] { - RoleDescriptor.RemoteIndicesPrivileges.builder("remote").privileges("read").indices("index").build() }; - RemoteClusterPermissions remoteClusterPermissions = new RemoteClusterPermissions().addGroup( - new RemoteClusterPermissionGroup( - RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), - new String[] { "remote" } - ) - ); - switch (testMode) { - case REMOTE_CLUSTER_PRIVS -> { - transportVersionBeforeAdvancedRemoteClusterSecurity = TransportVersionUtils.getPreviousVersion( - ROLE_REMOTE_CLUSTER_PRIVS - ); - remoteIndicesPrivileges = null; - } - case REMOTE_INDICES_PRIVS -> remoteClusterPermissions = null; - } - final Client client = mock(Client.class); - - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - transportVersionBeforeAdvancedRemoteClusterSecurity - ); - final ClusterService clusterService = mockClusterServiceWithMinNodeVersion(minTransportVersion); - - final XPackLicenseState licenseState = mock(XPackLicenseState.class); - - final SecuritySystemIndices systemIndices = new SecuritySystemIndices(clusterService.getSettings()); - final FeatureService featureService = mock(FeatureService.class); - systemIndices.init(client, featureService, clusterService); - final SecurityIndexManager securityIndex = systemIndices.getMainIndexManager(); - - final NativeRolesStore rolesStore = new NativeRolesStore( - Settings.EMPTY, - client, - licenseState, - securityIndex, - clusterService, - mock(FeatureService.class), - mock(ReservedRoleNameChecker.class), - mock(NamedXContentRegistry.class) - ); - // setup the roles store so the security index exists - securityIndex.clusterChanged(new ClusterChangedEvent("source", getClusterStateWithSecurityIndex(), getEmptyClusterState())); - - RoleDescriptor remoteIndicesRole = new RoleDescriptor( - "remote", - null, - null, - null, - null, - null, - null, - null, - remoteIndicesPrivileges, - remoteClusterPermissions, - null, - null - ); - PlainActionFuture future = new PlainActionFuture<>(); - putRole(rolesStore, remoteIndicesRole, future); - IllegalStateException e = expectThrows( - IllegalStateException.class, - String.format(Locale.ROOT, "expected IllegalStateException, but not thrown for mode [%s]", testMode), - future::actionGet - ); - assertThat( - e.getMessage(), - containsString( - "all nodes must have version [" - + (TEST_MODE.REMOTE_CLUSTER_PRIVS.equals(testMode) - ? ROLE_REMOTE_CLUSTER_PRIVS.toReleaseVersion() - : TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion()) - + "] or higher to support remote " - + (remoteIndicesPrivileges != null ? "indices" : "cluster") - + " privileges" - ) - ); - } - } - public void testGetRoleWhenDisabled() throws Exception { final Settings settings = Settings.builder().put(NativeRolesStore.NATIVE_ROLES_ENABLED, "false").build(); NativeRolesStore store = createRoleStoreForTest(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java index 34cfde8dc862f..972c00b59b1f2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java @@ -484,7 +484,7 @@ public void testParseInvalidConfig() throws IOException { } private Path getOperatorUsersPath() throws IOException { - Path xpackConf = env.configFile(); + Path xpackConf = env.configDir(); Files.createDirectories(xpackConf); return xpackConf.resolve("operator_users.yml"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java index 9bb0c8af6f481..2ac2d4ebf0c32 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java @@ -375,7 +375,7 @@ private void checkBlockedResource( + " [" + fileName + "] because access to read the file is blocked; SSL resources should be placed in the [" - + env.configFile().toAbsolutePath().toString() + + env.configDir().toAbsolutePath().toString() + "] directory"; Throwable exception = expectFailure(settings); @@ -477,7 +477,7 @@ private Settings.Builder configureWorkingKeystore(String prefix, Settings.Builde private ElasticsearchException expectFailure(Settings.Builder settings) { return expectThrows( ElasticsearchException.class, - () -> new SSLService(new Environment(buildEnvSettings(settings.build()), env.configFile())) + () -> new SSLService(new Environment(buildEnvSettings(settings.build()), env.configDir())) ); } diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index d1b179f09e403..fd0e43fb23c3c 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -20,11 +20,20 @@ dependencies { testImplementation project(':modules:rest-root') } -tasks.named("compileJava").configure { +java { targetCompatibility = JavaVersion.VERSION_1_8 sourceCompatibility = JavaVersion.VERSION_1_8 } +tasks.named("compileTestJava").configure { + targetCompatibility = buildParams.getMinimumRuntimeVersion() + sourceCompatibility = buildParams.getMinimumRuntimeVersion() +} + +tasks.named("test").configure { + // reset the unit test classpath as using the shadow jar won't work due to relocated packages + classpath = sourceSets.test.runtimeClasspath +} tasks.named("shadowJar").configure { relocate 'com.fasterxml', 'shadow.fasterxml' @@ -34,7 +43,3 @@ tasks.named("shadowJar").configure { } } -tasks.named("test").configure { - // reset the unit test classpath as using the shadow jar won't work due to relocated packages - classpath = sourceSets.test.runtimeClasspath -} diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec index 2fa82c05cc1aa..0bdd3fbc1b450 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec @@ -3353,7 +3353,7 @@ Alejandro Amabile Anoosh Basil -Brendon +Cristinel // end::filterToday ; diff --git a/x-pack/plugin/sql/sql-client/build.gradle b/x-pack/plugin/sql/sql-client/build.gradle index 4a20e00666ea4..f52e4b9ed96db 100644 --- a/x-pack/plugin/sql/sql-client/build.gradle +++ b/x-pack/plugin/sql/sql-client/build.gradle @@ -12,11 +12,16 @@ dependencies { testImplementation(testArtifact(project(xpackModule('core')))) } -tasks.named("compileJava").configure { +java { targetCompatibility = JavaVersion.VERSION_1_8 sourceCompatibility = JavaVersion.VERSION_1_8 } +tasks.named("compileTestJava").configure { + targetCompatibility = buildParams.getMinimumRuntimeVersion() + sourceCompatibility = buildParams.getMinimumRuntimeVersion() +} + tasks.named('forbiddenApisMain').configure { // does not depend on core, so only jdk and http signatures should be checked replaceSignatureFiles 'jdk-signatures' diff --git a/x-pack/plugin/sql/sql-proto/build.gradle b/x-pack/plugin/sql/sql-proto/build.gradle index 2cb1cfa89f033..0a34afdbc2504 100644 --- a/x-pack/plugin/sql/sql-proto/build.gradle +++ b/x-pack/plugin/sql/sql-proto/build.gradle @@ -16,11 +16,16 @@ dependencies { } } -tasks.named("compileJava").configure { +java { targetCompatibility = JavaVersion.VERSION_1_8 sourceCompatibility = JavaVersion.VERSION_1_8 } +tasks.named("compileTestJava").configure { + targetCompatibility = buildParams.getMinimumRuntimeVersion() + sourceCompatibility = buildParams.getMinimumRuntimeVersion() +} + tasks.named('forbiddenApisMain').configure { //sql does not depend on server, so only jdk signatures should be checked replaceSignatureFiles 'jdk-signatures' diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDeleteIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDeleteIT.java index bb68c7b84da5d..b5064c46c95ae 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDeleteIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDeleteIT.java @@ -110,7 +110,7 @@ public void testDeleteWithParamDeletesAutoCreatedDestinationIndex() throws Excep deleteTransform(transformId, false, true); assertFalse(indexExists(transformDest)); - assertFalse(aliasExists(transformDest)); + assertFalse(aliasExists(transformDestAlias)); } public void testDeleteWithParamDeletesManuallyCreatedDestinationIndex() throws Exception { @@ -139,7 +139,7 @@ public void testDeleteWithParamDeletesManuallyCreatedDestinationIndex() throws E assertFalse(aliasExists(transformDestAlias)); } - public void testDeleteWithParamDoesNotDeleteManuallySetUpAlias() throws Exception { + public void testDeleteWithManuallyCreatedIndexAndManuallyCreatedAlias() throws Exception { String transformId = "transform-4"; String transformDest = transformId + "_idx"; String transformDestAlias = transformId + "_alias"; @@ -158,31 +158,106 @@ public void testDeleteWithParamDoesNotDeleteManuallySetUpAlias() throws Exceptio assertTrue(indexExists(transformDest)); assertTrue(aliasExists(transformDestAlias)); + deleteTransform(transformId, false, true); + assertFalse(indexExists(transformDest)); + assertFalse(aliasExists(transformDestAlias)); + } + + public void testDeleteDestinationIndexIsNoOpWhenNoDestinationIndexExists() throws Exception { + String transformId = "transform-5"; + String transformDest = transformId + "_idx"; + String transformDestAlias = transformId + "_alias"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformDest, transformDestAlias); + + createTransform(transformId, transformDest, transformDestAlias); + assertFalse(indexExists(transformDest)); + assertFalse(aliasExists(transformDestAlias)); + + deleteTransform(transformId, false, true); + assertFalse(indexExists(transformDest)); + assertFalse(aliasExists(transformDestAlias)); + } + + public void testDeleteWithAliasPointingToManyIndices() throws Exception { + var transformId = "transform-6"; + var transformDest = transformId + "_idx"; + var otherIndex = "some-other-index-6"; + String transformDestAlias = transformId + "_alias"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformDest, otherIndex, transformDestAlias); + + createIndex(transformDest, null, null, "\"" + transformDestAlias + "\": { \"is_write_index\": true }"); + createIndex(otherIndex, null, null, "\"" + transformDestAlias + "\": {}"); + + assertTrue(indexExists(transformDest)); + assertTrue(indexExists(otherIndex)); + assertTrue(aliasExists(transformDestAlias)); + + createTransform(transformId, transformDestAlias, null); + + startTransform(transformId); + waitForTransformCheckpoint(transformId, 1); + + stopTransform(transformId, false); + + assertTrue(indexExists(transformDest)); + assertTrue(indexExists(otherIndex)); + assertTrue(aliasExists(transformDestAlias)); + + deleteTransform(transformId, false, true); + + assertFalse(indexExists(transformDest)); + assertTrue(indexExists(otherIndex)); + assertTrue(aliasExists(transformDestAlias)); + } + + public void testDeleteWithNoWriteIndexThrowsException() throws Exception { + var transformId = "transform-7"; + var transformDest = transformId + "_idx"; + var otherIndex = "some-other-index-7"; + String transformDestAlias = transformId + "_alias"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformDest, otherIndex, transformDestAlias); + + createIndex(transformDest, null, null, "\"" + transformDestAlias + "\": {}"); + + assertTrue(indexExists(transformDest)); + assertTrue(aliasExists(transformDestAlias)); + + createTransform(transformId, transformDestAlias, null); + + createIndex(otherIndex, null, null, "\"" + transformDestAlias + "\": {}"); + assertTrue(indexExists(otherIndex)); + ResponseException e = expectThrows(ResponseException.class, () -> deleteTransform(transformId, false, true)); assertThat( e.getMessage(), containsString( Strings.format( - "The provided expression [%s] matches an alias, specify the corresponding concrete indices instead.", + "Cannot disambiguate destination index alias [%s]. Alias points to many indices with no clear write alias." + + " Retry with delete_dest_index=false and manually clean up destination index.", transformDestAlias ) ) ); } - public void testDeleteDestinationIndexIsNoOpWhenNoDestinationIndexExists() throws Exception { - String transformId = "transform-5"; - String transformDest = transformId + "_idx"; - String transformDestAlias = transformId + "_alias"; - setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformDest, transformDestAlias); + public void testDeleteWithAlreadyDeletedIndex() throws Exception { + var transformId = "transform-8"; + var transformDest = transformId + "_idx"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformDest); + + createIndex(transformDest); + + assertTrue(indexExists(transformDest)); + + createTransform(transformId, transformDest, null); + + deleteIndex(transformDest); - createTransform(transformId, transformDest, transformDestAlias); assertFalse(indexExists(transformDest)); - assertFalse(aliasExists(transformDestAlias)); deleteTransform(transformId, false, true); + assertFalse(indexExists(transformDest)); - assertFalse(aliasExists(transformDestAlias)); } private void createTransform(String transformId, String destIndex, String destAlias) throws IOException { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 537f50a30b5dd..20ec649f74811 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -412,7 +412,7 @@ protected void updateTransform(String transformId, String update, boolean deferV } updateTransformRequest.setJsonEntity(update); - client().performRequest(updateTransformRequest); + assertOKAndConsume(client().performRequest(updateTransformRequest)); } protected void startTransform(String transformId) throws IOException { diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java index e4e577299d0d7..4ce17b46805e3 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java @@ -12,6 +12,8 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateAction; +import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -61,6 +63,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformStats; import org.elasticsearch.xpack.core.transform.transforms.latest.LatestConfig; import org.elasticsearch.xpack.transform.LocalStateTransform; +import org.junit.After; import org.junit.Before; import java.io.IOException; @@ -136,6 +139,11 @@ public void setUpNamedXContentRegistryAndIndices() throws Exception { remoteNewDocs = createIndexAndIndexDocs(REMOTE_CLUSTER, "remote_new_index", newRemoteNumShards, timestamp, randomBoolean()); } + @After + public void cleanup() { + client().execute(ResetFeatureStateAction.INSTANCE, new ResetFeatureStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); + } + private int createIndexAndIndexDocs(String cluster, String index, int numberOfShards, long timestamp, boolean exposeTimestamp) throws Exception { Client client = client(cluster); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java index 41b683a7965ca..619e72581cb51 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java @@ -10,9 +10,13 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; import org.elasticsearch.client.internal.Client; @@ -27,6 +31,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; @@ -42,6 +47,8 @@ import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; import org.elasticsearch.xpack.transform.transforms.TransformTask; +import java.util.Objects; + import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.executeWithHeadersAsync; @@ -146,20 +153,31 @@ private void deleteDestinationIndex( TimeValue timeout, ActionListener listener ) { - // <3> Check if the error is "index not found" error. If so, just move on. The index is already deleted. - ActionListener deleteDestIndexListener = ActionListener.wrap(listener::onResponse, e -> { - if (e instanceof IndexNotFoundException) { - listener.onResponse(AcknowledgedResponse.TRUE); - } else { - listener.onFailure(e); - } - }); + getTransformConfig(transformId).andThen((l, r) -> deleteDestinationIndex(r.v1(), parentTaskId, timeout, l)) + .addListener(listener.delegateResponse((l, e) -> { + if (e instanceof IndexNotFoundException) { + l.onResponse(AcknowledgedResponse.TRUE); + } else { + l.onFailure(e); + } + })); + } - // <2> Delete destination index - ActionListener> getTransformConfigurationListener = ActionListener.wrap( - transformConfigAndVersion -> { - TransformConfig config = transformConfigAndVersion.v1(); - String destIndex = config.getDestination().getIndex(); + private SubscribableListener> getTransformConfig(String transformId) { + return SubscribableListener.newForked(l -> transformConfigManager.getTransformConfigurationForUpdate(transformId, l)); + } + + /** + * Delete the destination index. If the Transform is configured to write to an alias, then follow that alias to the concrete index. + */ + private void deleteDestinationIndex( + TransformConfig config, + TaskId parentTaskId, + TimeValue timeout, + ActionListener listener + ) { + SubscribableListener.newForked(l -> resolveDestinationIndex(config, parentTaskId, timeout, l)) + .andThen((l, destIndex) -> { DeleteIndexRequest deleteDestIndexRequest = new DeleteIndexRequest(destIndex); deleteDestIndexRequest.ackTimeout(timeout); deleteDestIndexRequest.setParentTask(parentTaskId); @@ -169,14 +187,57 @@ private void deleteDestinationIndex( client, TransportDeleteIndexAction.TYPE, deleteDestIndexRequest, - deleteDestIndexListener + l ); - }, - listener::onFailure - ); + }) + .addListener(listener); + } + + private void resolveDestinationIndex(TransformConfig config, TaskId parentTaskId, TimeValue timeout, ActionListener listener) { + var destIndex = config.getDestination().getIndex(); + var responseListener = ActionListener.wrap(r -> findDestinationIndexInAliases(r, destIndex, listener), e -> { + if (e instanceof AliasesNotFoundException) { + // no alias == the destIndex is our concrete index + listener.onResponse(destIndex); + } else { + listener.onFailure(e); + } + }); + + GetAliasesRequest request = new GetAliasesRequest(timeout, destIndex); + request.setParentTask(parentTaskId); + executeWithHeadersAsync(config.getHeaders(), TRANSFORM_ORIGIN, client, GetAliasesAction.INSTANCE, request, responseListener); + } - // <1> Fetch transform configuration - transformConfigManager.getTransformConfigurationForUpdate(transformId, getTransformConfigurationListener); + private static void findDestinationIndexInAliases(GetAliasesResponse aliases, String destIndex, ActionListener listener) { + var indexToAliases = aliases.getAliases(); + if (indexToAliases.isEmpty()) { + // if the alias list is empty, that means the index is a concrete index + listener.onResponse(destIndex); + } else if (indexToAliases.size() == 1) { + // if there is one value, the alias will treat it as the write index, so it's our destination index + listener.onResponse(indexToAliases.keySet().iterator().next()); + } else { + // if there is more than one index, there may be more than one alias for each index + // we have to search for the alias that matches our destination index name AND is declared the write index for that alias + indexToAliases.entrySet().stream().map(entry -> { + if (entry.getValue().stream().anyMatch(md -> destIndex.equals(md.getAlias()) && Boolean.TRUE.equals(md.writeIndex()))) { + return entry.getKey(); + } else { + return null; + } + }).filter(Objects::nonNull).findFirst().ifPresentOrElse(listener::onResponse, () -> { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot disambiguate destination index alias [" + + destIndex + + "]. Alias points to many indices with no clear write alias. Retry with delete_dest_index=false and manually" + + " clean up destination index.", + RestStatus.CONFLICT + ) + ); + }); + } } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java index 51e679ff9fe6c..402a8cbe12bd5 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java @@ -52,7 +52,8 @@ public TransformAuditor( nodeName, TransformAuditMessage::new, clusterService, - indexNameExpressionResolver + indexNameExpressionResolver, + clusterService.threadPool().generic() ); clusterService.addListener(event -> { if (event.metadataChanged()) { diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java index b9d91287ce45f..3231d705f389c 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; @@ -66,7 +65,6 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -import static org.mockito.Mockito.mock; public class TransformUpdaterTests extends ESTestCase { @@ -77,8 +75,7 @@ public class TransformUpdaterTests extends ESTestCase { private final IndexNameExpressionResolver indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(); private TestThreadPool threadPool; private Client client; - private ClusterService clusterService = mock(ClusterService.class); - private TransformAuditor auditor = new MockTransformAuditor(clusterService, mock(IndexNameExpressionResolver.class)); + private TransformAuditor auditor; private final Settings settings = Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build(); private final Settings destIndexSettings = new DefaultTransformExtension().getTransformDestinationIndexSettings(); @@ -124,8 +121,7 @@ public void setupClient() { } threadPool = createThreadPool(); client = new MyMockClient(threadPool); - clusterService = mock(ClusterService.class); - auditor = new MockTransformAuditor(clusterService, mock(IndexNameExpressionResolver.class)); + auditor = MockTransformAuditor.createMockAuditor(); } @After diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java index 1dffd8c20abbf..4eb255b69cfd3 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java @@ -16,6 +16,8 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.common.notifications.Level; import org.elasticsearch.xpack.core.transform.notifications.TransformAuditMessage; @@ -51,13 +53,16 @@ public static MockTransformAuditor createMockAuditor() { when(state.getMetadata()).thenReturn(metadata); ClusterService clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(state); + ThreadPool threadPool = mock(); + when(threadPool.generic()).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); + when(clusterService.threadPool()).thenReturn(threadPool); return new MockTransformAuditor(clusterService, mock(IndexNameExpressionResolver.class)); } private final List expectations; - public MockTransformAuditor(ClusterService clusterService, IndexNameExpressionResolver indexNameResolver) { + private MockTransformAuditor(ClusterService clusterService, IndexNameExpressionResolver indexNameResolver) { super(mock(Client.class), MOCK_NODE_NAME, clusterService, indexNameResolver, true); expectations = new CopyOnWriteArrayList<>(); } diff --git a/x-pack/plugin/watcher/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/watcher/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 0000000000000..2eb0d0dbd9881 --- /dev/null +++ b/x-pack/plugin/watcher/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,2 @@ +ALL-UNNAMED: + - manage_threads diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java index 337fc00cc7caf..093959978b0d1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java @@ -112,8 +112,8 @@ public static void main(String[] args) throws Exception { Node node = new Node( internalNodeEnv, PluginsLoader.createPluginsLoader( - PluginsLoader.loadModulesBundles(internalNodeEnv.modulesFile()), - PluginsLoader.loadPluginsBundles(internalNodeEnv.pluginsFile()), + PluginsLoader.loadModulesBundles(internalNodeEnv.modulesDir()), + PluginsLoader.loadPluginsBundles(internalNodeEnv.pluginsDir()), Map.of() ) ).start() diff --git a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java index 0b40828b8e86c..94eeee5ed9298 100644 --- a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java +++ b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java @@ -46,6 +46,7 @@ public class CoreWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTest .setting("xpack.ml.enabled", "false") .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.autoconfiguration.enabled", "false") + .systemProperty("es.queryable_built_in_roles_enabled", "false") .user(USER, PASS) .feature(FeatureFlag.TIME_SERIES_MODE) .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) diff --git a/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java b/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java index 261bc567d5c91..74cb057278c4a 100644 --- a/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java +++ b/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java @@ -152,7 +152,7 @@ public void tearDownMiniKdc() throws IOException, PrivilegedActionException { protected Path getKeytabPath(Environment env) { final Setting setting = KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.getConcreteSettingForNamespace(REALM_NAME); - return env.configFile().resolve(setting.get(settings)); + return env.configDir().resolve(setting.get(settings)); } /** diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index e45571fd7056e..4edf4b0c6277a 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -47,7 +47,6 @@ buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> testDistribution = "DEFAULT" versions = [oldVersion, project.version] numberOfNodes = 3 - systemProperty 'es.queryable_built_in_roles_enabled', 'true' systemProperty 'ingest.geoip.downloader.enabled.default', 'true' //we don't want to hit real service from each test systemProperty 'ingest.geoip.downloader.endpoint.default', 'http://invalid.endpoint' diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java index 2d229d7ffece5..a156e571b6ceb 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java @@ -8,12 +8,15 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.Build; +import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.client.Node; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.client.WarningsHandler; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.common.settings.SecureString; @@ -183,24 +186,108 @@ public void testDataStreamValidationDoesNotBreakUpgrade() throws Exception { } public void testUpgradeDataStream() throws Exception { + /* + * This test tests upgrading a "normal" data stream (dataStreamName), and upgrading a data stream that was originally just an + * ordinary index that was converted to a data stream (dataStreamFromNonDataStreamIndices). + */ String dataStreamName = "reindex_test_data_stream"; + String dataStreamFromNonDataStreamIndices = "index_first_reindex_test_data_stream"; int numRollovers = randomIntBetween(0, 5); if (CLUSTER_TYPE == ClusterType.OLD) { createAndRolloverDataStream(dataStreamName, numRollovers); + createDataStreamFromNonDataStreamIndices(dataStreamFromNonDataStreamIndices); } else if (CLUSTER_TYPE == ClusterType.UPGRADED) { - upgradeDataStream(dataStreamName, numRollovers); + Map> oldIndicesMetadata = getIndicesMetadata(dataStreamName); + upgradeDataStream(dataStreamName, numRollovers, numRollovers + 1, 0); + upgradeDataStream(dataStreamFromNonDataStreamIndices, 0, 1, 0); + Map> upgradedIndicesMetadata = getIndicesMetadata(dataStreamName); + compareIndexMetadata(oldIndicesMetadata, upgradedIndicesMetadata); + } + } + + private void compareIndexMetadata( + Map> oldIndicesMetadata, + Map> upgradedIndicesMetadata + ) { + String oldWriteIndex = getWriteIndexFromDataStreamIndexMetadata(oldIndicesMetadata); + for (Map.Entry> upgradedIndexEntry : upgradedIndicesMetadata.entrySet()) { + String upgradedIndexName = upgradedIndexEntry.getKey(); + if (upgradedIndexName.startsWith(".migrated-")) { + String oldIndexName = "." + upgradedIndexName.substring(".migrated-".length()); + Map oldIndexMetadata = oldIndicesMetadata.get(oldIndexName); + Map upgradedIndexMetadata = upgradedIndexEntry.getValue(); + compareSettings(oldIndexMetadata, upgradedIndexMetadata); + assertThat("Mappings did not match", upgradedIndexMetadata.get("mappings"), equalTo(oldIndexMetadata.get("mappings"))); + assertThat("ILM states did not match", upgradedIndexMetadata.get("ilm"), equalTo(oldIndexMetadata.get("ilm"))); + if (oldIndexName.equals(oldWriteIndex) == false) { // the old write index will have been rolled over by upgrade + assertThat( + "Rollover info did not match", + upgradedIndexMetadata.get("rollover_info"), + equalTo(oldIndexMetadata.get("rollover_info")) + ); + } + assertThat(upgradedIndexMetadata.get("system"), equalTo(oldIndexMetadata.get("system"))); + } + } + } + + private String getWriteIndexFromDataStreamIndexMetadata(Map> indexMetadataForDataStream) { + return indexMetadataForDataStream.entrySet() + .stream() + .sorted((o1, o2) -> Long.compare(getCreationDate(o2.getValue()), getCreationDate(o1.getValue()))) + .map(Map.Entry::getKey) + .findFirst() + .get(); + } + + @SuppressWarnings("unchecked") + long getCreationDate(Map indexMetadata) { + return Long.parseLong( + (String) ((Map>) indexMetadata.get("settings")).get("index").get("creation_date") + ); + } + + private void compareSettings(Map oldIndexMetadata, Map upgradedIndexMetadata) { + Map oldIndexSettings = getIndexSettingsFromIndexMetadata(oldIndexMetadata); + Map upgradedIndexSettings = getIndexSettingsFromIndexMetadata(upgradedIndexMetadata); + final Set SETTINGS_TO_CHECK = Set.of( + "lifecycle", + "mode", + "routing", + "hidden", + "number_of_shards", + "creation_date", + "number_of_replicas" + ); + for (String setting : SETTINGS_TO_CHECK) { + assertThat( + "Unexpected value for setting " + setting, + upgradedIndexSettings.get(setting), + equalTo(oldIndexSettings.get(setting)) + ); } } - private static void createAndRolloverDataStream(String dataStreamName, int numRollovers) throws IOException { + @SuppressWarnings("unchecked") + private Map getIndexSettingsFromIndexMetadata(Map indexMetadata) { + return (Map) ((Map) indexMetadata.get("settings")).get("index"); + } + + private void createAndRolloverDataStream(String dataStreamName, int numRollovers) throws IOException { + boolean useIlm = randomBoolean(); + if (useIlm) { + createIlmPolicy(); + } // We want to create a data stream and roll it over several times so that we have several indices to upgrade - final String template = """ + String template = """ { "settings":{ "index": { + $ILM_SETTING "mode": "time_series" } }, + $DSL_TEMPLATE "mappings":{ "dynamic_templates": [ { @@ -246,6 +333,19 @@ private static void createAndRolloverDataStream(String dataStreamName, int numRo } } """; + if (useIlm) { + template = template.replace("$ILM_SETTING", """ + "lifecycle.name": "test-lifecycle-policy", + """); + template = template.replace("$DSL_TEMPLATE", ""); + } else { + template = template.replace("$ILM_SETTING", ""); + template = template.replace("$DSL_TEMPLATE", """ + "lifecycle": { + "data_retention": "7d" + }, + """); + } final String indexTemplate = """ { "index_patterns": ["$PATTERN"], @@ -266,7 +366,181 @@ private static void createAndRolloverDataStream(String dataStreamName, int numRo } } - private void upgradeDataStream(String dataStreamName, int numRolloversOnOldCluster) throws Exception { + private static void createIlmPolicy() throws IOException { + String ilmPolicy = """ + { + "policy": { + "phases": { + "hot": { + "actions": { + "rollover": { + "max_primary_shard_size": "50kb" + } + } + }, + "warm": { + "min_age": "30d", + "actions": { + "shrink": { + "number_of_shards": 1 + }, + "forcemerge": { + "max_num_segments": 1 + } + } + } + } + } + }"""; + Request putIlmPolicyRequest = new Request("PUT", "_ilm/policy/test-lifecycle-policy"); + putIlmPolicyRequest.setJsonEntity(ilmPolicy); + assertOK(client().performRequest(putIlmPolicyRequest)); + } + + /* + * This returns a Map of index metadata for each index in the data stream, as retrieved from the cluster state. + */ + @SuppressWarnings("unchecked") + private Map> getIndicesMetadata(String dataStreamName) throws IOException { + Request getClusterStateRequest = new Request("GET", "/_cluster/state/metadata/" + dataStreamName); + Response clusterStateResponse = client().performRequest(getClusterStateRequest); + Map clusterState = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + clusterStateResponse.getEntity().getContent(), + false + ); + return ((Map>>) clusterState.get("metadata")).get("indices"); + } + + private void createDataStreamFromNonDataStreamIndices(String dataStreamFromNonDataStreamIndices) throws IOException { + /* + * This method creates an index, creates an alias to that index, and then converts the aliased index into a data stream. This is + * similar to the path that many indices (including system indices) took in versions 7/8. + */ + // First, we create an ordinary index with no @timestamp mapping: + final String templateWithNoTimestamp = """ + { + "mappings":{ + "properties": { + "message": { + "type": "text" + } + } + } + } + """; + // Note that this is not a data stream template: + final String indexTemplate = """ + { + "index_patterns": ["$PATTERN"], + "template": $TEMPLATE + }"""; + var putIndexTemplateRequest = new Request("POST", "/_index_template/reindex_test_data_stream_index_template"); + putIndexTemplateRequest.setJsonEntity( + indexTemplate.replace("$TEMPLATE", templateWithNoTimestamp).replace("$PATTERN", dataStreamFromNonDataStreamIndices + "-*") + ); + String indexName = dataStreamFromNonDataStreamIndices + "-01"; + if (minimumTransportVersion().before(TransportVersions.V_8_0_0)) { + /* + * It is not possible to create a 7.x index template with a type. And you can't create an empty index with a type. But you can + * create the index with a type by posting a document to an index with a type. We do that here so that we test that the type is + * removed when we reindex into 8.x. + */ + String typeName = "test-type"; + Request createIndexRequest = new Request("POST", indexName + "/" + typeName); + createIndexRequest.setJsonEntity(""" + { + "@timestamp": "2099-11-15T13:12:00", + "message": "GET /search HTTP/1.1 200 1070000", + "user": { + "id": "kimchy" + } + }"""); + createIndexRequest.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE).build()); + assertOK(client().performRequest(createIndexRequest)); + } + assertOK(client().performRequest(putIndexTemplateRequest)); + bulkLoadDataMissingTimestamp(indexName); + /* + * Next, we will change the index's mapping to include a @timestamp field since we are going to convert it to a data stream. But + * first we have to flush the translog to disk because adding a @timestamp field will cause errors if it is done before the translog + * is flushed: + */ + assertOK(client().performRequest(new Request("POST", indexName + "/_flush"))); + ensureHealth(indexName, (request -> { + request.addParameter("wait_for_nodes", "3"); + request.addParameter("wait_for_status", "green"); + request.addParameter("timeout", "70s"); + request.addParameter("level", "shards"); + })); + + // Updating the mapping to include @timestamp: + Request updateIndexMappingRequest = new Request("PUT", indexName + "/_mapping"); + updateIndexMappingRequest.setJsonEntity(""" + { + "properties": { + "@timestamp" : { + "type": "date" + }, + "message": { + "type": "text" + } + } + }"""); + assertOK(client().performRequest(updateIndexMappingRequest)); + + // Creating an alias with the same name that the data stream will have: + Request createAliasRequest = new Request("POST", "/_aliases"); + String aliasRequestBody = """ + { + "actions": [ + { + "add": { + "index": "$index", + "alias": "$alias" + } + } + ] + }"""; + createAliasRequest.setJsonEntity( + aliasRequestBody.replace("$index", indexName).replace("$alias", dataStreamFromNonDataStreamIndices) + ); + assertOK(client().performRequest(createAliasRequest)); + + // This is now just an aliased index. We'll convert it into a data stream + final String templateWithTimestamp = """ + { + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "message": { + "type": "text" + } + } + } + } + """; + final String dataStreamTemplate = """ + { + "index_patterns": ["$PATTERN"], + "template": $TEMPLATE, + "data_stream": { + } + }"""; + var putDataStreamTemplateRequest = new Request("POST", "/_index_template/reindex_test_data_stream_data_stream_template"); + putDataStreamTemplateRequest.setJsonEntity( + dataStreamTemplate.replace("$TEMPLATE", templateWithTimestamp).replace("$PATTERN", dataStreamFromNonDataStreamIndices) + ); + assertOK(client().performRequest(putDataStreamTemplateRequest)); + Request migrateToDataStreamRequest = new Request("POST", "/_data_stream/_migrate/" + dataStreamFromNonDataStreamIndices); + assertOK(client().performRequest(migrateToDataStreamRequest)); + } + + @SuppressWarnings("unchecked") + private void upgradeDataStream(String dataStreamName, int numRolloversOnOldCluster, int expectedSuccessesCount, int expectedErrorCount) + throws Exception { Set indicesNeedingUpgrade = getDataStreamIndices(dataStreamName); final int explicitRolloverOnNewClusterCount = randomIntBetween(0, 2); for (int i = 0; i < explicitRolloverOnNewClusterCount; i++) { @@ -334,16 +608,19 @@ private void upgradeDataStream(String dataStreamName, int numRolloversOnOldClust statusResponseMap.get("total_indices_requiring_upgrade"), equalTo(originalWriteIndex + numRolloversOnOldCluster) ); - assertThat(statusResponseString, statusResponseMap.get("successes"), equalTo(numRolloversOnOldCluster + 1)); + assertThat(statusResponseString, statusResponseMap.get("successes"), equalTo(expectedSuccessesCount)); // We expect all the original indices to have been deleted - for (String oldIndex : indicesNeedingUpgrade) { - assertThat(statusResponseString, indexExists(oldIndex), equalTo(false)); + if (expectedErrorCount == 0) { + for (String oldIndex : indicesNeedingUpgrade) { + assertThat(statusResponseString, indexExists(oldIndex), equalTo(false)); + } } assertThat( statusResponseString, getDataStreamIndices(dataStreamName).size(), equalTo(expectedTotalIndicesInDataStream) ); + assertThat(statusResponseString, ((List) statusResponseMap.get("errors")).size(), equalTo(expectedErrorCount)); } }, 60, TimeUnit.SECONDS); Request cancelRequest = new Request("POST", "_migration/reindex/" + dataStreamName + "/_cancel"); @@ -399,6 +676,26 @@ private static void bulkLoadData(String dataStreamName) throws IOException { assertOK(response); } + /* + * This bulkloads data, where some documents have no @timestamp field and some do. + */ + private static void bulkLoadDataMissingTimestamp(String dataStreamName) throws IOException { + final String bulk = """ + {"create": {}} + {"metricset": "pod", "k8s": {"pod": {"name": "cat", "network": {"tx": 2001818691, "rx": 802133794}}}} + {"create": {}} + {"metricset": "pod", "k8s": {"pod": {"name": "hamster", "network": {"tx": 2005177954, "rx": 801479970}}}} + {"create": {}} + {"metricset": "pod", "k8s": {"pod": {"name": "cow", "network": {"tx": 2006223737, "rx": 802337279}}}} + {"create": {}} + {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "rat", "network": {"tx": 2012916202, "rx": 803685721}}}} + """; + var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk"); + bulkRequest.setJsonEntity(bulk.replace("$now", formatInstant(Instant.now()))); + var response = client().performRequest(bulkRequest); + assertOK(response); + } + static String formatInstant(Instant instant) { return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java index f9d28670dab65..79f42244b37c2 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java @@ -65,7 +65,6 @@ protected static void waitForPendingUpgraderTasks() throws Exception { * The purpose of this test is to ensure that when a job is open through a rolling upgrade we upgrade the results * index mappings when it is assigned to an upgraded node even if no other ML endpoint is called after the upgrade */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98560") public void testSnapshotUpgrader() throws Exception { Request adjustLoggingLevels = new Request("PUT", "/_cluster/settings"); adjustLoggingLevels.setJsonEntity(""" @@ -98,6 +97,13 @@ public void testSnapshotUpgrader() throws Exception { @SuppressWarnings("unchecked") private void testSnapshotUpgradeFailsOnMixedCluster() throws Exception { + // TODO the mixed cluster assertions sometimes fail because the code that + // detects the mixed cluster relies on the transport versions being different. + // This assumption does not hold immediately after a version bump and new + // branch being cut as the new branch will have the same transport version + // See https://github.com/elastic/elasticsearch/issues/98560 + + assumeTrue("The mixed cluster is not always detected correctly, see https://github.com/elastic/elasticsearch/issues/98560", false); Map jobs = entityAsMap(getJob(JOB_ID)); String currentSnapshot = ((List) XContentMapValues.extractValue("jobs.model_snapshot_id", jobs)).get(0); @@ -154,7 +160,7 @@ private void testSnapshotUpgrade() throws Exception { List> upgradedSnapshot = (List>) entityAsMap(getModelSnapshots(JOB_ID, snapshotToUpgradeId)) .get("model_snapshots"); - assertThat(upgradedSnapshot, hasSize(1)); + assertThat(upgradedSnapshot.toString(), upgradedSnapshot, hasSize(1)); assertThat(upgradedSnapshot.get(0).get("latest_record_time_stamp"), equalTo(snapshotToUpgrade.get("latest_record_time_stamp"))); // Does the snapshot still work? @@ -273,7 +279,7 @@ private Response buildAndPutJob(String jobId, TimeValue bucketSpan) throws Excep return client().performRequest(request); } - private static List generateData( + static List generateData( long timestamp, TimeValue bucketSpan, int bucketCount, diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java index 95c3fd4fde916..a5330d3daf92f 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java +++ b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java @@ -97,18 +97,18 @@ protected Environment createEnv(OptionSet options, ProcessInfo processInfo) thro public void testSuccessfullyGenerateAndStoreHash() throws Exception { execute(); assertThat(terminal.getOutput(), hasLength(20)); - KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configDir()); assertNotNull(keyStoreWrapper); keyStoreWrapper.decrypt(new char[0]); assertThat(keyStoreWrapper.getSettingNames(), containsInAnyOrder(AUTOCONFIG_ELASTIC_PASSWORD_HASH.getKey(), "keystore.seed")); } public void testExistingKeystoreWithWrongPassword() throws Exception { - KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configDir()); assertNotNull(keyStoreWrapper); keyStoreWrapper.decrypt(new char[0]); // set a random password so that we fail to decrypt it in GenerateElasticPasswordHash#execute - keyStoreWrapper.save(env.configFile(), randomAlphaOfLength(16).toCharArray()); + keyStoreWrapper.save(env.configDir(), randomAlphaOfLength(16).toCharArray()); UserException e = expectThrows(UserException.class, this::execute); assertThat(e.getMessage(), equalTo("Failed to generate a password for the elastic user")); assertThat(terminal.getOutput(), is(emptyString())); diff --git a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java index 1a0f098b45bde..93281e3453e5c 100644 --- a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java +++ b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java @@ -17,20 +17,15 @@ public final class XPackRestTestConstants { public static final String[] TEMPLATE_NAMES_NO_ILM = new String[] { HISTORY_TEMPLATE_NAME_NO_ILM }; // ML constants: - public static final String ML_META_INDEX_NAME = ".ml-meta"; - public static final String CONFIG_INDEX = ".ml-config"; public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-"; public static final String STATE_INDEX_PREFIX = ".ml-state"; - public static final String RESULTS_INDEX_DEFAULT = "shared"; public static final List ML_POST_V7120_TEMPLATES = List.of(STATE_INDEX_PREFIX, RESULTS_INDEX_PREFIX); // Transform constants: public static final String TRANSFORM_TASK_NAME = "data_frame/transforms"; public static final String TRANSFORM_INTERNAL_INDEX_PREFIX = ".transform-internal-"; - public static final String TRANSFORM_NOTIFICATIONS_INDEX_PREFIX = ".transform-notifications-"; public static final String TRANSFORM_INTERNAL_INDEX_PREFIX_DEPRECATED = ".data-frame-internal-"; - public static final String TRANSFORM_NOTIFICATIONS_INDEX_PREFIX_DEPRECATED = ".data-frame-notifications-"; private XPackRestTestConstants() {} }