diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeMetricsIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeMetricsIT.java new file mode 100644 index 0000000000000..9dd060cf5b4ff --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeMetricsIT.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.xpack.esql.qa.rest.generative.EsqlQueryGenerator; +import org.elasticsearch.xpack.esql.qa.rest.generative.GenerativeRestTest; +import org.elasticsearch.xpack.esql.qa.rest.generative.command.CommandGenerator; +import org.junit.ClassRule; + +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) +public class GenerativeMetricsIT extends GenerativeRestTest { + @ClassRule + public static ElasticsearchCluster cluster = Clusters.testCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected boolean supportsSourceFieldMapping() { + return cluster.getNumNodes() == 1; + } + + @Override + protected CommandGenerator sourceCommand() { + return EsqlQueryGenerator.timeSeriesSourceCommand(); + } + + @Override + protected boolean requiresTimeSeries() { + return true; + } +} diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java index b6cf5ff3a8d15..114fcde289cd9 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java @@ -23,12 +23,15 @@ import org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe.RenameGenerator; import org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe.SortGenerator; import org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe.StatsGenerator; +import org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe.TimeSeriesStatsGenerator; import org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe.WhereGenerator; import org.elasticsearch.xpack.esql.qa.rest.generative.command.source.FromGenerator; +import org.elasticsearch.xpack.esql.qa.rest.generative.command.source.TimeSeriesGenerator; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomBoolean; @@ -51,6 +54,11 @@ public record QueryExecuted(String query, int depth, List outputSchema, */ static List SOURCE_COMMANDS = List.of(FromGenerator.INSTANCE); + /** + * Commands at the beginning of queries that begin queries on time series indices, eg. TS + */ + static List TIME_SERIES_SOURCE_COMMANDS = List.of(TimeSeriesGenerator.INSTANCE); + /** * These are downstream commands, ie. that cannot appear as the first command in a query */ @@ -72,14 +80,27 @@ public record QueryExecuted(String query, int depth, List outputSchema, WhereGenerator.INSTANCE ); + static List TIME_SERIES_PIPE_COMMANDS = Stream.concat( + PIPE_COMMANDS.stream(), + Stream.of(TimeSeriesStatsGenerator.INSTANCE) + ).toList(); + public static CommandGenerator sourceCommand() { return randomFrom(SOURCE_COMMANDS); } + public static CommandGenerator timeSeriesSourceCommand() { + return randomFrom(TIME_SERIES_SOURCE_COMMANDS); + } + public static CommandGenerator randomPipeCommandGenerator() { return randomFrom(PIPE_COMMANDS); } + public static CommandGenerator randomMetricsPipeCommandGenerator() { + return randomFrom(TIME_SERIES_PIPE_COMMANDS); + } + public interface Executor { void run(CommandGenerator generator, CommandGenerator.CommandDescription current); @@ -95,8 +116,10 @@ public static void generatePipeline( final int depth, CommandGenerator commandGenerator, final CommandGenerator.QuerySchema schema, - Executor executor + Executor executor, + boolean isTimeSeries ) { + boolean canGenerateTimeSeries = isTimeSeries; CommandGenerator.CommandDescription desc = commandGenerator.generate(List.of(), List.of(), schema); executor.run(commandGenerator, desc); if (executor.continueExecuting() == false) { @@ -107,7 +130,28 @@ public static void generatePipeline( if (executor.currentSchema().isEmpty()) { break; } - commandGenerator = EsqlQueryGenerator.randomPipeCommandGenerator(); + boolean commandAllowed = false; + while (commandAllowed == false) { + commandGenerator = isTimeSeries && canGenerateTimeSeries + ? randomMetricsPipeCommandGenerator() + : randomPipeCommandGenerator(); + if (isTimeSeries == false) { + commandAllowed = true; + } else { + if (commandGenerator.equals(TimeSeriesStatsGenerator.INSTANCE) || commandGenerator.equals(StatsGenerator.INSTANCE)) { + if (canGenerateTimeSeries) { + canGenerateTimeSeries = false; + commandAllowed = true; + } + } else if (commandGenerator.equals(RenameGenerator.INSTANCE)) { + // https://github.com/elastic/elasticsearch/issues/134994 + canGenerateTimeSeries = false; + commandAllowed = true; + } else { + commandAllowed = true; + } + } + } desc = commandGenerator.generate(executor.previousCommands(), executor.currentSchema(), schema); if (desc == CommandGenerator.EMPTY_DESCRIPTION) { continue; @@ -217,6 +261,82 @@ public static boolean sortable(Column col) { || col.type.equals("version"); } + public static String metricsAgg(List previousOutput) { + String outerCommand = randomFrom("min", "max", "sum", "count", "avg"); + String innerCommand = switch (randomIntBetween(0, 3)) { + case 0 -> { + // input can be numerics + aggregate_metric_double + String numericPlusAggMetricFieldName = randomMetricsNumericField(previousOutput); + if (numericPlusAggMetricFieldName == null) { + yield null; + } + yield switch ((randomIntBetween(0, 6))) { + case 0 -> "max_over_time(" + numericPlusAggMetricFieldName + ")"; + case 1 -> "min_over_time(" + numericPlusAggMetricFieldName + ")"; + case 2 -> "sum_over_time(" + numericPlusAggMetricFieldName + ")"; + case 3 -> { + if (outerCommand.equals("sum") || outerCommand.equals("avg")) { + yield null; + } + yield "present_over_time(" + numericPlusAggMetricFieldName + ")"; + } + case 4 -> { + if (outerCommand.equals("sum") || outerCommand.equals("avg")) { + yield null; + } + yield "absent_over_time(" + numericPlusAggMetricFieldName + ")"; + } + case 5 -> "count_over_time(" + numericPlusAggMetricFieldName + ")"; + default -> "avg_over_time(" + numericPlusAggMetricFieldName + ")"; + }; + } + case 1 -> { + // input can be a counter + String counterField = randomCounterField(previousOutput); + if (counterField == null) { + yield null; + } + yield "rate(" + counterField + ")"; + } + case 2 -> { + // numerics except aggregate_metric_double + // TODO: add to case 0 when support for aggregate_metric_double is added to these functions + // TODO: add to case 1 when support for counters is added + String numericFieldName = randomNumericField(previousOutput); + if (numericFieldName == null) { + yield null; + } + if (previousOutput.stream() + .noneMatch( + column -> column.name.equals("@timestamp") && (column.type.equals("date_nanos") || column.type.equals("datetime")) + )) { + // first_over_time and last_over_time require @timestamp to be available and be either datetime or date_nanos + yield null; + } + yield (randomBoolean() ? "first_over_time(" : "last_over_time(") + numericFieldName + ")"; + } + default -> { + // TODO: add other types that count_over_time supports + String otherFieldName = randomBoolean() ? randomStringField(previousOutput) : randomNumericOrDateField(previousOutput); + if (otherFieldName == null) { + yield null; + } + if (randomBoolean()) { + yield "count_over_time(" + otherFieldName + ")"; + } else { + yield "count_distinct_over_time(" + otherFieldName + ")"; + // TODO: replace with the below + // yield "count_distinct_over_time(" + otherFieldName + (randomBoolean() ? ", " + randomNonNegativeInt() : "") + ")"; + } + } + }; + if (innerCommand == null) { + // TODO: figure out a default that maybe makes more sense than using a timestamp field + innerCommand = "count_over_time(" + randomDateField(previousOutput) + ")"; + } + return outerCommand + "(" + innerCommand + ")"; + } + public static String agg(List previousOutput) { String name = randomNumericOrDateField(previousOutput); if (name != null && randomBoolean()) { @@ -251,6 +371,30 @@ public static String randomNumericField(List previousOutput) { return randomName(previousOutput, Set.of("long", "integer", "double")); } + public static String randomMetricsNumericField(List previousOutput) { + Set allowedTypes = Set.of("double", "long", "unsigned_long", "integer", "aggregate_metric_double"); + List items = previousOutput.stream() + .filter( + x -> allowedTypes.contains(x.type()) + || (x.type().equals("unsupported") && canBeCastedToAggregateMetricDouble(x.originalTypes())) + ) + .map(Column::name) + .toList(); + if (items.isEmpty()) { + return null; + } + return items.get(randomIntBetween(0, items.size() - 1)); + } + + public static String randomCounterField(List previousOutput) { + return randomName(previousOutput, Set.of("counter_long", "counter_double", "counter_integer")); + } + + private static boolean canBeCastedToAggregateMetricDouble(List types) { + return types.contains("aggregate_metric_double") + && Set.of("double", "long", "unsigned_long", "integer", "aggregate_metric_double").containsAll(types); + } + public static String randomStringField(List previousOutput) { return randomName(previousOutput, Set.of("text", "keyword")); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java index 46bcfb72fb07a..95a14f183416a 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -62,7 +62,15 @@ public abstract class GenerativeRestTest extends ESRestTestCase { "optimized incorrectly due to missing references", // https://github.com/elastic/elasticsearch/issues/131509 // Awaiting fixes for correctness - "Expecting at most \\[.*\\] columns, got \\[.*\\]" // https://github.com/elastic/elasticsearch/issues/129561 + "Expecting at most \\[.*\\] columns, got \\[.*\\]", // https://github.com/elastic/elasticsearch/issues/129561 + + // TS-command tests + "time-series .* the first aggregation .* is not allowed", + "count_star .* can't be used with TS command", + "time_series aggregate.* can only be used with the TS command", + "Invalid call to dataType on an unresolved object \\?LASTOVERTIME", // https://github.com/elastic/elasticsearch/issues/134791 + "class org.elasticsearch.compute.data..*Block cannot be cast to class org.elasticsearch.compute.data..*Block", // https://github.com/elastic/elasticsearch/issues/134793 + "Output has changed from \\[.*\\] to \\[.*\\]" // https://github.com/elastic/elasticsearch/issues/134794 ); public static final Set ALLOWED_ERROR_PATTERNS = ALLOWED_ERRORS.stream() @@ -79,6 +87,10 @@ public void setup() throws IOException { protected abstract boolean supportsSourceFieldMapping(); + protected boolean requiresTimeSeries() { + return false; + } + @AfterClass public static void wipeTestData() throws IOException { try { @@ -142,10 +154,14 @@ public List currentSchema() { final List previousCommands = new ArrayList<>(); EsqlQueryGenerator.QueryExecuted previousResult; }; - EsqlQueryGenerator.generatePipeline(MAX_DEPTH, EsqlQueryGenerator.sourceCommand(), mappingInfo, exec); + EsqlQueryGenerator.generatePipeline(MAX_DEPTH, sourceCommand(), mappingInfo, exec, requiresTimeSeries()); } } + protected CommandGenerator sourceCommand() { + return EsqlQueryGenerator.sourceCommand(); + } + private static CommandGenerator.ValidationResult checkResults( List previousCommands, CommandGenerator commandGenerator, @@ -234,7 +250,7 @@ private static List originalTypes(Map x) { } private List availableIndices() throws IOException { - return availableDatasetsForEs(true, supportsSourceFieldMapping(), false).stream() + return availableDatasetsForEs(true, supportsSourceFieldMapping(), false, requiresTimeSeries()).stream() .filter(x -> x.requiresInferenceEndpoint() == false) .map(x -> x.indexName()) .toList(); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/pipe/ForkGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/pipe/ForkGenerator.java index 74c4122156bc6..6cc07a4112e30 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/pipe/ForkGenerator.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/pipe/ForkGenerator.java @@ -114,7 +114,7 @@ public ValidationResult validateOutput( } }; - EsqlQueryGenerator.generatePipeline(3, gen, schema, exec); + EsqlQueryGenerator.generatePipeline(3, gen, schema, exec, false); if (exec.previousCommands().size() > 1) { String previousCmd = exec.previousCommands() .stream() diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/pipe/TimeSeriesStatsGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/pipe/TimeSeriesStatsGenerator.java new file mode 100644 index 0000000000000..c75327f2a3cc0 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/pipe/TimeSeriesStatsGenerator.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe; + +import org.elasticsearch.xpack.esql.qa.rest.generative.EsqlQueryGenerator; +import org.elasticsearch.xpack.esql.qa.rest.generative.command.CommandGenerator; + +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomIntBetween; +import static org.elasticsearch.xpack.esql.qa.rest.generative.EsqlQueryGenerator.randomDateField; + +public class TimeSeriesStatsGenerator implements CommandGenerator { + + public static final String STATS = "stats"; + public static final CommandGenerator INSTANCE = new TimeSeriesStatsGenerator(); + + @Override + public CommandDescription generate( + List previousCommands, + List previousOutput, + QuerySchema schema + ) { + // generates stats in the form of: + // `STATS some_aggregation(some_field) by optional_grouping_field, non_optional = bucket(time_field, 5minute)` + // where `some_aggregation` can be a time series aggregation in the form of agg1(agg2_over_time(some_field)), + // or a regular aggregation. + // There is a variable number of aggregations per pipe + + List nonNull = previousOutput.stream() + .filter(EsqlQueryGenerator::fieldCanBeUsed) + .filter(x -> x.type().equals("null") == false) + .collect(Collectors.toList()); + if (nonNull.isEmpty()) { + return EMPTY_DESCRIPTION; + } + String timestamp = randomDateField(nonNull); + // if there's no timestamp field left, there's nothing to bucket on + if (timestamp == null) { + return EMPTY_DESCRIPTION; + } + + // TODO: Switch back to using nonNull as possible arguments for aggregations. Using the timestamp field in both the bucket as well + // as as an argument in an aggregation causes all sorts of bizarre errors with confusing messages. + List acceptableFields = nonNull.stream() + .filter(c -> c.type().equals("datetime") == false && c.type().equals("date_nanos") == false) + .filter(c -> c.name().equals("@timestamp") == false) + .toList(); + + StringBuilder cmd = new StringBuilder(" | stats "); + + // TODO: increase range max to 5 + int nStats = randomIntBetween(1, 2); + for (int i = 0; i < nStats; i++) { + String name; + if (randomBoolean()) { + name = EsqlQueryGenerator.randomIdentifier(); + } else { + name = EsqlQueryGenerator.randomName(acceptableFields); + if (name == null) { + name = EsqlQueryGenerator.randomIdentifier(); + } + } + // generate the aggregation + String expression = randomBoolean() + ? EsqlQueryGenerator.metricsAgg(acceptableFields) + : EsqlQueryGenerator.agg(acceptableFields); + if (i > 0) { + cmd.append(","); + } + cmd.append(" "); + cmd.append(name); + cmd.append(" = "); + cmd.append(expression); + } + + cmd.append(" by "); + if (randomBoolean()) { + var col = EsqlQueryGenerator.randomGroupableName(acceptableFields); + if (col != null) { + cmd.append(col + ", "); + } + } + // TODO: add alternative time buckets + // TODO: replace name of bucket with half chance of being EsqlQueryGenerator.randomName(previousOutput) if + // is fixed https://github.com/elastic/elasticsearch/issues/134796 + cmd.append(EsqlQueryGenerator.randomIdentifier() + " = bucket(" + timestamp + ",1hour)"); + return new CommandDescription(STATS, this, cmd.toString(), Map.of()); + } + + @Override + public ValidationResult validateOutput( + List previousCommands, + CommandDescription commandDescription, + List previousColumns, + List> previousOutput, + List columns, + List> output + ) { + // TODO validate columns + return VALIDATION_OK; + } +} diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/source/TimeSeriesGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/source/TimeSeriesGenerator.java new file mode 100644 index 0000000000000..075911cc5a141 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/source/TimeSeriesGenerator.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.rest.generative.command.source; + +import org.elasticsearch.xpack.esql.qa.rest.generative.EsqlQueryGenerator; +import org.elasticsearch.xpack.esql.qa.rest.generative.command.CommandGenerator; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.test.ESTestCase.randomIntBetween; +import static org.elasticsearch.xpack.esql.qa.rest.generative.EsqlQueryGenerator.indexPattern; + +public class TimeSeriesGenerator implements CommandGenerator { + + public static final TimeSeriesGenerator INSTANCE = new TimeSeriesGenerator(); + + @Override + public CommandDescription generate( + List previousCommands, + List previousOutput, + QuerySchema schema + ) { + StringBuilder result = new StringBuilder("ts "); + int items = randomIntBetween(1, 3); + List availableIndices = schema.baseIndices(); + for (int i = 0; i < items; i++) { + String pattern = indexPattern(availableIndices.get(randomIntBetween(0, availableIndices.size() - 1))); + if (i > 0) { + result.append(","); + } + result.append(pattern); + } + String query = result.toString(); + return new CommandDescription("ts", this, query, Map.of()); + } + + @Override + public ValidationResult validateOutput( + List previousCommands, + CommandDescription command, + List previousColumns, + List> previousOutput, + List columns, + List> output + ) { + return VALIDATION_OK; + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 7138b0efe53ce..6d4ba53533c09 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -322,7 +322,7 @@ public static void main(String[] args) throws IOException { } try (RestClient client = builder.build()) { - loadDataSetIntoEs(client, true, true, false, (restClient, indexName, indexMapping, indexSettings) -> { + loadDataSetIntoEs(client, true, true, false, false, (restClient, indexName, indexMapping, indexSettings) -> { // don't use ESRestTestCase methods here or, if you do, test running the main method before making the change StringBuilder jsonBody = new StringBuilder("{"); if (indexSettings != null && indexSettings.isEmpty() == false) { @@ -344,14 +344,16 @@ public static void main(String[] args) throws IOException { public static Set availableDatasetsForEs( boolean supportsIndexModeLookup, boolean supportsSourceFieldMapping, - boolean inferenceEnabled + boolean inferenceEnabled, + boolean requiresTimeSeries ) throws IOException { Set testDataSets = new HashSet<>(); for (TestDataset dataset : CSV_DATASET_MAP.values()) { if ((inferenceEnabled || dataset.requiresInferenceEndpoint == false) && (supportsIndexModeLookup || isLookupDataset(dataset) == false) - && (supportsSourceFieldMapping || isSourceMappingDataset(dataset) == false)) { + && (supportsSourceFieldMapping || isSourceMappingDataset(dataset) == false) + && (requiresTimeSeries == false || isTimeSeries(dataset))) { testDataSets.add(dataset); } } @@ -375,17 +377,34 @@ private static boolean isSourceMappingDataset(TestDataset dataset) throws IOExce return mappingNode.get("_source") != null; } + private static boolean isTimeSeries(TestDataset dataset) throws IOException { + Settings settings = dataset.readSettingsFile(); + String mode = settings.get("index.mode"); + return (mode != null && mode.equalsIgnoreCase("time_series")); + } + public static void loadDataSetIntoEs( RestClient client, boolean supportsIndexModeLookup, boolean supportsSourceFieldMapping, boolean inferenceEnabled + ) throws IOException { + loadDataSetIntoEs(client, supportsIndexModeLookup, supportsSourceFieldMapping, inferenceEnabled, false); + } + + public static void loadDataSetIntoEs( + RestClient client, + boolean supportsIndexModeLookup, + boolean supportsSourceFieldMapping, + boolean inferenceEnabled, + boolean timeSeriesOnly ) throws IOException { loadDataSetIntoEs( client, supportsIndexModeLookup, supportsSourceFieldMapping, inferenceEnabled, + timeSeriesOnly, (restClient, indexName, indexMapping, indexSettings) -> { ESRestTestCase.createIndex(restClient, indexName, indexSettings, indexMapping, null); } @@ -397,12 +416,13 @@ private static void loadDataSetIntoEs( boolean supportsIndexModeLookup, boolean supportsSourceFieldMapping, boolean inferenceEnabled, + boolean timeSeriesOnly, IndexCreator indexCreator ) throws IOException { Logger logger = LogManager.getLogger(CsvTestsDataLoader.class); Set loadedDatasets = new HashSet<>(); - for (var dataset : availableDatasetsForEs(supportsIndexModeLookup, supportsSourceFieldMapping, inferenceEnabled)) { + for (var dataset : availableDatasetsForEs(supportsIndexModeLookup, supportsSourceFieldMapping, inferenceEnabled, timeSeriesOnly)) { load(client, dataset, logger, indexCreator); loadedDatasets.add(dataset.indexName); }