Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@
import org.elasticsearch.xpack.esql.CsvSpecReader.CsvTestCase;
import org.elasticsearch.xpack.esql.CsvTestsDataLoader;
import org.elasticsearch.xpack.esql.SpecReader;
import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase;
import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase;
import org.junit.AfterClass;
import org.junit.ClassRule;
import org.junit.rules.RuleChain;
Expand Down Expand Up @@ -422,4 +424,17 @@ protected boolean supportsTook() throws IOException {
// We don't read took properly in multi-cluster tests.
return false;
}

@Override
protected boolean supportsExponentialHistograms() {
try {
return RestEsqlTestCase.hasCapabilities(client(), List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM.capabilityName()))
&& RestEsqlTestCase.hasCapabilities(
remoteClusterClient(),
List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM.capabilityName())
);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,16 @@
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.esql.CsvSpecReader.CsvTestCase;
import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
import org.elasticsearch.xpack.esql.planner.PlannerSettings;
import org.elasticsearch.xpack.esql.plugin.ComputeService;
import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase;
import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase;
import org.junit.Before;
import org.junit.ClassRule;

import java.io.IOException;
import java.util.List;

@ThreadLeakFilters(filters = TestClustersThreadFilter.class)
public class EsqlSpecIT extends EsqlSpecTestCase {
Expand Down Expand Up @@ -51,6 +54,11 @@ protected boolean supportsSourceFieldMapping() {
return cluster.getNumNodes() == 1;
}

@Override
protected boolean supportsExponentialHistograms() {
return RestEsqlTestCase.hasCapabilities(client(), List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM.capabilityName()));
}

@Before
public void configureChunks() throws IOException {
assumeTrue("test clusters were broken", testClustersOk);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.core.Types;
import org.elasticsearch.exponentialhistogram.ExponentialHistogramXContent;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.geometry.Geometry;
import org.elasticsearch.geometry.Point;
Expand All @@ -26,11 +28,14 @@
import org.elasticsearch.test.MapMatcher;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.TestFeatureService;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.esql.CsvSpecReader.CsvTestCase;
import org.elasticsearch.xpack.esql.CsvTestUtils;
import org.elasticsearch.xpack.esql.EsqlTestUtils;
import org.elasticsearch.xpack.esql.SpecReader;
import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
import org.elasticsearch.xpack.esql.plugin.EsqlFeatures;
import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode;
import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.RequestObjectBuilder;
Expand Down Expand Up @@ -173,7 +178,14 @@ public void setup() {
if (supportsInferenceTestService()) {
createInferenceEndpoints(adminClient());
}
loadDataSetIntoEs(client(), supportsIndexModeLookup(), supportsSourceFieldMapping(), supportsInferenceTestService());
loadDataSetIntoEs(
client(),
supportsIndexModeLookup(),
supportsSourceFieldMapping(),
supportsInferenceTestService(),
false,
supportsExponentialHistograms()
);
return null;
});
}
Expand Down Expand Up @@ -274,6 +286,10 @@ protected boolean supportsSourceFieldMapping() throws IOException {
return true;
}

protected boolean supportsExponentialHistograms() {
return RestEsqlTestCase.hasCapabilities(client(), List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM.capabilityName()));
}

protected void doTest() throws Throwable {
doTest(testCase.query);
}
Expand Down Expand Up @@ -390,6 +406,18 @@ private Object valueMapper(CsvTestUtils.Type type, Object value) {
value = s.replaceAll("\\\\n", "\n");
}
}
if (type == CsvTestUtils.Type.EXPONENTIAL_HISTOGRAM) {
if (value instanceof Map<?, ?> map) {
return ExponentialHistogramXContent.parseForTesting(Types.<Map<String, Object>>forciblyCast(map));
}
if (value instanceof String json) {
try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) {
return ExponentialHistogramXContent.parseForTesting(parser);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
return value.toString();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ private static List<String> originalTypes(Map<String, ?> x) {
}

private List<String> availableIndices() throws IOException {
return availableDatasetsForEs(true, supportsSourceFieldMapping(), false, requiresTimeSeries()).stream()
return availableDatasetsForEs(true, supportsSourceFieldMapping(), false, requiresTimeSeries(), false).stream()
.filter(x -> x.requiresInferenceEndpoint() == false)
.map(x -> x.indexName())
.toList();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.exponentialhistogram.ExponentialHistogram;
import org.elasticsearch.geometry.utils.Geohash;
import org.elasticsearch.h3.H3;
import org.elasticsearch.logging.Logger;
Expand Down Expand Up @@ -46,6 +47,7 @@
import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN;
import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.aggregateMetricDoubleLiteralToString;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.exponentialHistogramToString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.instanceOf;
import static org.junit.Assert.assertEquals;
Expand Down Expand Up @@ -431,6 +433,11 @@ private static Object convertExpectedValue(Type expectedType, Object expectedVal
AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral.class,
x -> aggregateMetricDoubleLiteralToString((AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral) x)
);
case EXPONENTIAL_HISTOGRAM -> rebuildExpected(
expectedValue,
ExponentialHistogram.class,
x -> exponentialHistogramToString((ExponentialHistogram) x)
);
default -> expectedValue;
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,21 @@
import org.elasticsearch.compute.data.ElementType;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.core.Booleans;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Releasable;
import org.elasticsearch.core.Releasables;
import org.elasticsearch.core.Strings;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.exponentialhistogram.ExponentialHistogram;
import org.elasticsearch.exponentialhistogram.ExponentialHistogramXContent;
import org.elasticsearch.geometry.utils.Geohash;
import org.elasticsearch.h3.H3;
import org.elasticsearch.logging.Logger;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.esql.action.ResponseValueUtils;
import org.elasticsearch.xpack.esql.core.type.DataType;
import org.elasticsearch.xpack.esql.core.util.StringUtils;
Expand All @@ -57,7 +63,6 @@
import java.util.regex.Pattern;
import java.util.stream.Stream;

import static org.elasticsearch.common.Strings.delimitedListToStringArray;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
import static org.elasticsearch.xpack.esql.EsqlTestUtils.reader;
import static org.elasticsearch.xpack.esql.SpecReader.shouldSkipLine;
Expand Down Expand Up @@ -143,13 +148,14 @@ void append(String stringValue) {
return;
}
stringValue = mvStrings[0].replace(ESCAPED_COMMA_SEQUENCE, ",");
} else if (stringValue.contains(",") && type != Type.AGGREGATE_METRIC_DOUBLE) {// multi-value field
} else if (stringValue.matches(".*" + COMMA_ESCAPING_REGEX + ".*") && type != Type.AGGREGATE_METRIC_DOUBLE) {// multi-value
// field
builderWrapper().builder().beginPositionEntry();

String[] arrayOfValues = delimitedListToStringArray(stringValue, ",");
String[] arrayOfValues = stringValue.split(COMMA_ESCAPING_REGEX, -1);
List<Object> convertedValues = new ArrayList<>(arrayOfValues.length);
for (String value : arrayOfValues) {
convertedValues.add(type.convert(value));
convertedValues.add(type.convert(value.replace(ESCAPED_COMMA_SEQUENCE, ",")));
}
Stream<Object> convertedValuesStream = convertedValues.stream();
if (type.sortMultiValues()) {
Expand All @@ -161,7 +167,7 @@ void append(String stringValue) {
return;
}

var converted = stringValue.length() == 0 ? null : type.convert(stringValue);
var converted = stringValue.length() == 0 ? null : type.convert(stringValue.replace(ESCAPED_COMMA_SEQUENCE, ","));
builderWrapper().append().accept(converted);
}

Expand Down Expand Up @@ -498,6 +504,7 @@ public enum Type {
AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral.class
),
DENSE_VECTOR(Float::parseFloat, Float.class, false),
EXPONENTIAL_HISTOGRAM(CsvTestUtils::parseExponentialHistogram, ExponentialHistogram.class),
UNSUPPORTED(Type::convertUnsupported, Void.class);

private static Void convertUnsupported(String s) {
Expand Down Expand Up @@ -593,7 +600,7 @@ public static Type asType(ElementType elementType, Type actualType) {
case DOC -> throw new IllegalArgumentException("can't assert on doc blocks");
case COMPOSITE -> throw new IllegalArgumentException("can't assert on composite blocks");
case AGGREGATE_METRIC_DOUBLE -> AGGREGATE_METRIC_DOUBLE;
case EXPONENTIAL_HISTOGRAM -> throw new IllegalArgumentException("exponential histogram blocks not supported yet");
case EXPONENTIAL_HISTOGRAM -> EXPONENTIAL_HISTOGRAM;
case UNKNOWN -> throw new IllegalArgumentException("Unknown block types cannot be handled");
};
}
Expand Down Expand Up @@ -699,4 +706,15 @@ private static double scaledFloat(String value, String factor) {
double scalingFactor = Double.parseDouble(factor);
return new BigDecimal(value).multiply(BigDecimal.valueOf(scalingFactor)).longValue() / scalingFactor;
}

private static ExponentialHistogram parseExponentialHistogram(@Nullable String json) {
if (json == null) {
return null;
}
try (XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, json)) {
return ExponentialHistogramXContent.parseForTesting(parser);
} catch (IOException e) {
throw new IllegalArgumentException(e);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@ public class CsvTestsDataLoader {
private static final TestDataset DENSE_VECTOR = new TestDataset("dense_vector");
private static final TestDataset COLORS = new TestDataset("colors");
private static final TestDataset COLORS_CMYK_LOOKUP = new TestDataset("colors_cmyk").withSetting("lookup-settings.json");
private static final TestDataset EXP_HISTO_SAMPLE = new TestDataset("exp_histo_sample");

public static final Map<String, TestDataset> CSV_DATASET_MAP = Map.ofEntries(
Map.entry(EMPLOYEES.indexName, EMPLOYEES),
Expand Down Expand Up @@ -239,7 +240,8 @@ public class CsvTestsDataLoader {
Map.entry(COLORS.indexName, COLORS),
Map.entry(COLORS_CMYK_LOOKUP.indexName, COLORS_CMYK_LOOKUP),
Map.entry(MULTI_COLUMN_JOINABLE.indexName, MULTI_COLUMN_JOINABLE),
Map.entry(MULTI_COLUMN_JOINABLE_LOOKUP.indexName, MULTI_COLUMN_JOINABLE_LOOKUP)
Map.entry(MULTI_COLUMN_JOINABLE_LOOKUP.indexName, MULTI_COLUMN_JOINABLE_LOOKUP),
Map.entry(EXP_HISTO_SAMPLE.indexName, EXP_HISTO_SAMPLE)
);

private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json");
Expand Down Expand Up @@ -331,7 +333,7 @@ public static void main(String[] args) throws IOException {
}

try (RestClient client = builder.build()) {
loadDataSetIntoEs(client, true, true, false, false, (restClient, indexName, indexMapping, indexSettings) -> {
loadDataSetIntoEs(client, true, true, false, false, true, (restClient, indexName, indexMapping, indexSettings) -> {
// don't use ESRestTestCase methods here or, if you do, test running the main method before making the change
StringBuilder jsonBody = new StringBuilder("{");
if (indexSettings != null && indexSettings.isEmpty() == false) {
Expand All @@ -354,15 +356,17 @@ public static Set<TestDataset> availableDatasetsForEs(
boolean supportsIndexModeLookup,
boolean supportsSourceFieldMapping,
boolean inferenceEnabled,
boolean requiresTimeSeries
boolean requiresTimeSeries,
boolean exponentialHistogramFieldSupported
) throws IOException {
Set<TestDataset> testDataSets = new HashSet<>();

for (TestDataset dataset : CSV_DATASET_MAP.values()) {
if ((inferenceEnabled || dataset.requiresInferenceEndpoint == false)
&& (supportsIndexModeLookup || isLookupDataset(dataset) == false)
&& (supportsSourceFieldMapping || isSourceMappingDataset(dataset) == false)
&& (requiresTimeSeries == false || isTimeSeries(dataset))) {
&& (requiresTimeSeries == false || isTimeSeries(dataset))
&& (exponentialHistogramFieldSupported || containsExponentialHistogramFields(dataset) == false)) {
testDataSets.add(dataset);
}
}
Expand All @@ -386,6 +390,27 @@ private static boolean isSourceMappingDataset(TestDataset dataset) throws IOExce
return mappingNode.get("_source") != null;
}

private static boolean containsExponentialHistogramFields(TestDataset dataset) throws IOException {
if (dataset.mappingFileName() == null) {
return false;
}
String mappingJsonText = readTextFile(getResource("/" + dataset.mappingFileName()));
JsonNode mappingNode = new ObjectMapper().readTree(mappingJsonText);
JsonNode properties = mappingNode.get("properties");
if (properties != null) {
for (var fieldWithValue : properties.properties()) {
JsonNode fieldProperties = fieldWithValue.getValue();
if (fieldProperties != null) {
JsonNode typeNode = fieldProperties.get("type");
if (typeNode != null && typeNode.asText().equals("exponential_histogram")) {
return true;
}
}
}
}
return false;
}

private static boolean isTimeSeries(TestDataset dataset) throws IOException {
Settings settings = dataset.readSettingsFile();
String mode = settings.get("index.mode");
Expand All @@ -398,22 +423,24 @@ public static void loadDataSetIntoEs(
boolean supportsSourceFieldMapping,
boolean inferenceEnabled
) throws IOException {
loadDataSetIntoEs(client, supportsIndexModeLookup, supportsSourceFieldMapping, inferenceEnabled, false);
loadDataSetIntoEs(client, supportsIndexModeLookup, supportsSourceFieldMapping, inferenceEnabled, false, false);
}

public static void loadDataSetIntoEs(
RestClient client,
boolean supportsIndexModeLookup,
boolean supportsSourceFieldMapping,
boolean inferenceEnabled,
boolean timeSeriesOnly
boolean timeSeriesOnly,
boolean exponentialHistogramFieldSupported
) throws IOException {
loadDataSetIntoEs(
client,
supportsIndexModeLookup,
supportsSourceFieldMapping,
inferenceEnabled,
timeSeriesOnly,
exponentialHistogramFieldSupported,
(restClient, indexName, indexMapping, indexSettings) -> {
ESRestTestCase.createIndex(restClient, indexName, indexSettings, indexMapping, null);
}
Expand All @@ -426,13 +453,20 @@ private static void loadDataSetIntoEs(
boolean supportsSourceFieldMapping,
boolean inferenceEnabled,
boolean timeSeriesOnly,
boolean exponentialHistogramFieldSupported,
IndexCreator indexCreator
) throws IOException {
Logger logger = LogManager.getLogger(CsvTestsDataLoader.class);

Set<String> loadedDatasets = new HashSet<>();
logger.info("Loading test datasets");
for (var dataset : availableDatasetsForEs(supportsIndexModeLookup, supportsSourceFieldMapping, inferenceEnabled, timeSeriesOnly)) {
for (var dataset : availableDatasetsForEs(
supportsIndexModeLookup,
supportsSourceFieldMapping,
inferenceEnabled,
timeSeriesOnly,
exponentialHistogramFieldSupported
)) {
load(client, dataset, logger, indexCreator);
loadedDatasets.add(dataset.indexName);
}
Expand Down
Loading