diff --git a/docs/changelog/133397.yaml b/docs/changelog/133397.yaml
new file mode 100644
index 0000000000000..6d97c25bfb066
--- /dev/null
+++ b/docs/changelog/133397.yaml
@@ -0,0 +1,5 @@
+pr: 133397
+summary: Push down loading of singleton dense double based field types to the …
+area: "Codec"
+type: enhancement
+issues: []
diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java
index 7efc449aa8560..f1356ac871393 100644
--- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java
+++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java
@@ -78,6 +78,11 @@ protected void registerParameters(ParameterChecker checker) throws IOException {
checker.registerUpdateCheck(b -> b.field("coerce", false), m -> assertFalse(((ScaledFloatFieldMapper) m).coerce()));
}
+ @Override
+ protected boolean supportsBulkDoubleBlockReading() {
+ return true;
+ }
+
public void testExistsQueryDocValuesDisabled() throws IOException {
MapperService mapperService = createMapperService(fieldMapping(b -> {
minimalMapping(b);
diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesProducer.java
index 8214e9b2465c0..28db095250e44 100644
--- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesProducer.java
+++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesProducer.java
@@ -45,6 +45,7 @@
import org.apache.lucene.util.packed.PackedInts;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.index.codec.tsdb.TSDBDocValuesEncoder;
+import org.elasticsearch.index.mapper.BlockDocValuesReader;
import org.elasticsearch.index.mapper.BlockLoader;
import java.io.IOException;
@@ -383,7 +384,13 @@ public long cost() {
}
@Override
- public BlockLoader.Block tryRead(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException {
+ public BlockLoader.Block tryRead(
+ BlockLoader.BlockFactory factory,
+ BlockLoader.Docs docs,
+ int offset,
+ BlockDocValuesReader.ToDouble toDouble
+ ) throws IOException {
+ assert toDouble == null;
if (ords instanceof BaseDenseNumericValues denseOrds) {
var block = tryReadAHead(factory, docs, offset);
if (block != null) {
@@ -457,7 +464,12 @@ public TermsEnum termsEnum() throws IOException {
}
@Override
- public BlockLoader.Block tryRead(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException {
+ public BlockLoader.Block tryRead(
+ BlockLoader.BlockFactory factory,
+ BlockLoader.Docs docs,
+ int offset,
+ BlockDocValuesReader.ToDouble toDouble
+ ) throws IOException {
return null;
}
@@ -504,7 +516,12 @@ public final long cost() {
}
@Override
- public BlockLoader.Block tryRead(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException {
+ public BlockLoader.Block tryRead(
+ BlockLoader.BlockFactory factory,
+ BlockLoader.Docs docs,
+ int offset,
+ BlockDocValuesReader.ToDouble toDouble
+ ) throws IOException {
return null;
}
@@ -1365,7 +1382,18 @@ public long longValue() throws IOException {
}
@Override
- public BlockLoader.Block tryRead(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException {
+ public BlockLoader.Block tryRead(
+ BlockLoader.BlockFactory factory,
+ BlockLoader.Docs docs,
+ int offset,
+ BlockDocValuesReader.ToDouble toDouble
+ ) throws IOException {
+ if (toDouble != null) {
+ try (BlockLoader.SingletonDoubleBuilder builder = factory.singletonDoubles(docs.count() - offset)) {
+ SingletonLongToDoubleDelegate delegate = new SingletonLongToDoubleDelegate(builder, toDouble);
+ return tryRead(delegate, docs, offset);
+ }
+ }
try (BlockLoader.SingletonLongBuilder builder = factory.singletonLongs(docs.count() - offset)) {
return tryRead(builder, docs, offset);
}
@@ -1774,4 +1802,55 @@ public BlockLoader.Builder endPositionEntry() {
public void close() {}
}
+ // Block builder that consumes long values and converts them to double using the provided converter function.
+ static final class SingletonLongToDoubleDelegate implements BlockLoader.SingletonLongBuilder {
+ private final BlockLoader.SingletonDoubleBuilder doubleBuilder;
+ private final BlockDocValuesReader.ToDouble toDouble;
+ private final double[] buffer = new double[ES819TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE];
+
+ // The passed builder is used to store the converted double values and produce the final block containing them.
+ SingletonLongToDoubleDelegate(BlockLoader.SingletonDoubleBuilder doubleBuilder, BlockDocValuesReader.ToDouble toDouble) {
+ this.doubleBuilder = doubleBuilder;
+ this.toDouble = toDouble;
+ }
+
+ @Override
+ public BlockLoader.SingletonLongBuilder appendLong(long value) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public BlockLoader.SingletonLongBuilder appendLongs(long[] values, int from, int length) {
+ assert length <= buffer.length : "length " + length + " > " + buffer.length;
+ for (int i = 0; i < length; i++) {
+ buffer[i] = toDouble.convert(values[from + i]);
+ }
+ doubleBuilder.appendDoubles(buffer, 0, length);
+ return this;
+ }
+
+ @Override
+ public BlockLoader.Block build() {
+ return doubleBuilder.build();
+ }
+
+ @Override
+ public BlockLoader.Builder appendNull() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public BlockLoader.Builder beginPositionEntry() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public BlockLoader.Builder endPositionEntry() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void close() {}
+ }
+
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java
index 281087f703236..06513444737f3 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java
@@ -122,7 +122,12 @@ public AllReader reader(LeafReaderContext context) throws IOException {
}
}
- static class SingletonLongs extends BlockDocValuesReader {
+ // Used for testing.
+ interface NumericDocValuesAccessor {
+ NumericDocValues numericDocValues();
+ }
+
+ static class SingletonLongs extends BlockDocValuesReader implements NumericDocValuesAccessor {
final NumericDocValues numericDocValues;
SingletonLongs(NumericDocValues numericDocValues) {
@@ -132,7 +137,7 @@ static class SingletonLongs extends BlockDocValuesReader {
@Override
public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset, boolean nullsFiltered) throws IOException {
if (numericDocValues instanceof BlockLoader.OptionalColumnAtATimeReader direct) {
- BlockLoader.Block result = direct.tryRead(factory, docs, offset);
+ BlockLoader.Block result = direct.tryRead(factory, docs, offset, null);
if (result != null) {
return result;
}
@@ -169,6 +174,11 @@ public int docId() {
public String toString() {
return "BlockDocValuesReader.SingletonLongs";
}
+
+ @Override
+ public NumericDocValues numericDocValues() {
+ return numericDocValues;
+ }
}
static class Longs extends BlockDocValuesReader {
@@ -387,7 +397,7 @@ public AllReader reader(LeafReaderContext context) throws IOException {
}
}
- private static class SingletonDoubles extends BlockDocValuesReader {
+ static class SingletonDoubles extends BlockDocValuesReader implements NumericDocValuesAccessor {
private final NumericDocValues docValues;
private final ToDouble toDouble;
@@ -398,6 +408,12 @@ private static class SingletonDoubles extends BlockDocValuesReader {
@Override
public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset, boolean nullsFiltered) throws IOException {
+ if (docValues instanceof BlockLoader.OptionalColumnAtATimeReader direct) {
+ BlockLoader.Block result = direct.tryRead(factory, docs, offset, toDouble);
+ if (result != null) {
+ return result;
+ }
+ }
try (BlockLoader.DoubleBuilder builder = factory.doublesFromDocValues(docs.count() - offset)) {
for (int i = offset; i < docs.count(); i++) {
int doc = docs.get(i);
@@ -430,9 +446,14 @@ public int docId() {
public String toString() {
return "BlockDocValuesReader.SingletonDoubles";
}
+
+ @Override
+ public NumericDocValues numericDocValues() {
+ return docValues;
+ }
}
- private static class Doubles extends BlockDocValuesReader {
+ static class Doubles extends BlockDocValuesReader {
private final SortedNumericDocValues docValues;
private final ToDouble toDouble;
@@ -715,7 +736,7 @@ public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset, boole
return readSingleDoc(factory, docs.get(offset));
}
if (ordinals instanceof BlockLoader.OptionalColumnAtATimeReader direct) {
- BlockLoader.Block block = direct.tryRead(factory, docs, offset);
+ BlockLoader.Block block = direct.tryRead(factory, docs, offset, null);
if (block != null) {
return block;
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java
index f7f2720850ca4..b4fae3fd824de 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java
@@ -65,9 +65,11 @@ interface OptionalColumnAtATimeReader {
/**
* Attempts to read the values of all documents in {@code docs}
* Returns {@code null} if unable to load the values.
+ *
+ * @param toDouble a function to convert long values to double, or null if no conversion is needed/supported
*/
@Nullable
- BlockLoader.Block tryRead(BlockFactory factory, Docs docs, int offset) throws IOException;
+ BlockLoader.Block tryRead(BlockFactory factory, Docs docs, int offset, BlockDocValuesReader.ToDouble toDouble) throws IOException;
}
interface RowStrideReader extends Reader {
@@ -435,6 +437,17 @@ interface BlockFactory {
*/
SingletonLongBuilder singletonLongs(int expectedCount);
+ /**
+ * Build a specialized builder for singleton dense double based fields with the following constraints:
+ *
+ * - Only one value per document can be collected
+ * - No more than expectedCount values can be collected
+ *
+ *
+ * @param expectedCount The maximum number of values to be collected.
+ */
+ SingletonDoubleBuilder singletonDoubles(int expectedCount);
+
/**
* Build a builder to load only {@code null}s.
*/
@@ -537,12 +550,20 @@ interface IntBuilder extends Builder {
* Specialized builder for collecting dense arrays of long values.
*/
interface SingletonLongBuilder extends Builder {
-
SingletonLongBuilder appendLong(long value);
SingletonLongBuilder appendLongs(long[] values, int from, int length);
}
+ /**
+ * Specialized builder for collecting dense arrays of double values.
+ */
+ interface SingletonDoubleBuilder extends Builder {
+ SingletonDoubleBuilder appendDouble(double value);
+
+ SingletonDoubleBuilder appendDoubles(double[] values, int from, int length);
+ }
+
interface LongBuilder extends Builder {
/**
* Appends a long to the current entry.
diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesFormatTests.java
index 03c99cd995f70..2b23d62d72803 100644
--- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesFormatTests.java
+++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesFormatTests.java
@@ -773,7 +773,7 @@ public void testOptionalColumnAtATimeReader() throws Exception {
{
// bulk loading timestamp:
- var block = (TestBlock) timestampDV.tryRead(factory, docs, 0);
+ var block = (TestBlock) timestampDV.tryRead(factory, docs, 0, null);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
@@ -785,10 +785,10 @@ public void testOptionalColumnAtATimeReader() throws Exception {
}
{
// bulk loading counter field:
- var block = (TestBlock) counterDV.tryRead(factory, docs, 0);
+ var block = (TestBlock) counterDV.tryRead(factory, docs, 0, null);
assertNotNull(block);
assertEquals(size, block.size());
- var stringBlock = (TestBlock) stringCounterDV.tryRead(factory, docs, 0);
+ var stringBlock = (TestBlock) stringCounterDV.tryRead(factory, docs, 0, null);
assertNotNull(stringBlock);
assertEquals(size, stringBlock.size());
for (int j = 0; j < block.size(); j++) {
@@ -805,7 +805,7 @@ public void testOptionalColumnAtATimeReader() throws Exception {
}
{
// bulk loading gauge field:
- var block = (TestBlock) gaugeDV.tryRead(factory, docs, 0);
+ var block = (TestBlock) gaugeDV.tryRead(factory, docs, 0, null);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
@@ -843,7 +843,7 @@ public void testOptionalColumnAtATimeReader() throws Exception {
{
// bulk loading timestamp:
- var block = (TestBlock) timestampDV.tryRead(blockFactory, docs, randomOffset);
+ var block = (TestBlock) timestampDV.tryRead(blockFactory, docs, randomOffset, null);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
@@ -855,11 +855,11 @@ public void testOptionalColumnAtATimeReader() throws Exception {
}
{
// bulk loading counter field:
- var block = (TestBlock) counterDV.tryRead(factory, docs, randomOffset);
+ var block = (TestBlock) counterDV.tryRead(factory, docs, randomOffset, null);
assertNotNull(block);
assertEquals(size, block.size());
- var stringBlock = (TestBlock) stringCounterDV.tryRead(factory, docs, randomOffset);
+ var stringBlock = (TestBlock) stringCounterDV.tryRead(factory, docs, randomOffset, null);
assertNotNull(stringBlock);
assertEquals(size, stringBlock.size());
@@ -877,7 +877,7 @@ public void testOptionalColumnAtATimeReader() throws Exception {
}
{
// bulk loading gauge field:
- var block = (TestBlock) gaugeDV.tryRead(factory, docs, randomOffset);
+ var block = (TestBlock) gaugeDV.tryRead(factory, docs, randomOffset, null);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
@@ -902,11 +902,11 @@ public void testOptionalColumnAtATimeReader() throws Exception {
stringCounterDV = getBaseSortedDocValues(leafReader, counterFieldAsString);
{
// bulk loading counter field:
- var block = (TestBlock) counterDV.tryRead(factory, docs, 0);
+ var block = (TestBlock) counterDV.tryRead(factory, docs, 0, null);
assertNotNull(block);
assertEquals(size, block.size());
- var stringBlock = (TestBlock) stringCounterDV.tryRead(factory, docs, 0);
+ var stringBlock = (TestBlock) stringCounterDV.tryRead(factory, docs, 0, null);
assertNotNull(stringBlock);
assertEquals(size, stringBlock.size());
@@ -1001,7 +1001,7 @@ public void testOptionalColumnAtATimeReaderWithSparseDocs() throws Exception {
var docs = TestBlock.docs(docIds);
{
timestampDV = getBaseDenseNumericValues(leafReader, timestampField);
- var block = (TestBlock) timestampDV.tryRead(factory, docs, 0);
+ var block = (TestBlock) timestampDV.tryRead(factory, docs, 0, null);
assertNotNull(block);
assertEquals(numDocsPerQValue, block.size());
for (int j = 0; j < block.size(); j++) {
@@ -1012,7 +1012,7 @@ public void testOptionalColumnAtATimeReaderWithSparseDocs() throws Exception {
}
{
counterDV = getBaseDenseNumericValues(leafReader, counterField);
- var block = (TestBlock) counterDV.tryRead(factory, docs, 0);
+ var block = (TestBlock) counterDV.tryRead(factory, docs, 0, null);
assertNotNull(block);
assertEquals(numDocsPerQValue, block.size());
for (int j = 0; j < block.size(); j++) {
@@ -1023,7 +1023,7 @@ public void testOptionalColumnAtATimeReaderWithSparseDocs() throws Exception {
}
{
counterAsStringDV = getBaseSortedDocValues(leafReader, counterAsStringField);
- var block = (TestBlock) counterAsStringDV.tryRead(factory, docs, 0);
+ var block = (TestBlock) counterAsStringDV.tryRead(factory, docs, 0, null);
assertNotNull(block);
assertEquals(numDocsPerQValue, block.size());
for (int j = 0; j < block.size(); j++) {
@@ -1086,7 +1086,7 @@ public int get(int i) {
}
};
var idReader = ESTestCase.asInstanceOf(OptionalColumnAtATimeReader.class, leaf.reader().getNumericDocValues("id"));
- TestBlock idBlock = (TestBlock) idReader.tryRead(factory, docs, 0);
+ TestBlock idBlock = (TestBlock) idReader.tryRead(factory, docs, 0, null);
assertNotNull(idBlock);
{
@@ -1100,7 +1100,7 @@ public int get(int i) {
block = (TestBlock) reader2.tryReadAHead(factory, docs, randomOffset);
} else {
assertNull(reader2.tryReadAHead(factory, docs, randomOffset));
- block = (TestBlock) reader2.tryRead(factory, docs, randomOffset);
+ block = (TestBlock) reader2.tryRead(factory, docs, randomOffset, null);
}
assertNotNull(block);
assertThat(block.size(), equalTo(docs.count() - randomOffset));
@@ -1122,7 +1122,7 @@ public int get(int i) {
block = (TestBlock) reader3.tryReadAHead(factory, docs, randomOffset);
} else {
assertNull(reader3.tryReadAHead(factory, docs, randomOffset));
- block = (TestBlock) reader3.tryRead(factory, docs, randomOffset);
+ block = (TestBlock) reader3.tryRead(factory, docs, randomOffset, null);
}
assertNotNull(reader3);
assertNotNull(block);
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
index 0b005bf4ba7b3..db4022c8be539 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
@@ -804,7 +804,7 @@ public void testLegacyDateFormatName() {
}
- protected boolean supportsBulkBlockReading() {
+ protected boolean supportsBulkLongBlockReading() {
return true;
}
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleFieldMapperTests.java
index 14e2b1b42fdb9..c56ad3a1e56eb 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleFieldMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleFieldMapperTests.java
@@ -74,6 +74,11 @@ protected Number randomNumber() {
return randomBoolean() ? randomDoubleBetween(-Double.MAX_VALUE, Double.MAX_VALUE, true) : randomFloat();
}
+ @Override
+ protected boolean supportsBulkDoubleBlockReading() {
+ return true;
+ }
+
public void testScriptAndPrecludedParameters() {
{
Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java
index ccae6c44ebc02..148c7b1b86e44 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java
@@ -49,6 +49,11 @@ protected Number randomNumber() {
return randomBoolean() ? randomDoubleBetween(-Float.MAX_VALUE, Float.MAX_VALUE, true) : randomFloat();
}
+ @Override
+ protected boolean supportsBulkDoubleBlockReading() {
+ return true;
+ }
+
@Override
protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) {
return new NumberSyntheticSourceSupport(Number::floatValue, ignoreMalformed);
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java
index 905f266010012..8264f53661320 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java
@@ -68,4 +68,9 @@ protected SyntheticSourceSupport syntheticSourceSupportForKeepTests(boolean igno
protected IngestScriptSupport ingestScriptSupport() {
throw new AssumptionViolatedException("not supported");
}
+
+ @Override
+ protected boolean supportsBulkDoubleBlockReading() {
+ return true;
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java
index 07a352c1fde96..2c1a8ac181135 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java
@@ -165,7 +165,7 @@ public void execute() {
};
}
- protected boolean supportsBulkBlockReading() {
+ protected boolean supportsBulkLongBlockReading() {
return true;
}
diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java
index 80727795f8bb0..bcd45b8131392 100644
--- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java
@@ -1500,7 +1500,11 @@ public void testSyntheticSourceKeepArrays() throws IOException {
assertThat(actual, equalTo(expected));
}
- protected boolean supportsBulkBlockReading() {
+ protected boolean supportsBulkLongBlockReading() {
+ return false;
+ }
+
+ protected boolean supportsBulkDoubleBlockReading() {
return false;
}
@@ -1513,7 +1517,18 @@ protected Object[] getThreeEncodedSampleValues() {
}
public void testSingletonLongBulkBlockReading() throws IOException {
- assumeTrue("field type supports bulk singleton long reading", supportsBulkBlockReading());
+ assumeTrue("field type supports bulk singleton long reading", supportsBulkLongBlockReading());
+ testSingletonBulkBlockReading(columnAtATimeReader -> (BlockDocValuesReader.SingletonLongs) columnAtATimeReader);
+ }
+
+ public void testSingletonDoubleBulkBlockReading() throws IOException {
+ assumeTrue("field type supports bulk singleton double reading", supportsBulkDoubleBlockReading());
+ testSingletonBulkBlockReading(columnAtATimeReader -> (BlockDocValuesReader.SingletonDoubles) columnAtATimeReader);
+ }
+
+ private void testSingletonBulkBlockReading(Function readerCast)
+ throws IOException {
+ assumeTrue("field type supports bulk singleton long reading", supportsBulkLongBlockReading());
var settings = indexSettings(IndexVersion.current(), 1, 1).put("index.mode", "logsdb").build();
var mapperService = createMapperService(settings, fieldMapping(this::minimalMapping));
var mapper = mapperService.documentMapper();
@@ -1539,8 +1554,11 @@ public void testSingletonLongBulkBlockReading() throws IOException {
assertThat(reader.numDocs(), equalTo(3));
LeafReaderContext context = reader.leaves().get(0);
var blockLoader = mapperService.fieldType("field").blockLoader(mockBlockContext);
- var columnReader = (BlockDocValuesReader.SingletonLongs) blockLoader.columnAtATimeReader(context);
- assertThat(columnReader.numericDocValues, instanceOf(BlockLoader.OptionalColumnAtATimeReader.class));
+ BlockDocValuesReader columnReader = readerCast.apply(blockLoader.columnAtATimeReader(context));
+ assertThat(
+ ((BlockDocValuesReader.NumericDocValuesAccessor) columnReader).numericDocValues(),
+ instanceOf(BlockLoader.OptionalColumnAtATimeReader.class)
+ );
var docBlock = TestBlock.docs(IntStream.range(0, 3).toArray());
var block = (TestBlock) columnReader.read(TestBlock.factory(), docBlock, 0, false);
for (int i = 0; i < block.size(); i++) {
@@ -1564,8 +1582,11 @@ public void testSingletonLongBulkBlockReading() throws IOException {
assertThat(reader.numDocs(), equalTo(3));
LeafReaderContext context = reader.leaves().get(0);
var blockLoader = mapperService.fieldType("field").blockLoader(mockBlockContext);
- var columnReader = (BlockDocValuesReader.SingletonLongs) blockLoader.columnAtATimeReader(context);
- assertThat(columnReader.numericDocValues, not(instanceOf(BlockLoader.OptionalColumnAtATimeReader.class)));
+ BlockDocValuesReader columnReader = readerCast.apply(blockLoader.columnAtATimeReader(context));
+ assertThat(
+ ((BlockDocValuesReader.NumericDocValuesAccessor) columnReader).numericDocValues(),
+ not(instanceOf(BlockLoader.OptionalColumnAtATimeReader.class))
+ );
var docBlock = TestBlock.docs(IntStream.range(0, 3).toArray());
var block = (TestBlock) columnReader.read(TestBlock.factory(), docBlock, 0, false);
assertThat(block.get(0), equalTo(expectedSampleValues[0]));
@@ -1595,7 +1616,10 @@ public void testSingletonLongBulkBlockReading() throws IOException {
LeafReaderContext context = reader.leaves().get(0);
var blockLoader = mapperService.fieldType("field").blockLoader(mockBlockContext);
var columnReader = blockLoader.columnAtATimeReader(context);
- assertThat(columnReader, instanceOf(BlockDocValuesReader.Longs.class));
+ assertThat(
+ columnReader,
+ anyOf(instanceOf(BlockDocValuesReader.Longs.class), instanceOf(BlockDocValuesReader.Doubles.class))
+ );
var docBlock = TestBlock.docs(IntStream.range(0, 3).toArray());
var block = (TestBlock) columnReader.read(TestBlock.factory(), docBlock, 0, false);
assertThat(block.get(0), equalTo(expectedSampleValues[0]));
diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java
index 09d7a4147563b..52396409a8dd6 100644
--- a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java
+++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java
@@ -202,12 +202,19 @@ public LongsBuilder appendLong(long value) {
@Override
public BlockLoader.SingletonLongBuilder singletonLongs(int expectedCount) {
final long[] values = new long[expectedCount];
+
return new BlockLoader.SingletonLongBuilder() {
private int count;
+ private BlockDocValuesReader.ToDouble toDouble = null;
@Override
public BlockLoader.Block build() {
+ if (toDouble != null) {
+ return new TestBlock(
+ Arrays.stream(values).mapToDouble(toDouble::convert).boxed().collect(Collectors.toUnmodifiableList())
+ );
+ }
return new TestBlock(Arrays.stream(values).boxed().collect(Collectors.toUnmodifiableList()));
}
@@ -244,6 +251,51 @@ public void close() {}
};
}
+ @Override
+ public BlockLoader.SingletonDoubleBuilder singletonDoubles(int expectedCount) {
+ final double[] values = new double[expectedCount];
+
+ return new BlockLoader.SingletonDoubleBuilder() {
+ private int count;
+
+ @Override
+ public BlockLoader.Block build() {
+ return new TestBlock(Arrays.stream(values).boxed().collect(Collectors.toUnmodifiableList()));
+ }
+
+ @Override
+ public BlockLoader.SingletonDoubleBuilder appendDoubles(double[] newValues, int from, int length) {
+ System.arraycopy(newValues, from, values, count, length);
+ count += length;
+ return this;
+ }
+
+ @Override
+ public BlockLoader.SingletonDoubleBuilder appendDouble(double value) {
+ values[count++] = value;
+ return this;
+ }
+
+ @Override
+ public BlockLoader.Builder appendNull() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public BlockLoader.Builder beginPositionEntry() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public BlockLoader.Builder endPositionEntry() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void close() {}
+ };
+ }
+
@Override
public BlockLoader.Builder nulls(int expectedCount) {
return longs(expectedCount);
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/DelegatingBlockLoaderFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/DelegatingBlockLoaderFactory.java
index b23114292680d..724d65f207840 100644
--- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/DelegatingBlockLoaderFactory.java
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/DelegatingBlockLoaderFactory.java
@@ -108,6 +108,11 @@ public BlockLoader.SingletonLongBuilder singletonLongs(int expectedCount) {
return new SingletonLongBuilder(expectedCount, factory);
}
+ @Override
+ public BlockLoader.SingletonDoubleBuilder singletonDoubles(int expectedCount) {
+ return new SingletonDoubleBuilder(expectedCount, factory);
+ }
+
@Override
public BlockLoader.Builder nulls(int expectedCount) {
return ElementType.NULL.newBlockBuilder(expectedCount, factory);
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/SingletonDoubleBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/SingletonDoubleBuilder.java
new file mode 100644
index 0000000000000..576561a11946f
--- /dev/null
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/SingletonDoubleBuilder.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.compute.lucene.read;
+
+import org.elasticsearch.compute.data.Block;
+import org.elasticsearch.compute.data.BlockFactory;
+import org.elasticsearch.core.Releasable;
+import org.elasticsearch.index.mapper.BlockLoader;
+
+/**
+ * Like {@link org.elasticsearch.compute.data.DoubleBlockBuilder} but optimized for collecting dense single valued values.
+ * Additionally, this builder doesn't grow its array.
+ */
+public final class SingletonDoubleBuilder implements BlockLoader.SingletonDoubleBuilder, Releasable, Block.Builder {
+
+ private final double[] values;
+ private final BlockFactory blockFactory;
+
+ private int count;
+
+ public SingletonDoubleBuilder(int expectedCount, BlockFactory blockFactory) {
+ this.blockFactory = blockFactory;
+ blockFactory.adjustBreaker(valuesSize(expectedCount));
+ this.values = new double[expectedCount];
+ }
+
+ @Override
+ public Block.Builder appendNull() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Block.Builder beginPositionEntry() {
+ throw new UnsupportedOperationException();
+
+ }
+
+ @Override
+ public Block.Builder endPositionEntry() {
+ throw new UnsupportedOperationException();
+
+ }
+
+ @Override
+ public Block.Builder copyFrom(Block block, int beginInclusive, int endExclusive) {
+ throw new UnsupportedOperationException();
+
+ }
+
+ @Override
+ public Block.Builder mvOrdering(Block.MvOrdering mvOrdering) {
+ throw new UnsupportedOperationException();
+
+ }
+
+ @Override
+ public long estimatedBytes() {
+ return (long) values.length * Double.BYTES;
+ }
+
+ @Override
+ public Block build() {
+ return blockFactory.newDoubleArrayVector(values, count).asBlock();
+ }
+
+ @Override
+ public BlockLoader.SingletonDoubleBuilder appendDouble(double value) {
+ values[count++] = value;
+ return this;
+ }
+
+ @Override
+ public BlockLoader.SingletonDoubleBuilder appendDoubles(double[] values, int from, int length) {
+ System.arraycopy(values, from, this.values, count, length);
+ count += length;
+ return this;
+ }
+
+ @Override
+ public void close() {
+ blockFactory.adjustBreaker(-valuesSize(values.length));
+ }
+
+ static long valuesSize(int count) {
+ return (long) count * Double.BYTES;
+ }
+}
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/SingletonLongBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/SingletonLongBuilder.java
index e82ed921d2c45..84df5feb09ff5 100644
--- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/SingletonLongBuilder.java
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/SingletonLongBuilder.java
@@ -65,7 +65,7 @@ public long estimatedBytes() {
@Override
public Block build() {
- return blockFactory.newLongArrayVector(values, count, 0L).asBlock();
+ return blockFactory.newLongArrayVector(values, count).asBlock();
}
@Override
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/SingletonDoubleBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/SingletonDoubleBuilderTests.java
new file mode 100644
index 0000000000000..9c6d376da3e8a
--- /dev/null
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/SingletonDoubleBuilderTests.java
@@ -0,0 +1,141 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.compute.lucene.read;
+
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.analysis.MockAnalyzer;
+import org.apache.lucene.tests.util.TestUtil;
+import org.elasticsearch.common.breaker.CircuitBreakingException;
+import org.elasticsearch.compute.data.BlockFactory;
+import org.elasticsearch.compute.data.DoubleVector;
+import org.elasticsearch.compute.test.ComputeTestCase;
+import org.elasticsearch.index.codec.tsdb.es819.ES819TSDBDocValuesFormat;
+import org.elasticsearch.indices.CrankyCircuitBreakerService;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.elasticsearch.test.MapMatcher.assertMap;
+import static org.elasticsearch.test.MapMatcher.matchesMap;
+import static org.hamcrest.Matchers.equalTo;
+
+public class SingletonDoubleBuilderTests extends ComputeTestCase {
+
+ public void testReader() throws IOException {
+ testRead(blockFactory());
+ }
+
+ public void testReadWithCranky() throws IOException {
+ var factory = crankyBlockFactory();
+ try {
+ testRead(factory);
+ // If we made it this far cranky didn't fail us!
+ } catch (CircuitBreakingException e) {
+ logger.info("cranky", e);
+ assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE));
+ }
+ assertThat(factory.breaker().getUsed(), equalTo(0L));
+ }
+
+ private void testRead(BlockFactory factory) throws IOException {
+ Double[] values = new Double[] { 1.1, 2.2, 3.3, 4.4 };
+
+ int count = 1000;
+ try (Directory directory = newDirectory()) {
+ try (IndexWriter indexWriter = createIndexWriter(directory)) {
+ for (int i = 0; i < count; i++) {
+ double v = values[i % values.length];
+ indexWriter.addDocument(List.of(new NumericDocValuesField("field", Double.doubleToRawLongBits(v))));
+ }
+ }
+ Map counts = new HashMap<>();
+ try (IndexReader reader = DirectoryReader.open(directory)) {
+ for (LeafReaderContext ctx : reader.leaves()) {
+ var docValues = ctx.reader().getNumericDocValues("field");
+ try (var builder = new SingletonDoubleBuilder(ctx.reader().numDocs(), factory)) {
+ for (int i = 0; i < ctx.reader().maxDoc(); i++) {
+ assertThat(docValues.advanceExact(i), equalTo(true));
+ double value = Double.longBitsToDouble(docValues.longValue());
+ if (randomBoolean()) {
+ builder.appendDoubles(new double[] { value }, 0, 1);
+ } else {
+ builder.appendDouble(value);
+ }
+ }
+ try (var build = (DoubleVector) builder.build().asVector()) {
+ for (int i = 0; i < build.getPositionCount(); i++) {
+ double key = build.getDouble(i);
+ counts.merge(key, 1, Integer::sum);
+ }
+ }
+ }
+ }
+ }
+ int expectedCount = count / values.length;
+ assertMap(
+ counts,
+ matchesMap().entry(values[0], expectedCount)
+ .entry(values[1], expectedCount)
+ .entry(values[2], expectedCount)
+ .entry(values[3], expectedCount)
+ );
+ }
+ }
+
+ public void testMoreValues() throws IOException {
+ int count = 1_000;
+ try (Directory directory = newDirectory()) {
+ try (IndexWriter indexWriter = createIndexWriter(directory)) {
+ for (int i = 0; i < count; i++) {
+ indexWriter.addDocument(List.of(new NumericDocValuesField("field", Double.doubleToRawLongBits(i / (double) count))));
+ }
+ indexWriter.forceMerge(1);
+ }
+ try (IndexReader reader = DirectoryReader.open(directory)) {
+ assertThat(reader.leaves().size(), equalTo(1));
+ LeafReader leafReader = reader.leaves().get(0).reader();
+ var docValues = leafReader.getNumericDocValues("field");
+ int offset = 850;
+ try (var builder = new SingletonDoubleBuilder(count - offset, blockFactory())) {
+ for (int i = offset; i < leafReader.maxDoc(); i++) {
+ assertThat(docValues.advanceExact(i), equalTo(true));
+ double value = Double.longBitsToDouble(docValues.longValue());
+ if (randomBoolean()) {
+ builder.appendDoubles(new double[] { value }, 0, 1);
+ } else {
+ builder.appendDouble(value);
+ }
+ }
+ try (var build = (DoubleVector) builder.build().asVector()) {
+ assertThat(build.getPositionCount(), equalTo(count - offset));
+ for (int i = 0; i < build.getPositionCount(); i++) {
+ double key = build.getDouble(i);
+ assertThat(key, equalTo((offset + i) / (double) count));
+ }
+ }
+ }
+ }
+ }
+ }
+
+ static IndexWriter createIndexWriter(Directory directory) throws IOException {
+ IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+ iwc.setCodec(TestUtil.alwaysDocValuesFormat(new ES819TSDBDocValuesFormat()));
+ return new IndexWriter(directory, iwc);
+ }
+
+}
diff --git a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java
index a679f14680e4e..d07e0ba3d0194 100644
--- a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java
+++ b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java
@@ -518,7 +518,7 @@ protected Object[] getThreeEncodedSampleValues() {
}
@Override
- protected boolean supportsBulkBlockReading() {
+ protected boolean supportsBulkLongBlockReading() {
return true;
}
}
diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java
index 5faccab610530..70df2763e30c6 100644
--- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java
+++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java
@@ -550,7 +550,7 @@ protected Object[] getThreeEncodedSampleValues() {
}
@Override
- protected boolean supportsBulkBlockReading() {
+ protected boolean supportsBulkLongBlockReading() {
return true;
}
}