Skip to content

Commit a0e0148

Browse files
authored
ESNextDiskBBQVectorsWriter should use ESNextOSQVectorsScorer.BULK_SIZE, not ES91OSQVectorsScorer (#138760)
1 parent 233e90d commit a0e0148

File tree

1 file changed

+14
-6
lines changed

1 file changed

+14
-6
lines changed

server/src/main/java/org/elasticsearch/index/codec/vectors/diskbbq/next/ESNextDiskBBQVectorsWriter.java

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737
import org.elasticsearch.index.codec.vectors.diskbbq.QuantizedVectorValues;
3838
import org.elasticsearch.logging.LogManager;
3939
import org.elasticsearch.logging.Logger;
40-
import org.elasticsearch.simdvec.ES91OSQVectorsScorer;
4140
import org.elasticsearch.simdvec.ES92Int7VectorsScorer;
4241
import org.elasticsearch.simdvec.ESNextOSQVectorsScorer;
4342

@@ -120,7 +119,11 @@ public CentroidOffsetAndLength buildAndWritePostingsLists(
120119
// write the posting lists
121120
final PackedLongValues.Builder offsets = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
122121
final PackedLongValues.Builder lengths = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
123-
DiskBBQBulkWriter bulkWriter = DiskBBQBulkWriter.fromBitSize(quantEncoding.bits(), ES91OSQVectorsScorer.BULK_SIZE, postingsOutput);
122+
DiskBBQBulkWriter bulkWriter = DiskBBQBulkWriter.fromBitSize(
123+
quantEncoding.bits(),
124+
ESNextOSQVectorsScorer.BULK_SIZE,
125+
postingsOutput
126+
);
124127
OnHeapQuantizedVectors onHeapQuantizedVectors = new OnHeapQuantizedVectors(
125128
floatVectorValues,
126129
quantEncoding,
@@ -156,11 +159,16 @@ public CentroidOffsetAndLength buildAndWritePostingsLists(
156159
docDeltas[j] = j == 0 ? docIds[clusterOrds[j]] : docIds[clusterOrds[j]] - docIds[clusterOrds[j - 1]];
157160
}
158161
onHeapQuantizedVectors.reset(centroid, size, ord -> cluster[clusterOrds[ord]]);
159-
byte encoding = idsWriter.calculateBlockEncoding(i -> docDeltas[i], size, ES91OSQVectorsScorer.BULK_SIZE);
162+
byte encoding = idsWriter.calculateBlockEncoding(i -> docDeltas[i], size, ESNextOSQVectorsScorer.BULK_SIZE);
160163
postingsOutput.writeByte(encoding);
161164
bulkWriter.writeVectors(onHeapQuantizedVectors, i -> {
162165
// for vector i we write `bulk` size docs or the remaining docs
163-
idsWriter.writeDocIds(d -> docDeltas[i + d], Math.min(ES91OSQVectorsScorer.BULK_SIZE, size - i), encoding, postingsOutput);
166+
idsWriter.writeDocIds(
167+
d -> docDeltas[i + d],
168+
Math.min(ESNextOSQVectorsScorer.BULK_SIZE, size - i),
169+
encoding,
170+
postingsOutput
171+
);
164172
});
165173
lengths.add(postingsOutput.getFilePointer() - fileOffset - offset);
166174
}
@@ -275,7 +283,7 @@ public CentroidOffsetAndLength buildAndWritePostingsLists(
275283
);
276284
DiskBBQBulkWriter bulkWriter = DiskBBQBulkWriter.fromBitSize(
277285
quantEncoding.bits(),
278-
ES91OSQVectorsScorer.BULK_SIZE,
286+
ESNextOSQVectorsScorer.BULK_SIZE,
279287
postingsOutput
280288
);
281289
final ByteBuffer buffer = ByteBuffer.allocate(fieldInfo.getVectorDimension() * Float.BYTES).order(ByteOrder.LITTLE_ENDIAN);
@@ -308,7 +316,7 @@ public CentroidOffsetAndLength buildAndWritePostingsLists(
308316
for (int j = 0; j < size; j++) {
309317
docDeltas[j] = j == 0 ? docIds[clusterOrds[j]] : docIds[clusterOrds[j]] - docIds[clusterOrds[j - 1]];
310318
}
311-
byte encoding = idsWriter.calculateBlockEncoding(i -> docDeltas[i], size, ES91OSQVectorsScorer.BULK_SIZE);
319+
byte encoding = idsWriter.calculateBlockEncoding(i -> docDeltas[i], size, ESNextOSQVectorsScorer.BULK_SIZE);
312320
postingsOutput.writeByte(encoding);
313321
offHeapQuantizedVectors.reset(size, ord -> isOverspill[clusterOrds[ord]], ord -> cluster[clusterOrds[ord]]);
314322
// write vectors

0 commit comments

Comments
 (0)