Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions server/src/main/java/module-info.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/

import org.elasticsearch.index.codec.vectors.diskbbq.ES920DiskBBQVectorsFormat;
import org.elasticsearch.plugins.internal.RestExtension;
import org.elasticsearch.reservedstate.ReservedStateHandlerProvider;

Expand Down Expand Up @@ -463,7 +462,8 @@
org.elasticsearch.index.codec.vectors.es816.ES816HnswBinaryQuantizedVectorsFormat,
org.elasticsearch.index.codec.vectors.es818.ES818BinaryQuantizedVectorsFormat,
org.elasticsearch.index.codec.vectors.es818.ES818HnswBinaryQuantizedVectorsFormat,
ES920DiskBBQVectorsFormat;
org.elasticsearch.index.codec.vectors.diskbbq.ES920DiskBBQVectorsFormat,
org.elasticsearch.index.codec.vectors.diskbbq.next.ESNextDiskBBQVectorsFormat;

provides org.apache.lucene.codecs.Codec
with
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@

package org.elasticsearch.index.codec.vectors.diskbbq;

record CentroidAssignments(int numCentroids, float[][] centroids, int[] assignments, int[] overspillAssignments) {
public record CentroidAssignments(int numCentroids, float[][] centroids, int[] assignments, int[] overspillAssignments) {

CentroidAssignments(float[][] centroids, int[] assignments, int[] overspillAssignments) {
public CentroidAssignments(float[][] centroids, int[] assignments, int[] overspillAssignments) {
this(centroids.length, centroids, assignments, overspillAssignments);
assert assignments.length == overspillAssignments.length || overspillAssignments.length == 0
: "assignments and overspillAssignments must have the same length";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
/**
* An interface for that supply centroids.
*/
interface CentroidSupplier {
public interface CentroidSupplier {
CentroidSupplier EMPTY = new CentroidSupplier() {
@Override
public int size() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
* This class provides the structure for writing vectors in bulk, with specific
* implementations for different bit sizes strategies.
*/
abstract class DiskBBQBulkWriter {
public abstract class DiskBBQBulkWriter {
protected final int bulkSize;
protected final IndexOutput out;

Expand All @@ -29,18 +29,18 @@ protected DiskBBQBulkWriter(int bulkSize, IndexOutput out) {
this.out = out;
}

abstract void writeVectors(QuantizedVectorValues qvv, CheckedIntConsumer<IOException> docsWriter) throws IOException;
public abstract void writeVectors(QuantizedVectorValues qvv, CheckedIntConsumer<IOException> docsWriter) throws IOException;

static class OneBitDiskBBQBulkWriter extends DiskBBQBulkWriter {
public static class OneBitDiskBBQBulkWriter extends DiskBBQBulkWriter {
private final OptimizedScalarQuantizer.QuantizationResult[] corrections;

OneBitDiskBBQBulkWriter(int bulkSize, IndexOutput out) {
public OneBitDiskBBQBulkWriter(int bulkSize, IndexOutput out) {
super(bulkSize, out);
this.corrections = new OptimizedScalarQuantizer.QuantizationResult[bulkSize];
}

@Override
void writeVectors(QuantizedVectorValues qvv, CheckedIntConsumer<IOException> docsWriter) throws IOException {
public void writeVectors(QuantizedVectorValues qvv, CheckedIntConsumer<IOException> docsWriter) throws IOException {
int limit = qvv.count() - bulkSize + 1;
int i = 0;
for (; i < limit; i += bulkSize) {
Expand Down Expand Up @@ -93,16 +93,16 @@ private void writeCorrection(OptimizedScalarQuantizer.QuantizationResult correct
}
}

static class SevenBitDiskBBQBulkWriter extends DiskBBQBulkWriter {
public static class SevenBitDiskBBQBulkWriter extends DiskBBQBulkWriter {
private final OptimizedScalarQuantizer.QuantizationResult[] corrections;

SevenBitDiskBBQBulkWriter(int bulkSize, IndexOutput out) {
public SevenBitDiskBBQBulkWriter(int bulkSize, IndexOutput out) {
super(bulkSize, out);
this.corrections = new OptimizedScalarQuantizer.QuantizationResult[bulkSize];
}

@Override
void writeVectors(QuantizedVectorValues qvv, CheckedIntConsumer<IOException> docsWriter) throws IOException {
public void writeVectors(QuantizedVectorValues qvv, CheckedIntConsumer<IOException> docsWriter) throws IOException {
int limit = qvv.count() - bulkSize + 1;
int i = 0;
for (; i < limit; i += bulkSize) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
*
* <p>It is copied from the BKD implementation.
*/
final class DocIdsWriter {
public final class DocIdsWriter {

private static final byte CONTINUOUS_IDS = (byte) -2;
private static final byte DELTA_BPV_16 = (byte) 16;
Expand All @@ -40,7 +40,7 @@ final class DocIdsWriter {

private int[] scratch = new int[0];

DocIdsWriter() {}
public DocIdsWriter() {}

/**
* Calculate the best encoding that will be used to write blocks of doc ids of blockSize.
Expand All @@ -51,7 +51,7 @@ final class DocIdsWriter {
* @param blockSize the block size
* @return the byte encoding to use for the blocks
*/
byte calculateBlockEncoding(IntToIntFunction docIds, int count, int blockSize) {
public byte calculateBlockEncoding(IntToIntFunction docIds, int count, int blockSize) {
if (count == 0) {
return CONTINUOUS_IDS;
}
Expand Down Expand Up @@ -90,7 +90,7 @@ byte calculateBlockEncoding(IntToIntFunction docIds, int count, int blockSize) {
}
}

void writeDocIds(IntToIntFunction docIds, int count, byte encoding, DataOutput out) throws IOException {
public void writeDocIds(IntToIntFunction docIds, int count, byte encoding, DataOutput out) throws IOException {
if (count == 0) {
return;
}
Expand Down Expand Up @@ -206,7 +206,7 @@ private static int[] sortedAndMaxAndMin2Max(IntToIntFunction docIds, int count)
return new int[] { (strictlySorted && min2max == count) ? 1 : 0, max, min2max };
}

void writeDocIds(IntToIntFunction docIds, int count, DataOutput out) throws IOException {
public void writeDocIds(IntToIntFunction docIds, int count, DataOutput out) throws IOException {
if (count == 0) {
return;
}
Expand Down Expand Up @@ -253,7 +253,7 @@ void writeDocIds(IntToIntFunction docIds, int count, DataOutput out) throws IOEx
}
}

void readInts(IndexInput in, int count, byte encoding, int[] docIDs) throws IOException {
public void readInts(IndexInput in, int count, byte encoding, int[] docIDs) throws IOException {
if (count == 0) {
return;
}
Expand All @@ -271,7 +271,7 @@ void readInts(IndexInput in, int count, byte encoding, int[] docIDs) throws IOEx
}

/** Read {@code count} integers into {@code docIDs}. */
void readInts(IndexInput in, int count, int[] docIDs) throws IOException {
public void readInts(IndexInput in, int count, int[] docIDs) throws IOException {
if (count == 0) {
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ public ES920DiskBBQVectorsReader(SegmentReadState state, Map<String, FlatVectors
super(state, rawVectorsReader);
}

CentroidIterator getPostingListPrefetchIterator(CentroidIterator centroidIterator, IndexInput postingListSlice) throws IOException {
public CentroidIterator getPostingListPrefetchIterator(CentroidIterator centroidIterator, IndexInput postingListSlice)
throws IOException {
return new CentroidIterator() {
CentroidOffsetAndLength nextOffsetAndLength = centroidIterator.hasNext()
? centroidIterator.nextPostingListOffsetAndLength()
Expand Down Expand Up @@ -80,7 +81,7 @@ public CentroidOffsetAndLength nextPostingListOffsetAndLength() throws IOExcepti
}

@Override
CentroidIterator getCentroidIterator(
public CentroidIterator getCentroidIterator(
FieldInfo fieldInfo,
int numCentroids,
IndexInput centroids,
Expand Down Expand Up @@ -348,7 +349,8 @@ private static void score(
}

@Override
PostingVisitor getPostingVisitor(FieldInfo fieldInfo, IndexInput indexInput, float[] target, Bits acceptDocs) throws IOException {
public PostingVisitor getPostingVisitor(FieldInfo fieldInfo, IndexInput indexInput, float[] target, Bits acceptDocs)
throws IOException {
FieldEntry entry = fields.get(fieldInfo.number);
final int maxPostingListSize = indexInput.readVInt();
return new MemorySegmentPostingsVisitor(target, indexInput, entry, fieldInfo, maxPostingListSize, acceptDocs);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public ES920DiskBBQVectorsWriter(
}

@Override
CentroidOffsetAndLength buildAndWritePostingsLists(
public CentroidOffsetAndLength buildAndWritePostingsLists(
FieldInfo fieldInfo,
CentroidSupplier centroidSupplier,
FloatVectorValues floatVectorValues,
Expand Down Expand Up @@ -160,7 +160,7 @@ CentroidOffsetAndLength buildAndWritePostingsLists(

@Override
@SuppressForbidden(reason = "require usage of Lucene's IOUtils#deleteFilesIgnoringExceptions(...)")
CentroidOffsetAndLength buildAndWritePostingsLists(
public CentroidOffsetAndLength buildAndWritePostingsLists(
FieldInfo fieldInfo,
CentroidSupplier centroidSupplier,
FloatVectorValues floatVectorValues,
Expand Down Expand Up @@ -347,12 +347,17 @@ private static void printClusterQualityStatistics(int[][] clusters) {
}

@Override
CentroidSupplier createCentroidSupplier(IndexInput centroidsInput, int numCentroids, FieldInfo fieldInfo, float[] globalCentroid) {
public CentroidSupplier createCentroidSupplier(
IndexInput centroidsInput,
int numCentroids,
FieldInfo fieldInfo,
float[] globalCentroid
) {
return new OffHeapCentroidSupplier(centroidsInput, numCentroids, fieldInfo);
}

@Override
void writeCentroids(
public void writeCentroids(
FieldInfo fieldInfo,
CentroidSupplier centroidSupplier,
float[] globalCentroid,
Expand Down Expand Up @@ -502,7 +507,7 @@ public int size() {
* @throws IOException if an I/O error occurs
*/
@Override
CentroidAssignments calculateCentroids(FieldInfo fieldInfo, FloatVectorValues floatVectorValues, float[] globalCentroid)
public CentroidAssignments calculateCentroids(FieldInfo fieldInfo, FloatVectorValues floatVectorValues, float[] globalCentroid)
throws IOException {

long nanoTime = System.nanoTime();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ protected IVFVectorsReader(SegmentReadState state, Map<String, FlatVectorsReader
}
}

abstract CentroidIterator getCentroidIterator(
public abstract CentroidIterator getCentroidIterator(
FieldInfo fieldInfo,
int numCentroids,
IndexInput centroids,
Expand Down Expand Up @@ -388,18 +388,18 @@ IndexInput postingListSlice(IndexInput postingListFile) throws IOException {
}
}

abstract PostingVisitor getPostingVisitor(FieldInfo fieldInfo, IndexInput postingsLists, float[] target, Bits needsScoring)
public abstract PostingVisitor getPostingVisitor(FieldInfo fieldInfo, IndexInput postingsLists, float[] target, Bits needsScoring)
throws IOException;

record CentroidOffsetAndLength(long offset, long length) {}
public record CentroidOffsetAndLength(long offset, long length) {}

interface CentroidIterator {
public interface CentroidIterator {
boolean hasNext();

CentroidOffsetAndLength nextPostingListOffsetAndLength() throws IOException;
}

interface PostingVisitor {
public interface PostingVisitor {
/** returns the number of documents in the posting list */
int resetPostingsScorer(long offset) throws IOException;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,20 +127,20 @@ public final KnnFieldVectorsWriter<?> addField(FieldInfo fieldInfo) throws IOExc
return rawVectorDelegate;
}

abstract CentroidAssignments calculateCentroids(FieldInfo fieldInfo, FloatVectorValues floatVectorValues, float[] globalCentroid)
public abstract CentroidAssignments calculateCentroids(FieldInfo fieldInfo, FloatVectorValues floatVectorValues, float[] globalCentroid)
throws IOException;

record CentroidOffsetAndLength(LongValues offsets, LongValues lengths) {}
public record CentroidOffsetAndLength(LongValues offsets, LongValues lengths) {}

abstract void writeCentroids(
public abstract void writeCentroids(
FieldInfo fieldInfo,
CentroidSupplier centroidSupplier,
float[] globalCentroid,
CentroidOffsetAndLength centroidOffsetAndLength,
IndexOutput centroidOutput
) throws IOException;

abstract CentroidOffsetAndLength buildAndWritePostingsLists(
public abstract CentroidOffsetAndLength buildAndWritePostingsLists(
FieldInfo fieldInfo,
CentroidSupplier centroidSupplier,
FloatVectorValues floatVectorValues,
Expand All @@ -150,7 +150,7 @@ abstract CentroidOffsetAndLength buildAndWritePostingsLists(
int[] overspillAssignments
) throws IOException;

abstract CentroidOffsetAndLength buildAndWritePostingsLists(
public abstract CentroidOffsetAndLength buildAndWritePostingsLists(
FieldInfo fieldInfo,
CentroidSupplier centroidSupplier,
FloatVectorValues floatVectorValues,
Expand All @@ -161,7 +161,7 @@ abstract CentroidOffsetAndLength buildAndWritePostingsLists(
int[] overspillAssignments
) throws IOException;

abstract CentroidSupplier createCentroidSupplier(
public abstract CentroidSupplier createCentroidSupplier(
IndexInput centroidsInput,
int numCentroids,
FieldInfo fieldInfo,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@
import org.apache.lucene.util.IntroSorter;
import org.apache.lucene.util.hnsw.IntToIntFunction;

class IntSorter extends IntroSorter {
public class IntSorter extends IntroSorter {
int pivot = -1;
private final int[] arr;
private final IntToIntFunction func;

IntSorter(int[] arr, IntToIntFunction func) {
public IntSorter(int[] arr, IntToIntFunction func) {
this.arr = arr;
this.func = func;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,6 @@
* Functional interface representing a function that takes an integer input
* and produces a boolean output.
*/
interface IntToBooleanFunction {
public interface IntToBooleanFunction {
boolean apply(int value);
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
* Provides methods to iterate through the vectors and retrieve
* associated quantization correction data.
*/
interface QuantizedVectorValues {
public interface QuantizedVectorValues {
int count();

byte[] next() throws IOException;
Expand Down
Loading