Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions qa/vector/src/main/java/module-info.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
requires org.elasticsearch.base;
requires org.elasticsearch.server;
requires org.elasticsearch.xcontent;
requires org.elasticsearch.cli;
requires org.apache.lucene.core;
requires org.apache.lucene.codecs;
requires org.apache.lucene.queries;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,10 @@
import org.apache.lucene.codecs.KnnVectorsFormat;
import org.apache.lucene.codecs.lucene101.Lucene101Codec;
import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat;
import org.elasticsearch.cli.ProcessInfo;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.PathUtils;
import org.elasticsearch.index.codec.vectors.ES813Int8FlatVectorFormat;
import org.elasticsearch.index.codec.vectors.ES814HnswScalarQuantizedVectorsFormat;
Expand All @@ -35,6 +37,7 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;

/**
* A utility class to create and test KNN indices using Lucene.
Expand All @@ -47,7 +50,13 @@ public class KnnIndexTester {

static {
LogConfigurator.loadLog4jPlugins();
LogConfigurator.configureESLogging(); // native access requires logging to be initialized

// necessary otherwise the es.logger.level system configuration in build.gradle is ignored
ProcessInfo pinfo = ProcessInfo.fromSystem();
Map<String, String> sysprops = pinfo.sysprops();
String loggerLevel = sysprops.getOrDefault("es.logger.level", Level.INFO.name());
Settings settings = Settings.builder().put("logger.level", loggerLevel).build();
LogConfigurator.configureWithoutConfig(settings);
}

static final String INDEX_DIR = "target/knn_index";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,12 @@
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.VectorUtil;
import org.apache.lucene.util.quantization.OptimizedScalarQuantizer;
import org.elasticsearch.index.codec.vectors.cluster.HierarchicalKMeans;
import org.elasticsearch.index.codec.vectors.cluster.KMeansResult;
import org.elasticsearch.logging.LogManager;
import org.elasticsearch.logging.Logger;
import org.elasticsearch.simdvec.ES91OSQVectorsScorer;

import java.io.IOException;
Expand All @@ -31,14 +32,14 @@
import static org.apache.lucene.codecs.lucene102.Lucene102BinaryQuantizedVectorsFormat.INDEX_BITS;
import static org.apache.lucene.util.quantization.OptimizedScalarQuantizer.discretize;
import static org.apache.lucene.util.quantization.OptimizedScalarQuantizer.packAsBinary;
import static org.elasticsearch.index.codec.vectors.IVFVectorsFormat.IVF_VECTOR_COMPONENT;

/**
* Default implementation of {@link IVFVectorsWriter}. It uses {@link HierarchicalKMeans} algorithm to
* partition the vector space, and then stores the centroids and posting list in a sequential
* fashion.
*/
public class DefaultIVFVectorsWriter extends IVFVectorsWriter {
private static final Logger logger = LogManager.getLogger(DefaultIVFVectorsWriter.class);

private final int vectorPerCluster;

Expand All @@ -53,7 +54,6 @@ long[] buildAndWritePostingsLists(
CentroidSupplier centroidSupplier,
FloatVectorValues floatVectorValues,
IndexOutput postingsOutput,
InfoStream infoStream,
IntArrayList[] assignmentsByCluster
) throws IOException {
// write the posting lists
Expand All @@ -79,14 +79,14 @@ long[] buildAndWritePostingsLists(
writePostingList(cluster, postingsOutput, binarizedByteVectorValues);
}

if (infoStream.isEnabled(IVF_VECTOR_COMPONENT)) {
printClusterQualityStatistics(assignmentsByCluster, infoStream);
if (logger.isDebugEnabled()) {
printClusterQualityStatistics(assignmentsByCluster);
}

return offsets;
}

private static void printClusterQualityStatistics(IntArrayList[] clusters, InfoStream infoStream) {
private static void printClusterQualityStatistics(IntArrayList[] clusters) {
float min = Float.MAX_VALUE;
float max = Float.MIN_VALUE;
float mean = 0;
Expand All @@ -105,20 +105,9 @@ private static void printClusterQualityStatistics(IntArrayList[] clusters, InfoS
max = Math.max(max, cluster.size());
}
float variance = m2 / (clusters.length - 1);
infoStream.message(
IVF_VECTOR_COMPONENT,
"Centroid count: "
+ clusters.length
+ " min: "
+ min
+ " max: "
+ max
+ " mean: "
+ mean
+ " stdDev: "
+ Math.sqrt(variance)
+ " variance: "
+ variance
logger.debug(
"Centroid count: {} min: {} max: {} mean: {} stdDev: {} variance: {}",
clusters.length, min, max, mean, Math.sqrt(variance), variance
);
}

Expand Down Expand Up @@ -208,17 +197,16 @@ CentroidAssignments calculateAndWriteCentroids(
float[] globalCentroid
) throws IOException {
// TODO: take advantage of prior generated clusters from mergeState in the future
return calculateAndWriteCentroids(fieldInfo, floatVectorValues, centroidOutput, mergeState.infoStream, globalCentroid, false);
return calculateAndWriteCentroids(fieldInfo, floatVectorValues, centroidOutput, globalCentroid, false);
}

CentroidAssignments calculateAndWriteCentroids(
FieldInfo fieldInfo,
FloatVectorValues floatVectorValues,
IndexOutput centroidOutput,
InfoStream infoStream,
float[] globalCentroid
) throws IOException {
return calculateAndWriteCentroids(fieldInfo, floatVectorValues, centroidOutput, infoStream, globalCentroid, true);
return calculateAndWriteCentroids(fieldInfo, floatVectorValues, centroidOutput, globalCentroid, true);
}

/**
Expand All @@ -228,7 +216,6 @@ CentroidAssignments calculateAndWriteCentroids(
* @param fieldInfo merging field info
* @param floatVectorValues the float vector values to merge
* @param centroidOutput the centroid output
* @param infoStream the merge state
* @param globalCentroid the global centroid, calculated by this method and used to quantize the centroids
* @param cacheCentroids whether the centroids are kept or discarded once computed
* @return the vector assignments, soar assignments, and if asked the centroids themselves that were computed
Expand All @@ -238,7 +225,6 @@ CentroidAssignments calculateAndWriteCentroids(
FieldInfo fieldInfo,
FloatVectorValues floatVectorValues,
IndexOutput centroidOutput,
InfoStream infoStream,
float[] globalCentroid,
boolean cacheCentroids
) throws IOException {
Expand Down Expand Up @@ -266,12 +252,9 @@ CentroidAssignments calculateAndWriteCentroids(
// write centroids
writeCentroids(centroids, fieldInfo, globalCentroid, centroidOutput);

if (infoStream.isEnabled(IVF_VECTOR_COMPONENT)) {
infoStream.message(
IVF_VECTOR_COMPONENT,
"calculate centroids and assign vectors time ms: " + ((System.nanoTime() - nanoTime) / 1000000.0)
);
infoStream.message(IVF_VECTOR_COMPONENT, "final centroid count: " + centroids.length);
if (logger.isDebugEnabled()) {
logger.debug("calculate centroids and assign vectors time ms: {}", (System.nanoTime() - nanoTime) / 1000000.0);
logger.debug("final centroid count: {}", centroids.length);
}

IntArrayList[] assignmentsByCluster = new IntArrayList[centroids.length];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@
*/
public class IVFVectorsFormat extends KnnVectorsFormat {

public static final String IVF_VECTOR_COMPONENT = "IVF";
public static final String NAME = "IVFVectorsFormat";
// centroid ordinals -> centroid values, offsets
public static final String CENTROID_EXTENSION = "cenivf";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.VectorUtil;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.core.SuppressForbidden;
Expand Down Expand Up @@ -134,7 +133,6 @@ abstract CentroidAssignments calculateAndWriteCentroids(
FieldInfo fieldInfo,
FloatVectorValues floatVectorValues,
IndexOutput centroidOutput,
InfoStream infoStream,
float[] globalCentroid
) throws IOException;

Expand All @@ -143,7 +141,6 @@ abstract long[] buildAndWritePostingsLists(
CentroidSupplier centroidSupplier,
FloatVectorValues floatVectorValues,
IndexOutput postingsOutput,
InfoStream infoStream,
IntArrayList[] assignmentsByCluster
) throws IOException;

Expand All @@ -168,7 +165,6 @@ public final void flush(int maxDoc, Sorter.DocMap sortMap) throws IOException {
fieldWriter.fieldInfo,
floatVectorValues,
ivfCentroids,
segmentWriteState.infoStream,
globalCentroid
);

Expand All @@ -180,7 +176,6 @@ public final void flush(int maxDoc, Sorter.DocMap sortMap) throws IOException {
centroidSupplier,
floatVectorValues,
ivfClusters,
segmentWriteState.infoStream,
centroidAssignments.assignmentsByCluster()
);
// write posting lists
Expand Down Expand Up @@ -313,7 +308,6 @@ public final void mergeOneField(FieldInfo fieldInfo, MergeState mergeState) thro
centroidSupplier,
floatVectorValues,
ivfClusters,
mergeState.infoStream,
centroidAssignments.assignmentsByCluster()
);
assert offsets.length == centroidSupplier.size();
Expand Down