diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ES92Int7VectorsScorer.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ES92Int7VectorsScorer.java
new file mode 100644
index 0000000000000..c405e0ad33677
--- /dev/null
+++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ES92Int7VectorsScorer.java
@@ -0,0 +1,178 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+package org.elasticsearch.simdvec;
+
+import org.apache.lucene.index.VectorSimilarityFunction;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.util.VectorUtil;
+
+import java.io.IOException;
+
+import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN;
+import static org.apache.lucene.index.VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT;
+
+/**
+ * Scorer for 7 bit quantized vectors stored in a {@link IndexInput}.
+ * Queries are expected to be quantized using 7 bits as well.
+ * */
+public class ES92Int7VectorsScorer {
+
+ public static final int BULK_SIZE = 16;
+ protected static final float SEVEN_BIT_SCALE = 1f / ((1 << 7) - 1);
+
+ /** The wrapper {@link IndexInput}. */
+ protected final IndexInput in;
+ protected final int dimensions;
+
+ private final float[] lowerIntervals = new float[BULK_SIZE];
+ private final float[] upperIntervals = new float[BULK_SIZE];
+ private final int[] targetComponentSums = new int[BULK_SIZE];
+ private final float[] additionalCorrections = new float[BULK_SIZE];
+
+ /** Sole constructor, called by sub-classes. */
+ public ES92Int7VectorsScorer(IndexInput in, int dimensions) {
+ this.in = in;
+ this.dimensions = dimensions;
+ }
+
+ /**
+ * compute the quantize distance between the provided quantized query and the quantized vector
+ * that is read from the wrapped {@link IndexInput}.
+ */
+ public long int7DotProduct(byte[] b) throws IOException {
+ int total = 0;
+ for (int i = 0; i < dimensions; i++) {
+ total += in.readByte() * b[i];
+ }
+ return total;
+ }
+
+ /**
+ * compute the quantize distance between the provided quantized query and the quantized vectors
+ * that are read from the wrapped {@link IndexInput}. The number of quantized vectors to read is
+ * determined by {code count} and the results are stored in the provided {@code scores} array.
+ */
+ public void int7DotProductBulk(byte[] b, int count, float[] scores) throws IOException {
+ for (int i = 0; i < count; i++) {
+ scores[i] = int7DotProduct(b);
+ }
+ }
+
+ /**
+ * Computes the score by applying the necessary corrections to the provided quantized distance.
+ */
+ public float score(
+ byte[] q,
+ float queryLowerInterval,
+ float queryUpperInterval,
+ int queryComponentSum,
+ float queryAdditionalCorrection,
+ VectorSimilarityFunction similarityFunction,
+ float centroidDp
+ ) throws IOException {
+ float score = int7DotProduct(q);
+ in.readFloats(lowerIntervals, 0, 3);
+ int addition = in.readInt();
+ return applyCorrections(
+ queryLowerInterval,
+ queryUpperInterval,
+ queryComponentSum,
+ queryAdditionalCorrection,
+ similarityFunction,
+ centroidDp,
+ lowerIntervals[0],
+ lowerIntervals[1],
+ addition,
+ lowerIntervals[2],
+ score
+ );
+ }
+
+ /**
+ * compute the distance between the provided quantized query and the quantized vectors that are
+ * read from the wrapped {@link IndexInput}.
+ *
+ * The number of vectors to score is defined by {@link #BULK_SIZE}. The expected format of the
+ * input is as follows: First the quantized vectors are read from the input,then all the lower
+ * intervals as floats, then all the upper intervals as floats, then all the target component sums
+ * as shorts, and finally all the additional corrections as floats.
+ *
+ *
The results are stored in the provided scores array.
+ */
+ public void scoreBulk(
+ byte[] q,
+ float queryLowerInterval,
+ float queryUpperInterval,
+ int queryComponentSum,
+ float queryAdditionalCorrection,
+ VectorSimilarityFunction similarityFunction,
+ float centroidDp,
+ float[] scores
+ ) throws IOException {
+ int7DotProductBulk(q, BULK_SIZE, scores);
+ in.readFloats(lowerIntervals, 0, BULK_SIZE);
+ in.readFloats(upperIntervals, 0, BULK_SIZE);
+ in.readInts(targetComponentSums, 0, BULK_SIZE);
+ in.readFloats(additionalCorrections, 0, BULK_SIZE);
+ for (int i = 0; i < BULK_SIZE; i++) {
+ scores[i] = applyCorrections(
+ queryLowerInterval,
+ queryUpperInterval,
+ queryComponentSum,
+ queryAdditionalCorrection,
+ similarityFunction,
+ centroidDp,
+ lowerIntervals[i],
+ upperIntervals[i],
+ targetComponentSums[i],
+ additionalCorrections[i],
+ scores[i]
+ );
+ }
+ }
+
+ /**
+ * Computes the score by applying the necessary corrections to the provided quantized distance.
+ */
+ public float applyCorrections(
+ float queryLowerInterval,
+ float queryUpperInterval,
+ int queryComponentSum,
+ float queryAdditionalCorrection,
+ VectorSimilarityFunction similarityFunction,
+ float centroidDp,
+ float lowerInterval,
+ float upperInterval,
+ int targetComponentSum,
+ float additionalCorrection,
+ float qcDist
+ ) {
+ float ax = lowerInterval;
+ // Here we assume `lx` is simply bit vectors, so the scaling isn't necessary
+ float lx = (upperInterval - ax) * SEVEN_BIT_SCALE;
+ float ay = queryLowerInterval;
+ float ly = (queryUpperInterval - ay) * SEVEN_BIT_SCALE;
+ float y1 = queryComponentSum;
+ float score = ax * ay * dimensions + ay * lx * (float) targetComponentSum + ax * ly * y1 + lx * ly * qcDist;
+ // For euclidean, we need to invert the score and apply the additional correction, which is
+ // assumed to be the squared l2norm of the centroid centered vectors.
+ if (similarityFunction == EUCLIDEAN) {
+ score = queryAdditionalCorrection + additionalCorrection - 2 * score;
+ return Math.max(1 / (1f + score), 0);
+ } else {
+ // For cosine and max inner product, we need to apply the additional correction, which is
+ // assumed to be the non-centered dot-product between the vector and the centroid
+ score += queryAdditionalCorrection + additionalCorrection - centroidDp;
+ if (similarityFunction == MAXIMUM_INNER_PRODUCT) {
+ return VectorUtil.scaleMaxInnerProductScore(score);
+ }
+ return Math.max((1f + score) / 2f, 0);
+ }
+ }
+}
diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java
index c3091dcb96882..5b14b39d37fb0 100644
--- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java
+++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java
@@ -51,6 +51,10 @@ public static ES91Int4VectorsScorer getES91Int4VectorsScorer(IndexInput input, i
return ESVectorizationProvider.getInstance().newES91Int4VectorsScorer(input, dimension);
}
+ public static ES92Int7VectorsScorer getES92Int7VectorsScorer(IndexInput input, int dimension) throws IOException {
+ return ESVectorizationProvider.getInstance().newES92Int7VectorsScorer(input, dimension);
+ }
+
public static long ipByteBinByte(byte[] q, byte[] d) {
if (q.length != d.length * B_QUERY) {
throw new IllegalArgumentException("vector dimensions incompatible: " + q.length + "!= " + B_QUERY + " x " + d.length);
diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorizationProvider.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorizationProvider.java
index 5bdd7a724ceda..4c4cd98bdd781 100644
--- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorizationProvider.java
+++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorizationProvider.java
@@ -12,8 +12,7 @@
import org.apache.lucene.store.IndexInput;
import org.elasticsearch.simdvec.ES91Int4VectorsScorer;
import org.elasticsearch.simdvec.ES91OSQVectorsScorer;
-
-import java.io.IOException;
+import org.elasticsearch.simdvec.ES92Int7VectorsScorer;
final class DefaultESVectorizationProvider extends ESVectorizationProvider {
private final ESVectorUtilSupport vectorUtilSupport;
@@ -28,12 +27,17 @@ public ESVectorUtilSupport getVectorUtilSupport() {
}
@Override
- public ES91OSQVectorsScorer newES91OSQVectorsScorer(IndexInput input, int dimension) throws IOException {
+ public ES91OSQVectorsScorer newES91OSQVectorsScorer(IndexInput input, int dimension) {
return new ES91OSQVectorsScorer(input, dimension);
}
@Override
- public ES91Int4VectorsScorer newES91Int4VectorsScorer(IndexInput input, int dimension) throws IOException {
+ public ES91Int4VectorsScorer newES91Int4VectorsScorer(IndexInput input, int dimension) {
return new ES91Int4VectorsScorer(input, dimension);
}
+
+ @Override
+ public ES92Int7VectorsScorer newES92Int7VectorsScorer(IndexInput input, int dimension) {
+ return new ES92Int7VectorsScorer(input, dimension);
+ }
}
diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java
index 719284f48471c..d174c31401f02 100644
--- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java
+++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java
@@ -12,6 +12,7 @@
import org.apache.lucene.store.IndexInput;
import org.elasticsearch.simdvec.ES91Int4VectorsScorer;
import org.elasticsearch.simdvec.ES91OSQVectorsScorer;
+import org.elasticsearch.simdvec.ES92Int7VectorsScorer;
import java.io.IOException;
import java.util.Objects;
@@ -35,6 +36,9 @@ public static ESVectorizationProvider getInstance() {
/** Create a new {@link ES91Int4VectorsScorer} for the given {@link IndexInput}. */
public abstract ES91Int4VectorsScorer newES91Int4VectorsScorer(IndexInput input, int dimension) throws IOException;
+ /** Create a new {@link ES92Int7VectorsScorer} for the given {@link IndexInput}. */
+ public abstract ES92Int7VectorsScorer newES92Int7VectorsScorer(IndexInput input, int dimension) throws IOException;
+
// visible for tests
static ESVectorizationProvider lookup(boolean testMode) {
return new DefaultESVectorizationProvider();
diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/MemorySegmentES92Int7VectorsScorer.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/MemorySegmentES92Int7VectorsScorer.java
new file mode 100644
index 0000000000000..6edf60fff1c83
--- /dev/null
+++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/MemorySegmentES92Int7VectorsScorer.java
@@ -0,0 +1,352 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+package org.elasticsearch.simdvec.internal;
+
+import jdk.incubator.vector.ByteVector;
+import jdk.incubator.vector.FloatVector;
+import jdk.incubator.vector.IntVector;
+import jdk.incubator.vector.ShortVector;
+import jdk.incubator.vector.Vector;
+import jdk.incubator.vector.VectorOperators;
+import jdk.incubator.vector.VectorShape;
+import jdk.incubator.vector.VectorSpecies;
+
+import org.apache.lucene.index.VectorSimilarityFunction;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.util.VectorUtil;
+import org.elasticsearch.simdvec.ES92Int7VectorsScorer;
+
+import java.io.IOException;
+import java.lang.foreign.MemorySegment;
+import java.nio.ByteOrder;
+
+import static java.nio.ByteOrder.LITTLE_ENDIAN;
+import static jdk.incubator.vector.VectorOperators.ADD;
+import static jdk.incubator.vector.VectorOperators.B2I;
+import static jdk.incubator.vector.VectorOperators.B2S;
+import static jdk.incubator.vector.VectorOperators.S2I;
+import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN;
+import static org.apache.lucene.index.VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT;
+
+/** Panamized scorer for 7-bit quantized vectors stored as an {@link IndexInput}. **/
+public final class MemorySegmentES92Int7VectorsScorer extends ES92Int7VectorsScorer {
+
+ private static final VectorSpecies BYTE_SPECIES_64 = ByteVector.SPECIES_64;
+ private static final VectorSpecies BYTE_SPECIES_128 = ByteVector.SPECIES_128;
+
+ private static final VectorSpecies SHORT_SPECIES_128 = ShortVector.SPECIES_128;
+ private static final VectorSpecies SHORT_SPECIES_256 = ShortVector.SPECIES_256;
+
+ private static final VectorSpecies INT_SPECIES_128 = IntVector.SPECIES_128;
+ private static final VectorSpecies INT_SPECIES_256 = IntVector.SPECIES_256;
+ private static final VectorSpecies INT_SPECIES_512 = IntVector.SPECIES_512;
+
+ private static final int VECTOR_BITSIZE;
+ private static final VectorSpecies FLOAT_SPECIES;
+ private static final VectorSpecies INT_SPECIES;
+
+ static {
+ // default to platform supported bitsize
+ VECTOR_BITSIZE = VectorShape.preferredShape().vectorBitSize();
+ FLOAT_SPECIES = VectorSpecies.of(float.class, VectorShape.forBitSize(VECTOR_BITSIZE));
+ INT_SPECIES = VectorSpecies.of(int.class, VectorShape.forBitSize(VECTOR_BITSIZE));
+ }
+
+ private final MemorySegment memorySegment;
+
+ public MemorySegmentES92Int7VectorsScorer(IndexInput in, int dimensions, MemorySegment memorySegment) {
+ super(in, dimensions);
+ this.memorySegment = memorySegment;
+ }
+
+ @Override
+ public long int7DotProduct(byte[] q) throws IOException {
+ assert dimensions == q.length;
+ int i = 0;
+ int res = 0;
+ // only vectorize if we'll at least enter the loop a single time
+ if (dimensions >= 16) {
+ // compute vectorized dot product consistent with VPDPBUSD instruction
+ if (VECTOR_BITSIZE >= 512) {
+ i += BYTE_SPECIES_128.loopBound(dimensions);
+ res += dotProductBody512(q, i);
+ } else if (VECTOR_BITSIZE == 256) {
+ i += BYTE_SPECIES_64.loopBound(dimensions);
+ res += dotProductBody256(q, i);
+ } else {
+ // tricky: we don't have SPECIES_32, so we workaround with "overlapping read"
+ i += BYTE_SPECIES_64.loopBound(dimensions - BYTE_SPECIES_64.length());
+ res += dotProductBody128(q, i);
+ }
+ // scalar tail
+ while (i < dimensions) {
+ res += in.readByte() * q[i++];
+ }
+ return res;
+ } else {
+ return super.int7DotProduct(q);
+ }
+ }
+
+ private int dotProductBody512(byte[] q, int limit) throws IOException {
+ IntVector acc = IntVector.zero(INT_SPECIES_512);
+ long offset = in.getFilePointer();
+ for (int i = 0; i < limit; i += BYTE_SPECIES_128.length()) {
+ ByteVector va8 = ByteVector.fromArray(BYTE_SPECIES_128, q, i);
+ ByteVector vb8 = ByteVector.fromMemorySegment(BYTE_SPECIES_128, memorySegment, offset + i, LITTLE_ENDIAN);
+
+ // 16-bit multiply: avoid AVX-512 heavy multiply on zmm
+ Vector va16 = va8.convertShape(B2S, SHORT_SPECIES_256, 0);
+ Vector vb16 = vb8.convertShape(B2S, SHORT_SPECIES_256, 0);
+ Vector prod16 = va16.mul(vb16);
+
+ // 32-bit add
+ Vector prod32 = prod16.convertShape(S2I, INT_SPECIES_512, 0);
+ acc = acc.add(prod32);
+ }
+
+ in.seek(offset + limit); // advance the input stream
+ // reduce
+ return acc.reduceLanes(ADD);
+ }
+
+ private int dotProductBody256(byte[] q, int limit) throws IOException {
+ IntVector acc = IntVector.zero(INT_SPECIES_256);
+ long offset = in.getFilePointer();
+ for (int i = 0; i < limit; i += BYTE_SPECIES_64.length()) {
+ ByteVector va8 = ByteVector.fromArray(BYTE_SPECIES_64, q, i);
+ ByteVector vb8 = ByteVector.fromMemorySegment(BYTE_SPECIES_64, memorySegment, offset + i, LITTLE_ENDIAN);
+
+ // 32-bit multiply and add into accumulator
+ Vector va32 = va8.convertShape(B2I, INT_SPECIES_256, 0);
+ Vector vb32 = vb8.convertShape(B2I, INT_SPECIES_256, 0);
+ acc = acc.add(va32.mul(vb32));
+ }
+ in.seek(offset + limit);
+ // reduce
+ return acc.reduceLanes(ADD);
+ }
+
+ private int dotProductBody128(byte[] q, int limit) throws IOException {
+ IntVector acc = IntVector.zero(IntVector.SPECIES_128);
+ long offset = in.getFilePointer();
+ // 4 bytes at a time (re-loading half the vector each time!)
+ for (int i = 0; i < limit; i += ByteVector.SPECIES_64.length() >> 1) {
+ // load 8 bytes
+ ByteVector va8 = ByteVector.fromArray(BYTE_SPECIES_64, q, i);
+ ByteVector vb8 = ByteVector.fromMemorySegment(BYTE_SPECIES_64, memorySegment, offset + i, LITTLE_ENDIAN);
+
+ // process first "half" only: 16-bit multiply
+ Vector va16 = va8.convert(B2S, 0);
+ Vector vb16 = vb8.convert(B2S, 0);
+ Vector prod16 = va16.mul(vb16);
+
+ // 32-bit add
+ acc = acc.add(prod16.convertShape(S2I, IntVector.SPECIES_128, 0));
+ }
+ in.seek(offset + limit);
+ // reduce
+ return acc.reduceLanes(ADD);
+ }
+
+ @Override
+ public void int7DotProductBulk(byte[] q, int count, float[] scores) throws IOException {
+ assert dimensions == q.length;
+ // only vectorize if we'll at least enter the loop a single time
+ if (dimensions >= 16) {
+ // compute vectorized dot product consistent with VPDPBUSD instruction
+ if (VECTOR_BITSIZE >= 512) {
+ dotProductBody512Bulk(q, count, scores);
+ } else if (VECTOR_BITSIZE == 256) {
+ dotProductBody256Bulk(q, count, scores);
+ } else {
+ // tricky: we don't have SPECIES_32, so we workaround with "overlapping read"
+ dotProductBody128Bulk(q, count, scores);
+ }
+ } else {
+ int7DotProductBulk(q, count, scores);
+ }
+ }
+
+ private void dotProductBody512Bulk(byte[] q, int count, float[] scores) throws IOException {
+ int limit = BYTE_SPECIES_128.loopBound(dimensions);
+ for (int iter = 0; iter < count; iter++) {
+ IntVector acc = IntVector.zero(INT_SPECIES_512);
+ long offset = in.getFilePointer();
+ int i = 0;
+ for (; i < limit; i += BYTE_SPECIES_128.length()) {
+ ByteVector va8 = ByteVector.fromArray(BYTE_SPECIES_128, q, i);
+ ByteVector vb8 = ByteVector.fromMemorySegment(BYTE_SPECIES_128, memorySegment, offset + i, LITTLE_ENDIAN);
+
+ // 16-bit multiply: avoid AVX-512 heavy multiply on zmm
+ Vector va16 = va8.convertShape(B2S, SHORT_SPECIES_256, 0);
+ Vector vb16 = vb8.convertShape(B2S, SHORT_SPECIES_256, 0);
+ Vector prod16 = va16.mul(vb16);
+
+ // 32-bit add
+ Vector prod32 = prod16.convertShape(S2I, INT_SPECIES_512, 0);
+ acc = acc.add(prod32);
+ }
+
+ in.seek(offset + limit); // advance the input stream
+ // reduce
+ long res = acc.reduceLanes(ADD);
+ for (; i < dimensions; i++) {
+ res += in.readByte() * q[i];
+ }
+ scores[iter] = res;
+ }
+ }
+
+ private void dotProductBody256Bulk(byte[] q, int count, float[] scores) throws IOException {
+ int limit = BYTE_SPECIES_128.loopBound(dimensions);
+ for (int iter = 0; iter < count; iter++) {
+ IntVector acc = IntVector.zero(INT_SPECIES_256);
+ long offset = in.getFilePointer();
+ int i = 0;
+ for (; i < limit; i += BYTE_SPECIES_64.length()) {
+ ByteVector va8 = ByteVector.fromArray(BYTE_SPECIES_64, q, i);
+ ByteVector vb8 = ByteVector.fromMemorySegment(BYTE_SPECIES_64, memorySegment, offset + i, LITTLE_ENDIAN);
+
+ // 32-bit multiply and add into accumulator
+ Vector va32 = va8.convertShape(B2I, INT_SPECIES_256, 0);
+ Vector vb32 = vb8.convertShape(B2I, INT_SPECIES_256, 0);
+ acc = acc.add(va32.mul(vb32));
+ }
+ in.seek(offset + limit);
+ // reduce
+ long res = acc.reduceLanes(ADD);
+ for (; i < dimensions; i++) {
+ res += in.readByte() * q[i];
+ }
+ scores[iter] = res;
+ }
+ }
+
+ private void dotProductBody128Bulk(byte[] q, int count, float[] scores) throws IOException {
+ int limit = BYTE_SPECIES_64.loopBound(dimensions - BYTE_SPECIES_64.length());
+ for (int iter = 0; iter < count; iter++) {
+ IntVector acc = IntVector.zero(IntVector.SPECIES_128);
+ long offset = in.getFilePointer();
+ // 4 bytes at a time (re-loading half the vector each time!)
+ int i = 0;
+ for (; i < limit; i += ByteVector.SPECIES_64.length() >> 1) {
+ // load 8 bytes
+ ByteVector va8 = ByteVector.fromArray(BYTE_SPECIES_64, q, i);
+ ByteVector vb8 = ByteVector.fromMemorySegment(BYTE_SPECIES_64, memorySegment, offset + i, LITTLE_ENDIAN);
+
+ // process first "half" only: 16-bit multiply
+ Vector va16 = va8.convert(B2S, 0);
+ Vector vb16 = vb8.convert(B2S, 0);
+ Vector prod16 = va16.mul(vb16);
+
+ // 32-bit add
+ acc = acc.add(prod16.convertShape(S2I, IntVector.SPECIES_128, 0));
+ }
+ in.seek(offset + limit);
+ // reduce
+ long res = acc.reduceLanes(ADD);
+ for (; i < dimensions; i++) {
+ res += in.readByte() * q[i];
+ }
+ scores[iter] = res;
+ }
+ }
+
+ @Override
+ public void scoreBulk(
+ byte[] q,
+ float queryLowerInterval,
+ float queryUpperInterval,
+ int queryComponentSum,
+ float queryAdditionalCorrection,
+ VectorSimilarityFunction similarityFunction,
+ float centroidDp,
+ float[] scores
+ ) throws IOException {
+ int7DotProductBulk(q, BULK_SIZE, scores);
+ applyCorrectionsBulk(
+ queryLowerInterval,
+ queryUpperInterval,
+ queryComponentSum,
+ queryAdditionalCorrection,
+ similarityFunction,
+ centroidDp,
+ scores
+ );
+ }
+
+ private void applyCorrectionsBulk(
+ float queryLowerInterval,
+ float queryUpperInterval,
+ int queryComponentSum,
+ float queryAdditionalCorrection,
+ VectorSimilarityFunction similarityFunction,
+ float centroidDp,
+ float[] scores
+ ) throws IOException {
+ int limit = FLOAT_SPECIES.loopBound(BULK_SIZE);
+ int i = 0;
+ long offset = in.getFilePointer();
+ float ay = queryLowerInterval;
+ float ly = (queryUpperInterval - ay) * SEVEN_BIT_SCALE;
+ float y1 = queryComponentSum;
+ for (; i < limit; i += FLOAT_SPECIES.length()) {
+ var ax = FloatVector.fromMemorySegment(FLOAT_SPECIES, memorySegment, offset + i * Float.BYTES, ByteOrder.LITTLE_ENDIAN);
+ var lx = FloatVector.fromMemorySegment(
+ FLOAT_SPECIES,
+ memorySegment,
+ offset + 4 * BULK_SIZE + i * Float.BYTES,
+ ByteOrder.LITTLE_ENDIAN
+ ).sub(ax).mul(SEVEN_BIT_SCALE);
+ var targetComponentSums = IntVector.fromMemorySegment(
+ INT_SPECIES,
+ memorySegment,
+ offset + 8 * BULK_SIZE + i * Integer.BYTES,
+ ByteOrder.LITTLE_ENDIAN
+ ).convert(VectorOperators.I2F, 0);
+ var additionalCorrections = FloatVector.fromMemorySegment(
+ FLOAT_SPECIES,
+ memorySegment,
+ offset + 12 * BULK_SIZE + i * Float.BYTES,
+ ByteOrder.LITTLE_ENDIAN
+ );
+ var qcDist = FloatVector.fromArray(FLOAT_SPECIES, scores, i);
+ // ax * ay * dimensions + ay * lx * (float) targetComponentSum + ax * ly * y1 + lx * ly *
+ // qcDist;
+ var res1 = ax.mul(ay).mul(dimensions);
+ var res2 = lx.mul(ay).mul(targetComponentSums);
+ var res3 = ax.mul(ly).mul(y1);
+ var res4 = lx.mul(ly).mul(qcDist);
+ var res = res1.add(res2).add(res3).add(res4);
+ // For euclidean, we need to invert the score and apply the additional correction, which is
+ // assumed to be the squared l2norm of the centroid centered vectors.
+ if (similarityFunction == EUCLIDEAN) {
+ res = res.mul(-2).add(additionalCorrections).add(queryAdditionalCorrection).add(1f);
+ res = FloatVector.broadcast(FLOAT_SPECIES, 1).div(res).max(0);
+ res.intoArray(scores, i);
+ } else {
+ // For cosine and max inner product, we need to apply the additional correction, which is
+ // assumed to be the non-centered dot-product between the vector and the centroid
+ res = res.add(queryAdditionalCorrection).add(additionalCorrections).sub(centroidDp);
+ if (similarityFunction == MAXIMUM_INNER_PRODUCT) {
+ res.intoArray(scores, i);
+ // not sure how to do it better
+ for (int j = 0; j < FLOAT_SPECIES.length(); j++) {
+ scores[i + j] = VectorUtil.scaleMaxInnerProductScore(scores[i + j]);
+ }
+ } else {
+ res = res.add(1f).mul(0.5f).max(0);
+ res.intoArray(scores, i);
+ }
+ }
+ }
+ in.seek(offset + 16L * BULK_SIZE);
+ }
+}
diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java
index 4708a052b05db..856a0cf94410f 100644
--- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java
+++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java
@@ -15,6 +15,7 @@
import org.elasticsearch.logging.Logger;
import org.elasticsearch.simdvec.ES91Int4VectorsScorer;
import org.elasticsearch.simdvec.ES91OSQVectorsScorer;
+import org.elasticsearch.simdvec.ES92Int7VectorsScorer;
import java.io.IOException;
import java.util.Locale;
@@ -42,6 +43,9 @@ public static ESVectorizationProvider getInstance() {
/** Create a new {@link ES91Int4VectorsScorer} for the given {@link IndexInput}. */
public abstract ES91Int4VectorsScorer newES91Int4VectorsScorer(IndexInput input, int dimension) throws IOException;
+ /** Create a new {@link ES92Int7VectorsScorer} for the given {@link IndexInput}. */
+ public abstract ES92Int7VectorsScorer newES92Int7VectorsScorer(IndexInput input, int dimension) throws IOException;
+
// visible for tests
static ESVectorizationProvider lookup(boolean testMode) {
final int runtimeVersion = Runtime.version().feature();
diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorizationProvider.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorizationProvider.java
index abb75352da2f7..9b798870a4284 100644
--- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorizationProvider.java
+++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorizationProvider.java
@@ -13,6 +13,8 @@
import org.apache.lucene.store.MemorySegmentAccessInput;
import org.elasticsearch.simdvec.ES91Int4VectorsScorer;
import org.elasticsearch.simdvec.ES91OSQVectorsScorer;
+import org.elasticsearch.simdvec.ES92Int7VectorsScorer;
+import org.elasticsearch.simdvec.internal.MemorySegmentES92Int7VectorsScorer;
import java.io.IOException;
import java.lang.foreign.MemorySegment;
@@ -51,4 +53,16 @@ public ES91Int4VectorsScorer newES91Int4VectorsScorer(IndexInput input, int dime
}
return new ES91Int4VectorsScorer(input, dimension);
}
+
+ @Override
+ public ES92Int7VectorsScorer newES92Int7VectorsScorer(IndexInput input, int dimension) throws IOException {
+ if (input instanceof MemorySegmentAccessInput msai) {
+ MemorySegment ms = msai.segmentSliceOrNull(0, input.length());
+ if (ms != null) {
+ return new MemorySegmentES92Int7VectorsScorer(input, dimension, ms);
+ }
+ }
+ return new ES92Int7VectorsScorer(input, dimension);
+
+ }
}
diff --git a/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/MemorySegmentES92Int7VectorsScorer.java b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/MemorySegmentES92Int7VectorsScorer.java
new file mode 100644
index 0000000000000..1b60471b33b59
--- /dev/null
+++ b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/MemorySegmentES92Int7VectorsScorer.java
@@ -0,0 +1,156 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+package org.elasticsearch.simdvec.internal;
+
+import jdk.incubator.vector.FloatVector;
+import jdk.incubator.vector.IntVector;
+import jdk.incubator.vector.VectorOperators;
+import jdk.incubator.vector.VectorShape;
+import jdk.incubator.vector.VectorSpecies;
+
+import org.apache.lucene.index.VectorSimilarityFunction;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.util.VectorUtil;
+import org.elasticsearch.simdvec.ES92Int7VectorsScorer;
+
+import java.io.IOException;
+import java.lang.foreign.MemorySegment;
+import java.nio.ByteOrder;
+
+import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN;
+import static org.apache.lucene.index.VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT;
+
+/** Native / panamized scorer for 7-bit quantized vectors stored as an {@link IndexInput}. **/
+public final class MemorySegmentES92Int7VectorsScorer extends ES92Int7VectorsScorer {
+
+ private static final VectorSpecies FLOAT_SPECIES;
+ private static final VectorSpecies INT_SPECIES;
+
+ static {
+ // default to platform supported bitsize
+ final int vectorBitSize = VectorShape.preferredShape().vectorBitSize();
+ FLOAT_SPECIES = VectorSpecies.of(float.class, VectorShape.forBitSize(vectorBitSize));
+ INT_SPECIES = VectorSpecies.of(int.class, VectorShape.forBitSize(vectorBitSize));
+ }
+
+ private final MemorySegment memorySegment;
+
+ public MemorySegmentES92Int7VectorsScorer(IndexInput in, int dimensions, MemorySegment memorySegment) {
+ super(in, dimensions);
+ this.memorySegment = memorySegment;
+ }
+
+ @Override
+ public long int7DotProduct(byte[] q) throws IOException {
+ final MemorySegment segment = memorySegment.asSlice(in.getFilePointer(), dimensions);
+ final MemorySegment querySegment = MemorySegment.ofArray(q);
+ final long res = Similarities.dotProduct7u(segment, querySegment, dimensions);
+ in.skipBytes(dimensions);
+ return res;
+ }
+
+ @Override
+ public void int7DotProductBulk(byte[] q, int count, float[] scores) throws IOException {
+ // TODO: can we speed up bulks in native code?
+ for (int i = 0; i < count; i++) {
+ scores[i] = int7DotProduct(q);
+ }
+ }
+
+ @Override
+ public void scoreBulk(
+ byte[] q,
+ float queryLowerInterval,
+ float queryUpperInterval,
+ int queryComponentSum,
+ float queryAdditionalCorrection,
+ VectorSimilarityFunction similarityFunction,
+ float centroidDp,
+ float[] scores
+ ) throws IOException {
+ int7DotProductBulk(q, BULK_SIZE, scores);
+ applyCorrectionsBulk(
+ queryLowerInterval,
+ queryUpperInterval,
+ queryComponentSum,
+ queryAdditionalCorrection,
+ similarityFunction,
+ centroidDp,
+ scores
+ );
+ }
+
+ private void applyCorrectionsBulk(
+ float queryLowerInterval,
+ float queryUpperInterval,
+ int queryComponentSum,
+ float queryAdditionalCorrection,
+ VectorSimilarityFunction similarityFunction,
+ float centroidDp,
+ float[] scores
+ ) throws IOException {
+ int limit = FLOAT_SPECIES.loopBound(BULK_SIZE);
+ int i = 0;
+ long offset = in.getFilePointer();
+ float ay = queryLowerInterval;
+ float ly = (queryUpperInterval - ay) * SEVEN_BIT_SCALE;
+ float y1 = queryComponentSum;
+ for (; i < limit; i += FLOAT_SPECIES.length()) {
+ var ax = FloatVector.fromMemorySegment(FLOAT_SPECIES, memorySegment, offset + i * Float.BYTES, ByteOrder.LITTLE_ENDIAN);
+ var lx = FloatVector.fromMemorySegment(
+ FLOAT_SPECIES,
+ memorySegment,
+ offset + 4 * BULK_SIZE + i * Float.BYTES,
+ ByteOrder.LITTLE_ENDIAN
+ ).sub(ax).mul(SEVEN_BIT_SCALE);
+ var targetComponentSums = IntVector.fromMemorySegment(
+ INT_SPECIES,
+ memorySegment,
+ offset + 8 * BULK_SIZE + i * Integer.BYTES,
+ ByteOrder.LITTLE_ENDIAN
+ ).convert(VectorOperators.I2F, 0);
+ var additionalCorrections = FloatVector.fromMemorySegment(
+ FLOAT_SPECIES,
+ memorySegment,
+ offset + 12 * BULK_SIZE + i * Float.BYTES,
+ ByteOrder.LITTLE_ENDIAN
+ );
+ var qcDist = FloatVector.fromArray(FLOAT_SPECIES, scores, i);
+ // ax * ay * dimensions + ay * lx * (float) targetComponentSum + ax * ly * y1 + lx * ly *
+ // qcDist;
+ var res1 = ax.mul(ay).mul(dimensions);
+ var res2 = lx.mul(ay).mul(targetComponentSums);
+ var res3 = ax.mul(ly).mul(y1);
+ var res4 = lx.mul(ly).mul(qcDist);
+ var res = res1.add(res2).add(res3).add(res4);
+ // For euclidean, we need to invert the score and apply the additional correction, which is
+ // assumed to be the squared l2norm of the centroid centered vectors.
+ if (similarityFunction == EUCLIDEAN) {
+ res = res.mul(-2).add(additionalCorrections).add(queryAdditionalCorrection).add(1f);
+ res = FloatVector.broadcast(FLOAT_SPECIES, 1).div(res).max(0);
+ res.intoArray(scores, i);
+ } else {
+ // For cosine and max inner product, we need to apply the additional correction, which is
+ // assumed to be the non-centered dot-product between the vector and the centroid
+ res = res.add(queryAdditionalCorrection).add(additionalCorrections).sub(centroidDp);
+ if (similarityFunction == MAXIMUM_INNER_PRODUCT) {
+ res.intoArray(scores, i);
+ // not sure how to do it better
+ for (int j = 0; j < FLOAT_SPECIES.length(); j++) {
+ scores[i + j] = VectorUtil.scaleMaxInnerProductScore(scores[i + j]);
+ }
+ } else {
+ res = res.add(1f).mul(0.5f).max(0);
+ res.intoArray(scores, i);
+ }
+ }
+ }
+ in.seek(offset + 16L * BULK_SIZE);
+ }
+}
diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/internal/vectorization/ES92Int7VectorScorerTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/internal/vectorization/ES92Int7VectorScorerTests.java
new file mode 100644
index 0000000000000..31ef6092539e7
--- /dev/null
+++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/internal/vectorization/ES92Int7VectorScorerTests.java
@@ -0,0 +1,264 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.simdvec.internal.vectorization;
+
+import org.apache.lucene.index.VectorSimilarityFunction;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.MMapDirectory;
+import org.apache.lucene.util.VectorUtil;
+import org.elasticsearch.index.codec.vectors.OptimizedScalarQuantizer;
+import org.elasticsearch.simdvec.ES91Int4VectorsScorer;
+import org.elasticsearch.simdvec.ES91OSQVectorsScorer;
+import org.elasticsearch.simdvec.ES92Int7VectorsScorer;
+
+import java.io.IOException;
+
+import static org.hamcrest.Matchers.greaterThan;
+
+public class ES92Int7VectorScorerTests extends BaseVectorizationTests {
+
+ public void testInt7DotProduct() throws Exception {
+ // only even dimensions are supported
+ final int dimensions = random().nextInt(1, 1000) * 2;
+ final int numVectors = random().nextInt(1, 100);
+ final byte[] vector = new byte[dimensions];
+ try (Directory dir = new MMapDirectory(createTempDir())) {
+ try (IndexOutput out = dir.createOutput("tests.bin", IOContext.DEFAULT)) {
+ for (int i = 0; i < numVectors; i++) {
+ for (int j = 0; j < dimensions; j++) {
+ vector[j] = (byte) random().nextInt(128); // 7-bit quantization
+ }
+ out.writeBytes(vector, 0, dimensions);
+ }
+ }
+ final byte[] query = new byte[dimensions];
+ for (int j = 0; j < dimensions; j++) {
+ query[j] = (byte) random().nextInt(128); // 7-bit quantization
+ }
+ try (IndexInput in = dir.openInput("tests.bin", IOContext.DEFAULT)) {
+ // Work on a slice that has just the right number of bytes to make the test fail with an
+ // index-out-of-bounds in case the implementation reads more than the allowed number of
+ // padding bytes.
+ final IndexInput slice = in.slice("test", 0, (long) dimensions * numVectors);
+ final IndexInput slice2 = in.slice("test2", 0, (long) dimensions * numVectors);
+ final ES92Int7VectorsScorer defaultScorer = defaultProvider().newES92Int7VectorsScorer(slice, dimensions);
+ final ES92Int7VectorsScorer panamaScorer = maybePanamaProvider().newES92Int7VectorsScorer(slice2, dimensions);
+ for (int i = 0; i < numVectors; i++) {
+ in.readBytes(vector, 0, dimensions);
+ long val = VectorUtil.dotProduct(vector, query);
+ assertEquals(val, defaultScorer.int7DotProduct(query));
+ assertEquals(val, panamaScorer.int7DotProduct(query));
+ assertEquals(in.getFilePointer(), slice.getFilePointer());
+ assertEquals(in.getFilePointer(), slice2.getFilePointer());
+ }
+ assertEquals((long) dimensions * numVectors, in.getFilePointer());
+ }
+ }
+ }
+
+ public void testInt7Score() throws Exception {
+ // only even dimensions are supported
+ final int dimensions = random().nextInt(1, 1000) * 2;
+ final int numVectors = random().nextInt(1, 100);
+
+ float[][] vectors = new float[numVectors][dimensions];
+ final int[] scratch = new int[dimensions];
+ final byte[] qVector = new byte[dimensions];
+ final float[] centroid = new float[dimensions];
+ VectorSimilarityFunction similarityFunction = randomFrom(VectorSimilarityFunction.values());
+ randomVector(centroid, similarityFunction);
+ OptimizedScalarQuantizer quantizer = new OptimizedScalarQuantizer(similarityFunction);
+ try (Directory dir = new MMapDirectory(createTempDir())) {
+ try (IndexOutput out = dir.createOutput("tests.bin", IOContext.DEFAULT)) {
+ for (float[] vector : vectors) {
+ randomVector(vector, similarityFunction);
+ OptimizedScalarQuantizer.QuantizationResult result = quantizer.scalarQuantize(
+ vector.clone(),
+ scratch,
+ (byte) 7,
+ centroid
+ );
+ for (int j = 0; j < dimensions; j++) {
+ qVector[j] = (byte) scratch[j];
+ }
+ out.writeBytes(qVector, 0, dimensions);
+ out.writeInt(Float.floatToIntBits(result.lowerInterval()));
+ out.writeInt(Float.floatToIntBits(result.upperInterval()));
+ out.writeInt(Float.floatToIntBits(result.additionalCorrection()));
+ out.writeInt(result.quantizedComponentSum());
+ }
+ }
+ final float[] query = new float[dimensions];
+ randomVector(query, similarityFunction);
+ OptimizedScalarQuantizer.QuantizationResult queryCorrections = quantizer.scalarQuantize(
+ query.clone(),
+ scratch,
+ (byte) 7,
+ centroid
+ );
+ byte[] qQuery = new byte[dimensions];
+ for (int i = 0; i < dimensions; i++) {
+ qQuery[i] = (byte) scratch[i];
+ }
+
+ float centroidDp = VectorUtil.dotProduct(centroid, centroid);
+
+ try (IndexInput in = dir.openInput("tests.bin", IOContext.DEFAULT)) {
+ // Work on a slice that has just the right number of bytes to make the test fail with an
+ // index-out-of-bounds in case the implementation reads more than the allowed number of
+ // padding bytes.
+ final IndexInput slice = in.slice("test", 0, (long) (dimensions + 16) * numVectors);
+ final ES92Int7VectorsScorer defaultScorer = defaultProvider().newES92Int7VectorsScorer(in, dimensions);
+ final ES92Int7VectorsScorer panamaScorer = maybePanamaProvider().newES92Int7VectorsScorer(slice, dimensions);
+ for (int i = 0; i < numVectors; i++) {
+ float scoreDefault = defaultScorer.score(
+ qQuery,
+ queryCorrections.lowerInterval(),
+ queryCorrections.upperInterval(),
+ queryCorrections.quantizedComponentSum(),
+ queryCorrections.additionalCorrection(),
+ similarityFunction,
+ centroidDp
+ );
+ float scorePanama = panamaScorer.score(
+ qQuery,
+ queryCorrections.lowerInterval(),
+ queryCorrections.upperInterval(),
+ queryCorrections.quantizedComponentSum(),
+ queryCorrections.additionalCorrection(),
+ similarityFunction,
+ centroidDp
+ );
+ assertEquals(scoreDefault, scorePanama, 0.001f);
+ float realSimilarity = similarityFunction.compare(vectors[i], query);
+ float accuracy = realSimilarity > scoreDefault ? scoreDefault / realSimilarity : realSimilarity / scoreDefault;
+ assertThat(accuracy, greaterThan(0.98f));
+ assertEquals(in.getFilePointer(), slice.getFilePointer());
+ }
+ assertEquals((long) (dimensions + 16) * numVectors, in.getFilePointer());
+ }
+ }
+ }
+
+ public void testInt7ScoreBulk() throws Exception {
+ // only even dimensions are supported
+ final int dimensions = random().nextInt(1, 1000) * 2;
+ final int numVectors = random().nextInt(1, 10) * ES91Int4VectorsScorer.BULK_SIZE;
+ final float[][] vectors = new float[numVectors][dimensions];
+ final int[] quantizedScratch = new int[dimensions];
+ final byte[] quantizeVector = new byte[dimensions];
+ final float[] centroid = new float[dimensions];
+ VectorSimilarityFunction similarityFunction = randomFrom(VectorSimilarityFunction.values());
+ randomVector(centroid, similarityFunction);
+
+ OptimizedScalarQuantizer quantizer = new OptimizedScalarQuantizer(similarityFunction);
+ try (Directory dir = new MMapDirectory(createTempDir())) {
+ try (IndexOutput out = dir.createOutput("tests.bin", IOContext.DEFAULT)) {
+ OptimizedScalarQuantizer.QuantizationResult[] results =
+ new OptimizedScalarQuantizer.QuantizationResult[ES91Int4VectorsScorer.BULK_SIZE];
+ for (int i = 0; i < numVectors; i += ES91Int4VectorsScorer.BULK_SIZE) {
+ for (int j = 0; j < ES91Int4VectorsScorer.BULK_SIZE; j++) {
+ randomVector(vectors[i + j], similarityFunction);
+ results[j] = quantizer.scalarQuantize(vectors[i + j].clone(), quantizedScratch, (byte) 7, centroid);
+ for (int k = 0; k < dimensions; k++) {
+ quantizeVector[k] = (byte) quantizedScratch[k];
+ }
+ out.writeBytes(quantizeVector, 0, dimensions);
+ }
+ writeCorrections(results, out);
+ }
+ }
+ final float[] query = new float[dimensions];
+ final byte[] quantizeQuery = new byte[dimensions];
+ randomVector(query, similarityFunction);
+ OptimizedScalarQuantizer.QuantizationResult queryCorrections = quantizer.scalarQuantize(
+ query.clone(),
+ quantizedScratch,
+ (byte) 7,
+ centroid
+ );
+ for (int j = 0; j < dimensions; j++) {
+ quantizeQuery[j] = (byte) quantizedScratch[j];
+ }
+ float centroidDp = VectorUtil.dotProduct(centroid, centroid);
+
+ try (IndexInput in = dir.openInput("tests.bin", IOContext.DEFAULT)) {
+ // Work on a slice that has just the right number of bytes to make the test fail with an
+ // index-out-of-bounds in case the implementation reads more than the allowed number of
+ // padding bytes.
+ final IndexInput slice = in.slice("test", 0, (long) (dimensions + 16) * numVectors);
+ final ES92Int7VectorsScorer defaultScorer = defaultProvider().newES92Int7VectorsScorer(in, dimensions);
+ final ES92Int7VectorsScorer panamaScorer = maybePanamaProvider().newES92Int7VectorsScorer(slice, dimensions);
+ float[] scoresDefault = new float[ES91Int4VectorsScorer.BULK_SIZE];
+ float[] scoresPanama = new float[ES91Int4VectorsScorer.BULK_SIZE];
+ for (int i = 0; i < numVectors; i += ES91Int4VectorsScorer.BULK_SIZE) {
+ defaultScorer.scoreBulk(
+ quantizeQuery,
+ queryCorrections.lowerInterval(),
+ queryCorrections.upperInterval(),
+ queryCorrections.quantizedComponentSum(),
+ queryCorrections.additionalCorrection(),
+ similarityFunction,
+ centroidDp,
+ scoresDefault
+ );
+ panamaScorer.scoreBulk(
+ quantizeQuery,
+ queryCorrections.lowerInterval(),
+ queryCorrections.upperInterval(),
+ queryCorrections.quantizedComponentSum(),
+ queryCorrections.additionalCorrection(),
+ similarityFunction,
+ centroidDp,
+ scoresPanama
+ );
+ for (int j = 0; j < ES91OSQVectorsScorer.BULK_SIZE; j++) {
+ assertEquals(scoresDefault[j], scoresPanama[j], 1e-2f);
+ float realSimilarity = similarityFunction.compare(vectors[i + j], query);
+ float accuracy = realSimilarity > scoresDefault[j]
+ ? scoresDefault[j] / realSimilarity
+ : realSimilarity / scoresDefault[j];
+ assertThat(accuracy, greaterThan(0.98f));
+ }
+ assertEquals(in.getFilePointer(), slice.getFilePointer());
+ }
+ assertEquals((long) (dimensions + 16) * numVectors, in.getFilePointer());
+ }
+ }
+ }
+
+ private static void writeCorrections(OptimizedScalarQuantizer.QuantizationResult[] corrections, IndexOutput out) throws IOException {
+ for (OptimizedScalarQuantizer.QuantizationResult correction : corrections) {
+ out.writeInt(Float.floatToIntBits(correction.lowerInterval()));
+ }
+ for (OptimizedScalarQuantizer.QuantizationResult correction : corrections) {
+ out.writeInt(Float.floatToIntBits(correction.upperInterval()));
+ }
+ for (OptimizedScalarQuantizer.QuantizationResult correction : corrections) {
+ int targetComponentSum = correction.quantizedComponentSum();
+ out.writeInt(targetComponentSum);
+ }
+ for (OptimizedScalarQuantizer.QuantizationResult correction : corrections) {
+ out.writeInt(Float.floatToIntBits(correction.additionalCorrection()));
+ }
+ }
+
+ private void randomVector(float[] vector, VectorSimilarityFunction vectorSimilarityFunction) {
+ for (int i = 0; i < vector.length; i++) {
+ vector[i] = random().nextFloat();
+ }
+ if (vectorSimilarityFunction != VectorSimilarityFunction.EUCLIDEAN) {
+ VectorUtil.l2normalize(vector);
+ }
+ }
+}
diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java
index 9a3d3bd2fc6e4..d553fa4303540 100644
--- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java
+++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java
@@ -16,6 +16,7 @@
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
@@ -740,6 +741,73 @@ public void testIgnoreDynamicBeyondLimit() throws Exception {
assertThat(ignored.stream().filter(i -> i.startsWith("field") == false).toList(), empty());
}
+ @SuppressWarnings("unchecked")
+ public void testFailureStoreWithInvalidFieldType() throws Exception {
+ String dataStreamName = "logs-app-with-failure-store";
+ createDataStream(client, dataStreamName);
+
+ indexDoc(client, dataStreamName, """
+ {
+ "@timestamp": "2023-11-30T12:00:00Z",
+ "message": "This is a valid message"
+ }
+ """);
+
+ // invalid document (message as an object instead of string)
+ indexDoc(client, dataStreamName, """
+ {
+ "@timestamp": "2023-11-30T12:01:00Z",
+ "message": {
+ "nested": "This should fail because message should be a string"
+ }
+ }
+ """);
+
+ refreshAllIndices();
+
+ Request dsInfoRequest = new Request("GET", "/_data_stream/" + dataStreamName);
+ Map dsInfoResponse = entityAsMap(client.performRequest(dsInfoRequest));
+ List> dataStreams = (List>) dsInfoResponse.get("data_streams");
+ Map dataStream = dataStreams.getFirst();
+ Map failureStoreInfo = (Map) dataStream.get("failure_store");
+ assertNotNull(failureStoreInfo);
+ assertThat(failureStoreInfo.get("enabled"), is(true));
+ List> failureIndices = (List>) failureStoreInfo.get("indices");
+
+ assertThat(failureIndices, not(empty()));
+ String failureIndex = (String) failureIndices.getFirst().get("index_name");
+ assertThat(failureIndex, matchesRegex("\\.fs-" + dataStreamName + "-.*"));
+
+ // query the failure store index
+ Request failureStoreQuery = new Request("GET", "/" + failureIndex + "/_search");
+ failureStoreQuery.setJsonEntity("""
+ {
+ "query": {
+ "match_all": {}
+ }
+ }
+ """);
+ Map failureStoreResponse = entityAsMap(client.performRequest(failureStoreQuery));
+ Map hits = (Map) failureStoreResponse.get("hits");
+ List> hitsList = (List>) hits.get("hits");
+
+ // Verify the failed document is in the failure store
+ assertThat(hitsList.size(), is(1));
+ Map failedDoc = (Map) hitsList.getFirst().get("_source");
+ Map document = (Map) failedDoc.get("document");
+ assertNotNull(document);
+ Map source = (Map) document.get("source");
+ assertNotNull(source);
+ Map message = (Map) source.get("message");
+ assertNotNull(message);
+ assertThat(message.get("nested"), equalTo("This should fail because message should be a string"));
+ Map error = (Map) failedDoc.get("error");
+ assertNotNull(error);
+ assertEquals("document_parsing_exception", error.get("type"));
+ String errorMessage = (String) error.get("message");
+ assertThat(errorMessage, containsString("failed to parse field [message] of type [match_only_text] in document with id"));
+ }
+
@Override
protected String indexTemplateName() {
return "logs";
diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java
index c7033b0d560ad..38f03557ca9c7 100644
--- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java
+++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java
@@ -28,6 +28,8 @@ public class DataStreamFeatures implements FeatureSpecification {
public static final NodeFeature LOGS_STREAM_FEATURE = new NodeFeature("logs_stream");
+ public static final NodeFeature FAILURE_STORE_IN_LOG_DATA_STREAMS = new NodeFeature("logs_data_streams.failure_store.enabled");
+
@Override
public Set getFeatures() {
return Set.of(DataStream.DATA_STREAM_FAILURE_STORE_FEATURE);
@@ -35,6 +37,11 @@ public Set getFeatures() {
@Override
public Set getTestFeatures() {
- return Set.of(DATA_STREAM_FAILURE_STORE_TSDB_FIX, DOWNSAMPLE_AGGREGATE_DEFAULT_METRIC_FIX, LOGS_STREAM_FEATURE);
+ return Set.of(
+ DATA_STREAM_FAILURE_STORE_TSDB_FIX,
+ DOWNSAMPLE_AGGREGATE_DEFAULT_METRIC_FIX,
+ LOGS_STREAM_FEATURE,
+ FAILURE_STORE_IN_LOG_DATA_STREAMS
+ );
}
}
diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/260_logs_failure_store.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/260_logs_failure_store.yml
new file mode 100644
index 0000000000000..21dd18e8cb28d
--- /dev/null
+++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/260_logs_failure_store.yml
@@ -0,0 +1,79 @@
+---
+setup:
+ - requires:
+ cluster_features: [ "logs_data_streams.failure_store.enabled" ]
+ reason: "failure store became enabled by default for log data streams in 9.2.0"
+
+ - do:
+ indices.create_data_stream:
+ name: logs-app-default
+---
+teardown:
+ - do:
+ indices.delete_data_stream:
+ name: logs-app-default
+ ignore: 404
+
+---
+"Test logs-*-* data streams have failure store enabled by default":
+ # index a valid document (string message)
+ - do:
+ index:
+ index: logs-app-default
+ refresh: true
+ body:
+ '@timestamp': '2023-01-01T12:00:00Z'
+ host:
+ name: 'server-01'
+ severity: 'INFO'
+ message: "Application started successfully"
+ - match: { result: created }
+
+ - do:
+ indices.get_data_stream:
+ name: logs-app-default
+ - match: { data_streams.0.name: logs-app-default }
+ - length: { data_streams.0.indices: 1 }
+ - match: { data_streams.0.failure_store.enabled: true }
+ - length: { data_streams.0.failure_store.indices: 0 }
+
+ # index a document with (object message, causing a mapping conflict)
+ - do:
+ index:
+ index: logs-app-default
+ refresh: true
+ body:
+ '@timestamp': '2023-01-01T12:01:00Z'
+ host:
+ name: 'server-02'
+ severity: 'ERROR'
+ message:
+ struct:
+ value: 42
+ - match: { result: 'created' }
+ - match: { failure_store: used}
+
+ - do:
+ indices.get_data_stream:
+ name: logs-app-default
+ - length: { data_streams.0.failure_store.indices: 1 }
+
+ - do:
+ search:
+ index: logs-app-default::data
+ body:
+ query:
+ match_all: {}
+ - length: { hits.hits: 1 }
+ - match: { hits.hits.0._source.severity: "INFO" }
+ - match: { hits.hits.0._source.message: "Application started successfully" }
+
+ - do:
+ search:
+ index: logs-app-default::failures
+ body:
+ query:
+ match_all: {}
+ - length: { hits.hits: 1 }
+ - match: { hits.hits.0._source.document.source.message.struct.value: 42 }
+ - match: { hits.hits.0._source.error.type: "document_parsing_exception" }
diff --git a/muted-tests.yml b/muted-tests.yml
index 528398aeb462a..9cafb9efa19b8 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -407,9 +407,6 @@ tests:
- class: org.elasticsearch.xpack.esql.analysis.VerifierTests
method: testMatchInsideEval
issue: https://github.com/elastic/elasticsearch/issues/131336
-- class: org.elasticsearch.packaging.test.DockerTests
- method: test022InstallPluginsFromLocalArchive
- issue: https://github.com/elastic/elasticsearch/issues/116866
- class: org.elasticsearch.packaging.test.DockerTests
method: test071BindMountCustomPathWithDifferentUID
issue: https://github.com/elastic/elasticsearch/issues/120917
@@ -485,21 +482,9 @@ tests:
- class: org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorTests
method: testLoadLongManySegments
issue: https://github.com/elastic/elasticsearch/issues/132258
-- class: org.elasticsearch.search.SearchWithIndexBlocksIT
- method: testSearchShardsOnIndicesWithIndexRefreshBlocks
- issue: https://github.com/elastic/elasticsearch/issues/131662
-- class: org.elasticsearch.search.SearchWithIndexBlocksIT
- method: testSearchIndicesWithIndexRefreshBlocks
- issue: https://github.com/elastic/elasticsearch/issues/131693
-- class: org.elasticsearch.search.SearchWithIndexBlocksIT
- method: testOpenPITOnIndicesWithIndexRefreshBlocks
- issue: https://github.com/elastic/elasticsearch/issues/131695
- class: org.elasticsearch.xpack.esql.ccq.MultiClustersIT
method: testLookupJoinAliasesSkipOld
issue: https://github.com/elastic/elasticsearch/issues/131697
-- class: org.elasticsearch.search.SearchWithIndexBlocksIT
- method: testMultiSearchIndicesWithIndexRefreshBlocks
- issue: https://github.com/elastic/elasticsearch/issues/131698
- class: org.elasticsearch.indices.cluster.RemoteSearchForceConnectTimeoutIT
method: testTimeoutSetting
issue: https://github.com/elastic/elasticsearch/issues/131656
@@ -521,9 +506,6 @@ tests:
- class: org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT
method: test {p0=search/600_flattened_ignore_above/flattened ignore_above multi-value field}
issue: https://github.com/elastic/elasticsearch/issues/131967
-- class: org.elasticsearch.search.routing.SearchReplicaSelectionIT
- method: testNodeSelection
- issue: https://github.com/elastic/elasticsearch/issues/132017
- class: org.elasticsearch.xpack.remotecluster.CrossClusterEsqlRCS1EnrichUnavailableRemotesIT
method: testEsqlEnrichWithSkipUnavailable
issue: https://github.com/elastic/elasticsearch/issues/132078
@@ -536,78 +518,24 @@ tests:
- class: org.elasticsearch.xpack.test.rest.XPackRestIT
method: test {p0=transform/transforms_crud/Test transform where source query is invalid}
issue: https://github.com/elastic/elasticsearch/issues/132111
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=bbq_flat updateType=bbq_hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132112
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=hnsw updateType=int4_hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132113
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=hnsw updateType=bbq_hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132115
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=int8_flat updateType=int8_hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132116
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=int4_flat updateType=int8_hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132117
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=flat updateType=hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132119
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=flat updateType=bbq_flat}
- issue: https://github.com/elastic/elasticsearch/issues/132120
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=int8_flat updateType=bbq_disk}
- issue: https://github.com/elastic/elasticsearch/issues/132122
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=int8_flat updateType=int4_hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132123
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=int8_flat updateType=hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132124
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=int8_flat updateType=int4_flat}
- issue: https://github.com/elastic/elasticsearch/issues/132125
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=flat updateType=int8_hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132126
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=bbq_disk updateType=bbq_disk}
- issue: https://github.com/elastic/elasticsearch/issues/132127
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=flat updateType=int8_flat}
- issue: https://github.com/elastic/elasticsearch/issues/132129
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: "testDenseVectorMappingUpdate {initialType=flat updateType=bbq_disk #2}"
- issue: https://github.com/elastic/elasticsearch/issues/132130
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=int4_flat updateType=bbq_flat}
- issue: https://github.com/elastic/elasticsearch/issues/132132
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=int8_hnsw updateType=int4_hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132133
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=bbq_flat updateType=bbq_disk}
- issue: https://github.com/elastic/elasticsearch/issues/132134
-- class: org.elasticsearch.upgrades.RunningSnapshotIT
- method: testRunningSnapshotCompleteAfterUpgrade {upgradedNodes=1}
- issue: https://github.com/elastic/elasticsearch/issues/132135
-- class: org.elasticsearch.upgrades.RunningSnapshotIT
- method: testRunningSnapshotCompleteAfterUpgrade {upgradedNodes=2}
- issue: https://github.com/elastic/elasticsearch/issues/132136
-- class: org.elasticsearch.upgrades.RunningSnapshotIT
- method: testRunningSnapshotCompleteAfterUpgrade {upgradedNodes=3}
- issue: https://github.com/elastic/elasticsearch/issues/132137
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=int4_flat updateType=int4_hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132140
-- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
- method: testDenseVectorMappingUpdate {initialType=int8_flat updateType=bbq_hnsw}
- issue: https://github.com/elastic/elasticsearch/issues/132141
- class: org.elasticsearch.index.engine.MergeWithLowDiskSpaceIT
method: testRelocationWhileForceMerging
issue: https://github.com/elastic/elasticsearch/issues/131789
+- class: org.elasticsearch.indices.cluster.FieldCapsForceConnectTimeoutIT
+ method: testTimeoutSetting
+ issue: https://github.com/elastic/elasticsearch/issues/132179
+- class: org.elasticsearch.test.rest.yaml.MDPYamlTestSuiteIT
+ method: test {yaml=mdp/10_basic/Index using shared data path}
+ issue: https://github.com/elastic/elasticsearch/issues/132223
+- class: org.elasticsearch.xpack.logsdb.qa.StandardVersusStandardReindexedIntoLogsDbChallengeRestIT
+ method: testTermsQuery
+ issue: https://github.com/elastic/elasticsearch/issues/132225
+- class: org.elasticsearch.xpack.logsdb.qa.StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT
+ method: testTermsQuery
+ issue: https://github.com/elastic/elasticsearch/issues/132226
+- class: org.elasticsearch.xpack.sql.qa.mixed_node.SqlCompatIT
+ method: testNullsOrderWithMissingOrderSupportQueryingNewNode
+ issue: https://github.com/elastic/elasticsearch/issues/132249
- class: org.elasticsearch.index.mapper.vectors.DenseVectorFieldIndexTypeUpdateIT
method: testDenseVectorMappingUpdate {initialType=flat updateType=int4_hnsw}
issue: https://github.com/elastic/elasticsearch/issues/132149
@@ -650,8 +578,7 @@ tests:
issue: https://github.com/elastic/elasticsearch/issues/131749
- class: org.elasticsearch.upgrades.SemanticTextUpgradeIT
method: testSemanticTextOperations*
- issue: https://github.com/elastic/elasticsearch/issues/131749
-
+ issue: https://github.com/elastic/elasticsearch/issues/131749
# Examples:
#
diff --git a/plugins/examples/build.gradle b/plugins/examples/build.gradle
index c5f75e71e4842..d1d802e288419 100644
--- a/plugins/examples/build.gradle
+++ b/plugins/examples/build.gradle
@@ -20,6 +20,11 @@ subprojects {
targetCompatibility = 21
}
+ test {
+ // testing with entitlements doesn't work for example plugins ES-12453
+ systemProperty 'es.entitlement.enableForTests', 'false'
+ }
+
repositories {
// Only necessary when building plugins against SNAPSHOT versions of Elasticsearch
if (gradle.includedBuilds.isEmpty()) {
diff --git a/qa/multi-data-path/build.gradle b/qa/multi-data-path/build.gradle
new file mode 100644
index 0000000000000..adba82aeb12c0
--- /dev/null
+++ b/qa/multi-data-path/build.gradle
@@ -0,0 +1,10 @@
+apply plugin: 'elasticsearch.internal-yaml-rest-test'
+
+// This subproject verifies MDP continues to work with entitlements.
+
+restResources {
+ restApi {
+ include '_common', 'capabilities', 'index', 'indices', 'indices.create'
+ }
+}
+
diff --git a/qa/multi-data-path/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/MDPYamlTestSuiteIT.java b/qa/multi-data-path/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/MDPYamlTestSuiteIT.java
new file mode 100644
index 0000000000000..9c3bb935a7424
--- /dev/null
+++ b/qa/multi-data-path/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/MDPYamlTestSuiteIT.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.test.rest.yaml;
+
+import com.carrotsearch.randomizedtesting.annotations.Name;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
+import org.elasticsearch.test.cluster.ElasticsearchCluster;
+import org.junit.ClassRule;
+
+import java.io.IOException;
+import java.nio.file.Files;
+
+public class MDPYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
+
+ @ClassRule
+ public static ElasticsearchCluster cluster = ElasticsearchCluster.local().setting("path.shared_data", tempSharedDataPath()).build();
+
+ public MDPYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
+ super(testCandidate);
+ }
+
+ @ParametersFactory
+ public static Iterable parameters() throws Exception {
+ return createParameters();
+ }
+
+ @Override
+ protected String getTestRestCluster() {
+ return cluster.getHttpAddresses();
+ }
+
+ private static String tempSharedDataPath() {
+ try {
+ return Files.createTempDirectory("shared_data").toString();
+ } catch (IOException e) {
+ throw new AssertionError(e);
+ }
+ }
+}
diff --git a/qa/multi-data-path/src/yamlRestTest/resources/rest-api-spec/test/mdp/10_basic.yml b/qa/multi-data-path/src/yamlRestTest/resources/rest-api-spec/test/mdp/10_basic.yml
new file mode 100644
index 0000000000000..27d0af64a8017
--- /dev/null
+++ b/qa/multi-data-path/src/yamlRestTest/resources/rest-api-spec/test/mdp/10_basic.yml
@@ -0,0 +1,31 @@
+---
+"Index using shared data path":
+
+ - requires:
+ test_runner_features: ["warnings"]
+
+ - do:
+ warnings:
+ - "[index.data_path] setting was deprecated in Elasticsearch and will be removed in a future release. See the deprecation documentation for the next major version."
+ indices.create:
+ index: test_index
+ body:
+ settings:
+ data_path: "test_index_data_path"
+
+ - do:
+ index:
+ index: test_index
+ id: "1"
+ body: { foo: bar }
+
+ - match: { result: created }
+
+ - do:
+ index:
+ index: test_index
+ id: "1"
+ body: { foo: bar }
+ op_type: index
+
+ - match: { result: updated }
diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java
index 3ad4c247a8b9b..810f23e609f7c 100644
--- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java
+++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java
@@ -205,7 +205,7 @@ public void test022InstallPluginsFromLocalArchive() {
listPluginArchive().forEach(System.out::println);
assertThat("Expected " + plugin + " to not be installed", listPlugins(), not(hasItems(plugin)));
- assertThat("Expected " + plugin + " available in archive", listPluginArchive(), hasSize(16));
+ assertThat("Expected " + plugin + " available in archive", listPluginArchive(), hasItems(containsString(plugin)));
// Stuff the proxy settings with garbage, so any attempt to go out to the internet would fail
sh.getEnv()
diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/RunningSnapshotIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/RunningSnapshotIT.java
index 261e92c5d7b65..3e2dfa24e7237 100644
--- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/RunningSnapshotIT.java
+++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/RunningSnapshotIT.java
@@ -12,12 +12,10 @@
import com.carrotsearch.randomizedtesting.annotations.Name;
import org.elasticsearch.client.Request;
-import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.rest.ObjectPath;
import java.io.IOException;
-import java.util.Collection;
import java.util.Map;
import java.util.stream.Collectors;
@@ -26,7 +24,6 @@
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not;
public class RunningSnapshotIT extends AbstractRollingUpgradeTestCase {
@@ -45,6 +42,13 @@ public void testRunningSnapshotCompleteAfterUpgrade() throws Exception {
.collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, entry -> entry.getValue().get("name").toString()));
assertThat(nodeIdToNodeNames.values(), containsInAnyOrder("test-cluster-0", "test-cluster-1", "test-cluster-2"));
+ final var lastUpgradeNodeId = nodeIdToNodeNames.entrySet()
+ .stream()
+ .filter(entry -> "test-cluster-2".equals(entry.getValue()))
+ .map(Map.Entry::getKey)
+ .findFirst()
+ .orElseThrow(() -> new AssertionError("node id not found in " + nodeIdToNodeNames));
+
if (isOldCluster()) {
registerRepository(repositoryName, "fs", randomBoolean(), Settings.builder().put("location", "backup").build());
// create an index to have one shard per node
@@ -54,54 +58,41 @@ public void testRunningSnapshotCompleteAfterUpgrade() throws Exception {
indexDocs(indexName, between(10, 50));
}
flush(indexName, true);
- // Signal shutdown to prevent snapshot from being completed
- putShutdownMetadata(nodeIdToNodeNames.keySet());
+ // Signal shutdown for the last node to upgrade to prevent snapshot from being completed during the upgrade process
+ putShutdownMetadata(lastUpgradeNodeId);
createSnapshot(repositoryName, snapshotName, false);
assertRunningSnapshot(repositoryName, snapshotName);
} else {
if (isUpgradedCluster()) {
- deleteShutdownMetadata(nodeIdToNodeNames.keySet());
- assertNoShutdownMetadata(nodeIdToNodeNames.keySet());
+ deleteShutdownMetadata(lastUpgradeNodeId);
+ assertNoShutdownMetadata(lastUpgradeNodeId);
ensureGreen(indexName);
assertBusy(() -> assertCompletedSnapshot(repositoryName, snapshotName));
} else {
- if (isFirstMixedCluster()) {
- final var upgradedNodeIds = nodeIdToNodeNames.entrySet()
- .stream()
- .filter(entry -> "test-cluster-0".equals(entry.getValue()))
- .map(Map.Entry::getKey)
- .collect(Collectors.toUnmodifiableSet());
- assertThat(upgradedNodeIds, hasSize(1));
- deleteShutdownMetadata(upgradedNodeIds);
- }
assertRunningSnapshot(repositoryName, snapshotName);
}
}
}
- private void putShutdownMetadata(Collection nodeIds) throws IOException {
- for (String nodeId : nodeIds) {
- final Request putShutdownRequest = new Request("PUT", "/_nodes/" + nodeId + "/shutdown");
- putShutdownRequest.setJsonEntity("""
- {
- "type": "remove",
- "reason": "test"
- }""");
- client().performRequest(putShutdownRequest);
- }
+ private void putShutdownMetadata(String nodeId) throws IOException {
+ final Request putShutdownRequest = new Request("PUT", "/_nodes/" + nodeId + "/shutdown");
+ putShutdownRequest.setJsonEntity("""
+ {
+ "type": "remove",
+ "reason": "test"
+ }""");
+ client().performRequest(putShutdownRequest);
}
- private void deleteShutdownMetadata(Collection nodeIds) throws IOException {
- for (String nodeId : nodeIds) {
- final Request request = new Request("DELETE", "/_nodes/" + nodeId + "/shutdown");
- request.addParameter(IGNORE_RESPONSE_CODES_PARAM, "404");
- client().performRequest(request);
- }
+ private void deleteShutdownMetadata(String nodeId) throws IOException {
+ final Request request = new Request("DELETE", "/_nodes/" + nodeId + "/shutdown");
+ request.addParameter(IGNORE_RESPONSE_CODES_PARAM, "404");
+ client().performRequest(request);
}
- private void assertNoShutdownMetadata(Collection nodeIds) throws IOException {
+ private void assertNoShutdownMetadata(String nodeId) throws IOException {
final ObjectPath responsePath = assertOKAndCreateObjectPath(
- client().performRequest(new Request("GET", "/_nodes/" + Strings.collectionToCommaDelimitedString(nodeIds) + "/shutdown"))
+ client().performRequest(new Request("GET", "/_nodes/" + nodeId + "/shutdown"))
);
assertThat(responsePath.evaluate("nodes"), empty());
}
diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml
index ba4e73f0c5e09..2cccc8988ff0e 100644
--- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml
+++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml
@@ -1740,3 +1740,194 @@ setup:
- match: { docs.0.doc._source.abc: "sfdsfsfdsfsfdsfsfdsfsfdsfsfdsf" }
- match: { docs.0.doc.ignored_fields: [ {"field": "abc"} ] }
- not_exists: docs.0.doc.error
+
+---
+"Test mapping addition correctly respects mapping of indices without templates":
+ # In this test, we make sure that when we have an index that has mapping but was not built with a template, that the
+ # additional_mapping respects the existing mapping for validation.
+
+ - skip:
+ features:
+ - headers
+ - allowed_warnings
+
+ # A global match-everything legacy template is added to the cluster sometimes (rarely). We have to get rid of this template if it exists
+ # because this test is making sure we get correct behavior when an index matches *no* template:
+ - do:
+ indices.delete_template:
+ name: '*'
+ ignore: 404
+
+ # We create the index no-template-index with an implicit mapping that has a foo field with type long:
+ - do:
+ bulk:
+ refresh: true
+ body:
+ - '{"index": {"_index": "no-template-index"}}'
+ - '{"foo": 3}'
+
+ # Now we make sure that the existing mapping is taken into account when we simulate with a mapping_addition. Since
+ # the pre-existing mapping has foo mapped as a long, this ought to fail with a document_parsing_exception because
+ # we are attempting to write a boolean foo.
+ - do:
+ headers:
+ Content-Type: application/json
+ simulate.ingest:
+ index: no-template-index
+ body: >
+ {
+ "docs": [
+ {
+ "_id": "test-id",
+ "_index": "no-template-index",
+ "_source": {
+ "@timestamp": "2025-07-25T09:06:06.929Z",
+ "is_valid": true,
+ "foo": true
+ }
+ }
+ ],
+ "mapping_addition": {
+ "properties": {
+ "is_valid": {
+ "type": "boolean"
+ }
+ }
+ }
+ }
+ - length: { docs: 1 }
+ - match: { docs.0.doc._index: "no-template-index" }
+ - match: { docs.0.doc._source.foo: true }
+ - match: { docs.0.doc._source.is_valid: true }
+ - match: { docs.0.doc.error.type: "document_parsing_exception" }
+
+ # Now we add a template for this index.
+ - do:
+ indices.put_template:
+ name: my-template-1
+ body:
+ index_patterns: no-template-index
+ mappings:
+ properties:
+ foo:
+ type: boolean
+
+ # And we still expect the index's mapping to be used rather than the template:
+ - do:
+ headers:
+ Content-Type: application/json
+ simulate.ingest:
+ index: no-template-index
+ body: >
+ {
+ "docs": [
+ {
+ "_id": "test-id",
+ "_index": "no-template-index",
+ "_source": {
+ "@timestamp": "2025-07-25T09:06:06.929Z",
+ "is_valid": true,
+ "foo": true
+ }
+ }
+ ],
+ "mapping_addition": {
+ "properties": {
+ "is_valid": {
+ "type": "boolean"
+ }
+ }
+ }
+ }
+ - length: { docs: 1 }
+ - match: { docs.0.doc._index: "no-template-index" }
+ - match: { docs.0.doc._source.foo: true }
+ - match: { docs.0.doc._source.is_valid: true }
+ - match: { docs.0.doc.error.type: "document_parsing_exception" }
+
+---
+"Test ingest simulate with mapping addition for data streams when write index has different mapping":
+ # In this test, we make sure that when a data stream's write index has a mapping that is different from the mapping
+ # in its template, and a mapping_override is given, then the mapping_override is applied to the mapping of the write
+ # index rather than the mapping of the template.
+
+ - skip:
+ features:
+ - headers
+ - allowed_warnings
+
+ - do:
+ cluster.put_component_template:
+ name: mappings_template
+ body:
+ template:
+ mappings:
+ dynamic: strict
+ properties:
+ foo:
+ type: boolean
+ bar:
+ type: boolean
+
+ - do:
+ allowed_warnings:
+ - "index template [my-template-1] has index patterns [simple-data-stream1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template-1] will take precedence during new index creation"
+ indices.put_index_template:
+ name: my-template-1
+ body:
+ index_patterns: [simple-data-stream1]
+ composed_of:
+ - mappings_template
+ data_stream: {}
+
+ - do:
+ indices.create_data_stream:
+ name: simple-data-stream1
+ - is_true: acknowledged
+
+ - do:
+ cluster.health:
+ wait_for_status: yellow
+
+ # Now that the data stream exists, we change the template to remove the mapping for bar. The write index still has the
+ # old mapping.
+ - do:
+ cluster.put_component_template:
+ name: mappings_template
+ body:
+ template:
+ mappings:
+ properties:
+ foo:
+ type: boolean
+
+ # We expect the mapping_addition to be added to the mapping of the write index, which has a boolean bar field. So this
+ # simulate ingest ought to fail.
+ - do:
+ headers:
+ Content-Type: application/json
+ simulate.ingest:
+ index: simple-data-stream1
+ body: >
+ {
+ "docs": [
+ {
+ "_id": "asdf",
+ "_source": {
+ "@timestamp": 1234,
+ "bar": "baz"
+ }
+ }
+ ],
+ "mapping_addition": {
+ "properties": {
+ "baz": {
+ "type": "keyword"
+ }
+ }
+ }
+ }
+ - length: { docs: 1 }
+ - match: { docs.0.doc._index: "simple-data-stream1" }
+ - match: { docs.0.doc._source.bar: "baz" }
+ - match: { docs.0.doc.error.type: "document_parsing_exception" }
diff --git a/qa/vector/src/main/java/org/elasticsearch/test/knn/KnnIndexTester.java b/qa/vector/src/main/java/org/elasticsearch/test/knn/KnnIndexTester.java
index 223a42bb3b86b..48c7d855ac93d 100644
--- a/qa/vector/src/main/java/org/elasticsearch/test/knn/KnnIndexTester.java
+++ b/qa/vector/src/main/java/org/elasticsearch/test/knn/KnnIndexTester.java
@@ -101,7 +101,7 @@ private static String formatIndexPath(CmdLineArgs args) {
static Codec createCodec(CmdLineArgs args) {
final KnnVectorsFormat format;
if (args.indexType() == IndexType.IVF) {
- format = new IVFVectorsFormat(args.ivfClusterSize());
+ format = new IVFVectorsFormat(args.ivfClusterSize(), IVFVectorsFormat.DEFAULT_CENTROIDS_PER_PARENT_CLUSTER);
} else {
if (args.quantizeBits() == 1) {
if (args.indexType() == IndexType.FLAT) {
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml
index d627be2fb15c3..df66831ba94ff 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml
@@ -16,6 +16,8 @@ setup:
nested:
type: nested
properties:
+ language:
+ type: keyword
paragraph_id:
type: keyword
vector:
@@ -27,6 +29,13 @@ setup:
type: hnsw
m: 16
ef_construction: 200
+ nested2:
+ type: nested
+ properties:
+ key:
+ type: keyword
+ value:
+ type: keyword
- do:
index:
@@ -37,8 +46,16 @@ setup:
nested:
- paragraph_id: 0
vector: [230.0, 300.33, -34.8988, 15.555, -200.0]
+ language: EN
- paragraph_id: 1
vector: [240.0, 300, -3, 1, -20]
+ language: FR
+ nested2:
+ - key: "category"
+ value: "domestic"
+ - key: "level"
+ value: "beginner"
+
- do:
index:
@@ -49,10 +66,18 @@ setup:
nested:
- paragraph_id: 0
vector: [-0.5, 100.0, -13, 14.8, -156.0]
+ language: EN
- paragraph_id: 2
vector: [0, 100.0, 0, 14.8, -156.0]
+ language: EN
- paragraph_id: 3
vector: [0, 1.0, 0, 1.8, -15.0]
+ language: FR
+ nested2:
+ - key: "category"
+ value: "wild"
+ - key: "level"
+ value: "beginner"
- do:
index:
@@ -63,6 +88,12 @@ setup:
nested:
- paragraph_id: 0
vector: [0.5, 111.3, -13.0, 14.8, -156.0]
+ language: FR
+ nested2:
+ - key: "category"
+ value: "domestic"
+ - key: "level"
+ value: "advanced"
- do:
indices.refresh: {}
@@ -461,3 +492,125 @@ setup:
- match: {hits.hits.0._id: "2"}
- length: {hits.hits.0.inner_hits.nested.hits.hits: 1}
- match: {hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0"}
+
+
+---
+"Filter on nested fields":
+ - requires:
+ capabilities:
+ - method: POST
+ path: /_search
+ capabilities: [ knn_filter_on_nested_fields ]
+ test_runner_features: ["capabilities", "close_to"]
+ reason: "Capability for filtering on nested fields required"
+
+ - do:
+ search:
+ index: test
+ body:
+ _source: false
+ knn:
+ boost: 2
+ field: nested.vector
+ query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ]
+ k: 3
+ filter: { match: { nested.language: "EN" } }
+ inner_hits: { size: 3, "fields": [ "nested.paragraph_id", "nested.language"], _source: false }
+
+ - match: { hits.total.value: 2 }
+ - match: { hits.hits.0._id: "2" }
+ - match: { hits.hits.0.inner_hits.nested.hits.total.value: 2 }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "EN" }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.language.0: "EN" }
+ - close_to: { hits.hits.0._score: { value: 0.0182, error: 0.0001 } }
+ - close_to: { hits.hits.0.inner_hits.nested.hits.hits.0._score: { value: 0.0182, error: 0.0001 } }
+ - match: { hits.hits.1._id: "1" }
+ - match: { hits.hits.1.inner_hits.nested.hits.total.value: 1 }
+ - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" }
+ - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "EN" }
+
+
+ - do:
+ search:
+ index: test
+ body:
+ _source: false
+ knn:
+ boost: 2
+ field: nested.vector
+ query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ]
+ k: 3
+ filter: { match: { nested.language: "FR" } }
+ inner_hits: { size: 3, "fields": [ "nested.paragraph_id", "nested.language"], _source: false }
+
+ - match: { hits.total.value: 3 }
+ - match: { hits.hits.0._id: "3" }
+ - match: { hits.hits.0.inner_hits.nested.hits.total.value: 1 }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "FR" }
+ - close_to: { hits.hits.0._score: { value: 0.0043, error: 0.0001 } }
+ - close_to: { hits.hits.0.inner_hits.nested.hits.hits.0._score: { value: 0.0043, error: 0.0001 } }
+ - match: { hits.hits.1._id: "2" }
+ - match: { hits.hits.1.inner_hits.nested.hits.total.value: 1 }
+ - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "3" }
+ - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "FR" }
+ - match: { hits.hits.2._id: "1" }
+ - match: { hits.hits.2.inner_hits.nested.hits.total.value: 1 }
+ - match: { hits.hits.2.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "1" }
+ - match: { hits.hits.2.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "FR" }
+
+ # filter on both nested and parent metadata with 2 different filters
+ - do:
+ search:
+ index: test
+ body:
+ _source: false
+ knn:
+ boost: 2
+ field: nested.vector
+ query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ]
+ k: 3
+ num_candidates: 10
+ filter: [{ match: { nested.language: "FR" }}, {term: {name: "rabbit.jpg"}} ]
+ inner_hits: { size: 3, "fields": [ "nested.paragraph_id", "nested.language"], _source: false }
+
+ - match: { hits.total.value: 1 }
+ - match: { hits.hits.0._id: "3" }
+ - match: { hits.hits.0.inner_hits.nested.hits.total.value: 1 }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "FR" }
+ - close_to: { hits.hits.0._score: { value: 0.0043, error: 0.0001 } }
+ - close_to: { hits.hits.0.inner_hits.nested.hits.hits.0._score: { value: 0.0043, error: 0.0001 } }
+
+
+---
+"Test filter on sibling nested fields works":
+ - requires:
+ capabilities:
+ - method: POST
+ path: /_search
+ capabilities: [ knn_filter_on_nested_fields ]
+ test_runner_features: ["capabilities", "close_to"]
+ reason: "Capability for filtering on nested fields required"
+
+ - do:
+ search:
+ index: test
+ body:
+ _source: false
+ knn:
+ field: nested.vector
+ query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ]
+ filter:
+ nested:
+ path: nested2
+ query:
+ bool:
+ filter:
+ - match:
+ nested2.key: "category"
+ - match:
+ nested2.value: "domestic"
+ - match: { hits.total.value: 2}
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml
index bf07144975650..2416689c285fd 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml
@@ -16,6 +16,8 @@ setup:
nested:
type: nested
properties:
+ language:
+ type: keyword
paragraph_id:
type: keyword
vector:
@@ -23,6 +25,17 @@ setup:
dims: 5
index: true
similarity: l2_norm
+ index_options:
+ type: hnsw
+ m: 16
+ ef_construction: 200
+ nested2:
+ type: nested
+ properties:
+ key:
+ type: keyword
+ value:
+ type: keyword
aliases:
my_alias:
filter:
@@ -38,8 +51,15 @@ setup:
nested:
- paragraph_id: 0
vector: [230.0, 300.33, -34.8988, 15.555, -200.0]
+ language: EN
- paragraph_id: 1
vector: [240.0, 300, -3, 1, -20]
+ language: FR
+ nested2:
+ - key: "category"
+ value: "domestic"
+ - key: "level"
+ value: "beginner"
- do:
index:
@@ -50,10 +70,19 @@ setup:
nested:
- paragraph_id: 0
vector: [-0.5, 100.0, -13, 14.8, -156.0]
+ language: EN
- paragraph_id: 2
vector: [0, 100.0, 0, 14.8, -156.0]
+ language: EN
- paragraph_id: 3
vector: [0, 1.0, 0, 1.8, -15.0]
+ language: FR
+ nested2:
+ - key: "category"
+ value: "wild"
+ - key: "level"
+ value: "beginner"
+
- do:
index:
@@ -64,6 +93,12 @@ setup:
nested:
- paragraph_id: 0
vector: [0.5, 111.3, -13.0, 14.8, -156.0]
+ language: FR
+ nested2:
+ - key: "category"
+ value: "domestic"
+ - key: "level"
+ value: "advanced"
- do:
indices.refresh: {}
@@ -408,3 +443,147 @@ setup:
- match: {hits.total.value: 1}
- match: {hits.hits.0._id: "2"}
+
+
+---
+"Filter on nested fields":
+ - requires:
+ capabilities:
+ - method: POST
+ path: /_search
+ capabilities: [ knn_filter_on_nested_fields ]
+ test_runner_features: ["capabilities", "close_to"]
+ reason: "Capability for filtering on nested fields required"
+
+ - do:
+ search:
+ index: test
+ body:
+ _source: false
+ query:
+ nested:
+ path: nested
+ query:
+ knn:
+ boost: 2
+ field: nested.vector
+ query_vector: [-0.5, 90.0, -10, 14.8, -156.0]
+ k: 3
+ filter:
+ match:
+ nested.language: "EN"
+ inner_hits: { size: 3, "fields": [ "nested.paragraph_id", "nested.language"], _source: false }
+
+ - match: {hits.total.value: 2}
+ - match: {hits.hits.0._id: "2"}
+ - match: { hits.hits.0.inner_hits.nested.hits.total.value: 2 }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "EN" }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.language.0: "EN" }
+ - close_to: { hits.hits.0._score: { value: 0.0182, error: 0.0001 } }
+ - close_to: { hits.hits.0.inner_hits.nested.hits.hits.0._score: { value: 0.0182, error: 0.0001 } }
+ - match: {hits.hits.1._id: "1"}
+ - match: { hits.hits.1.inner_hits.nested.hits.total.value: 1 }
+ - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" }
+ - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "EN" }
+
+ - do:
+ search:
+ index: test
+ body:
+ _source: false
+ query:
+ nested:
+ path: nested
+ query:
+ knn:
+ boost: 2
+ field: nested.vector
+ query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ]
+ k: 3
+ filter:
+ match:
+ nested.language: "FR"
+ inner_hits: { size: 3, "fields": [ "nested.paragraph_id", "nested.language" ], _source: false }
+
+ - match: { hits.total.value: 3 }
+ - match: { hits.hits.0._id: "3" }
+ - match: { hits.hits.0.inner_hits.nested.hits.total.value: 1 }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "FR" }
+ - close_to: { hits.hits.0._score: { value: 0.0043, error: 0.0001 } }
+ - close_to: { hits.hits.0.inner_hits.nested.hits.hits.0._score: { value: 0.0043, error: 0.0001 } }
+ - match: { hits.hits.1._id: "2" }
+ - match: { hits.hits.1.inner_hits.nested.hits.total.value: 1 }
+ - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "3" }
+ - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "FR" }
+ - match: { hits.hits.2._id: "1" }
+ - match: { hits.hits.2.inner_hits.nested.hits.total.value: 1 }
+ - match: { hits.hits.2.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "1" }
+ - match: { hits.hits.2.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "FR" }
+
+ # filter on both nested and parent metadata
+ - do:
+ search:
+ index: test
+ body:
+ _source: false
+ query:
+ nested:
+ path: nested
+ query:
+ knn:
+ boost: 2
+ field: nested.vector
+ query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ]
+ k: 10
+ filter: [{ match: { nested.language: "FR" }}, {term: {name: "rabbit.jpg"}} ]
+ inner_hits: { size: 3, "fields": [ "nested.paragraph_id", "nested.language" ], _source: false }
+
+ - match: { hits.total.value: 1 }
+ - match: { hits.hits.0._id: "3" }
+ - match: { hits.hits.0.inner_hits.nested.hits.total.value: 1 }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" }
+ - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.language.0: "FR" }
+ - close_to: { hits.hits.0._score: { value: 0.0043, error: 0.0001 } }
+ - close_to: { hits.hits.0.inner_hits.nested.hits.hits.0._score: { value: 0.0043, error: 0.0001 } }
+
+
+---
+"Test filter on sibling nested fields doesn't work":
+ - requires:
+ capabilities:
+ - method: POST
+ path: /_search
+ capabilities: [ knn_filter_on_nested_fields ]
+ test_runner_features: ["capabilities", "close_to"]
+ reason: "Capability for filtering on nested fields required"
+
+ - do:
+ search:
+ index: test
+ body:
+ _source: false
+ query:
+ nested:
+ path: nested
+ query:
+ knn:
+ field: nested.vector
+ query_vector: [-0.5, 90.0, -10, 14.8, -156.0]
+ k: 10
+ filter:
+ nested:
+ path: nested2
+ query:
+ bool:
+ filter:
+ - match:
+ nested2.key: "category"
+ - match:
+ nested2.value: "domestic"
+ inner_hits: { size: 3, "fields": [ "nested.paragraph_id", "nested.language"], _source: false }
+
+ - match: { hits.total.value: 0 }
+
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldIndexTypeUpdateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldIndexTypeUpdateIT.java
index 522ea24d73196..bb06d6f569106 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldIndexTypeUpdateIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldIndexTypeUpdateIT.java
@@ -12,10 +12,13 @@
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.search.vectors.KnnVectorQueryBuilder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
@@ -25,6 +28,7 @@
import java.util.Collection;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.ExecutionException;
import static org.elasticsearch.index.IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING;
import static org.elasticsearch.index.mapper.SourceFieldMapper.Mode.SYNTHETIC;
@@ -75,7 +79,7 @@ public static Collection params() {
@SuppressWarnings("unchecked")
public void testDenseVectorMappingUpdate() throws Exception {
dimensions = randomIntBetween(1, 10) * 64;
- var client = client().admin().indices();
+ var indicesClient = client().admin().indices();
Settings.Builder settingsBuilder = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
@@ -85,7 +89,7 @@ public void testDenseVectorMappingUpdate() throws Exception {
}
// Create index with initial mapping
- var createRequest = client.prepareCreate(INDEX_NAME)
+ var createRequest = indicesClient.prepareCreate(INDEX_NAME)
.setSettings(Settings.builder().put("index.number_of_shards", randomIntBetween(1, 5)))
.setMapping(updateMapping(dimensions, initialType))
.setSettings(settingsBuilder.build());
@@ -97,16 +101,16 @@ public void testDenseVectorMappingUpdate() throws Exception {
indexDoc(i);
}
- client.prepareFlush(INDEX_NAME).get();
- client.prepareRefresh(INDEX_NAME).get();
+ indicesClient.prepareFlush(INDEX_NAME).get();
+ indicesClient.prepareRefresh(INDEX_NAME).get();
// Update mapping to new type
- var putMappingRequest = client.preparePutMapping(INDEX_NAME).setSource(updateMapping(dimensions, updateType)).request();
- assertAcked(client.putMapping(putMappingRequest));
+ var putMappingRequest = indicesClient.preparePutMapping(INDEX_NAME).setSource(updateMapping(dimensions, updateType)).request();
+ assertAcked(indicesClient.putMapping(putMappingRequest));
// Validate mapping
- GetFieldMappingsResponse fieldMapping = client.getFieldMappings(
- client.prepareGetFieldMappings(INDEX_NAME).setFields(VECTOR_FIELD).request()
+ GetFieldMappingsResponse fieldMapping = indicesClient.getFieldMappings(
+ indicesClient.prepareGetFieldMappings(INDEX_NAME).setFields(VECTOR_FIELD).request()
).get();
var fieldMappingMetadata = fieldMapping.fieldMappings(INDEX_NAME, VECTOR_FIELD);
var fieldMap = (Map) fieldMappingMetadata.sourceAsMap().get(VECTOR_FIELD);
@@ -119,28 +123,32 @@ public void testDenseVectorMappingUpdate() throws Exception {
indexDoc(i);
}
- client.prepareFlush(INDEX_NAME).get();
- client.prepareRefresh(INDEX_NAME).get();
+ indicesClient.prepareFlush(INDEX_NAME).get();
+ indicesClient.prepareRefresh(INDEX_NAME).get();
// Search to ensure all documents are present
int expectedDocs = docsBefore + docsAfter;
- assertNoFailuresAndResponse(client().prepareSearch(INDEX_NAME).setSize(expectedDocs + 10), response -> {
- assertHitCount(response, expectedDocs);
- });
- }
- private XContentBuilder initialMapping(int dimensions, String type) throws IOException {
- XContentBuilder builder = XContentFactory.jsonBuilder();
- builder.startObject();
- {
- builder.startObject("mappings");
- {
- createFieldMapping(dimensions, type, builder);
+ // Count query
+ assertNoFailuresAndResponse(
+ client().prepareSearch(INDEX_NAME).setSize(0).setTrackTotalHits(true).setSize(expectedDocs),
+ response -> {
+ assertHitCount(response, expectedDocs);
}
- builder.endObject();
+ );
+
+ // KNN query
+ float[] queryVector = new float[dimensions];
+ for (int i = 0; i < queryVector.length; i++) {
+ queryVector[i] = randomFloatBetween(-1, 1, true);
}
- builder.endObject();
- return builder;
+ KnnVectorQueryBuilder queryBuilder = new KnnVectorQueryBuilder(VECTOR_FIELD, queryVector, null, null, null, null);
+ assertNoFailuresAndResponse(
+ client().prepareSearch(INDEX_NAME).setQuery(queryBuilder).setTrackTotalHits(true).setSize(expectedDocs),
+ response -> {
+ assertHitCount(response, expectedDocs);
+ }
+ );
}
private XContentBuilder updateMapping(int dimensions, String type) throws IOException {
@@ -168,9 +176,10 @@ private static void createFieldMapping(int dimensions, String type, XContentBuil
builder.endObject();
}
- private void indexDoc(int id) throws IOException {
+ private void indexDoc(int id) throws ExecutionException, InterruptedException {
Float[] vector = randomArray(dimensions, dimensions, Float[]::new, () -> randomFloatBetween(-1, 1, true));
- IndexRequest req = prepareIndex(INDEX_NAME).setSource(VECTOR_FIELD, vector).setId(Integer.toString(id)).request();
- client().index(req);
+ IndexRequest indexRequest = prepareIndex(INDEX_NAME).setSource(VECTOR_FIELD, vector).setId(Integer.toString(id)).request();
+ DocWriteResponse indexResponse = client().index(indexRequest).get();
+ assertEquals(RestStatus.CREATED, indexResponse.status());
}
}
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/FieldCapsForceConnectTimeoutIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/FieldCapsForceConnectTimeoutIT.java
new file mode 100644
index 0000000000000..96d2b4190f75d
--- /dev/null
+++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/FieldCapsForceConnectTimeoutIT.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.indices.cluster;
+
+import org.elasticsearch.ElasticsearchTimeoutException;
+import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
+import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction;
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.CollectionUtils;
+import org.elasticsearch.plugins.ClusterPlugin;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.test.AbstractMultiClustersTestCase;
+import org.elasticsearch.test.transport.MockTransportService;
+import org.elasticsearch.transport.TransportService;
+import org.hamcrest.Matchers;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CountDownLatch;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+
+public class FieldCapsForceConnectTimeoutIT extends AbstractMultiClustersTestCase {
+ private static final String LINKED_CLUSTER_1 = "cluster-a";
+ private static final String LINKED_CLUSTER_2 = "cluster-b";
+
+ public static class ForceConnectTimeoutPlugin extends Plugin implements ClusterPlugin {
+ @Override
+ public List> getSettings() {
+ return List.of(ForceConnectTimeoutSetting);
+ }
+ }
+
+ private static final Setting ForceConnectTimeoutSetting = Setting.simpleString(
+ "search.ccs.force_connect_timeout",
+ Setting.Property.NodeScope
+ );
+
+ @Override
+ protected List remoteClusterAlias() {
+ return List.of(LINKED_CLUSTER_1, LINKED_CLUSTER_2);
+ }
+
+ @Override
+ protected Collection> nodePlugins(String clusterAlias) {
+ return CollectionUtils.appendToCopy(super.nodePlugins(clusterAlias), ForceConnectTimeoutPlugin.class);
+ }
+
+ @Override
+ protected Settings nodeSettings() {
+ /*
+ * This is the setting that controls how long TransportFieldCapabilitiesAction will wait for establishing a connection
+ * with a remote. At present, we set it to low 1s to prevent stalling the test for too long -- this is consistent
+ * with what we've done in other tests.
+ */
+ return Settings.builder().put(super.nodeSettings()).put("search.ccs.force_connect_timeout", "1s").build();
+ }
+
+ @Override
+ protected Map skipUnavailableForRemoteClusters() {
+ return Map.of(LINKED_CLUSTER_1, true, LINKED_CLUSTER_2, true);
+ }
+
+ public void testTimeoutSetting() {
+ var latch = new CountDownLatch(1);
+ for (String nodeName : cluster(LOCAL_CLUSTER).getNodeNames()) {
+ MockTransportService mts = (MockTransportService) cluster(LOCAL_CLUSTER).getInstance(TransportService.class, nodeName);
+
+ mts.addConnectBehavior(
+ cluster(LINKED_CLUSTER_1).getInstance(TransportService.class, (String) null),
+ ((transport, discoveryNode, profile, listener) -> {
+ try {
+ latch.await();
+ } catch (InterruptedException e) {
+ throw new AssertionError(e);
+ }
+
+ transport.openConnection(discoveryNode, profile, listener);
+ })
+ );
+ }
+
+ // Add some dummy data to prove we are communicating fine with the remote.
+ assertAcked(client(LINKED_CLUSTER_1).admin().indices().prepareCreate("test-index"));
+ client(LINKED_CLUSTER_1).prepareIndex("test-index").setSource("sample-field", "sample-value").get();
+ client(LINKED_CLUSTER_1).admin().indices().prepareRefresh("test-index").get();
+
+ /*
+ * Do a full restart so that our custom connect behaviour takes effect since it does not apply to
+ * pre-existing connections -- they're already established by the time this test runs.
+ */
+ try {
+ cluster(LINKED_CLUSTER_1).fullRestart();
+ } catch (Exception e) {
+ throw new AssertionError(e);
+ } finally {
+ var fieldCapsRequest = new FieldCapabilitiesRequest();
+ /*
+ * We have an origin and 2 linked clusters but will target only the one that we stalled.
+ * This is because when the timeout kicks in, and we move on from the stalled cluster, we do not want
+ * the error to be a top-level error. Rather, it must be present in the response object under "failures".
+ * All other errors are free to be top-level errors though.
+ */
+ fieldCapsRequest.indices(LINKED_CLUSTER_1 + ":*");
+ fieldCapsRequest.fields("foo", "bar", "baz");
+ var result = safeGet(client().execute(TransportFieldCapabilitiesAction.TYPE, fieldCapsRequest));
+
+ var failures = result.getFailures();
+ assertThat(failures.size(), Matchers.is(1));
+
+ var failure = failures.getFirst();
+ assertThat(failure.getIndices().length, Matchers.is(1));
+ assertThat(failure.getIndices()[0], Matchers.equalTo("cluster-a:*"));
+ // Outer wrapper that gets unwrapped in ExceptionsHelper.isRemoteUnavailableException().
+ assertThat(
+ failure.getException().toString(),
+ Matchers.containsString("java.lang.IllegalStateException: Unable to open any connections")
+ );
+
+ // The actual error that is thrown by the subscribable listener when a linked cluster could not be talked to.
+ assertThat(failure.getException().getCause(), Matchers.instanceOf(ElasticsearchTimeoutException.class));
+ assertThat(ExceptionsHelper.isRemoteUnavailableException(failure.getException()), Matchers.is(true));
+
+ latch.countDown();
+ result.decRef();
+ }
+ }
+}
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java
index febe1e9c536c5..df50d7fb0603e 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java
@@ -11,6 +11,7 @@
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.tests.util.English;
+import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
@@ -36,12 +37,15 @@
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.seqno.ReplicationTracker;
import org.elasticsearch.index.seqno.RetentionLease;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.indices.IndexingMemoryController;
+import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.recovery.PeerRecoveryTargetService;
import org.elasticsearch.indices.recovery.RecoveryFileChunkRequest;
import org.elasticsearch.plugins.Plugin;
@@ -161,10 +165,88 @@ public void testSimpleRelocationNoIndexing() {
assertHitCount(prepareSearch("test").setSize(0), 20);
}
+ // This tests that relocation can successfully suspend index throttling to grab
+ // indexing permits required for relocation to succeed.
+ public void testSimpleRelocationWithIndexingPaused() throws Exception {
+ logger.info("--> starting [node1] ...");
+ // Start node with PAUSE_INDEXING_ON_THROTTLE setting set to true. This means that if we activate
+ // index throttling for a shard on this node, it will pause indexing for that shard until throttling
+ // is deactivated.
+ final String node_1 = internalCluster().startNode(
+ Settings.builder().put(IndexingMemoryController.PAUSE_INDEXING_ON_THROTTLE.getKey(), true)
+ );
+
+ logger.info("--> creating test index ...");
+ prepareCreate("test", indexSettings(1, 0)).get();
+
+ logger.info("--> index docs");
+ int numDocs = between(1, 10);
+ for (int i = 0; i < numDocs; i++) {
+ prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get();
+ }
+ logger.info("--> flush so we have an actual index");
+ indicesAdmin().prepareFlush().get();
+
+ logger.info("--> verifying count");
+ indicesAdmin().prepareRefresh().get();
+ assertHitCount(prepareSearch("test").setSize(0), numDocs);
+
+ logger.info("--> start another node");
+ final String node_2 = internalCluster().startNode();
+ ClusterHealthResponse clusterHealthResponse = clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT)
+ .setWaitForEvents(Priority.LANGUID)
+ .setWaitForNodes("2")
+ .get();
+ assertThat(clusterHealthResponse.isTimedOut(), equalTo(false));
+
+ // Activate index throttling on "test" index primary shard
+ IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node_1);
+ IndexShard shard = indicesService.indexServiceSafe(resolveIndex("test")).getShard(0);
+ shard.activateThrottling();
+ // Verify that indexing is paused for the throttled shard
+ Engine engine = shard.getEngineOrNull();
+ assertThat(engine != null && engine.isThrottled(), equalTo(true));
+
+ // Try to index a document into the "test" index which is currently throttled
+ logger.info("--> Try to index a doc while indexing is paused");
+ IndexRequestBuilder indexRequestBuilder = prepareIndex("test").setId(Integer.toString(20)).setSource("field", "value" + 20);
+ var future = indexRequestBuilder.execute();
+ expectThrows(ElasticsearchException.class, () -> future.actionGet(500, TimeUnit.MILLISECONDS));
+ // Verify that the new document has not been indexed indicating that the indexing thread is paused.
+ logger.info("--> verifying count is unchanged...");
+ indicesAdmin().prepareRefresh().get();
+ assertHitCount(prepareSearch("test").setSize(0), numDocs);
+
+ logger.info("--> relocate the shard from node1 to node2");
+ updateIndexSettings(Settings.builder().put("index.routing.allocation.include._name", node_2), "test");
+ ensureGreen(ACCEPTABLE_RELOCATION_TIME, "test");
+
+ // Relocation will suspend throttling for the paused shard, allow the indexing thread to proceed, thereby releasing
+ // the indexing permit it holds, in turn allowing relocation to acquire the permits and proceed.
+ clusterHealthResponse = clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT)
+ .setWaitForEvents(Priority.LANGUID)
+ .setWaitForNoRelocatingShards(true)
+ .setTimeout(ACCEPTABLE_RELOCATION_TIME)
+ .get();
+ assertThat(clusterHealthResponse.isTimedOut(), equalTo(false));
+
+ logger.info("--> verifying shard primary has relocated ...");
+ indicesService = internalCluster().getInstance(IndicesService.class, node_2);
+ shard = indicesService.indexServiceSafe(resolveIndex("test")).getShard(0);
+ assertThat(shard.routingEntry().primary(), equalTo(true));
+ engine = shard.getEngineOrNull();
+ assertThat(engine != null && engine.isThrottled(), equalTo(false));
+ logger.info("--> verifying count after relocation ...");
+ future.actionGet();
+ indicesAdmin().prepareRefresh().get();
+ assertHitCount(prepareSearch("test").setSize(0), numDocs + 1);
+ }
+
public void testRelocationWhileIndexingRandom() throws Exception {
int numberOfRelocations = scaledRandomIntBetween(1, rarely() ? 10 : 4);
int numberOfReplicas = randomBoolean() ? 0 : 1;
int numberOfNodes = numberOfReplicas == 0 ? 2 : 3;
+ boolean throttleIndexing = randomBoolean();
logger.info(
"testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})",
@@ -173,16 +255,22 @@ public void testRelocationWhileIndexingRandom() throws Exception {
numberOfNodes
);
+ // Randomly use pause throttling vs lock throttling, to verify that relocations proceed regardless
String[] nodes = new String[numberOfNodes];
logger.info("--> starting [node1] ...");
- nodes[0] = internalCluster().startNode();
+ nodes[0] = internalCluster().startNode(
+ Settings.builder().put(IndexingMemoryController.PAUSE_INDEXING_ON_THROTTLE.getKey(), randomBoolean())
+ );
logger.info("--> creating test index ...");
prepareCreate("test", indexSettings(1, numberOfReplicas)).get();
+ // Randomly use pause throttling vs lock throttling, to verify that relocations proceed regardless
for (int i = 2; i <= numberOfNodes; i++) {
logger.info("--> starting [node{}] ...", i);
- nodes[i - 1] = internalCluster().startNode();
+ nodes[i - 1] = internalCluster().startNode(
+ Settings.builder().put(IndexingMemoryController.PAUSE_INDEXING_ON_THROTTLE.getKey(), randomBoolean())
+ );
if (i != numberOfNodes) {
ClusterHealthResponse healthResponse = clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT)
.setWaitForEvents(Priority.LANGUID)
@@ -200,17 +288,37 @@ public void testRelocationWhileIndexingRandom() throws Exception {
logger.info("--> {} docs indexed", numDocs);
logger.info("--> starting relocations...");
- int nodeShiftBased = numberOfReplicas; // if we have replicas shift those
+
+ // When we have a replica, the primary is on node 0 and replica is on node 1. We cannot move primary
+ // to a node containing the replica, so relocation of primary needs to happen between node 0 and 2.
+ // When there is no replica, we only have 2 nodes and primary relocates back and forth between node 0 and 1.
for (int i = 0; i < numberOfRelocations; i++) {
int fromNode = (i % 2);
int toNode = fromNode == 0 ? 1 : 0;
- fromNode += nodeShiftBased;
- toNode += nodeShiftBased;
+ if (numberOfReplicas == 1) {
+ fromNode = fromNode == 1 ? 2 : 0;
+ toNode = toNode == 1 ? 2 : 0;
+ }
+
numDocs = scaledRandomIntBetween(200, 1000);
+
+ // Throttle indexing on primary shard
+ if (throttleIndexing) {
+ IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodes[fromNode]);
+ IndexShard shard = indicesService.indexServiceSafe(resolveIndex("test")).getShard(0);
+ // Activate index throttling on "test" index primary shard
+ logger.info("--> activate throttling for shard on node {}...", nodes[fromNode]);
+ shard.activateThrottling();
+ // Verify that indexing is throttled for this shard
+ Engine engine = shard.getEngineOrNull();
+ assertThat(engine != null && engine.isThrottled(), equalTo(true));
+ }
logger.debug("--> Allow indexer to index [{}] documents", numDocs);
indexer.continueIndexing(numDocs);
logger.info("--> START relocate the shard from {} to {}", nodes[fromNode], nodes[toNode]);
+
ClusterRerouteUtils.reroute(client(), new MoveAllocationCommand("test", 0, nodes[fromNode], nodes[toNode]));
+
if (rarely()) {
logger.debug("--> flushing");
indicesAdmin().prepareFlush().get();
@@ -219,11 +327,13 @@ public void testRelocationWhileIndexingRandom() throws Exception {
.setWaitForEvents(Priority.LANGUID)
.setWaitForNoRelocatingShards(true)
.setTimeout(ACCEPTABLE_RELOCATION_TIME)
+ .setWaitForGreenStatus()
.get();
assertThat(clusterHealthResponse.isTimedOut(), equalTo(false));
indexer.pauseIndexing();
logger.info("--> DONE relocate the shard from {} to {}", fromNode, toNode);
}
+
logger.info("--> done relocations");
logger.info("--> waiting for indexing threads to stop ...");
indexer.stopAndAwaitStopped();
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithIndexBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithIndexBlocksIT.java
deleted file mode 100644
index 91cd699f89682..0000000000000
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithIndexBlocksIT.java
+++ /dev/null
@@ -1,180 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the "Elastic License
- * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
- * Public License v 1"; you may not use this file except in compliance with, at
- * your election, the "Elastic License 2.0", the "GNU Affero General Public
- * License v3.0 only", or the "Server Side Public License, v 1".
- */
-
-package org.elasticsearch.search;
-
-import org.elasticsearch.action.index.IndexRequestBuilder;
-import org.elasticsearch.action.search.ClosePointInTimeRequest;
-import org.elasticsearch.action.search.OpenPointInTimeRequest;
-import org.elasticsearch.action.search.SearchRequest;
-import org.elasticsearch.action.search.SearchShardsGroup;
-import org.elasticsearch.action.search.SearchShardsRequest;
-import org.elasticsearch.action.search.TransportClosePointInTimeAction;
-import org.elasticsearch.action.search.TransportOpenPointInTimeAction;
-import org.elasticsearch.action.search.TransportSearchShardsAction;
-import org.elasticsearch.action.support.IndicesOptions;
-import org.elasticsearch.cluster.ClusterState;
-import org.elasticsearch.cluster.block.ClusterBlocks;
-import org.elasticsearch.cluster.metadata.IndexMetadata;
-import org.elasticsearch.cluster.metadata.ProjectId;
-import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.cluster.service.ClusterService;
-import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.core.TimeValue;
-import org.elasticsearch.index.query.MatchAllQueryBuilder;
-import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.search.builder.PointInTimeBuilder;
-import org.elasticsearch.search.builder.SearchSourceBuilder;
-import org.elasticsearch.test.ESIntegTestCase;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-
-import static org.elasticsearch.cluster.block.ClusterBlocks.EMPTY_CLUSTER_BLOCK;
-import static org.elasticsearch.test.ClusterServiceUtils.setState;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse;
-
-public class SearchWithIndexBlocksIT extends ESIntegTestCase {
-
- public void testSearchIndicesWithIndexRefreshBlocks() {
- List indices = createIndices();
- Map numDocsPerIndex = indexDocuments(indices);
- List unblockedIndices = addIndexRefreshBlockToSomeIndices(indices);
-
- int expectedHits = 0;
- for (String index : unblockedIndices) {
- expectedHits += numDocsPerIndex.get(index);
- }
-
- assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), expectedHits);
- }
-
- public void testOpenPITOnIndicesWithIndexRefreshBlocks() {
- List indices = createIndices();
- Map numDocsPerIndex = indexDocuments(indices);
- List unblockedIndices = addIndexRefreshBlockToSomeIndices(indices);
-
- int expectedHits = 0;
- for (String index : unblockedIndices) {
- expectedHits += numDocsPerIndex.get(index);
- }
-
- BytesReference pitId = null;
- try {
- OpenPointInTimeRequest openPITRequest = new OpenPointInTimeRequest(indices.toArray(new String[0])).keepAlive(
- TimeValue.timeValueSeconds(10)
- ).allowPartialSearchResults(true);
- pitId = client().execute(TransportOpenPointInTimeAction.TYPE, openPITRequest).actionGet().getPointInTimeId();
- SearchRequest searchRequest = new SearchRequest().source(
- new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(pitId).setKeepAlive(TimeValue.timeValueSeconds(10)))
- );
- assertHitCount(client().search(searchRequest), expectedHits);
- } finally {
- if (pitId != null) {
- client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitId)).actionGet();
- }
- }
- }
-
- public void testMultiSearchIndicesWithIndexRefreshBlocks() {
- List indices = createIndices();
- Map numDocsPerIndex = indexDocuments(indices);
- List unblockedIndices = addIndexRefreshBlockToSomeIndices(indices);
-
- int expectedHits = 0;
- for (String index : unblockedIndices) {
- expectedHits += numDocsPerIndex.get(index);
- }
-
- final long expectedHitsL = expectedHits;
- assertResponse(
- client().prepareMultiSearch()
- .add(prepareSearch().setQuery(QueryBuilders.matchAllQuery()))
- .add(prepareSearch().setQuery(QueryBuilders.termQuery("field", "blah"))),
- response -> {
- assertHitCount(Objects.requireNonNull(response.getResponses()[0].getResponse()), expectedHitsL);
- assertHitCount(Objects.requireNonNull(response.getResponses()[1].getResponse()), 0);
- }
- );
- }
-
- public void testSearchShardsOnIndicesWithIndexRefreshBlocks() {
- List indices = createIndices();
- indexDocuments(indices);
- List unblockedIndices = addIndexRefreshBlockToSomeIndices(indices);
-
- var resp = client().execute(
- TransportSearchShardsAction.TYPE,
- new SearchShardsRequest(
- indices.toArray(new String[0]),
- IndicesOptions.DEFAULT,
- new MatchAllQueryBuilder(),
- null,
- null,
- true,
- null
- )
- ).actionGet();
- for (SearchShardsGroup group : resp.getGroups()) {
- assertTrue(unblockedIndices.contains(group.shardId().getIndex().getName()));
- }
- }
-
- private List createIndices() {
- int numIndices = randomIntBetween(1, 3);
- List indices = new ArrayList<>();
- for (int i = 0; i < numIndices; i++) {
- indices.add("test" + i);
- createIndex("test" + i);
- }
- return indices;
- }
-
- private Map indexDocuments(List indices) {
- Map numDocsPerIndex = new HashMap<>();
- List indexRequests = new ArrayList<>();
- for (String index : indices) {
- int numDocs = randomIntBetween(0, 10);
- numDocsPerIndex.put(index, numDocs);
- for (int i = 0; i < numDocs; i++) {
- indexRequests.add(prepareIndex(index).setId(String.valueOf(i)).setSource("field", "value"));
- }
- }
- indexRandom(true, indexRequests);
-
- return numDocsPerIndex;
- }
-
- private List addIndexRefreshBlockToSomeIndices(List indices) {
- List unblockedIndices = new ArrayList<>();
- var blocksBuilder = ClusterBlocks.builder().blocks(EMPTY_CLUSTER_BLOCK);
- for (String index : indices) {
- boolean blockIndex = randomBoolean();
- if (blockIndex) {
- blocksBuilder.addIndexBlock(ProjectId.DEFAULT, index, IndexMetadata.INDEX_REFRESH_BLOCK);
- } else {
- unblockedIndices.add(index);
- }
- }
-
- var dataNodes = clusterService().state().getNodes().getAllNodes();
- for (DiscoveryNode dataNode : dataNodes) {
- ClusterService clusterService = internalCluster().getInstance(ClusterService.class, dataNode.getName());
- ClusterState currentState = clusterService.state();
- ClusterState newState = ClusterState.builder(currentState).blocks(blocksBuilder).build();
- setState(clusterService, newState);
- }
-
- return unblockedIndices;
- }
-}
diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java
index 569556a7aa2e6..e57cb485361b6 100644
--- a/server/src/main/java/org/elasticsearch/TransportVersions.java
+++ b/server/src/main/java/org/elasticsearch/TransportVersions.java
@@ -354,6 +354,7 @@ static TransportVersion def(int id) {
public static final TransportVersion RERANK_SNIPPETS = def(9_130_0_00);
public static final TransportVersion PIPELINE_TRACKING_INFO = def(9_131_0_00);
public static final TransportVersion COMPONENT_TEMPLATE_TRACKING_INFO = def(9_132_0_00);
+ public static final TransportVersion TO_CHILD_BLOCK_JOIN_QUERY = def(9_133_0_00);
/*
* STOP! READ THIS FIRST! No, really,
diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java
index ac807bf1d752c..85e9f2c5084de 100644
--- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java
+++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java
@@ -69,6 +69,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Executor;
@@ -198,32 +199,15 @@ private Tuple, Exception> validateMappings(
Collection ignoredFields = List.of();
IndexAbstraction indexAbstraction = project.getIndicesLookup().get(request.index());
try {
- if (indexAbstraction != null
- && componentTemplateSubstitutions.isEmpty()
- && indexTemplateSubstitutions.isEmpty()
- && mappingAddition.isEmpty()) {
+ if (indexAbstraction != null && componentTemplateSubstitutions.isEmpty() && indexTemplateSubstitutions.isEmpty()) {
/*
- * In this case the index exists and we don't have any component template overrides. So we can just use withTempIndexService
- * to do the mapping validation, using all the existing logic for validation.
+ * In this case the index exists and we don't have any template overrides. So we can just merge the mappingAddition (which
+ * might not exist) into the existing index mapping.
*/
IndexMetadata imd = project.getIndexSafe(indexAbstraction.getWriteIndex(request, project));
- indicesService.withTempIndexService(imd, indexService -> {
- indexService.mapperService().updateMapping(null, imd);
- return IndexShard.prepareIndex(
- indexService.mapperService(),
- sourceToParse,
- SequenceNumbers.UNASSIGNED_SEQ_NO,
- -1,
- -1,
- VersionType.INTERNAL,
- Engine.Operation.Origin.PRIMARY,
- Long.MIN_VALUE,
- false,
- request.ifSeqNo(),
- request.ifPrimaryTerm(),
- 0
- );
- });
+ CompressedXContent mappings = Optional.ofNullable(imd.mapping()).map(MappingMetadata::source).orElse(null);
+ CompressedXContent mergedMappings = mappingAddition == null ? null : mergeMappings(mappings, mappingAddition);
+ ignoredFields = validateUpdatedMappingsFromIndexMetadata(imd, mergedMappings, request, sourceToParse);
} else {
/*
* The index did not exist, or we have component template substitutions, so we put together the mappings from existing
@@ -296,15 +280,6 @@ private Tuple, Exception> validateMappings(
);
final CompressedXContent combinedMappings = mergeMappings(new CompressedXContent(mappingsMap), mappingAddition);
ignoredFields = validateUpdatedMappings(null, combinedMappings, request, sourceToParse);
- } else if (indexAbstraction != null && mappingAddition.isEmpty() == false) {
- /*
- * The index matched no templates of any kind, including the substitutions. But it might have a mapping. So we
- * merge in the mapping addition if it exists, and validate.
- */
- MappingMetadata mappingFromIndex = project.index(indexAbstraction.getName()).mapping();
- CompressedXContent currentIndexCompressedXContent = mappingFromIndex == null ? null : mappingFromIndex.source();
- CompressedXContent combinedMappings = mergeMappings(currentIndexCompressedXContent, mappingAddition);
- ignoredFields = validateUpdatedMappings(null, combinedMappings, request, sourceToParse);
} else {
/*
* The index matched no templates and had no mapping of its own. If there were component template substitutions
@@ -332,9 +307,6 @@ private Collection validateUpdatedMappings(
IndexRequest request,
SourceToParse sourceToParse
) throws IOException {
- if (updatedMappings == null) {
- return List.of(); // no validation to do
- }
Settings dummySettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
@@ -346,8 +318,20 @@ private Collection validateUpdatedMappings(
originalIndexMetadataBuilder.putMapping(new MappingMetadata(originalMappings));
}
final IndexMetadata originalIndexMetadata = originalIndexMetadataBuilder.build();
+ return validateUpdatedMappingsFromIndexMetadata(originalIndexMetadata, updatedMappings, request, sourceToParse);
+ }
+
+ private Collection validateUpdatedMappingsFromIndexMetadata(
+ IndexMetadata originalIndexMetadata,
+ @Nullable CompressedXContent updatedMappings,
+ IndexRequest request,
+ SourceToParse sourceToParse
+ ) throws IOException {
+ if (updatedMappings == null) {
+ return List.of(); // no validation to do
+ }
final IndexMetadata updatedIndexMetadata = IndexMetadata.builder(request.index())
- .settings(dummySettings)
+ .settings(originalIndexMetadata.getSettings())
.putMapping(new MappingMetadata(updatedMappings))
.build();
Engine.Index result = indicesService.withTempIndexService(originalIndexMetadata, indexService -> {
diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java
index eb45276ce2da0..e9960c832953c 100644
--- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java
+++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java
@@ -11,8 +11,10 @@
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
+import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionListenerResponseHandler;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.OriginalIndices;
@@ -22,7 +24,7 @@
import org.elasticsearch.action.support.ChannelActionListener;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.action.support.RefCountingRunnable;
-import org.elasticsearch.client.internal.RemoteClusterClient;
+import org.elasticsearch.action.support.SubscribableListener;
import org.elasticsearch.cluster.ProjectState;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
@@ -37,6 +39,7 @@
import org.elasticsearch.common.util.concurrent.ThrottledTaskRunner;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Releasable;
+import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
@@ -48,9 +51,10 @@
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.RemoteClusterAware;
-import org.elasticsearch.transport.RemoteClusterService;
+import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportRequestHandler;
+import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportService;
import java.util.ArrayList;
@@ -91,6 +95,8 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction remoteClient.execute(REMOTE_TYPE, remoteRequest, remoteListener),
+ (transportService, conn, fieldCapabilitiesRequest, responseHandler) -> transportService.sendRequest(
+ conn,
+ REMOTE_TYPE.name(),
+ fieldCapabilitiesRequest,
+ TransportRequestOptions.EMPTY,
+ responseHandler
+ ),
listener
);
}
@@ -132,17 +146,17 @@ protected void doExecute(Task task, FieldCapabilitiesRequest request, final Acti
public void executeRequest(
Task task,
FieldCapabilitiesRequest request,
- RemoteRequestExecutor remoteRequestExecutor,
+ LinkedRequestExecutor linkedRequestExecutor,
ActionListener listener
) {
// workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can
- searchCoordinationExecutor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(task, request, remoteRequestExecutor, l)));
+ searchCoordinationExecutor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(task, request, linkedRequestExecutor, l)));
}
private void doExecuteForked(
Task task,
FieldCapabilitiesRequest request,
- RemoteRequestExecutor remoteRequestExecutor,
+ LinkedRequestExecutor linkedRequestExecutor,
ActionListener listener
) {
if (ccsCheckCompatibility) {
@@ -268,12 +282,6 @@ private void doExecuteForked(
for (Map.Entry remoteIndices : remoteClusterIndices.entrySet()) {
String clusterAlias = remoteIndices.getKey();
OriginalIndices originalIndices = remoteIndices.getValue();
- var remoteClusterClient = transportService.getRemoteClusterService()
- .getRemoteClusterClient(
- clusterAlias,
- singleThreadedExecutor,
- RemoteClusterService.DisconnectedStrategy.RECONNECT_UNLESS_SKIP_UNAVAILABLE
- );
FieldCapabilitiesRequest remoteRequest = prepareRemoteRequest(clusterAlias, request, originalIndices, nowInMillis);
ActionListener remoteListener = ActionListener.wrap(response -> {
for (FieldCapabilitiesIndexResponse resp : response.getIndexResponses()) {
@@ -299,9 +307,13 @@ private void doExecuteForked(
handleIndexFailure.accept(RemoteClusterAware.buildRemoteIndexName(clusterAlias, index), ex);
}
});
- remoteRequestExecutor.executeRemoteRequest(
- remoteClusterClient,
- remoteRequest,
+
+ SubscribableListener connectionListener = new SubscribableListener<>();
+ if (forceConnectTimeoutSecs != null) {
+ connectionListener.addTimeout(forceConnectTimeoutSecs, threadPool, singleThreadedExecutor);
+ }
+
+ connectionListener.addListener(
// The underlying transport service may call onFailure with a thread pool other than search_coordinator.
// This fork is a workaround to ensure that the merging of field-caps always occurs on the search_coordinator.
// TODO: remove this workaround after we fixed https://github.com/elastic/elasticsearch/issues/107439
@@ -309,8 +321,20 @@ private void doExecuteForked(
singleThreadedExecutor,
true,
ActionListener.releaseAfter(remoteListener, refs.acquire())
+ ).delegateFailure(
+ (responseListener, conn) -> linkedRequestExecutor.executeRemoteRequest(
+ transportService,
+ conn,
+ remoteRequest,
+ new ActionListenerResponseHandler<>(responseListener, FieldCapabilitiesResponse::new, singleThreadedExecutor)
+ )
)
);
+
+ boolean ensureConnected = forceConnectTimeoutSecs != null
+ || transportService.getRemoteClusterService().isSkipUnavailable(clusterAlias) == false;
+ transportService.getRemoteClusterService()
+ .maybeEnsureConnectedAndGetConnection(clusterAlias, ensureConnected, connectionListener);
}
}
}
@@ -338,11 +362,12 @@ public void onFailure(Exception e) {
});
}
- public interface RemoteRequestExecutor {
+ public interface LinkedRequestExecutor {
void executeRemoteRequest(
- RemoteClusterClient remoteClient,
+ TransportService transportService,
+ Transport.Connection conn,
FieldCapabilitiesRequest remoteRequest,
- ActionListener remoteListener
+ ActionListenerResponseHandler responseHandler
);
}
@@ -376,8 +401,20 @@ private static void mergeIndexResponses(
} else {
// we have no responses at all, maybe because of errors
if (indexFailures.isEmpty() == false) {
- // throw back the first exception
- listener.onFailure(failures.get(0).getException());
+ /*
+ * Under no circumstances are we to pass timeout errors originating from SubscribableListener as top-level errors.
+ * Instead, they should always be passed through the response object, as part of "failures".
+ */
+ if (failures.stream()
+ .anyMatch(
+ failure -> failure.getException() instanceof IllegalStateException ise
+ && ise.getCause() instanceof ElasticsearchTimeoutException
+ )) {
+ listener.onResponse(new FieldCapabilitiesResponse(Collections.emptyList(), failures));
+ } else {
+ // throw back the first exception
+ listener.onFailure(failures.get(0).getException());
+ }
} else {
listener.onResponse(new FieldCapabilitiesResponse(Collections.emptyList(), Collections.emptyList()));
}
@@ -585,15 +622,24 @@ List build(Set successfulIndices) {
for (Map.Entry failure : failuresByIndex.entrySet()) {
String index = failure.getKey();
Exception e = failure.getValue();
+ /*
+ * The listener we use to briefly try, and connect to a linked cluster can throw an ElasticsearchTimeoutException
+ * error if it cannot be reached. To make sure we correctly recognise this scenario via
+ * ExceptionsHelper.isRemoteUnavailableException(), we wrap this error appropriately.
+ */
+ if (e instanceof ElasticsearchTimeoutException ete) {
+ e = new IllegalStateException("Unable to open any connections", ete);
+ }
if (successfulIndices.contains(index) == false) {
// we deduplicate exceptions on the underlying causes message and classname
// we unwrap the cause to e.g. group RemoteTransportExceptions coming from different nodes if the cause is the same
Throwable cause = ExceptionsHelper.unwrapCause(e);
Tuple groupingKey = new Tuple<>(cause.getMessage(), cause.getClass().getName());
+ Exception ex = e;
indexFailures.compute(
groupingKey,
- (k, v) -> v == null ? new FieldCapabilitiesFailure(new String[] { index }, e) : v.addIndex(index)
+ (k, v) -> v == null ? new FieldCapabilitiesFailure(new String[] { index }, ex) : v.addIndex(index)
);
}
}
diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java
index d67e656773495..71eb94459548c 100644
--- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java
+++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java
@@ -165,7 +165,8 @@ ShardSearchRequest rewriteShardSearchRequest(List knnResults, Sha
scoreDocs.toArray(Lucene.EMPTY_SCORE_DOCS),
source.knnSearch().get(i).getField(),
source.knnSearch().get(i).getQueryVector(),
- source.knnSearch().get(i).getSimilarity()
+ source.knnSearch().get(i).getSimilarity(),
+ source.knnSearch().get(i).getFilterQueries()
).boost(source.knnSearch().get(i).boost()).queryName(source.knnSearch().get(i).queryName());
if (nestedPath != null) {
query = new NestedQueryBuilder(nestedPath, query, ScoreMode.Max).innerHit(source.knnSearch().get(i).innerHit());
diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java
index 340123456435f..c20d9832daf0a 100644
--- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java
+++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java
@@ -308,6 +308,9 @@ default void copyBlob(
*
* This operation, together with {@link #compareAndSetRegister}, must have linearizable semantics: a collection of such operations must
* act as if they operate serially, with each operation taking place at some instant in between its invocation and its completion.
+ *
+ * If the listener completes exceptionally then the write operation should be considered as continuing to run and may therefore appear
+ * to occur at some later point in time.
*
* @param purpose The purpose of the operation
* @param key key of the value to update
@@ -330,6 +333,9 @@ void compareAndExchangeRegister(
*
* This operation, together with {@link #compareAndExchangeRegister}, must have linearizable semantics: a collection of such operations
* must act as if they operate serially, with each operation taking place at some instant in between its invocation and its completion.
+ *
+ * If the listener completes exceptionally then the write operation should be considered as continuing to run and may therefore appear
+ * to occur at some later point in time.
*
* @param purpose The purpose of the operation
* @param key key of the value to update
@@ -361,7 +367,10 @@ default void compareAndSetRegister(
* This operation has read-after-write consistency with respect to writes performed using {@link #compareAndExchangeRegister} and
* {@link #compareAndSetRegister}, but does not guarantee full linearizability. In particular, a {@code getRegister} performed during
* one of these write operations may return either the old or the new value, and a caller may therefore observe the old value
- * after observing the new value, as long as both such read operations take place before the write operation completes.
+ * after observing the new value, as long as both such read operations take place before the success of the write operation.
+ *
+ * Write operations which complete exceptionally may behave as if they continue to run, thus yielding old or new values for an extended
+ * period of time. If multiple writes fail then {@code getRegister} may return any of the written values.
*
* @param purpose The purpose of the operation
* @param key key of the value to get
diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/DefaultIVFVectorsReader.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/DefaultIVFVectorsReader.java
index 2407e79977ac5..0521785dbdeb4 100644
--- a/server/src/main/java/org/elasticsearch/index/codec/vectors/DefaultIVFVectorsReader.java
+++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/DefaultIVFVectorsReader.java
@@ -28,8 +28,6 @@
import static org.apache.lucene.codecs.lucene102.Lucene102BinaryQuantizedVectorsFormat.QUERY_BITS;
import static org.apache.lucene.index.VectorSimilarityFunction.COSINE;
-import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN;
-import static org.apache.lucene.index.VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT;
import static org.elasticsearch.index.codec.vectors.BQSpaceUtils.transposeHalfByte;
import static org.elasticsearch.index.codec.vectors.BQVectorUtils.discretize;
import static org.elasticsearch.index.codec.vectors.OptimizedScalarQuantizer.DEFAULT_LAMBDA;
@@ -40,7 +38,9 @@
* brute force and then scores the top ones using the posting list.
*/
public class DefaultIVFVectorsReader extends IVFVectorsReader {
- private static final float FOUR_BIT_SCALE = 1f / ((1 << 4) - 1);
+
+ // The percentage of centroids that are scored to keep recall
+ public static final double CENTROID_SAMPLING_PERCENTAGE = 0.2;
public DefaultIVFVectorsReader(SegmentReadState state, FlatVectorsReader rawVectorsReader) throws IOException {
super(state, rawVectorsReader);
@@ -53,8 +53,12 @@ CentroidIterator getCentroidIterator(FieldInfo fieldInfo, int numCentroids, Inde
final float globalCentroidDp = fieldEntry.globalCentroidDp();
final OptimizedScalarQuantizer scalarQuantizer = new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction());
final int[] scratch = new int[targetQuery.length];
+ float[] targetQueryCopy = ArrayUtil.copyArray(targetQuery);
+ if (fieldInfo.getVectorSimilarityFunction() == COSINE) {
+ VectorUtil.l2normalize(targetQueryCopy);
+ }
final OptimizedScalarQuantizer.QuantizationResult queryParams = scalarQuantizer.scalarQuantize(
- ArrayUtil.copyArray(targetQuery),
+ targetQueryCopy,
scratch,
(byte) 4,
fieldEntry.globalCentroid()
@@ -64,67 +68,227 @@ CentroidIterator getCentroidIterator(FieldInfo fieldInfo, int numCentroids, Inde
quantized[i] = (byte) scratch[i];
}
final ES91Int4VectorsScorer scorer = ESVectorUtil.getES91Int4VectorsScorer(centroids, fieldInfo.getVectorDimension());
- NeighborQueue queue = new NeighborQueue(fieldEntry.numCentroids(), true);
centroids.seek(0L);
- final float[] centroidCorrectiveValues = new float[3];
- for (int i = 0; i < numCentroids; i++) {
- final float qcDist = scorer.int4DotProduct(quantized);
- centroids.readFloats(centroidCorrectiveValues, 0, 3);
- final int quantizedCentroidComponentSum = Short.toUnsignedInt(centroids.readShort());
- float score = int4QuantizedScore(
- qcDist,
+ int numParents = centroids.readVInt();
+ if (numParents > 0) {
+ return getCentroidIteratorWithParents(
+ fieldInfo,
+ centroids,
+ numParents,
+ numCentroids,
+ scorer,
+ quantized,
queryParams,
- fieldInfo.getVectorDimension(),
- centroidCorrectiveValues,
- quantizedCentroidComponentSum,
- globalCentroidDp,
- fieldInfo.getVectorSimilarityFunction()
+ globalCentroidDp
);
- queue.add(i, score);
}
- final long offset = centroids.getFilePointer();
+ return getCentroidIteratorNoParent(fieldInfo, centroids, numCentroids, scorer, quantized, queryParams, globalCentroidDp);
+ }
+
+ private static CentroidIterator getCentroidIteratorNoParent(
+ FieldInfo fieldInfo,
+ IndexInput centroids,
+ int numCentroids,
+ ES91Int4VectorsScorer scorer,
+ byte[] quantizeQuery,
+ OptimizedScalarQuantizer.QuantizationResult queryParams,
+ float globalCentroidDp
+ ) throws IOException {
+ final NeighborQueue neighborQueue = new NeighborQueue(numCentroids, true);
+ score(
+ neighborQueue,
+ numCentroids,
+ 0,
+ scorer,
+ quantizeQuery,
+ queryParams,
+ globalCentroidDp,
+ fieldInfo.getVectorSimilarityFunction(),
+ new float[ES91Int4VectorsScorer.BULK_SIZE]
+ );
+ long offset = centroids.getFilePointer();
return new CentroidIterator() {
@Override
public boolean hasNext() {
- return queue.size() > 0;
+ return neighborQueue.size() > 0;
}
@Override
public long nextPostingListOffset() throws IOException {
- int centroidOrdinal = queue.pop();
+ int centroidOrdinal = neighborQueue.pop();
centroids.seek(offset + (long) Long.BYTES * centroidOrdinal);
return centroids.readLong();
}
};
}
- // TODO can we do this in off-heap blocks?
- private float int4QuantizedScore(
- float qcDist,
+ private static CentroidIterator getCentroidIteratorWithParents(
+ FieldInfo fieldInfo,
+ IndexInput centroids,
+ int numParents,
+ int numCentroids,
+ ES91Int4VectorsScorer scorer,
+ byte[] quantizeQuery,
+ OptimizedScalarQuantizer.QuantizationResult queryParams,
+ float globalCentroidDp
+ ) throws IOException {
+ // build the three queues we are going to use
+ final NeighborQueue parentsQueue = new NeighborQueue(numParents, true);
+ final int maxChildrenSize = centroids.readVInt();
+ final NeighborQueue currentParentQueue = new NeighborQueue(maxChildrenSize, true);
+ final int bufferSize = (int) Math.max(numCentroids * CENTROID_SAMPLING_PERCENTAGE, 1);
+ final NeighborQueue neighborQueue = new NeighborQueue(bufferSize, true);
+ // score the parents
+ final float[] scores = new float[ES91Int4VectorsScorer.BULK_SIZE];
+ score(
+ parentsQueue,
+ numParents,
+ 0,
+ scorer,
+ quantizeQuery,
+ queryParams,
+ globalCentroidDp,
+ fieldInfo.getVectorSimilarityFunction(),
+ scores
+ );
+ final long centroidQuantizeSize = fieldInfo.getVectorDimension() + 3 * Float.BYTES + Short.BYTES;
+ final long offset = centroids.getFilePointer();
+ final long childrenOffset = offset + (long) Long.BYTES * numParents;
+ // populate the children's queue by reading parents one by one
+ while (parentsQueue.size() > 0 && neighborQueue.size() < bufferSize) {
+ final int pop = parentsQueue.pop();
+ populateOneChildrenGroup(
+ currentParentQueue,
+ centroids,
+ offset + 2L * Integer.BYTES * pop,
+ childrenOffset,
+ centroidQuantizeSize,
+ fieldInfo,
+ scorer,
+ quantizeQuery,
+ queryParams,
+ globalCentroidDp,
+ scores
+ );
+ while (currentParentQueue.size() > 0 && neighborQueue.size() < bufferSize) {
+ final float score = currentParentQueue.topScore();
+ final int children = currentParentQueue.pop();
+ neighborQueue.add(children, score);
+ }
+ }
+ final long childrenFileOffsets = childrenOffset + centroidQuantizeSize * numCentroids;
+ return new CentroidIterator() {
+ @Override
+ public boolean hasNext() {
+ return neighborQueue.size() > 0;
+ }
+
+ @Override
+ public long nextPostingListOffset() throws IOException {
+ int centroidOrdinal = neighborQueue.pop();
+ updateQueue(); // add one children if available so the queue remains fully populated
+ centroids.seek(childrenFileOffsets + (long) Long.BYTES * centroidOrdinal);
+ return centroids.readLong();
+ }
+
+ private void updateQueue() throws IOException {
+ if (currentParentQueue.size() > 0) {
+ // add a children from the current parent queue
+ float score = currentParentQueue.topScore();
+ int children = currentParentQueue.pop();
+ neighborQueue.add(children, score);
+ } else if (parentsQueue.size() > 0) {
+ // add a new parent from the current parent queue
+ int pop = parentsQueue.pop();
+ populateOneChildrenGroup(
+ currentParentQueue,
+ centroids,
+ offset + 2L * Integer.BYTES * pop,
+ childrenOffset,
+ centroidQuantizeSize,
+ fieldInfo,
+ scorer,
+ quantizeQuery,
+ queryParams,
+ globalCentroidDp,
+ scores
+ );
+ updateQueue();
+ }
+ }
+ };
+ }
+
+ private static void populateOneChildrenGroup(
+ NeighborQueue neighborQueue,
+ IndexInput centroids,
+ long parentOffset,
+ long childrenOffset,
+ long centroidQuantizeSize,
+ FieldInfo fieldInfo,
+ ES91Int4VectorsScorer scorer,
+ byte[] quantizeQuery,
+ OptimizedScalarQuantizer.QuantizationResult queryParams,
+ float globalCentroidDp,
+ float[] scores
+ ) throws IOException {
+ centroids.seek(parentOffset);
+ int childrenOrdinal = centroids.readInt();
+ int numChildren = centroids.readInt();
+ centroids.seek(childrenOffset + centroidQuantizeSize * childrenOrdinal);
+ score(
+ neighborQueue,
+ numChildren,
+ childrenOrdinal,
+ scorer,
+ quantizeQuery,
+ queryParams,
+ globalCentroidDp,
+ fieldInfo.getVectorSimilarityFunction(),
+ scores
+ );
+ }
+
+ private static void score(
+ NeighborQueue neighborQueue,
+ int size,
+ int scoresOffset,
+ ES91Int4VectorsScorer scorer,
+ byte[] quantizeQuery,
OptimizedScalarQuantizer.QuantizationResult queryCorrections,
- int dims,
- float[] targetCorrections,
- int targetComponentSum,
float centroidDp,
- VectorSimilarityFunction similarityFunction
- ) {
- float ax = targetCorrections[0];
- float lx = (targetCorrections[1] - ax) * FOUR_BIT_SCALE;
- float ay = queryCorrections.lowerInterval();
- float ly = (queryCorrections.upperInterval() - ay) * FOUR_BIT_SCALE;
- float y1 = queryCorrections.quantizedComponentSum();
- float score = ax * ay * dims + ay * lx * (float) targetComponentSum + ax * ly * y1 + lx * ly * qcDist;
- if (similarityFunction == EUCLIDEAN) {
- score = queryCorrections.additionalCorrection() + targetCorrections[2] - 2 * score;
- return Math.max(1 / (1f + score), 0);
- } else {
- // For cosine and max inner product, we need to apply the additional correction, which is
- // assumed to be the non-centered dot-product between the vector and the centroid
- score += queryCorrections.additionalCorrection() + targetCorrections[2] - centroidDp;
- if (similarityFunction == MAXIMUM_INNER_PRODUCT) {
- return VectorUtil.scaleMaxInnerProductScore(score);
+ VectorSimilarityFunction similarityFunction,
+ float[] scores
+ ) throws IOException {
+ int limit = size - ES91Int4VectorsScorer.BULK_SIZE + 1;
+ int i = 0;
+ for (; i < limit; i += ES91Int4VectorsScorer.BULK_SIZE) {
+ scorer.scoreBulk(
+ quantizeQuery,
+ queryCorrections.lowerInterval(),
+ queryCorrections.upperInterval(),
+ queryCorrections.quantizedComponentSum(),
+ queryCorrections.additionalCorrection(),
+ similarityFunction,
+ centroidDp,
+ scores
+ );
+ for (int j = 0; j < ES91Int4VectorsScorer.BULK_SIZE; j++) {
+ neighborQueue.add(scoresOffset + i + j, scores[j]);
}
- return Math.max((1f + score) / 2f, 0);
+ }
+
+ for (; i < size; i++) {
+ float score = scorer.score(
+ quantizeQuery,
+ queryCorrections.lowerInterval(),
+ queryCorrections.upperInterval(),
+ queryCorrections.quantizedComponentSum(),
+ queryCorrections.additionalCorrection(),
+ similarityFunction,
+ centroidDp
+ );
+ neighborQueue.add(scoresOffset + i, score);
}
}
diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/DefaultIVFVectorsWriter.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/DefaultIVFVectorsWriter.java
index f47ecc549831a..58f09cf70d4bd 100644
--- a/server/src/main/java/org/elasticsearch/index/codec/vectors/DefaultIVFVectorsWriter.java
+++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/DefaultIVFVectorsWriter.java
@@ -26,11 +26,14 @@
import org.elasticsearch.index.codec.vectors.cluster.KMeansResult;
import org.elasticsearch.logging.LogManager;
import org.elasticsearch.logging.Logger;
+import org.elasticsearch.simdvec.ES91Int4VectorsScorer;
import org.elasticsearch.simdvec.ES91OSQVectorsScorer;
import java.io.IOException;
+import java.io.UncheckedIOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
+import java.util.AbstractList;
import java.util.Arrays;
/**
@@ -42,10 +45,17 @@ public class DefaultIVFVectorsWriter extends IVFVectorsWriter {
private static final Logger logger = LogManager.getLogger(DefaultIVFVectorsWriter.class);
private final int vectorPerCluster;
+ private final int centroidsPerParentCluster;
- public DefaultIVFVectorsWriter(SegmentWriteState state, FlatVectorsWriter rawVectorDelegate, int vectorPerCluster) throws IOException {
+ public DefaultIVFVectorsWriter(
+ SegmentWriteState state,
+ FlatVectorsWriter rawVectorDelegate,
+ int vectorPerCluster,
+ int centroidsPerParentCluster
+ ) throws IOException {
super(state, rawVectorDelegate);
this.vectorPerCluster = vectorPerCluster;
+ this.centroidsPerParentCluster = centroidsPerParentCluster;
}
@Override
@@ -288,34 +298,136 @@ void writeCentroids(
LongValues offsets,
IndexOutput centroidOutput
) throws IOException {
-
- final OptimizedScalarQuantizer osq = new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction());
- int[] quantizedScratch = new int[fieldInfo.getVectorDimension()];
- float[] centroidScratch = new float[fieldInfo.getVectorDimension()];
- final byte[] quantized = new byte[fieldInfo.getVectorDimension()];
// TODO do we want to store these distances as well for future use?
// TODO: sort centroids by global centroid (was doing so previously here)
// TODO: sorting tanks recall possibly because centroids ordinals no longer are aligned
- for (int i = 0; i < centroidSupplier.size(); i++) {
- float[] centroid = centroidSupplier.centroid(i);
- System.arraycopy(centroid, 0, centroidScratch, 0, centroid.length);
- OptimizedScalarQuantizer.QuantizationResult result = osq.scalarQuantize(
- centroidScratch,
- quantizedScratch,
- (byte) 4,
- globalCentroid
- );
- for (int j = 0; j < quantizedScratch.length; j++) {
- quantized[j] = (byte) quantizedScratch[j];
+ if (centroidSupplier.size() > centroidsPerParentCluster * centroidsPerParentCluster) {
+ writeCentroidsWithParents(fieldInfo, centroidSupplier, globalCentroid, offsets, centroidOutput);
+ } else {
+ writeCentroidsWithoutParents(fieldInfo, centroidSupplier, globalCentroid, offsets, centroidOutput);
+ }
+ }
+
+ private void writeCentroidsWithParents(
+ FieldInfo fieldInfo,
+ CentroidSupplier centroidSupplier,
+ float[] globalCentroid,
+ LongValues offsets,
+ IndexOutput centroidOutput
+ ) throws IOException {
+ DiskBBQBulkWriter.FourBitDiskBBQBulkWriter bulkWriter = new DiskBBQBulkWriter.FourBitDiskBBQBulkWriter(
+ ES91Int4VectorsScorer.BULK_SIZE,
+ centroidOutput
+ );
+ final OptimizedScalarQuantizer osq = new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction());
+ final CentroidGroups centroidGroups = buildCentroidGroups(fieldInfo, centroidSupplier);
+ centroidOutput.writeVInt(centroidGroups.centroids.length);
+ centroidOutput.writeVInt(centroidGroups.maxVectorsPerCentroidLength);
+ QuantizedCentroids parentQuantizeCentroid = new QuantizedCentroids(
+ new OnHeapCentroidSupplier(centroidGroups.centroids),
+ fieldInfo.getVectorDimension(),
+ osq,
+ globalCentroid
+ );
+ bulkWriter.writeVectors(parentQuantizeCentroid);
+ int offset = 0;
+ for (int i = 0; i < centroidGroups.centroids().length; i++) {
+ centroidOutput.writeInt(offset);
+ centroidOutput.writeInt(centroidGroups.vectors()[i].length);
+ offset += centroidGroups.vectors()[i].length;
+ }
+
+ QuantizedCentroids childrenQuantizeCentroid = new QuantizedCentroids(
+ centroidSupplier,
+ fieldInfo.getVectorDimension(),
+ osq,
+ globalCentroid
+ );
+ for (int i = 0; i < centroidGroups.centroids().length; i++) {
+ final int[] centroidAssignments = centroidGroups.vectors()[i];
+ childrenQuantizeCentroid.reset(idx -> centroidAssignments[idx], centroidAssignments.length);
+ bulkWriter.writeVectors(childrenQuantizeCentroid);
+ }
+ // write the centroid offsets at the end of the file
+ for (int i = 0; i < centroidGroups.centroids().length; i++) {
+ final int[] centroidAssignments = centroidGroups.vectors()[i];
+ for (int assignment : centroidAssignments) {
+ centroidOutput.writeLong(offsets.get(assignment));
}
- writeQuantizedValue(centroidOutput, quantized, result);
}
+ }
+
+ private void writeCentroidsWithoutParents(
+ FieldInfo fieldInfo,
+ CentroidSupplier centroidSupplier,
+ float[] globalCentroid,
+ LongValues offsets,
+ IndexOutput centroidOutput
+ ) throws IOException {
+ centroidOutput.writeVInt(0);
+ DiskBBQBulkWriter.FourBitDiskBBQBulkWriter bulkWriter = new DiskBBQBulkWriter.FourBitDiskBBQBulkWriter(
+ ES91Int4VectorsScorer.BULK_SIZE,
+ centroidOutput
+ );
+ final OptimizedScalarQuantizer osq = new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction());
+ QuantizedCentroids quantizedCentroids = new QuantizedCentroids(
+ centroidSupplier,
+ fieldInfo.getVectorDimension(),
+ osq,
+ globalCentroid
+ );
+ bulkWriter.writeVectors(quantizedCentroids);
// write the centroid offsets at the end of the file
for (int i = 0; i < centroidSupplier.size(); i++) {
centroidOutput.writeLong(offsets.get(i));
}
}
+ private record CentroidGroups(float[][] centroids, int[][] vectors, int maxVectorsPerCentroidLength) {}
+
+ private CentroidGroups buildCentroidGroups(FieldInfo fieldInfo, CentroidSupplier centroidSupplier) throws IOException {
+ final FloatVectorValues floatVectorValues = FloatVectorValues.fromFloats(new AbstractList<>() {
+ @Override
+ public float[] get(int index) {
+ try {
+ return centroidSupplier.centroid(index);
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+
+ @Override
+ public int size() {
+ return centroidSupplier.size();
+ }
+ }, fieldInfo.getVectorDimension());
+ // we use the HierarchicalKMeans to partition the space of all vectors across merging segments
+ // this are small numbers so we run it wih all the centroids.
+ final KMeansResult kMeansResult = new HierarchicalKMeans(
+ fieldInfo.getVectorDimension(),
+ 6,
+ floatVectorValues.size(),
+ floatVectorValues.size(),
+ -1 // disable SOAR assignments
+ ).cluster(floatVectorValues, centroidsPerParentCluster);
+ final int[] centroidVectorCount = new int[kMeansResult.centroids().length];
+ for (int i = 0; i < kMeansResult.assignments().length; i++) {
+ centroidVectorCount[kMeansResult.assignments()[i]]++;
+ }
+ final int[][] vectorsPerCentroid = new int[kMeansResult.centroids().length][];
+ int maxVectorsPerCentroidLength = 0;
+ for (int i = 0; i < kMeansResult.centroids().length; i++) {
+ vectorsPerCentroid[i] = new int[centroidVectorCount[i]];
+ maxVectorsPerCentroidLength = Math.max(maxVectorsPerCentroidLength, centroidVectorCount[i]);
+ }
+ Arrays.fill(centroidVectorCount, 0);
+ for (int i = 0; i < kMeansResult.assignments().length; i++) {
+ final int c = kMeansResult.assignments()[i];
+ vectorsPerCentroid[c][centroidVectorCount[c]++] = i;
+ }
+ return new CentroidGroups(kMeansResult.centroids(), vectorsPerCentroid, maxVectorsPerCentroidLength);
+ }
+
/**
* Calculate the centroids for the given field.
* We use the {@link HierarchicalKMeans} algorithm to partition the space of all vectors across merging segments
@@ -415,6 +527,63 @@ interface IntToBooleanFunction {
boolean apply(int ord);
}
+ static class QuantizedCentroids implements QuantizedVectorValues {
+ private final CentroidSupplier supplier;
+ private final OptimizedScalarQuantizer quantizer;
+ private final byte[] quantizedVector;
+ private final int[] quantizedVectorScratch;
+ private final float[] floatVectorScratch;
+ private OptimizedScalarQuantizer.QuantizationResult corrections;
+ private final float[] centroid;
+ private int currOrd = -1;
+ private IntToIntFunction ordTransformer = i -> i;
+ int size;
+
+ QuantizedCentroids(CentroidSupplier supplier, int dimension, OptimizedScalarQuantizer quantizer, float[] centroid) {
+ this.supplier = supplier;
+ this.quantizer = quantizer;
+ this.quantizedVector = new byte[dimension];
+ this.floatVectorScratch = new float[dimension];
+ this.quantizedVectorScratch = new int[dimension];
+ this.centroid = centroid;
+ size = supplier.size();
+ }
+
+ @Override
+ public int count() {
+ return size;
+ }
+
+ void reset(IntToIntFunction ordTransformer, int size) {
+ this.ordTransformer = ordTransformer;
+ this.currOrd = -1;
+ this.size = size;
+ this.corrections = null;
+ }
+
+ @Override
+ public byte[] next() throws IOException {
+ if (currOrd >= count() - 1) {
+ throw new IllegalStateException("No more vectors to read, current ord: " + currOrd + ", count: " + count());
+ }
+ currOrd++;
+ float[] vector = supplier.centroid(ordTransformer.apply(currOrd));
+ // Its possible that the vectors are on-heap and we cannot mutate them as we may quantize twice
+ // due to overspill, so we copy the vector to a scratch array
+ System.arraycopy(vector, 0, floatVectorScratch, 0, vector.length);
+ corrections = quantizer.scalarQuantize(floatVectorScratch, quantizedVectorScratch, (byte) 4, centroid);
+ for (int i = 0; i < quantizedVectorScratch.length; i++) {
+ quantizedVector[i] = (byte) quantizedVectorScratch[i];
+ }
+ return quantizedVector;
+ }
+
+ @Override
+ public OptimizedScalarQuantizer.QuantizationResult getCorrections() throws IOException {
+ return corrections;
+ }
+ }
+
static class OnHeapQuantizedVectors implements QuantizedVectorValues {
private final FloatVectorValues vectorValues;
private final OptimizedScalarQuantizer quantizer;
diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/DiskBBQBulkWriter.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/DiskBBQBulkWriter.java
index 662878270ea09..9da77fb77661a 100644
--- a/server/src/main/java/org/elasticsearch/index/codec/vectors/DiskBBQBulkWriter.java
+++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/DiskBBQBulkWriter.java
@@ -84,4 +84,34 @@ void writeVectors(DefaultIVFVectorsWriter.QuantizedVectorValues qvv) throws IOEx
}
}
}
+
+ static class FourBitDiskBBQBulkWriter extends DiskBBQBulkWriter {
+ private final OptimizedScalarQuantizer.QuantizationResult[] corrections;
+
+ FourBitDiskBBQBulkWriter(int bulkSize, IndexOutput out) {
+ super(bulkSize, out);
+ this.corrections = new OptimizedScalarQuantizer.QuantizationResult[bulkSize];
+ }
+
+ @Override
+ void writeVectors(DefaultIVFVectorsWriter.QuantizedVectorValues qvv) throws IOException {
+ int limit = qvv.count() - bulkSize + 1;
+ int i = 0;
+ for (; i < limit; i += bulkSize) {
+ for (int j = 0; j < bulkSize; j++) {
+ byte[] qv = qvv.next();
+ corrections[j] = qvv.getCorrections();
+ out.writeBytes(qv, qv.length);
+ }
+ writeCorrections(corrections, out);
+ }
+ // write tail
+ for (; i < qvv.count(); ++i) {
+ byte[] qv = qvv.next();
+ OptimizedScalarQuantizer.QuantizationResult correction = qvv.getCorrections();
+ out.writeBytes(qv, qv.length);
+ writeCorrection(correction, out);
+ }
+ }
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/IVFVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/IVFVectorsFormat.java
index 7a18558703423..aa8921cee24c4 100644
--- a/server/src/main/java/org/elasticsearch/index/codec/vectors/IVFVectorsFormat.java
+++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/IVFVectorsFormat.java
@@ -65,10 +65,14 @@ public class IVFVectorsFormat extends KnnVectorsFormat {
public static final int DEFAULT_VECTORS_PER_CLUSTER = 384;
public static final int MIN_VECTORS_PER_CLUSTER = 64;
public static final int MAX_VECTORS_PER_CLUSTER = 1 << 16; // 65536
+ public static final int DEFAULT_CENTROIDS_PER_PARENT_CLUSTER = 16;
+ public static final int MIN_CENTROIDS_PER_PARENT_CLUSTER = 2;
+ public static final int MAX_CENTROIDS_PER_PARENT_CLUSTER = 1 << 8; // 256
private final int vectorPerCluster;
+ private final int centroidsPerParentCluster;
- public IVFVectorsFormat(int vectorPerCluster) {
+ public IVFVectorsFormat(int vectorPerCluster, int centroidsPerParentCluster) {
super(NAME);
if (vectorPerCluster < MIN_VECTORS_PER_CLUSTER || vectorPerCluster > MAX_VECTORS_PER_CLUSTER) {
throw new IllegalArgumentException(
@@ -80,17 +84,28 @@ public IVFVectorsFormat(int vectorPerCluster) {
+ vectorPerCluster
);
}
+ if (centroidsPerParentCluster < MIN_CENTROIDS_PER_PARENT_CLUSTER || centroidsPerParentCluster > MAX_CENTROIDS_PER_PARENT_CLUSTER) {
+ throw new IllegalArgumentException(
+ "centroidsPerParentCluster must be between "
+ + MIN_CENTROIDS_PER_PARENT_CLUSTER
+ + " and "
+ + MAX_CENTROIDS_PER_PARENT_CLUSTER
+ + ", got: "
+ + centroidsPerParentCluster
+ );
+ }
this.vectorPerCluster = vectorPerCluster;
+ this.centroidsPerParentCluster = centroidsPerParentCluster;
}
/** Constructs a format using the given graph construction parameters and scalar quantization. */
public IVFVectorsFormat() {
- this(DEFAULT_VECTORS_PER_CLUSTER);
+ this(DEFAULT_VECTORS_PER_CLUSTER, DEFAULT_CENTROIDS_PER_PARENT_CLUSTER);
}
@Override
public KnnVectorsWriter fieldsWriter(SegmentWriteState state) throws IOException {
- return new DefaultIVFVectorsWriter(state, rawVectorFormat.fieldsWriter(state), vectorPerCluster);
+ return new DefaultIVFVectorsWriter(state, rawVectorFormat.fieldsWriter(state), vectorPerCluster, centroidsPerParentCluster);
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/cluster/HierarchicalKMeans.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/cluster/HierarchicalKMeans.java
index fc13a4b9faa1a..22a78cfbae835 100644
--- a/server/src/main/java/org/elasticsearch/index/codec/vectors/cluster/HierarchicalKMeans.java
+++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/cluster/HierarchicalKMeans.java
@@ -34,7 +34,7 @@ public HierarchicalKMeans(int dimension) {
this(dimension, MAX_ITERATIONS_DEFAULT, SAMPLES_PER_CLUSTER_DEFAULT, MAXK, DEFAULT_SOAR_LAMBDA);
}
- HierarchicalKMeans(int dimension, int maxIterations, int samplesPerCluster, int clustersPerNeighborhood, float soarLambda) {
+ public HierarchicalKMeans(int dimension, int maxIterations, int samplesPerCluster, int clustersPerNeighborhood, float soarLambda) {
this.dimension = dimension;
this.maxIterations = maxIterations;
this.samplesPerCluster = samplesPerCluster;
@@ -79,7 +79,7 @@ public KMeansResult cluster(FloatVectorValues vectors, int targetSize) throws IO
if (kMeansIntermediate.centroids().length > 1 && kMeansIntermediate.centroids().length < vectors.size()) {
int localSampleSize = Math.min(kMeansIntermediate.centroids().length * samplesPerCluster / 2, vectors.size());
KMeansLocal kMeansLocal = new KMeansLocal(localSampleSize, maxIterations);
- kMeansLocal.cluster(vectors, kMeansIntermediate, clustersPerNeighborhood, DEFAULT_SOAR_LAMBDA);
+ kMeansLocal.cluster(vectors, kMeansIntermediate, clustersPerNeighborhood, soarLambda);
}
return kMeansIntermediate;
diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/cluster/KMeansLocal.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/cluster/KMeansLocal.java
index a3be558128577..a1e480fb73266 100644
--- a/server/src/main/java/org/elasticsearch/index/codec/vectors/cluster/KMeansLocal.java
+++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/cluster/KMeansLocal.java
@@ -307,7 +307,7 @@ private void doCluster(FloatVectorValues vectors, KMeansIntermediate kMeansInter
neighborhoods = computeNeighborhoods(centroids, clustersPerNeighborhood);
}
cluster(vectors, kMeansIntermediate, neighborhoods);
- if (neighborAware) {
+ if (neighborAware && soarLambda >= 0) {
assert kMeansIntermediate.soarAssignments().length == 0;
kMeansIntermediate.setSoarAssignments(new int[vectors.size()]);
assignSpilled(vectors, kMeansIntermediate, neighborhoods, soarLambda);
diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java
index 767ba8cb89500..2cfadb11ec4f7 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java
@@ -146,6 +146,9 @@ public abstract class Engine implements Closeable {
protected final ReentrantLock failEngineLock = new ReentrantLock();
protected final SetOnce failedEngine = new SetOnce<>();
protected final boolean enableRecoverySource;
+ // This should only be enabled in serverless. In stateful clusters, where we have
+ // indexing replicas, if pause throttling gets enabled on replicas, it will indirectly
+ // pause the primary as well which might prevent us from relocating the primary shard.
protected final boolean pauseIndexingOnThrottle;
private final AtomicBoolean isClosing = new AtomicBoolean();
@@ -482,7 +485,10 @@ protected static final class IndexThrottle {
private final Condition pauseCondition = pauseIndexingLock.newCondition();
private final ReleasableLock pauseLockReference = new ReleasableLock(pauseIndexingLock);
private volatile AtomicBoolean suspendThrottling = new AtomicBoolean();
- private final boolean pauseWhenThrottled; // Should throttling pause indexing ?
+
+ // Should throttling pause indexing ? This is decided by the
+ // IndexingMemoryController#PAUSE_INDEXING_ON_THROTTLE setting for this node.
+ private final boolean pauseWhenThrottled;
private volatile ReleasableLock lock = NOOP_LOCK;
public IndexThrottle(boolean pause) {
@@ -513,7 +519,6 @@ public Releasable acquireThrottle() {
/** Activate throttling, which switches the lock to be a real lock */
public void activate() {
assert lock == NOOP_LOCK : "throttling activated while already active";
-
startOfThrottleNS = System.nanoTime();
if (pauseWhenThrottled) {
lock = pauseLockReference;
@@ -561,10 +566,14 @@ boolean isThrottled() {
return lock != NOOP_LOCK;
}
+ boolean isIndexingPaused() {
+ return (lock == pauseLockReference);
+ }
+
/** Suspend throttling to allow another task such as relocation to acquire all indexing permits */
public void suspendThrottle() {
if (pauseWhenThrottled) {
- try (Releasable releasableLock = pauseLockReference.acquire()) {
+ try (Releasable ignored = pauseLockReference.acquire()) {
suspendThrottling.setRelease(true);
pauseCondition.signalAll();
}
@@ -574,7 +583,7 @@ public void suspendThrottle() {
/** Reverse what was done in {@link #suspendThrottle()} */
public void resumeThrottle() {
if (pauseWhenThrottled) {
- try (Releasable releasableLock = pauseLockReference.acquire()) {
+ try (Releasable ignored = pauseLockReference.acquire()) {
suspendThrottling.setRelease(false);
pauseCondition.signalAll();
}
@@ -2296,6 +2305,18 @@ public interface Warmer {
*/
public abstract void deactivateThrottling();
+ /**
+ * If indexing is throttled to the point where it is paused completely,
+ * another task trying to get indexing permits might want to pause throttling
+ * by letting one thread pass at a time so that it does not get starved.
+ */
+ public abstract void suspendThrottling();
+
+ /**
+ * Reverses a previous {@link #suspendThrottling} call.
+ */
+ public abstract void resumeThrottling();
+
/**
* This method replays translog to restore the Lucene index which might be reverted previously.
* This ensures that all acknowledged writes are restored correctly when this engine is promoted.
diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
index 7753decfc4854..1bed195862959 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
@@ -2883,6 +2883,16 @@ public void deactivateThrottling() {
}
}
+ @Override
+ public void suspendThrottling() {
+ throttle.suspendThrottle();
+ }
+
+ @Override
+ public void resumeThrottling() {
+ throttle.resumeThrottle();
+ }
+
@Override
public boolean isThrottled() {
return throttle.isThrottled();
diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java
index 2388154494ad4..800854dcedb0a 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java
@@ -500,6 +500,12 @@ public void activateThrottling() {}
@Override
public void deactivateThrottling() {}
+ @Override
+ public void suspendThrottling() {}
+
+ @Override
+ public void resumeThrottling() {}
+
@Override
public void trimUnreferencedTranslogFiles() {}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java
index 9019edc435eaf..c9c14d027ebfd 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java
@@ -2312,7 +2312,7 @@ static class BBQIVFIndexOptions extends QuantizedIndexOptions {
@Override
KnnVectorsFormat getVectorsFormat(ElementType elementType) {
assert elementType == ElementType.FLOAT;
- return new IVFVectorsFormat(clusterSize);
+ return new IVFVectorsFormat(clusterSize, IVFVectorsFormat.DEFAULT_CENTROIDS_PER_PARENT_CLUSTER);
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/index/query/ToChildBlockJoinQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ToChildBlockJoinQueryBuilder.java
new file mode 100644
index 0000000000000..1e6e6feee3f42
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/index/query/ToChildBlockJoinQueryBuilder.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.index.query;
+
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.join.BitSetProducer;
+import org.apache.lucene.search.join.ToChildBlockJoinQuery;
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.TransportVersions;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.lucene.search.Queries;
+import org.elasticsearch.index.mapper.NestedObjectMapper;
+import org.elasticsearch.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * A query returns child documents whose parent matches the provided query.
+ * This query is used only for internal purposes and is not exposed to a user.
+ */
+public class ToChildBlockJoinQueryBuilder extends AbstractQueryBuilder {
+ public static final String NAME = "to_child_block_join";
+ private final QueryBuilder parentQueryBuilder;
+
+ public ToChildBlockJoinQueryBuilder(QueryBuilder parentQueryBuilder) {
+ this.parentQueryBuilder = parentQueryBuilder;
+ }
+
+ public ToChildBlockJoinQueryBuilder(StreamInput in) throws IOException {
+ super(in);
+ parentQueryBuilder = in.readNamedWriteable(QueryBuilder.class);
+ }
+
+ @Override
+ protected void doWriteTo(StreamOutput out) throws IOException {
+ out.writeNamedWriteable(parentQueryBuilder);
+ }
+
+ @Override
+ protected void doXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject(NAME);
+ builder.field("query");
+ parentQueryBuilder.toXContent(builder, params);
+ boostAndQueryNameToXContent(builder);
+ builder.endObject();
+ }
+
+ @Override
+ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
+ QueryBuilder rewritten = parentQueryBuilder.rewrite(queryRewriteContext);
+ if (rewritten instanceof MatchNoneQueryBuilder) {
+ return rewritten;
+ }
+ if (rewritten != parentQueryBuilder) {
+ return new ToChildBlockJoinQueryBuilder(rewritten);
+ }
+ return this;
+ }
+
+ @Override
+ protected Query doToQuery(SearchExecutionContext context) throws IOException {
+ final Query parentFilter;
+ NestedObjectMapper originalObjectMapper = context.nestedScope().getObjectMapper();
+ if (originalObjectMapper != null) {
+ try {
+ // we are in a nested context, to get the parent filter we need to go up one level
+ context.nestedScope().previousLevel();
+ NestedObjectMapper objectMapper = context.nestedScope().getObjectMapper();
+ parentFilter = objectMapper == null
+ ? Queries.newNonNestedFilter(context.indexVersionCreated())
+ : objectMapper.nestedTypeFilter();
+ } finally {
+ context.nestedScope().nextLevel(originalObjectMapper);
+ }
+ } else {
+ // we are NOT in a nested context, coming from the top level knn search
+ parentFilter = Queries.newNonNestedFilter(context.indexVersionCreated());
+ }
+ final BitSetProducer parentBitSet = context.bitsetFilter(parentFilter);
+ Query parentQuery = parentQueryBuilder.toQuery(context);
+ // ensure that parentQuery only applies to parent docs by adding parentFilter
+ return new ToChildBlockJoinQuery(Queries.filtered(parentQuery, parentFilter), parentBitSet);
+ }
+
+ @Override
+ protected boolean doEquals(ToChildBlockJoinQueryBuilder other) {
+ return Objects.equals(parentQueryBuilder, other.parentQueryBuilder);
+ }
+
+ @Override
+ protected int doHashCode() {
+ return Objects.hash(parentQueryBuilder);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersions.TO_CHILD_BLOCK_JOIN_QUERY;
+ }
+}
diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java
index 59129f9911da2..19af2616dbb7b 100644
--- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java
+++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java
@@ -810,7 +810,7 @@ public void relocated(
) throws IllegalIndexShardStateException, IllegalStateException {
assert shardRouting.primary() : "only primaries can be marked as relocated: " + shardRouting;
try (Releasable forceRefreshes = refreshListeners.forceRefreshes()) {
- indexShardOperationPermits.blockOperations(new ActionListener<>() {
+ blockOperations(new ActionListener<>() {
@Override
public void onResponse(Releasable releasable) {
boolean success = false;
@@ -888,8 +888,13 @@ public void onFailure(Exception e) {
listener.onFailure(e);
}
}
- }, 30L, TimeUnit.MINUTES, EsExecutors.DIRECT_EXECUTOR_SERVICE); // Wait on current thread because this execution is wrapped by
- // CancellableThreads and we want to be able to interrupt it
+ },
+ 30L,
+ TimeUnit.MINUTES,
+ // Wait on current thread because this execution is wrapped by CancellableThreads and we want to be able to interrupt it
+ EsExecutors.DIRECT_EXECUTOR_SERVICE
+ );
+
}
}
@@ -2765,6 +2770,22 @@ public void deactivateThrottling() {
}
}
+ private void suspendThrottling() {
+ try {
+ getEngine().suspendThrottling();
+ } catch (AlreadyClosedException ex) {
+ // ignore
+ }
+ }
+
+ private void resumeThrottling() {
+ try {
+ getEngine().resumeThrottling();
+ } catch (AlreadyClosedException ex) {
+ // ignore
+ }
+ }
+
private void handleRefreshException(Exception e) {
if (e instanceof AlreadyClosedException) {
// ignore
@@ -3823,6 +3844,39 @@ private ActionListener wrapPrimaryOperationPermitListener(final Acti
});
}
+ /**
+ * Immediately delays operations and uses the {@code executor} to wait for in-flight operations to finish and then acquires all
+ * permits. When all permits are acquired, the provided {@link ActionListener} is called under the guarantee that no new operations are
+ * started. Delayed operations are run once the {@link Releasable} is released or if a failure occurs while acquiring all permits; in
+ * this case the {@code onFailure} handler will be invoked after delayed operations are released.
+ *
+ * @param onAcquired {@link ActionListener} that is invoked once acquisition is successful or failed. This listener should not throw.
+ * @param timeout the maximum time to wait for the in-flight operations block
+ * @param timeUnit the time unit of the {@code timeout} argument
+ * @param executor executor on which to wait for in-flight operations to finish and acquire all permits
+ */
+ public void blockOperations(
+ final ActionListener onAcquired,
+ final long timeout,
+ final TimeUnit timeUnit,
+ final Executor executor
+ ) {
+ // In case indexing is paused on the shard, suspend throttling so that any currently paused task can
+ // go ahead and release the indexing permit it holds.
+ suspendThrottling();
+ try {
+ indexShardOperationPermits.blockOperations(
+ ActionListener.runAfter(onAcquired, this::resumeThrottling),
+ timeout,
+ timeUnit,
+ executor
+ );
+ } catch (IndexShardClosedException e) {
+ resumeThrottling();
+ throw e;
+ }
+ }
+
private void asyncBlockOperations(ActionListener onPermitAcquired, long timeout, TimeUnit timeUnit) {
final Releasable forceRefreshes = refreshListeners.forceRefreshes();
final ActionListener wrappedListener = ActionListener.wrap(r -> {
@@ -3833,7 +3887,7 @@ private void asyncBlockOperations(ActionListener onPermitAcquired, l
onPermitAcquired.onFailure(e);
});
try {
- indexShardOperationPermits.blockOperations(wrappedListener, timeout, timeUnit, threadPool.generic());
+ blockOperations(wrappedListener, timeout, timeUnit, threadPool.generic());
} catch (Exception e) {
forceRefreshes.close();
throw e;
diff --git a/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java b/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java
index 0f9b724d965bf..d5213f30dc63c 100644
--- a/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java
+++ b/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java
@@ -90,7 +90,10 @@ public class IndexingMemoryController implements IndexingOperationListener, Clos
/* Currently, indexing is throttled due to memory pressure in stateful/stateless or disk pressure in stateless.
* This limits the number of indexing threads to 1 per shard. However, this might not be enough when the number of
* shards that need indexing is larger than the number of threads. So we might opt to pause indexing completely.
- * The default value for this setting is false, but it will be set to true in stateless.
+ * The default value for this setting is false, but it can be set to true in stateless.
+ * Note that this should only be enabled in stateless. In stateful clusters, where we have
+ * indexing replicas, if pause throttling gets enabled on replicas, it will indirectly
+ * pause the primary as well which might prevent us from relocating the primary shard.
*/
public static final Setting PAUSE_INDEXING_ON_THROTTLE = Setting.boolSetting(
"indices.pause.on.throttle",
diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java
index 66648b7126514..43c2332b8e4a5 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java
@@ -57,6 +57,7 @@ private SearchCapabilities() {}
private static final String FIELD_EXISTS_QUERY_FOR_TEXT_FIELDS_NO_INDEX_OR_DV = "field_exists_query_for_text_fields_no_index_or_dv";
private static final String SYNTHETIC_VECTORS_SETTING = "synthetic_vectors_setting";
private static final String UPDATE_FIELD_TO_BBQ_DISK = "update_field_to_bbq_disk";
+ private static final String KNN_FILTER_ON_NESTED_FIELDS_CAPABILITY = "knn_filter_on_nested_fields";
public static final Set CAPABILITIES;
static {
@@ -82,6 +83,7 @@ private SearchCapabilities() {}
capabilities.add(DENSE_VECTOR_UPDATABLE_BBQ);
capabilities.add(FIELD_EXISTS_QUERY_FOR_TEXT_FIELDS_NO_INDEX_OR_DV);
capabilities.add(UPDATE_FIELD_TO_BBQ_DISK);
+ capabilities.add(KNN_FILTER_ON_NESTED_FIELDS_CAPABILITY);
if (SYNTHETIC_VECTORS) {
capabilities.add(SYNTHETIC_VECTORS_SETTING);
}
diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java
index 56b203700b362..7a5ae6f25a632 100644
--- a/server/src/main/java/org/elasticsearch/search/SearchModule.java
+++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java
@@ -66,6 +66,7 @@
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.index.query.TermsSetQueryBuilder;
+import org.elasticsearch.index.query.ToChildBlockJoinQueryBuilder;
import org.elasticsearch.index.query.WildcardQueryBuilder;
import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.index.query.functionscore.ExponentialDecayFunctionBuilder;
@@ -1187,6 +1188,9 @@ private void registerQueryParsers(List plugins) {
registerQuery(new QuerySpec<>(ExactKnnQueryBuilder.NAME, ExactKnnQueryBuilder::new, parser -> {
throw new IllegalArgumentException("[exact_knn] queries cannot be provided directly");
}));
+ registerQuery(new QuerySpec<>(ToChildBlockJoinQueryBuilder.NAME, ToChildBlockJoinQueryBuilder::new, parser -> {
+ throw new IllegalArgumentException("[to_child_block_join] queries cannot be provided directly");
+ }));
registerQuery(
new QuerySpec<>(RandomSamplingQueryBuilder.NAME, RandomSamplingQueryBuilder::new, RandomSamplingQueryBuilder::fromXContent)
);
diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java
index 1a81f4b984e93..17403bdbb05c9 100644
--- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java
@@ -17,13 +17,16 @@
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.query.AbstractQueryBuilder;
+import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.MatchNoneQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.SearchExecutionContext;
+import org.elasticsearch.index.query.ToChildBlockJoinQueryBuilder;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
+import java.util.List;
import java.util.Objects;
/**
@@ -37,6 +40,7 @@ public class KnnScoreDocQueryBuilder extends AbstractQueryBuilder filterQueries;
/**
* Creates a query builder.
@@ -44,11 +48,18 @@ public class KnnScoreDocQueryBuilder extends AbstractQueryBuilder filterQueries
+ ) {
this.scoreDocs = scoreDocs;
this.fieldName = fieldName;
this.queryVector = queryVector;
this.vectorSimilarity = vectorSimilarity;
+ this.filterQueries = filterQueries;
}
public KnnScoreDocQueryBuilder(StreamInput in) throws IOException {
@@ -74,6 +85,11 @@ public KnnScoreDocQueryBuilder(StreamInput in) throws IOException {
} else {
this.vectorSimilarity = null;
}
+ if (in.getTransportVersion().onOrAfter(TransportVersions.TO_CHILD_BLOCK_JOIN_QUERY)) {
+ this.filterQueries = readQueries(in);
+ } else {
+ this.filterQueries = List.of();
+ }
}
@Override
@@ -116,6 +132,9 @@ protected void doWriteTo(StreamOutput out) throws IOException {
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) {
out.writeOptionalFloat(vectorSimilarity);
}
+ if (out.getTransportVersion().onOrAfter(TransportVersions.TO_CHILD_BLOCK_JOIN_QUERY)) {
+ writeQueries(out, filterQueries);
+ }
}
@Override
@@ -135,6 +154,13 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
if (vectorSimilarity != null) {
builder.field("similarity", vectorSimilarity);
}
+ if (filterQueries.isEmpty() == false) {
+ builder.startArray("filter");
+ for (QueryBuilder filterQuery : filterQueries) {
+ filterQuery.toXContent(builder, params);
+ }
+ builder.endArray();
+ }
boostAndQueryNameToXContent(builder);
builder.endObject();
}
@@ -150,7 +176,20 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws
return new MatchNoneQueryBuilder("The \"" + getName() + "\" query was rewritten to a \"match_none\" query.");
}
if (queryRewriteContext.convertToInnerHitsRewriteContext() != null && queryVector != null && fieldName != null) {
- return new ExactKnnQueryBuilder(queryVector, fieldName, vectorSimilarity);
+ QueryBuilder exactKnnQuery = new ExactKnnQueryBuilder(queryVector, fieldName, vectorSimilarity);
+ if (filterQueries.isEmpty()) {
+ return exactKnnQuery;
+ } else {
+ BoolQueryBuilder boolQuery = new BoolQueryBuilder();
+ boolQuery.must(exactKnnQuery);
+ for (QueryBuilder filter : this.filterQueries) {
+ // filter can be both over parents or nested docs, so add them as should clauses to a filter
+ BoolQueryBuilder adjustedFilter = new BoolQueryBuilder().should(filter)
+ .should(new ToChildBlockJoinQueryBuilder(filter));
+ boolQuery.filter(adjustedFilter);
+ }
+ return boolQuery;
+ }
}
return super.doRewrite(queryRewriteContext);
}
@@ -173,7 +212,8 @@ protected boolean doEquals(KnnScoreDocQueryBuilder other) {
}
return Objects.equals(fieldName, other.fieldName)
&& Objects.equals(queryVector, other.queryVector)
- && Objects.equals(vectorSimilarity, other.vectorSimilarity);
+ && Objects.equals(vectorSimilarity, other.vectorSimilarity)
+ && Objects.equals(filterQueries, other.filterQueries);
}
@Override
@@ -183,7 +223,7 @@ protected int doHashCode() {
int hashCode = Objects.hash(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex);
result = 31 * result + hashCode;
}
- return Objects.hash(result, fieldName, vectorSimilarity, Objects.hashCode(queryVector));
+ return Objects.hash(result, fieldName, vectorSimilarity, Objects.hashCode(queryVector), filterQueries);
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java
index ea0c15642eb74..b76f56ceb2aa9 100644
--- a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java
@@ -27,10 +27,12 @@
import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper;
import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.DenseVectorFieldType;
import org.elasticsearch.index.query.AbstractQueryBuilder;
+import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.MatchNoneQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.SearchExecutionContext;
+import org.elasticsearch.index.query.ToChildBlockJoinQueryBuilder;
import org.elasticsearch.index.search.NestedHelper;
import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.ObjectParser;
@@ -454,9 +456,6 @@ protected QueryBuilder doRewrite(QueryRewriteContext ctx) throws IOException {
vectorSimilarity
).boost(boost).queryName(queryName).addFilterQueries(filterQueries);
}
- if (ctx.convertToInnerHitsRewriteContext() != null) {
- return new ExactKnnQueryBuilder(queryVector, fieldName, vectorSimilarity).boost(boost).queryName(queryName);
- }
boolean changed = false;
List rewrittenQueries = new ArrayList<>(filterQueries.size());
for (QueryBuilder query : filterQueries) {
@@ -481,6 +480,22 @@ protected QueryBuilder doRewrite(QueryRewriteContext ctx) throws IOException {
vectorSimilarity
).boost(boost).queryName(queryName).addFilterQueries(rewrittenQueries);
}
+ if (ctx.convertToInnerHitsRewriteContext() != null) {
+ QueryBuilder exactKnnQuery = new ExactKnnQueryBuilder(queryVector, fieldName, vectorSimilarity);
+ if (filterQueries.isEmpty()) {
+ return exactKnnQuery;
+ } else {
+ BoolQueryBuilder boolQuery = new BoolQueryBuilder();
+ boolQuery.must(exactKnnQuery);
+ for (QueryBuilder filter : this.filterQueries) {
+ // filter can be both over parents or nested docs, so add them as should clauses to a filter
+ BoolQueryBuilder adjustedFilter = new BoolQueryBuilder().should(filter)
+ .should(new ToChildBlockJoinQueryBuilder(filter));
+ boolQuery.filter(adjustedFilter);
+ }
+ return boolQuery;
+ }
+ }
return this;
}
@@ -500,29 +515,27 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException {
if (fieldType == null) {
return new MatchNoDocsQuery();
}
-
if (fieldType instanceof DenseVectorFieldType == false) {
throw new IllegalArgumentException(
"[" + NAME + "] queries are only supported on [" + DenseVectorFieldMapper.CONTENT_TYPE + "] fields"
);
}
+ DenseVectorFieldType vectorFieldType = (DenseVectorFieldType) fieldType;
- BooleanQuery.Builder builder = new BooleanQuery.Builder();
+ List filtersInitial = new ArrayList<>(filterQueries.size());
for (QueryBuilder query : this.filterQueries) {
- builder.add(query.toQuery(context), BooleanClause.Occur.FILTER);
+ filtersInitial.add(query.toQuery(context));
}
if (context.getAliasFilter() != null) {
- builder.add(context.getAliasFilter().toQuery(context), BooleanClause.Occur.FILTER);
+ filtersInitial.add(context.getAliasFilter().toQuery(context));
}
- BooleanQuery booleanQuery = builder.build();
- Query filterQuery = booleanQuery.clauses().isEmpty() ? null : booleanQuery;
- DenseVectorFieldType vectorFieldType = (DenseVectorFieldType) fieldType;
String parentPath = context.nestedLookup().getNestedParent(fieldName);
- Float oversample = rescoreVectorBuilder() == null ? null : rescoreVectorBuilder.oversample();
-
BitSetProducer parentBitSet = null;
- if (parentPath != null) {
+ Query filterQuery;
+ if (parentPath == null) {
+ filterQuery = buildFilterQuery(filtersInitial);
+ } else {
final Query parentFilter;
NestedObjectMapper originalObjectMapper = context.nestedScope().getObjectMapper();
if (originalObjectMapper != null) {
@@ -541,19 +554,23 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException {
parentFilter = Queries.newNonNestedFilter(context.indexVersionCreated());
}
parentBitSet = context.bitsetFilter(parentFilter);
- if (filterQuery != null) {
- // We treat the provided filter as a filter over PARENT documents, so if it might match nested documents
- // we need to adjust it.
- if (NestedHelper.mightMatchNestedDocs(filterQuery, context)) {
- // Ensure that the query only returns parent documents matching `filterQuery`
- filterQuery = Queries.filtered(filterQuery, parentFilter);
+ List filterAdjusted = new ArrayList<>(filtersInitial.size());
+ for (Query f : filtersInitial) {
+ // If filter matches non-nested docs, we assume this is a filter over parents docs,
+ // so we will modify it accordingly: matching parents docs with join to its child docs
+ if (NestedHelper.mightMatchNonNestedDocs(f, parentPath, context)) {
+ // Ensure that the query only returns parent documents matching filter
+ f = Queries.filtered(f, parentFilter);
+ f = new ToChildBlockJoinQuery(f, parentBitSet);
}
- // Now join the filterQuery & parentFilter to provide the matching blocks of children
- filterQuery = new ToChildBlockJoinQuery(filterQuery, parentBitSet);
+ filterAdjusted.add(f);
}
+ filterQuery = buildFilterQuery(filterAdjusted);
}
+
DenseVectorFieldMapper.FilterHeuristic heuristic = context.getIndexSettings().getHnswFilterHeuristic();
boolean hnswEarlyTermination = context.getIndexSettings().getHnswEarlyTermination();
+ Float oversample = rescoreVectorBuilder() == null ? null : rescoreVectorBuilder.oversample();
return vectorFieldType.createKnnQuery(
queryVector,
k,
@@ -567,6 +584,16 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException {
);
}
+ private static Query buildFilterQuery(List filters) {
+ BooleanQuery.Builder builder = new BooleanQuery.Builder();
+ for (Query f : filters) {
+ builder.add(f, BooleanClause.Occur.FILTER);
+ }
+ BooleanQuery booleanQuery = builder.build();
+ Query filterQuery = booleanQuery.clauses().isEmpty() ? null : booleanQuery;
+ return filterQuery;
+ }
+
@Override
protected int doHashCode() {
return Objects.hash(
diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java
index ac5233f1d54b4..656d19373d7e7 100644
--- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java
+++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java
@@ -17,6 +17,7 @@
import org.elasticsearch.action.support.CountDownActionListener;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.PlainActionFuture;
+import org.elasticsearch.action.support.RefCountingListener;
import org.elasticsearch.action.support.RefCountingRunnable;
import org.elasticsearch.client.internal.RemoteClusterClient;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
@@ -29,7 +30,6 @@
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
-import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.core.TimeValue;
@@ -172,17 +172,6 @@ public DiscoveryNode getLocalNode() {
return transportService.getLocalNode();
}
- /**
- * Returns true
if at least one remote cluster is configured
- */
- public boolean isCrossClusterSearchEnabled() {
- return remoteClusters.isEmpty() == false;
- }
-
- boolean isRemoteNodeConnected(final String remoteCluster, final DiscoveryNode node) {
- return remoteClusters.get(remoteCluster).isNodeConnected(node);
- }
-
/**
* Group indices by cluster alias mapped to OriginalIndices for that cluster.
* @param remoteClusterNames Set of configured remote cluster names.
@@ -258,13 +247,6 @@ public Set getConfiguredClusters() {
return getRegisteredRemoteClusterNames();
}
- /**
- * Returns true
iff the given cluster is configured as a remote cluster. Otherwise false
- */
- boolean isRemoteClusterRegistered(String clusterName) {
- return remoteClusters.containsKey(clusterName);
- }
-
/**
* Returns the registered remote cluster names.
*/
@@ -567,36 +549,26 @@ public void collectNodes(Set clusters, ActionListener();
for (String cluster : clusters) {
- if (this.remoteClusters.containsKey(cluster) == false) {
+ final var connection = this.remoteClusters.get(cluster);
+ if (connection == null) {
listener.onFailure(new NoSuchRemoteClusterException(cluster));
return;
}
+ connectionsMap.put(cluster, connection);
}
final Map> clusterMap = new HashMap<>();
- CountDown countDown = new CountDown(clusters.size());
- Function nullFunction = s -> null;
- for (final String cluster : clusters) {
- RemoteClusterConnection connection = this.remoteClusters.get(cluster);
- connection.collectNodes(new ActionListener>() {
- @Override
- public void onResponse(Function nodeLookup) {
- synchronized (clusterMap) {
- clusterMap.put(cluster, nodeLookup);
- }
- if (countDown.countDown()) {
- listener.onResponse((clusterAlias, nodeId) -> clusterMap.getOrDefault(clusterAlias, nullFunction).apply(nodeId));
- }
- }
-
- @Override
- public void onFailure(Exception e) {
- if (countDown.fastForward()) { // we need to check if it's true since we could have multiple failures
- listener.onFailure(e);
- }
+ final var finalListener = listener.safeMap(
+ ignored -> (clusterAlias, nodeId) -> clusterMap.getOrDefault(clusterAlias, s -> null).apply(nodeId)
+ );
+ try (var refs = new RefCountingListener(finalListener)) {
+ connectionsMap.forEach((cluster, connection) -> connection.collectNodes(refs.acquire(nodeLookup -> {
+ synchronized (clusterMap) {
+ clusterMap.put(cluster, nodeLookup);
}
- });
+ })));
}
}
diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java
index 43292c4f65245..a4f698d04b782 100644
--- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java
+++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java
@@ -367,13 +367,15 @@ public void testRewriteShardSearchRequestWithRank() {
new ScoreDoc[] { new ScoreDoc(1, 3.0f, 1), new ScoreDoc(4, 1.5f, 1) },
"vector",
VectorData.fromFloats(new float[] { 0.0f }),
- null
+ null,
+ List.of()
);
KnnScoreDocQueryBuilder ksdqb1 = new KnnScoreDocQueryBuilder(
new ScoreDoc[] { new ScoreDoc(1, 2.0f, 1) },
"vector2",
VectorData.fromFloats(new float[] { 0.0f }),
- null
+ null,
+ List.of()
);
assertEquals(
List.of(bm25, ksdqb0, ksdqb1),
diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/IVFVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/IVFVectorsFormatTests.java
index 60d346ef21585..b3f61fa357ba9 100644
--- a/server/src/test/java/org/elasticsearch/index/codec/vectors/IVFVectorsFormatTests.java
+++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/IVFVectorsFormatTests.java
@@ -36,7 +36,9 @@
import java.util.concurrent.atomic.AtomicBoolean;
import static java.lang.String.format;
+import static org.elasticsearch.index.codec.vectors.IVFVectorsFormat.MAX_CENTROIDS_PER_PARENT_CLUSTER;
import static org.elasticsearch.index.codec.vectors.IVFVectorsFormat.MAX_VECTORS_PER_CLUSTER;
+import static org.elasticsearch.index.codec.vectors.IVFVectorsFormat.MIN_CENTROIDS_PER_PARENT_CLUSTER;
import static org.elasticsearch.index.codec.vectors.IVFVectorsFormat.MIN_VECTORS_PER_CLUSTER;
import static org.hamcrest.Matchers.anEmptyMap;
import static org.hamcrest.Matchers.equalTo;
@@ -54,7 +56,18 @@ public class IVFVectorsFormatTests extends BaseKnnVectorsFormatTestCase {
@Before
@Override
public void setUp() throws Exception {
- format = new IVFVectorsFormat(random().nextInt(MIN_VECTORS_PER_CLUSTER, IVFVectorsFormat.MAX_VECTORS_PER_CLUSTER));
+ if (rarely()) {
+ format = new IVFVectorsFormat(
+ random().nextInt(2 * MIN_VECTORS_PER_CLUSTER, IVFVectorsFormat.MAX_VECTORS_PER_CLUSTER),
+ random().nextInt(8, IVFVectorsFormat.MAX_CENTROIDS_PER_PARENT_CLUSTER)
+ );
+ } else {
+ // run with low numbers to force many clusters with parents
+ format = new IVFVectorsFormat(
+ random().nextInt(MIN_VECTORS_PER_CLUSTER, 2 * MIN_VECTORS_PER_CLUSTER),
+ random().nextInt(MIN_CENTROIDS_PER_PARENT_CLUSTER, 8)
+ );
+ }
super.setUp();
}
@@ -113,7 +126,7 @@ public void testToString() {
FilterCodec customCodec = new FilterCodec("foo", Codec.getDefault()) {
@Override
public KnnVectorsFormat knnVectorsFormat() {
- return new IVFVectorsFormat(128);
+ return new IVFVectorsFormat(128, 4);
}
};
String expectedPattern = "IVFVectorsFormat(vectorPerCluster=128)";
@@ -124,8 +137,10 @@ public KnnVectorsFormat knnVectorsFormat() {
}
public void testLimits() {
- expectThrows(IllegalArgumentException.class, () -> new IVFVectorsFormat(MIN_VECTORS_PER_CLUSTER - 1));
- expectThrows(IllegalArgumentException.class, () -> new IVFVectorsFormat(MAX_VECTORS_PER_CLUSTER + 1));
+ expectThrows(IllegalArgumentException.class, () -> new IVFVectorsFormat(MIN_VECTORS_PER_CLUSTER - 1, 16));
+ expectThrows(IllegalArgumentException.class, () -> new IVFVectorsFormat(MAX_VECTORS_PER_CLUSTER + 1, 16));
+ expectThrows(IllegalArgumentException.class, () -> new IVFVectorsFormat(128, MIN_CENTROIDS_PER_PARENT_CLUSTER - 1));
+ expectThrows(IllegalArgumentException.class, () -> new IVFVectorsFormat(128, MAX_CENTROIDS_PER_PARENT_CLUSTER + 1));
}
public void testSimpleOffHeapSize() throws IOException {
diff --git a/server/src/test/java/org/elasticsearch/index/query/ToChildBlockJoinQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ToChildBlockJoinQueryBuilderTests.java
new file mode 100644
index 0000000000000..89d0474461ace
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/index/query/ToChildBlockJoinQueryBuilderTests.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.index.query;
+
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.join.ToChildBlockJoinQuery;
+import org.elasticsearch.test.AbstractQueryTestCase;
+
+import java.io.IOException;
+
+import static org.hamcrest.CoreMatchers.instanceOf;
+
+public class ToChildBlockJoinQueryBuilderTests extends AbstractQueryTestCase {
+ @Override
+ protected ToChildBlockJoinQueryBuilder doCreateTestQueryBuilder() {
+ String filterFieldName = randomBoolean() ? KEYWORD_FIELD_NAME : TEXT_FIELD_NAME;
+ return new ToChildBlockJoinQueryBuilder(QueryBuilders.termQuery(filterFieldName, randomAlphaOfLength(10)));
+ }
+
+ @Override
+ protected void doAssertLuceneQuery(ToChildBlockJoinQueryBuilder queryBuilder, Query query, SearchExecutionContext context)
+ throws IOException {
+ assertThat(query, instanceOf(ToChildBlockJoinQuery.class));
+ }
+
+ @Override
+ public void testUnknownField() throws IOException {
+ // Test isn't relevant, since query is never parsed from xContent
+ }
+
+ @Override
+ public void testUnknownObjectException() {
+ // Test isn't relevant, since query is never parsed from xContent
+ }
+
+ @Override
+ public void testFromXContent() throws IOException {
+ // Test isn't relevant, since query is never parsed from xContent
+ }
+
+ @Override
+ public void testValidOutput() {
+ // Test isn't relevant, since query is never parsed from xContent
+ }
+
+}
diff --git a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java
index 1e638f8e7b30e..ef02a0405c88f 100644
--- a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java
+++ b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java
@@ -463,7 +463,8 @@ public CheckedBiConsumer getReque
"terms_set",
"wildcard",
"wrapper",
- "distance_feature" };
+ "distance_feature",
+ "to_child_block_join" };
// add here deprecated queries to make sure we log a deprecation warnings when they are used
private static final String[] DEPRECATED_QUERIES = new String[] { "field_masking_span", "geo_polygon" };
diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractDiversifyingChildrenIVFKnnVectorQueryTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractDiversifyingChildrenIVFKnnVectorQueryTestCase.java
index f73d1e5a31999..ce08d631399d6 100644
--- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractDiversifyingChildrenIVFKnnVectorQueryTestCase.java
+++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractDiversifyingChildrenIVFKnnVectorQueryTestCase.java
@@ -93,7 +93,10 @@ static Document makeParent(int[] children) {
@Before
public void setUp() throws Exception {
super.setUp();
- format = new IVFVectorsFormat(random().nextInt(IVFVectorsFormat.MIN_VECTORS_PER_CLUSTER, IVFVectorsFormat.MAX_VECTORS_PER_CLUSTER));
+ format = new IVFVectorsFormat(
+ random().nextInt(IVFVectorsFormat.MIN_VECTORS_PER_CLUSTER, IVFVectorsFormat.MAX_VECTORS_PER_CLUSTER),
+ random().nextInt(IVFVectorsFormat.MIN_CENTROIDS_PER_PARENT_CLUSTER, IVFVectorsFormat.MAX_CENTROIDS_PER_PARENT_CLUSTER)
+ );
}
abstract Query getDiversifyingChildrenKnnQuery(
diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractIVFKnnVectorQueryTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractIVFKnnVectorQueryTestCase.java
index 70fd11c97a8c4..e602f9098b602 100644
--- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractIVFKnnVectorQueryTestCase.java
+++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractIVFKnnVectorQueryTestCase.java
@@ -98,7 +98,7 @@ abstract class AbstractIVFKnnVectorQueryTestCase extends LuceneTestCase {
@Before
public void setUp() throws Exception {
super.setUp();
- format = new IVFVectorsFormat(128);
+ format = new IVFVectorsFormat(128, 4);
}
abstract AbstractIVFKnnVectorQuery getKnnVectorQuery(String field, float[] query, int k, Query queryFilter, int nProbe);
diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java
index a15372bc1e8ef..a8d9b1259cb41 100644
--- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java
+++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java
@@ -25,6 +25,7 @@
import org.elasticsearch.index.IndexVersions;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper;
+import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.InnerHitsRewriteContext;
import org.elasticsearch.index.query.MatchNoneQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
@@ -482,12 +483,19 @@ public void testRewriteForInnerHits() throws IOException {
queryBuilder.boost(randomFloat());
queryBuilder.queryName(randomAlphaOfLength(10));
QueryBuilder rewritten = queryBuilder.rewrite(innerHitsRewriteContext);
+ float queryBoost = rewritten.boost();
+ String queryName = rewritten.queryName();
+ if (queryBuilder.filterQueries().isEmpty() == false) {
+ assertTrue(rewritten instanceof BoolQueryBuilder);
+ BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) rewritten;
+ rewritten = boolQueryBuilder.must().get(0);
+ }
assertTrue(rewritten instanceof ExactKnnQueryBuilder);
ExactKnnQueryBuilder exactKnnQueryBuilder = (ExactKnnQueryBuilder) rewritten;
assertEquals(queryBuilder.queryVector(), exactKnnQueryBuilder.getQuery());
assertEquals(queryBuilder.getFieldName(), exactKnnQueryBuilder.getField());
- assertEquals(queryBuilder.boost(), exactKnnQueryBuilder.boost(), 0.0001f);
- assertEquals(queryBuilder.queryName(), exactKnnQueryBuilder.queryName());
+ assertEquals(queryBuilder.boost(), queryBoost, 0.0001f);
+ assertEquals(queryBuilder.queryName(), queryName);
assertEquals(queryBuilder.getVectorSimilarity(), exactKnnQueryBuilder.vectorSimilarity());
}
diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java
index bef0bbfd27ff6..cf94fd41c1171 100644
--- a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java
@@ -24,9 +24,11 @@
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter;
+import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.InnerHitsRewriteContext;
import org.elasticsearch.index.query.MatchNoneQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.test.AbstractQueryTestCase;
@@ -54,11 +56,20 @@ protected KnnScoreDocQueryBuilder doCreateTestQueryBuilder() {
for (int doc = 0; doc < numDocs; doc++) {
scoreDocs.add(new ScoreDoc(doc, randomFloat()));
}
+ List filters = new ArrayList<>();
+ if (randomBoolean()) {
+ int numFilters = randomIntBetween(1, 5);
+ for (int i = 0; i < numFilters; i++) {
+ String filterFieldName = randomBoolean() ? KEYWORD_FIELD_NAME : TEXT_FIELD_NAME;
+ filters.add(QueryBuilders.termQuery(filterFieldName, randomAlphaOfLength(10)));
+ }
+ }
return new KnnScoreDocQueryBuilder(
scoreDocs.toArray(new ScoreDoc[0]),
randomBoolean() ? "field" : null,
randomBoolean() ? VectorData.fromFloats(randomVector(10)) : null,
- randomBoolean() ? randomFloat() : null
+ randomBoolean() ? randomFloat() : null,
+ filters
);
}
@@ -68,7 +79,8 @@ public void testValidOutput() {
new ScoreDoc[] { new ScoreDoc(0, 4.25f), new ScoreDoc(5, 1.6f) },
"field",
VectorData.fromFloats(new float[] { 1.0f, 2.0f }),
- null
+ null,
+ List.of()
);
String expected = """
{
@@ -159,7 +171,8 @@ public void testRewriteToMatchNone() throws IOException {
new ScoreDoc[0],
randomBoolean() ? "field" : null,
randomBoolean() ? VectorData.fromFloats(randomVector(10)) : null,
- randomBoolean() ? randomFloat() : null
+ randomBoolean() ? randomFloat() : null,
+ List.of()
);
QueryRewriteContext context = randomBoolean()
? new InnerHitsRewriteContext(createSearchExecutionContext().getParserConfig(), System::currentTimeMillis)
@@ -170,21 +183,41 @@ public void testRewriteToMatchNone() throws IOException {
public void testRewriteForInnerHits() throws IOException {
SearchExecutionContext context = createSearchExecutionContext();
InnerHitsRewriteContext innerHitsRewriteContext = new InnerHitsRewriteContext(context.getParserConfig(), System::currentTimeMillis);
+ List filters = new ArrayList<>();
+ boolean hasFilters = randomBoolean();
+ if (hasFilters) {
+ int numFilters = randomIntBetween(1, 5);
+ for (int i = 0; i < numFilters; i++) {
+ String filterFieldName = randomBoolean() ? KEYWORD_FIELD_NAME : TEXT_FIELD_NAME;
+ filters.add(QueryBuilders.termQuery(filterFieldName, randomAlphaOfLength(10)));
+ }
+ }
+
KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder(
new ScoreDoc[] { new ScoreDoc(0, 4.25f), new ScoreDoc(5, 1.6f) },
randomAlphaOfLength(10),
VectorData.fromFloats(randomVector(10)),
- randomBoolean() ? randomFloat() : null
+ randomBoolean() ? randomFloat() : null,
+ filters
);
queryBuilder.boost(randomFloat());
queryBuilder.queryName(randomAlphaOfLength(10));
QueryBuilder rewritten = queryBuilder.rewrite(innerHitsRewriteContext);
+ float queryBoost = rewritten.boost();
+ String queryName = rewritten.queryName();
+
+ if (hasFilters) {
+ assertTrue(rewritten instanceof BoolQueryBuilder);
+ BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) rewritten;
+ rewritten = boolQueryBuilder.must().get(0);
+ }
+
assertTrue(rewritten instanceof ExactKnnQueryBuilder);
ExactKnnQueryBuilder exactKnnQueryBuilder = (ExactKnnQueryBuilder) rewritten;
assertEquals(queryBuilder.queryVector(), exactKnnQueryBuilder.getQuery());
assertEquals(queryBuilder.fieldName(), exactKnnQueryBuilder.getField());
- assertEquals(queryBuilder.boost(), exactKnnQueryBuilder.boost(), 0.0001f);
- assertEquals(queryBuilder.queryName(), exactKnnQueryBuilder.queryName());
+ assertEquals(queryBuilder.boost(), queryBoost, 0.0001f);
+ assertEquals(queryBuilder.queryName(), queryName);
assertEquals(queryBuilder.vectorSimilarity(), exactKnnQueryBuilder.vectorSimilarity());
}
@@ -228,7 +261,8 @@ public void testScoreDocQueryWeightCount() throws IOException {
scoreDocs,
"field",
VectorData.fromFloats(randomVector(10)),
- null
+ null,
+ List.of()
);
Query query = queryBuilder.doToQuery(context);
final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f);
@@ -276,7 +310,8 @@ public void testScoreDocQuery() throws IOException {
scoreDocs,
"field",
VectorData.fromFloats(randomVector(10)),
- null
+ null,
+ List.of()
);
final Query query = queryBuilder.doToQuery(context);
final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f);
diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java
index 2d5e7982aa899..112c30cd02b8b 100644
--- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java
+++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java
@@ -40,6 +40,7 @@
import static org.elasticsearch.test.NodeRoles.removeRoles;
import static org.elasticsearch.transport.AbstractSimpleTransportTestCase.IGNORE_DESERIALIZATION_ERRORS_SETTING;
import static org.elasticsearch.transport.RemoteClusterConnectionTests.startTransport;
+import static org.elasticsearch.transport.RemoteClusterServiceTests.isRemoteNodeConnected;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
@@ -95,7 +96,7 @@ public void testConnectAndExecuteRequest() throws Exception {
service.acceptIncomingRequests();
logger.info("now accepting incoming requests on local transport");
RemoteClusterService remoteClusterService = service.getRemoteClusterService();
- assertTrue(remoteClusterService.isRemoteNodeConnected("test", remoteNode));
+ assertTrue(isRemoteNodeConnected(remoteClusterService, "test", remoteNode));
var client = remoteClusterService.getRemoteClusterClient(
"test",
threadPool.executor(TEST_THREAD_POOL_NAME),
@@ -172,7 +173,7 @@ public void testEnsureWeReconnect() throws Exception {
// the right calls in place in the RemoteAwareClient
service.acceptIncomingRequests();
RemoteClusterService remoteClusterService = service.getRemoteClusterService();
- assertBusy(() -> assertTrue(remoteClusterService.isRemoteNodeConnected("test", remoteNode)));
+ assertBusy(() -> assertTrue(isRemoteNodeConnected(remoteClusterService, "test", remoteNode)));
for (int i = 0; i < 10; i++) {
RemoteClusterConnection remoteClusterConnection = remoteClusterService.getRemoteClusterConnection("test");
assertBusy(remoteClusterConnection::assertNoRunningConnections);
@@ -286,7 +287,7 @@ public void testQuicklySkipUnavailableClusters() throws Exception {
);
try {
- assertFalse(remoteClusterService.isRemoteNodeConnected("test", remoteNode));
+ assertFalse(isRemoteNodeConnected(remoteClusterService, "test", remoteNode));
// check that we quickly fail
if (randomBoolean()) {
@@ -325,7 +326,7 @@ public void testQuicklySkipUnavailableClusters() throws Exception {
() -> safeAwait(listener -> client.getConnection(null, listener.map(v -> v)))
)
);
- assertTrue(remoteClusterService.isRemoteNodeConnected("test", remoteNode));
+ assertTrue(isRemoteNodeConnected(remoteClusterService, "test", remoteNode));
}
}
}
diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
index 99c4dde4d396f..30699b9346300 100644
--- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
+++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
@@ -9,8 +9,10 @@
package org.elasticsearch.transport;
import org.apache.logging.log4j.Level;
+import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.TransportVersion;
import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.support.ActionTestUtils;
import org.elasticsearch.action.support.IndicesOptions;
@@ -162,12 +164,12 @@ public void testGroupClusterIndices() throws IOException {
builder.putList("cluster.remote.cluster_1.seeds", cluster1Seed.getAddress().toString());
builder.putList("cluster.remote.cluster_2.seeds", cluster2Seed.getAddress().toString());
try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) {
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
service.initializeRemoteClusters();
- assertTrue(service.isCrossClusterSearchEnabled());
- assertTrue(service.isRemoteClusterRegistered("cluster_1"));
- assertTrue(service.isRemoteClusterRegistered("cluster_2"));
- assertFalse(service.isRemoteClusterRegistered("foo"));
+ assertTrue(hasRegisteredClusters(service));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_1"));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_2"));
+ assertFalse(isRemoteClusterRegistered(service, "foo"));
{
Map> perClusterIndices = service.groupClusterIndices(
service.getRegisteredRemoteClusterNames(),
@@ -376,12 +378,12 @@ public void testGroupIndices() throws IOException {
builder.putList("cluster.remote.cluster_1.seeds", cluster1Seed.getAddress().toString());
builder.putList("cluster.remote.cluster_2.seeds", cluster2Seed.getAddress().toString());
try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) {
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
service.initializeRemoteClusters();
- assertTrue(service.isCrossClusterSearchEnabled());
- assertTrue(service.isRemoteClusterRegistered("cluster_1"));
- assertTrue(service.isRemoteClusterRegistered("cluster_2"));
- assertFalse(service.isRemoteClusterRegistered("foo"));
+ assertTrue(hasRegisteredClusters(service));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_1"));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_2"));
+ assertFalse(isRemoteClusterRegistered(service, "foo"));
{
Map perClusterIndices = service.groupIndices(
IndicesOptions.LENIENT_EXPAND_OPEN,
@@ -440,7 +442,7 @@ public void testGroupIndicesWithoutRemoteClusterClientRole() throws Exception {
);
try (RemoteClusterService service = new RemoteClusterService(settings, null)) {
assertFalse(service.isEnabled());
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
final IllegalArgumentException error = expectThrows(
IllegalArgumentException.class,
() -> service.groupIndices(IndicesOptions.LENIENT_EXPAND_OPEN, new String[] { "cluster_1:bar", "cluster_2:foo*" })
@@ -483,9 +485,9 @@ public void testIncrementallyAddClusters() throws IOException {
transportService.start();
transportService.acceptIncomingRequests();
try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, transportService)) {
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
service.initializeRemoteClusters();
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
Settings cluster1Settings = createSettings(
"cluster_1",
Collections.singletonList(cluster1Seed.getAddress().toString())
@@ -502,19 +504,19 @@ public void testIncrementallyAddClusters() throws IOException {
}
}).start();
clusterAdded.actionGet();
- assertTrue(service.isCrossClusterSearchEnabled());
- assertTrue(service.isRemoteClusterRegistered("cluster_1"));
+ assertTrue(hasRegisteredClusters(service));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_1"));
Settings cluster2Settings = createSettings(
"cluster_2",
Collections.singletonList(cluster2Seed.getAddress().toString())
);
service.validateAndUpdateRemoteCluster("cluster_2", cluster2Settings);
- assertTrue(service.isCrossClusterSearchEnabled());
- assertTrue(service.isRemoteClusterRegistered("cluster_1"));
- assertTrue(service.isRemoteClusterRegistered("cluster_2"));
+ assertTrue(hasRegisteredClusters(service));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_1"));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_2"));
Settings cluster2SettingsDisabled = createSettings("cluster_2", Collections.emptyList());
service.validateAndUpdateRemoteCluster("cluster_2", cluster2SettingsDisabled);
- assertFalse(service.isRemoteClusterRegistered("cluster_2"));
+ assertFalse(isRemoteClusterRegistered(service, "cluster_2"));
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> service.validateAndUpdateRemoteCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, Settings.EMPTY)
@@ -559,15 +561,15 @@ public void testDefaultPingSchedule() throws IOException {
transportService.start();
transportService.acceptIncomingRequests();
try (RemoteClusterService service = new RemoteClusterService(settings, transportService)) {
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
service.initializeRemoteClusters();
- assertTrue(service.isCrossClusterSearchEnabled());
+ assertTrue(hasRegisteredClusters(service));
service.validateAndUpdateRemoteCluster(
"cluster_1",
createSettings("cluster_1", Collections.singletonList(seedNode.getAddress().toString()))
);
- assertTrue(service.isCrossClusterSearchEnabled());
- assertTrue(service.isRemoteClusterRegistered("cluster_1"));
+ assertTrue(hasRegisteredClusters(service));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_1"));
RemoteClusterConnection remoteClusterConnection = service.getRemoteClusterConnection("cluster_1");
assertEquals(pingSchedule, remoteClusterConnection.getConnectionManager().getConnectionProfile().getPingInterval());
}
@@ -624,7 +626,7 @@ public void testCustomPingSchedule() throws IOException {
builder.put("cluster.remote.cluster_2.transport.ping_schedule", pingSchedule2);
try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) {
service.initializeRemoteClusters();
- assertTrue(service.isRemoteClusterRegistered("cluster_1"));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_1"));
RemoteClusterConnection remoteClusterConnection1 = service.getRemoteClusterConnection("cluster_1");
assertEquals(pingSchedule1, remoteClusterConnection1.getConnectionManager().getConnectionProfile().getPingInterval());
RemoteClusterConnection remoteClusterConnection2 = service.getRemoteClusterConnection("cluster_2");
@@ -747,9 +749,9 @@ public void testRemoteNodeAttribute() throws IOException, InterruptedException {
transportService.start();
transportService.acceptIncomingRequests();
try (RemoteClusterService service = new RemoteClusterService(settings, transportService)) {
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
service.initializeRemoteClusters();
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
final CountDownLatch firstLatch = new CountDownLatch(1);
service.updateRemoteCluster(
@@ -767,13 +769,13 @@ public void testRemoteNodeAttribute() throws IOException, InterruptedException {
);
secondLatch.await();
- assertTrue(service.isCrossClusterSearchEnabled());
- assertTrue(service.isRemoteClusterRegistered("cluster_1"));
- assertFalse(service.isRemoteNodeConnected("cluster_1", c1N1Node));
- assertTrue(service.isRemoteNodeConnected("cluster_1", c1N2Node));
- assertTrue(service.isRemoteClusterRegistered("cluster_2"));
- assertFalse(service.isRemoteNodeConnected("cluster_2", c2N1Node));
- assertTrue(service.isRemoteNodeConnected("cluster_2", c2N2Node));
+ assertTrue(hasRegisteredClusters(service));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_1"));
+ assertFalse(isRemoteNodeConnected(service, "cluster_1", c1N1Node));
+ assertTrue(isRemoteNodeConnected(service, "cluster_1", c1N2Node));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_2"));
+ assertFalse(isRemoteNodeConnected(service, "cluster_2", c2N1Node));
+ assertTrue(isRemoteNodeConnected(service, "cluster_2", c2N2Node));
assertEquals(0, transportService.getConnectionManager().size());
}
}
@@ -837,9 +839,9 @@ public void testRemoteNodeRoles() throws IOException, InterruptedException {
transportService.start();
transportService.acceptIncomingRequests();
try (RemoteClusterService service = new RemoteClusterService(settings, transportService)) {
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
service.initializeRemoteClusters();
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
final CountDownLatch firstLatch = new CountDownLatch(1);
service.updateRemoteCluster(
@@ -857,13 +859,13 @@ public void testRemoteNodeRoles() throws IOException, InterruptedException {
);
secondLatch.await();
- assertTrue(service.isCrossClusterSearchEnabled());
- assertTrue(service.isRemoteClusterRegistered("cluster_1"));
- assertFalse(service.isRemoteNodeConnected("cluster_1", c1N1Node));
- assertTrue(service.isRemoteNodeConnected("cluster_1", c1N2Node));
- assertTrue(service.isRemoteClusterRegistered("cluster_2"));
- assertFalse(service.isRemoteNodeConnected("cluster_2", c2N1Node));
- assertTrue(service.isRemoteNodeConnected("cluster_2", c2N2Node));
+ assertTrue(hasRegisteredClusters(service));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_1"));
+ assertFalse(isRemoteNodeConnected(service, "cluster_1", c1N1Node));
+ assertTrue(isRemoteNodeConnected(service, "cluster_1", c1N2Node));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_2"));
+ assertFalse(isRemoteNodeConnected(service, "cluster_2", c2N1Node));
+ assertTrue(isRemoteNodeConnected(service, "cluster_2", c2N2Node));
assertEquals(0, transportService.getConnectionManager().size());
}
}
@@ -932,9 +934,9 @@ public void testCollectNodes() throws InterruptedException, IOException {
transportService.start();
transportService.acceptIncomingRequests();
try (RemoteClusterService service = new RemoteClusterService(settings, transportService)) {
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
service.initializeRemoteClusters();
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
final CountDownLatch firstLatch = new CountDownLatch(1);
@@ -1060,6 +1062,85 @@ public void onFailure(Exception e) {
}
}
+ public void testCollectNodesConcurrentWithSettingsChanges() throws IOException {
+ final List knownNodes_c1 = new CopyOnWriteArrayList<>();
+
+ try (
+ var c1N1 = startTransport(
+ "cluster_1_node_1",
+ knownNodes_c1,
+ VersionInformation.CURRENT,
+ TransportVersion.current(),
+ Settings.EMPTY
+ );
+ var transportService = MockTransportService.createNewService(
+ Settings.EMPTY,
+ VersionInformation.CURRENT,
+ TransportVersion.current(),
+ threadPool,
+ null
+ )
+ ) {
+ final var c1N1Node = c1N1.getLocalNode();
+ knownNodes_c1.add(c1N1Node);
+ final var seedList = List.of(c1N1Node.getAddress().toString());
+ transportService.start();
+ transportService.acceptIncomingRequests();
+
+ try (RemoteClusterService service = new RemoteClusterService(createSettings("cluster_1", seedList), transportService)) {
+ service.initializeRemoteClusters();
+ assertTrue(hasRegisteredClusters(service));
+ final var numTasks = between(3, 5);
+ final var taskLatch = new CountDownLatch(numTasks);
+
+ ESTestCase.startInParallel(numTasks, threadNumber -> {
+ if (threadNumber == 0) {
+ taskLatch.countDown();
+ boolean isLinked = true;
+ while (taskLatch.getCount() != 0) {
+ final var future = new PlainActionFuture();
+ final var settings = createSettings("cluster_1", isLinked ? Collections.emptyList() : seedList);
+ service.updateRemoteCluster("cluster_1", settings, future);
+ safeGet(future);
+ isLinked = isLinked == false;
+ }
+ return;
+ }
+
+ // Verify collectNodes() always invokes the listener, even if the node is concurrently being unlinked.
+ try {
+ for (int i = 0; i < 10; ++i) {
+ final var latch = new CountDownLatch(1);
+ final var exRef = new AtomicReference();
+ service.collectNodes(Set.of("cluster_1"), new LatchedActionListener<>(new ActionListener<>() {
+ @Override
+ public void onResponse(BiFunction func) {
+ assertEquals(c1N1Node, func.apply("cluster_1", c1N1Node.getId()));
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ exRef.set(e);
+ }
+ }, latch));
+ safeAwait(latch);
+ if (exRef.get() != null) {
+ assertThat(
+ exRef.get(),
+ either(instanceOf(TransportException.class)).or(instanceOf(NoSuchRemoteClusterException.class))
+ .or(instanceOf(AlreadyClosedException.class))
+ .or(instanceOf(NoSeedNodeLeftException.class))
+ );
+ }
+ }
+ } finally {
+ taskLatch.countDown();
+ }
+ });
+ }
+ }
+ }
+
public void testRemoteClusterSkipIfDisconnectedSetting() {
{
Settings settings = Settings.builder()
@@ -1191,9 +1272,9 @@ public void testReconnectWhenStrategySettingsUpdated() throws Exception {
final Settings.Builder builder = Settings.builder();
builder.putList("cluster.remote.cluster_test.seeds", Collections.singletonList(node0.getAddress().toString()));
try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) {
- assertFalse(service.isCrossClusterSearchEnabled());
+ assertFalse(hasRegisteredClusters(service));
service.initializeRemoteClusters();
- assertTrue(service.isCrossClusterSearchEnabled());
+ assertTrue(hasRegisteredClusters(service));
final RemoteClusterConnection firstRemoteClusterConnection = service.getRemoteClusterConnection("cluster_test");
assertTrue(firstRemoteClusterConnection.isNodeConnected(node0));
@@ -1209,7 +1290,7 @@ public void testReconnectWhenStrategySettingsUpdated() throws Exception {
);
firstLatch.await();
- assertTrue(service.isCrossClusterSearchEnabled());
+ assertTrue(hasRegisteredClusters(service));
assertTrue(firstRemoteClusterConnection.isNodeConnected(node0));
assertTrue(firstRemoteClusterConnection.isNodeConnected(node1));
assertEquals(2, firstRemoteClusterConnection.getNumNodesConnected());
@@ -1227,7 +1308,7 @@ public void testReconnectWhenStrategySettingsUpdated() throws Exception {
service.updateRemoteCluster("cluster_test", createSettings("cluster_test", newSeeds), connectionListener(secondLatch));
secondLatch.await();
- assertTrue(service.isCrossClusterSearchEnabled());
+ assertTrue(hasRegisteredClusters(service));
assertBusy(() -> {
assertFalse(firstRemoteClusterConnection.isNodeConnected(node0));
assertFalse(firstRemoteClusterConnection.isNodeConnected(node1));
@@ -1412,18 +1493,18 @@ public void testUseDifferentTransportProfileForCredentialsProtectedRemoteCluster
service.updateRemoteCluster("cluster_2", secondRemoteClusterSettingsBuilder.build(), connectionListener(secondLatch));
secondLatch.await();
- assertTrue(service.isCrossClusterSearchEnabled());
- assertTrue(service.isRemoteClusterRegistered("cluster_1"));
+ assertTrue(hasRegisteredClusters(service));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_1"));
if (firstRemoteClusterProxyMode) {
- assertFalse(service.isRemoteNodeConnected("cluster_1", c1Node));
+ assertFalse(isRemoteNodeConnected(service, "cluster_1", c1Node));
} else {
- assertTrue(service.isRemoteNodeConnected("cluster_1", c1Node));
+ assertTrue(isRemoteNodeConnected(service, "cluster_1", c1Node));
}
- assertTrue(service.isRemoteClusterRegistered("cluster_2"));
+ assertTrue(isRemoteClusterRegistered(service, "cluster_2"));
if (secondRemoteClusterProxyMode) {
- assertFalse(service.isRemoteNodeConnected("cluster_2", c2Node));
+ assertFalse(isRemoteNodeConnected(service, "cluster_2", c2Node));
} else {
- assertTrue(service.isRemoteNodeConnected("cluster_2", c2Node));
+ assertTrue(isRemoteNodeConnected(service, "cluster_2", c2Node));
}
// No local node connection
assertEquals(0, transportService.getConnectionManager().size());
@@ -1685,4 +1766,15 @@ private static Settings createSettings(String clusterAlias, List seeds)
return builder.build();
}
+ private static boolean hasRegisteredClusters(RemoteClusterService service) {
+ return service.getRegisteredRemoteClusterNames().isEmpty() == false;
+ }
+
+ private static boolean isRemoteClusterRegistered(RemoteClusterService service, String clusterName) {
+ return service.getRegisteredRemoteClusterNames().contains(clusterName);
+ }
+
+ public static boolean isRemoteNodeConnected(RemoteClusterService service, String clusterName, DiscoveryNode node) {
+ return service.getRemoteClusterConnection(clusterName).isNodeConnected(node);
+ }
}
diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java
index 4ed79024a19f9..82419d03d4259 100644
--- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java
+++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java
@@ -894,7 +894,6 @@ private void initGiantTextField(int docs) throws IOException {
}
}
}
-
logger.info("loading many documents with one big text field - docs per bulk {}", docsPerBulk);
int fieldSize = Math.toIntExact(ByteSizeValue.ofMb(5).getBytes());
@@ -1064,6 +1063,15 @@ private void bulk(String name, String bulk) throws IOException {
);
Response response = client().performRequest(request);
assertThat(entityAsMap(response), matchesMap().entry("errors", false).extraOk());
+
+ /*
+ * Flush after each bulk to clear the test-time seenSequenceNumbers Map in
+ * TranslogWriter. Without this the server will OOM from time to time keeping
+ * stuff around to run assertions on.
+ */
+ request = new Request("POST", "/" + name + "/_flush");
+ response = client().performRequest(request);
+ assertThat(entityAsMap(response), matchesMap().entry("_shards", matchesMap().extraOk().entry("failed", 0)).extraOk());
}
private void initIndex(String name, String bulk) throws IOException {
diff --git a/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java b/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java
index 1fb11acb550f7..5f71fccb73888 100644
--- a/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java
+++ b/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java
@@ -64,7 +64,7 @@ public void testDeleteNonExisting() throws Exception {
);
var nonExistingTask = createTask(randomUniqueProjectId(), listener);
var tasks = Stream.concat(Stream.of(nonExistingTask), deletedProjects.stream().map(this::createTask)).toList();
- var result = ClusterStateTaskExecutorUtils.executeIgnoringFailures(state, executor, tasks);
+ var result = ClusterStateTaskExecutorUtils.executeHandlingResults(state, executor, tasks, t -> {}, DeleteProjectTask::onFailure);
for (ProjectId deletedProject : deletedProjects) {
assertNull(result.metadata().projects().get(deletedProject));
assertNull(result.globalRoutingTable().routingTables().get(deletedProject));
diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java
index 64518cde6dd16..29cb2607844cc 100644
--- a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java
+++ b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java
@@ -15,9 +15,10 @@
public final class MinioTestContainer extends DockerEnvironmentAwareTestContainer {
- // NB releases earlier than 2025-05-24 are buggy, see https://github.com/minio/minio/issues/21189, and #127166 for a workaround
- // However the 2025-05-24 release is also buggy, see https://github.com/minio/minio/issues/21377, and this has no workaround
- public static final String DOCKER_BASE_IMAGE = "minio/minio:RELEASE.2025-06-13T11-33-47Z";
+ // NB releases earlier than 2025-05-24 are buggy, see https://github.com/minio/minio/issues/21189, and #127166 for a workaround.
+ // However the 2025-05-24 release is also buggy, see https://github.com/minio/minio/issues/21377, and this has no workaround.
+ // Also https://github.com/minio/minio/issues/21456 seems to affect releases newer than 2025-05-24, see #131815 for workaround.
+ public static final String DOCKER_BASE_IMAGE = "minio/minio:RELEASE.2025-07-23T15-54-02Z";
private static final int servicePort = 9000;
private final boolean enabled;
diff --git a/x-pack/plugin/core/src/javaRestTest/java/org/elasticsearch/xpack/core/DataStreamRestIT.java b/x-pack/plugin/core/src/javaRestTest/java/org/elasticsearch/xpack/core/DataStreamRestIT.java
index c4c8d0c460afc..3c380fa546c8e 100644
--- a/x-pack/plugin/core/src/javaRestTest/java/org/elasticsearch/xpack/core/DataStreamRestIT.java
+++ b/x-pack/plugin/core/src/javaRestTest/java/org/elasticsearch/xpack/core/DataStreamRestIT.java
@@ -70,20 +70,20 @@ public void testDSXpackUsage() throws Exception {
assertThat(failureStoreStats.get("effectively_enabled_count"), equalTo(0));
assertThat(failureStoreStats.get("failure_indices_count"), equalTo(0));
assertBusy(() -> {
- Map, ?> logsTemplate = (Map, ?>) ((List>) getLocation("/_index_template/logs").get("index_templates")).get(0);
- assertThat(logsTemplate, notNullValue());
- assertThat(logsTemplate.get("name"), equalTo("logs"));
- assertThat(((Map, ?>) logsTemplate.get("index_template")).get("data_stream"), notNullValue());
+ Map, ?> syntheticsTemplate = (Map, ?>) ((List>) getLocation("/_index_template/synthetics").get("index_templates")).get(0);
+ assertThat(syntheticsTemplate, notNullValue());
+ assertThat(syntheticsTemplate.get("name"), equalTo("synthetics"));
+ assertThat(((Map, ?>) syntheticsTemplate.get("index_template")).get("data_stream"), notNullValue());
});
putFailureStoreTemplate();
// Create a data stream
- Request indexRequest = new Request("POST", "/logs-mysql-default/_doc");
+ Request indexRequest = new Request("POST", "/synthetics-myapp-default/_doc");
indexRequest.setJsonEntity("{\"@timestamp\": \"2020-01-01\"}");
client().performRequest(indexRequest);
// Roll over the data stream
- Request rollover = new Request("POST", "/logs-mysql-default/_rollover");
+ Request rollover = new Request("POST", "/synthetics-myapp-default/_rollover");
client().performRequest(rollover);
// Create failure store data stream
@@ -105,10 +105,10 @@ public void testDSXpackUsage() throws Exception {
assertThat(failureStoreStats.get("effectively_enabled_count"), equalTo(1));
assertThat(failureStoreStats.get("failure_indices_count"), equalTo(1));
- // Enable the failure store for logs-mysql-default using the cluster setting...
+ // Enable the failure store for synthetics-myapp-default using the cluster setting...
updateClusterSettings(
Settings.builder()
- .put(DataStreamFailureStoreSettings.DATA_STREAM_FAILURE_STORED_ENABLED_SETTING.getKey(), "logs-mysql-default")
+ .put(DataStreamFailureStoreSettings.DATA_STREAM_FAILURE_STORED_ENABLED_SETTING.getKey(), "synthetics-myapp-default")
.build()
);
// ...and assert that it counts towards effectively_enabled_count but not explicitly_enabled_count:
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java
index f5e3c239dadcd..96287843e6943 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java
@@ -547,7 +547,7 @@ static RoleDescriptor kibanaSystem(String name) {
.indices(".asset-criticality.asset-criticality-*")
.privileges("create_index", "manage", "read", "write")
.build(),
- RoleDescriptor.IndicesPrivileges.builder().indices(".entities.v1.latest.security*").privileges("read").build(),
+ RoleDescriptor.IndicesPrivileges.builder().indices(".entities.v1.latest.security*").privileges("read", "write").build(),
// For cloud_defend usageCollection
RoleDescriptor.IndicesPrivileges.builder()
.indices("logs-cloud_defend.*", "metrics-cloud_defend.*")
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java
index 83d9ecacb1f38..e7a8b4e14707e 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java
@@ -75,6 +75,10 @@ public class ReservedRolesStore implements BiConsumer, ActionListene
public static final String LISTS_ITEMS_INDEX = ".items-*";
public static final String LISTS_ITEMS_INDEX_REINDEXED_V8 = ".reindexed-v8-items-*";
+ /** "Security Solutions" Entity Store and Asset Criticality indices for Asset Inventory and Entity Analytics */
+ public static final String ENTITY_STORE_V1_LATEST_INDEX = ".entities.v1.latest.security_*";
+ public static final String ASSET_CRITICALITY_INDEX = ".asset-criticality.asset-criticality-*";
+
/** Index pattern for Universal Profiling */
public static final String UNIVERSAL_PROFILING_ALIASES = "profiling-*";
public static final String UNIVERSAL_PROFILING_BACKING_INDICES = ".profiling-*";
@@ -784,7 +788,9 @@ private static RoleDescriptor buildViewerRoleDescriptor() {
ReservedRolesStore.LISTS_ITEMS_INDEX,
ReservedRolesStore.ALERTS_LEGACY_INDEX_REINDEXED_V8,
ReservedRolesStore.LISTS_INDEX_REINDEXED_V8,
- ReservedRolesStore.LISTS_ITEMS_INDEX_REINDEXED_V8
+ ReservedRolesStore.LISTS_ITEMS_INDEX_REINDEXED_V8,
+ ReservedRolesStore.ENTITY_STORE_V1_LATEST_INDEX,
+ ReservedRolesStore.ASSET_CRITICALITY_INDEX
)
.privileges("read", "view_index_metadata")
.build(),
@@ -846,10 +852,16 @@ private static RoleDescriptor buildEditorRoleDescriptor() {
ReservedRolesStore.LISTS_ITEMS_INDEX,
ReservedRolesStore.ALERTS_LEGACY_INDEX_REINDEXED_V8,
ReservedRolesStore.LISTS_INDEX_REINDEXED_V8,
- ReservedRolesStore.LISTS_ITEMS_INDEX_REINDEXED_V8
+ ReservedRolesStore.LISTS_ITEMS_INDEX_REINDEXED_V8,
+ ReservedRolesStore.ASSET_CRITICALITY_INDEX
)
.privileges("read", "view_index_metadata", "write", "maintenance")
.build(),
+ // Security - Entity Store is view only
+ RoleDescriptor.IndicesPrivileges.builder()
+ .indices(ReservedRolesStore.ENTITY_STORE_V1_LATEST_INDEX)
+ .privileges("read", "view_index_metadata")
+ .build(),
// Alerts-as-data
RoleDescriptor.IndicesPrivileges.builder()
.indices(
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java
index e90cbfaec027a..21e3d3ad0cd6c 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java
@@ -1851,6 +1851,13 @@ public void testKibanaSystemRole() {
assertViewIndexMetadata(kibanaRole, indexName);
});
+ Arrays.asList(".entities.v1.latest.security_" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach(indexName -> {
+ final IndexAbstraction indexAbstraction = mockIndexAbstraction(indexName);
+ assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true));
+ assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.TYPE.name()).test(indexAbstraction), is(true));
+ assertViewIndexMetadata(kibanaRole, indexName);
+ });
+
Arrays.asList("metrics-logstash." + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((indexName) -> {
final IndexAbstraction indexAbstraction = mockIndexAbstraction(indexName);
assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false));
@@ -3676,6 +3683,9 @@ public void testPredefinedViewerRole() {
assertOnlyReadAllowed(role, ".profiling-" + randomIntBetween(0, 5));
assertOnlyReadAllowed(role, randomAlphaOfLength(5));
+ assertOnlyReadAllowed(role, ".entities.v1.latest.security_" + randomIntBetween(0, 5));
+ assertOnlyReadAllowed(role, ".asset-criticality.asset-criticality-" + randomIntBetween(0, 5));
+
assertOnlyReadAllowed(role, ".slo-observability." + randomIntBetween(0, 5));
assertViewIndexMetadata(role, ".slo-observability." + randomIntBetween(0, 5));
@@ -3746,6 +3756,7 @@ public void testPredefinedEditorRole() {
assertOnlyReadAllowed(role, "endgame-" + randomIntBetween(0, 5));
assertOnlyReadAllowed(role, "profiling-" + randomIntBetween(0, 5));
assertOnlyReadAllowed(role, ".profiling-" + randomIntBetween(0, 5));
+ assertOnlyReadAllowed(role, ".entities.v1.latest.security_" + randomIntBetween(0, 5));
assertOnlyReadAllowed(role, randomAlphaOfLength(5));
assertReadWriteDocsAndMaintenanceButNotDeleteIndexAllowed(role, ".siem-signals-" + randomIntBetween(0, 5));
@@ -3756,6 +3767,7 @@ public void testPredefinedEditorRole() {
assertReadWriteDocsAndMaintenanceButNotDeleteIndexAllowed(role, ".internal.alerts-" + randomIntBetween(0, 5));
assertReadWriteDocsAndMaintenanceButNotDeleteIndexAllowed(role, ".preview.alerts-" + randomIntBetween(0, 5));
assertReadWriteDocsAndMaintenanceButNotDeleteIndexAllowed(role, ".internal.preview.alerts-" + randomIntBetween(0, 5));
+ assertReadWriteDocsAndMaintenanceButNotDeleteIndexAllowed(role, ".asset-criticality.asset-criticality-" + randomIntBetween(0, 5));
assertViewIndexMetadata(role, ".slo-observability." + randomIntBetween(0, 5));
assertReadWriteAndManage(role, ".slo-observability." + randomIntBetween(0, 5));
diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json
index ca2659b8d8dea..898e1b88cc632 100644
--- a/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json
+++ b/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json
@@ -14,6 +14,11 @@
},
"default_pipeline": "logs@default-pipeline"
}
+ },
+ "data_stream_options": {
+ "failure_store": {
+ "enabled": true
+ }
}
},
"_meta": {
diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MapExpression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MapExpression.java
index 24736ac3a2514..0a653b992a11d 100644
--- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MapExpression.java
+++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MapExpression.java
@@ -120,10 +120,18 @@ public Expression get(Object key) {
return map.get(key);
} else {
// the key(literal) could be converted to BytesRef by ConvertStringToByteRef
- return keyFoldedMap.containsKey(key) ? keyFoldedMap.get(key) : keyFoldedMap.get(new BytesRef(key.toString()));
+ return keyFoldedMap.containsKey(key) ? keyFoldedMap.get(key) : keyFoldedMap.get(foldKey(key));
}
}
+ public boolean containsKey(Object key) {
+ return keyFoldedMap.containsKey(key) || keyFoldedMap.containsKey(foldKey(key));
+ }
+
+ private BytesRef foldKey(Object key) {
+ return new BytesRef(key.toString());
+ }
+
@Override
public boolean equals(Object obj) {
if (this == obj) {
diff --git a/x-pack/plugin/esql/arrow/build.gradle b/x-pack/plugin/esql/arrow/build.gradle
index fac0bd0a77452..d6fa48982d029 100644
--- a/x-pack/plugin/esql/arrow/build.gradle
+++ b/x-pack/plugin/esql/arrow/build.gradle
@@ -12,9 +12,9 @@ dependencies {
compileOnly project(':x-pack:plugin:esql:compute')
compileOnly project(':x-pack:plugin:esql-core')
compileOnly project(':x-pack:plugin:mapper-version')
- implementation('org.apache.arrow:arrow-vector:16.1.0')
- implementation('org.apache.arrow:arrow-format:16.1.0')
- implementation('org.apache.arrow:arrow-memory-core:16.1.0')
+ implementation('org.apache.arrow:arrow-vector:18.3.0')
+ implementation('org.apache.arrow:arrow-format:18.3.0')
+ implementation('org.apache.arrow:arrow-memory-core:18.3.0')
implementation('org.checkerframework:checker-qual:3.42.0')
implementation('com.google.flatbuffers:flatbuffers-java:23.5.26')
// Needed for the json arrow serialization, and loaded even if we don't use it.
@@ -25,7 +25,7 @@ dependencies {
runtimeOnly "org.slf4j:slf4j-nop:${versions.slf4j}"
testImplementation project(':test:framework')
- testImplementation('org.apache.arrow:arrow-memory-unsafe:16.1.0')
+ testImplementation('org.apache.arrow:arrow-memory-unsafe:18.3.0')
testImplementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:${versions.jackson}")
}
@@ -38,18 +38,8 @@ tasks.named("dependencyLicenses").configure {
tasks.named("thirdPartyAudit").configure {
ignoreViolations(
// uses sun.misc.Unsafe. Only used in tests.
- 'org.apache.arrow.memory.util.hash.SimpleHasher',
- 'org.apache.arrow.memory.util.hash.MurmurHasher',
'org.apache.arrow.memory.util.MemoryUtil',
'org.apache.arrow.memory.util.MemoryUtil$1',
- 'org.apache.arrow.vector.DecimalVector',
- 'org.apache.arrow.vector.BaseFixedWidthVector',
- 'org.apache.arrow.vector.util.DecimalUtility',
- 'org.apache.arrow.vector.Decimal256Vector',
- 'org.apache.arrow.vector.util.VectorAppender',
- 'org.apache.arrow.memory.ArrowBuf',
- 'org.apache.arrow.vector.BitVectorHelper',
- 'org.apache.arrow.memory.util.ByteFunctionHelpers',
)
ignoreMissingClasses(
'org.apache.commons.codec.binary.Hex'
diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java
index 6494a29e83075..73cd7b77968fe 100644
--- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java
+++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java
@@ -27,7 +27,6 @@
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
-import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase;
import org.junit.Before;
import org.junit.ClassRule;
@@ -40,6 +39,7 @@
import static org.elasticsearch.test.ListMatcher.matchesList;
import static org.elasticsearch.test.MapMatcher.assertMap;
import static org.elasticsearch.test.MapMatcher.matchesMap;
+import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.hasCapabilities;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
@@ -594,7 +594,7 @@ record Listen(long timestamp, String songId, double duration) {
public void testLookupJoinIndexAllowed() throws Exception {
assumeTrue(
"Requires LOOKUP JOIN capability",
- EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName()))
+ hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName()))
);
Response resp = runESQLCommand(
@@ -685,7 +685,7 @@ public void testLookupJoinIndexAllowed() throws Exception {
public void testLookupJoinDocLevelSecurity() throws Exception {
assumeTrue(
"Requires LOOKUP JOIN capability",
- EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName()))
+ hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName()))
);
Response resp = runESQLCommand("dls_user", "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN lookup-user2 ON value | KEEP x, org");
@@ -734,7 +734,7 @@ public void testLookupJoinDocLevelSecurity() throws Exception {
public void testLookupJoinFieldLevelSecurity() throws Exception {
assumeTrue(
"Requires LOOKUP JOIN capability",
- EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName()))
+ hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName()))
);
Response resp = runESQLCommand("fls_user2", "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN lookup-user2 ON value");
@@ -792,7 +792,7 @@ public void testLookupJoinFieldLevelSecurity() throws Exception {
public void testLookupJoinFieldLevelSecurityOnAlias() throws Exception {
assumeTrue(
"Requires LOOKUP JOIN capability",
- EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName()))
+ hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName()))
);
Response resp = runESQLCommand("fls_user2_alias", "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN lookup-second-alias ON value");
@@ -850,7 +850,7 @@ public void testLookupJoinFieldLevelSecurityOnAlias() throws Exception {
public void testLookupJoinIndexForbidden() throws Exception {
assumeTrue(
"Requires LOOKUP JOIN capability",
- EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName()))
+ hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName()))
);
var resp = expectThrows(
diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java
index 5578eff5cc67a..9695411b2c33b 100644
--- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java
+++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java
@@ -12,7 +12,6 @@
import org.elasticsearch.test.rest.TestFeatureService;
import org.elasticsearch.xpack.esql.CsvSpecReader.CsvTestCase;
import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase;
-import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.ClassRule;
@@ -22,6 +21,7 @@
import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V12;
+import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.hasCapabilities;
public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase {
@ClassRule
@@ -43,11 +43,6 @@ public void extractOldClusterFeatures() {
}
}
- protected static boolean oldClusterHasFeature(String featureId) {
- assert oldClusterTestFeatureService != null;
- return oldClusterTestFeatureService.clusterHasFeature(featureId);
- }
-
@AfterClass
public static void cleanUp() {
oldClusterTestFeatureService = null;
@@ -59,10 +54,9 @@ public MixedClusterEsqlSpecIT(
String testName,
Integer lineNumber,
CsvTestCase testCase,
- String instructions,
- Mode mode
+ String instructions
) {
- super(fileName, groupName, testName, lineNumber, testCase, instructions, mode);
+ super(fileName, groupName, testName, lineNumber, testCase, instructions);
}
@Override
@@ -87,12 +81,12 @@ protected boolean supportsInferenceTestService() {
}
@Override
- protected boolean supportsIndexModeLookup() throws IOException {
- return hasCapabilities(List.of(JOIN_LOOKUP_V12.capabilityName()));
+ protected boolean supportsIndexModeLookup() {
+ return hasCapabilities(adminClient(), List.of(JOIN_LOOKUP_V12.capabilityName()));
}
@Override
- protected boolean supportsSourceFieldMapping() throws IOException {
+ protected boolean supportsSourceFieldMapping() {
return false;
}
diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java
index dfb653b5e0941..38885d4a3263f 100644
--- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java
+++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java
@@ -27,7 +27,6 @@
import org.elasticsearch.xpack.esql.CsvTestsDataLoader;
import org.elasticsearch.xpack.esql.SpecReader;
import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase;
-import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode;
import org.junit.AfterClass;
import org.junit.ClassRule;
import org.junit.rules.RuleChain;
@@ -36,7 +35,6 @@
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.net.URL;
-import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
@@ -58,7 +56,7 @@
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.UNMAPPED_FIELDS;
-import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode.SYNC;
+import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.hasCapabilities;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
@@ -86,19 +84,7 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase {
public static List readScriptSpec() throws Exception {
List urls = classpathResources("/*.csv-spec");
assertTrue("Not enough specs found " + urls, urls.size() > 0);
- List specs = SpecReader.readScriptSpec(urls, specParser());
-
- int len = specs.get(0).length;
- List testcases = new ArrayList<>();
- for (var spec : specs) {
- for (Mode mode : List.of(SYNC)) { // No async, for now
- Object[] obj = new Object[len + 1];
- System.arraycopy(spec, 0, obj, 0, len);
- obj[len] = mode;
- testcases.add(obj);
- }
- }
- return testcases;
+ return SpecReader.readScriptSpec(urls, specParser());
}
public MultiClusterSpecIT(
@@ -107,10 +93,9 @@ public MultiClusterSpecIT(
String testName,
Integer lineNumber,
CsvTestCase testCase,
- String instructions,
- Mode mode
+ String instructions
) {
- super(fileName, groupName, testName, lineNumber, convertToRemoteIndices(testCase), instructions, mode);
+ super(fileName, groupName, testName, lineNumber, convertToRemoteIndices(testCase), instructions);
}
// TODO: think how to handle this better
@@ -152,7 +137,10 @@ protected void shouldSkipTest(String testName) throws IOException {
assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName()));
assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V9.capabilityName()));
if (testCase.requiredCapabilities.contains(JOIN_LOOKUP_V12.capabilityName())) {
- assumeTrue("LOOKUP JOIN not yet supported in CCS", hasCapabilities(List.of(ENABLE_LOOKUP_JOIN_ON_REMOTE.capabilityName())));
+ assumeTrue(
+ "LOOKUP JOIN not yet supported in CCS",
+ hasCapabilities(adminClient(), List.of(ENABLE_LOOKUP_JOIN_ON_REMOTE.capabilityName()))
+ );
}
// Unmapped fields require a coorect capability response from every cluster, which isn't currently implemented.
assumeFalse("UNMAPPED FIELDS not yet supported in CCS", testCase.requiredCapabilities.contains(UNMAPPED_FIELDS.capabilityName()));
@@ -401,7 +389,7 @@ protected boolean supportsInferenceTestService() {
@Override
protected boolean supportsIndexModeLookup() throws IOException {
- return hasCapabilities(List.of(JOIN_LOOKUP_V12.capabilityName()));
+ return hasCapabilities(adminClient(), List.of(JOIN_LOOKUP_V12.capabilityName()));
}
@Override
diff --git a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java
index ffdf0eb336a1a..e36bd451c8298 100644
--- a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java
+++ b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java
@@ -10,7 +10,6 @@
import org.elasticsearch.test.cluster.ElasticsearchCluster;
import org.elasticsearch.xpack.esql.CsvSpecReader.CsvTestCase;
import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase;
-import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode;
import org.junit.ClassRule;
import java.io.IOException;
@@ -24,16 +23,8 @@ protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
- public EsqlSpecIT(
- String fileName,
- String groupName,
- String testName,
- Integer lineNumber,
- CsvTestCase testCase,
- String instructions,
- Mode mode
- ) {
- super(fileName, groupName, testName, lineNumber, testCase, instructions, mode);
+ public EsqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase, String instructions) {
+ super(fileName, groupName, testName, lineNumber, testCase, instructions);
}
@Override
diff --git a/x-pack/plugin/esql/qa/server/single-node/build.gradle b/x-pack/plugin/esql/qa/server/single-node/build.gradle
index 977955ed69e52..ce962ef4c7e74 100644
--- a/x-pack/plugin/esql/qa/server/single-node/build.gradle
+++ b/x-pack/plugin/esql/qa/server/single-node/build.gradle
@@ -16,9 +16,9 @@ dependencies {
javaRestTestImplementation project(xpackModule('esql'))
yamlRestTestImplementation project(xpackModule('esql:qa:server'))
- javaRestTestImplementation('org.apache.arrow:arrow-vector:16.1.0')
- javaRestTestImplementation('org.apache.arrow:arrow-format:16.1.0')
- javaRestTestImplementation('org.apache.arrow:arrow-memory-core:16.1.0')
+ javaRestTestImplementation('org.apache.arrow:arrow-vector:18.3.0')
+ javaRestTestImplementation('org.apache.arrow:arrow-format:18.3.0')
+ javaRestTestImplementation('org.apache.arrow:arrow-memory-core:18.3.0')
javaRestTestImplementation('org.checkerframework:checker-qual:3.42.0')
javaRestTestImplementation('com.google.flatbuffers:flatbuffers-java:23.5.26')
javaRestTestImplementation("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}")
@@ -26,7 +26,7 @@ dependencies {
javaRestTestImplementation("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}")
javaRestTestImplementation("org.slf4j:slf4j-api:${versions.slf4j}")
javaRestTestImplementation("org.slf4j:slf4j-nop:${versions.slf4j}")
- javaRestTestImplementation('org.apache.arrow:arrow-memory-unsafe:16.1.0')
+ javaRestTestImplementation('org.apache.arrow:arrow-memory-unsafe:18.3.0')
clusterPlugins project(':plugins:mapper-size')
clusterPlugins project(':plugins:mapper-murmur3')
diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java
index 4c4ce020eb773..f4ec3f099e068 100644
--- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java
+++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java
@@ -19,7 +19,6 @@
import org.elasticsearch.xpack.esql.planner.PhysicalSettings;
import org.elasticsearch.xpack.esql.plugin.ComputeService;
import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase;
-import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode;
import org.junit.Before;
import org.junit.ClassRule;
@@ -37,16 +36,8 @@ protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
- public EsqlSpecIT(
- String fileName,
- String groupName,
- String testName,
- Integer lineNumber,
- CsvTestCase testCase,
- String instructions,
- Mode mode
- ) {
- super(fileName, groupName, testName, lineNumber, testCase, instructions, mode);
+ public EsqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase, String instructions) {
+ super(fileName, groupName, testName, lineNumber, testCase, instructions);
}
@Override
diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeForkIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeForkIT.java
index cb1f4383600d8..a2ddb87cf0f44 100644
--- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeForkIT.java
+++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeForkIT.java
@@ -12,7 +12,6 @@
import org.elasticsearch.test.TestClustersThreadFilter;
import org.elasticsearch.test.cluster.ElasticsearchCluster;
import org.elasticsearch.xpack.esql.CsvSpecReader;
-import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode;
import org.elasticsearch.xpack.esql.qa.rest.generative.GenerativeForkRestTest;
import org.junit.ClassRule;
@@ -32,10 +31,9 @@ public GenerativeForkIT(
String testName,
Integer lineNumber,
CsvSpecReader.CsvTestCase testCase,
- String instructions,
- Mode mode
+ String instructions
) {
- super(fileName, groupName, testName, lineNumber, testCase, instructions, mode);
+ super(fileName, groupName, testName, lineNumber, testCase, instructions);
}
@Override
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java
index fa4f5a4b3909f..5ca7e4e8b03aa 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java
@@ -68,7 +68,6 @@
import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults;
import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled;
import static org.elasticsearch.xpack.esql.CsvTestUtils.loadCsvSpecValues;
-import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.availableDatasetsForEs;
import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.createInferenceEndpoints;
import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.deleteInferenceEndpoints;
import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.loadDataSetIntoEs;
@@ -79,6 +78,7 @@
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.SEMANTIC_TEXT_FIELD_CAPS;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.SOURCE_FIELD_MAPPING;
import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.assertNotPartial;
+import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.hasCapabilities;
// This test can run very long in serverless configurations
@TimeoutSuite(millis = 30 * TimeUnits.MINUTE)
@@ -101,19 +101,7 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase {
public static List readScriptSpec() throws Exception {
List urls = classpathResources("/*.csv-spec");
assertTrue("Not enough specs found " + urls, urls.size() > 0);
- List specs = SpecReader.readScriptSpec(urls, specParser());
-
- int len = specs.get(0).length;
- List testcases = new ArrayList<>();
- for (var spec : specs) {
- for (Mode mode : Mode.values()) {
- Object[] obj = new Object[len + 1];
- System.arraycopy(spec, 0, obj, 0, len);
- obj[len] = mode;
- testcases.add(obj);
- }
- }
- return testcases;
+ return SpecReader.readScriptSpec(urls, specParser());
}
protected EsqlSpecTestCase(
@@ -122,8 +110,7 @@ protected EsqlSpecTestCase(
String testName,
Integer lineNumber,
CsvTestCase testCase,
- String instructions,
- Mode mode
+ String instructions
) {
this.fileName = fileName;
this.groupName = groupName;
@@ -131,25 +118,30 @@ protected EsqlSpecTestCase(
this.lineNumber = lineNumber;
this.testCase = testCase;
this.instructions = instructions;
- this.mode = mode;
+ this.mode = randomFrom(Mode.values());
}
+ private static boolean dataLoaded = false;
+
@Before
public void setup() throws IOException {
- if (supportsInferenceTestService()) {
- createInferenceEndpoints(adminClient());
- }
-
boolean supportsLookup = supportsIndexModeLookup();
boolean supportsSourceMapping = supportsSourceFieldMapping();
- if (indexExists(availableDatasetsForEs(client(), supportsLookup, supportsSourceMapping).iterator().next().indexName()) == false) {
- loadDataSetIntoEs(client(), supportsLookup, supportsSourceMapping);
+ boolean supportsInferenceTestService = supportsInferenceTestService();
+ if (dataLoaded == false) {
+ if (supportsInferenceTestService) {
+ createInferenceEndpoints(adminClient());
+ }
+
+ loadDataSetIntoEs(client(), supportsLookup, supportsSourceMapping, supportsInferenceTestService);
+ dataLoaded = true;
}
}
@AfterClass
public static void wipeTestData() throws IOException {
try {
+ dataLoaded = false;
adminClient().performRequest(new Request("DELETE", "/*"));
} catch (ResponseException e) {
// 404 here just means we had no indexes
@@ -159,7 +151,6 @@ public static void wipeTestData() throws IOException {
}
deleteInferenceEndpoints(adminClient());
-
}
public boolean logResults() {
@@ -196,8 +187,12 @@ protected boolean supportTimeSeriesCommand() {
return true;
}
- protected static void checkCapabilities(RestClient client, TestFeatureService testFeatureService, String testName, CsvTestCase testCase)
- throws IOException {
+ protected static void checkCapabilities(
+ RestClient client,
+ TestFeatureService testFeatureService,
+ String testName,
+ CsvTestCase testCase
+ ) {
if (hasCapabilities(client, testCase.requiredCapabilities)) {
return;
}
@@ -211,38 +206,6 @@ protected static void checkCapabilities(RestClient client, TestFeatureService te
}
}
- protected static boolean hasCapabilities(List requiredCapabilities) throws IOException {
- return hasCapabilities(adminClient(), requiredCapabilities);
- }
-
- public static boolean hasCapabilities(RestClient client, List requiredCapabilities) throws IOException {
- if (requiredCapabilities.isEmpty()) {
- return true;
- }
- try {
- if (clusterHasCapability(client, "POST", "/_query", List.of(), requiredCapabilities).orElse(false)) {
- return true;
- }
- LOGGER.info("capabilities API returned false, we might be in a mixed version cluster so falling back to cluster features");
- } catch (ResponseException e) {
- if (e.getResponse().getStatusLine().getStatusCode() / 100 == 4) {
- /*
- * The node we're testing against is too old for the capabilities
- * API which means it has to be pretty old. Very old capabilities
- * are ALSO present in the features API, so we can check them instead.
- *
- * It's kind of weird that we check for *any* 400, but that's required
- * because old versions of Elasticsearch return 400, not the expected
- * 404.
- */
- LOGGER.info("capabilities API failed, falling back to cluster features");
- } else {
- throw e;
- }
- }
- return false;
- }
-
protected boolean supportsInferenceTestService() {
return true;
}
@@ -271,7 +234,9 @@ protected final void doTest(String query) throws Throwable {
builder.tables(tables());
}
- Map, ?> prevTooks = supportsTook() ? tooks() : null;
+ boolean checkTook = supportsTook() && rarely();
+
+ Map, ?> prevTooks = checkTook ? tooks() : null;
Map answer = RestEsqlTestCase.runEsql(
builder.query(query),
testCase.assertWarnings(deduplicateExactWarnings()),
@@ -296,7 +261,7 @@ protected final void doTest(String query) throws Throwable {
assertResults(expectedColumnsWithValues, actualColumns, actualValues, testCase.ignoreOrder, logger);
- if (supportsTook()) {
+ if (checkTook) {
LOGGER.info("checking took incremented from {}", prevTooks);
long took = ((Number) answer.get("took")).longValue();
int prevTookHisto = ((Number) prevTooks.remove(tookKey(took))).intValue();
@@ -413,7 +378,6 @@ protected boolean preserveClusterUponCompletion() {
return true;
}
- @Before
@After
public void assertRequestBreakerEmptyAfterTests() throws Exception {
assertRequestBreakerEmpty();
@@ -421,7 +385,7 @@ public void assertRequestBreakerEmptyAfterTests() throws Exception {
public static void assertRequestBreakerEmpty() throws Exception {
assertBusy(() -> {
- HttpEntity entity = adminClient().performRequest(new Request("GET", "/_nodes/stats")).getEntity();
+ HttpEntity entity = adminClient().performRequest(new Request("GET", "/_nodes/stats?metric=breaker")).getEntity();
Map, ?> stats = XContentHelper.convertToMap(XContentType.JSON.xContent(), entity.getContent(), false);
Map, ?> nodes = (Map, ?>) stats.get("nodes");
@@ -495,7 +459,7 @@ private Map> tables() {
protected boolean supportsTook() throws IOException {
if (supportsTook == null) {
- supportsTook = hasCapabilities(client(), List.of("usage_contains_took"));
+ supportsTook = hasCapabilities(adminClient(), List.of("usage_contains_took"));
}
return supportsTook;
}
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java
index d191cfcafa80f..e5e6785fb0a52 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java
@@ -16,6 +16,7 @@
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
+import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.WarningsHandler;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.Streams;
@@ -42,6 +43,7 @@
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
+import java.io.UncheckedIOException;
import java.nio.charset.StandardCharsets;
import java.time.ZoneId;
import java.util.ArrayList;
@@ -52,6 +54,8 @@
import java.util.Locale;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
import java.util.function.IntFunction;
import static java.util.Collections.emptySet;
@@ -1105,7 +1109,6 @@ public void testComplexFieldNames() throws IOException {
* query. It's part of the "configuration" of the query.
*
*/
- @AwaitsFix(bugUrl = "Disabled temporarily until JOIN implementation is completed")
public void testInlineStatsNow() throws IOException {
assumeTrue("INLINESTATS only available on snapshots", Build.current().isSnapshot());
indexTimestampData(1);
@@ -1121,8 +1124,8 @@ public void testInlineStatsNow() throws IOException {
.item("value" + i)
.item("value" + i)
.item(i)
- .item(any(String.class))
.item(499.5)
+ .item(any(String.class))
);
}
assertResultMap(
@@ -1131,8 +1134,8 @@ public void testInlineStatsNow() throws IOException {
.item(matchesMap().entry("name", "test").entry("type", "text"))
.item(matchesMap().entry("name", "test.keyword").entry("type", "keyword"))
.item(matchesMap().entry("name", "value").entry("type", "long"))
- .item(matchesMap().entry("name", "now").entry("type", "date"))
- .item(matchesMap().entry("name", "AVG(value)").entry("type", "double")),
+ .item(matchesMap().entry("name", "AVG(value)").entry("type", "double"))
+ .item(matchesMap().entry("name", "now").entry("type", "date")),
values
);
}
@@ -1334,8 +1337,8 @@ public static Map runEsqlAsync(
checkKeepOnCompletion(requestObject, json, keepOnCompletion);
String id = (String) json.get("id");
- var supportsAsyncHeaders = clusterHasCapability("POST", "/_query", List.of(), List.of("async_query_status_headers")).orElse(false);
- var supportsSuggestedCast = clusterHasCapability("POST", "/_query", List.of(), List.of("suggested_cast")).orElse(false);
+ var supportsAsyncHeaders = hasCapabilities(adminClient(), List.of("async_query_status_headers"));
+ var supportsSuggestedCast = hasCapabilities(adminClient(), List.of("suggested_cast"));
if (id == null) {
// no id returned from an async call, must have completed immediately and without keep_on_completion
@@ -1409,13 +1412,33 @@ public static Map runEsqlAsync(
private static void prepareProfileLogger(RequestObjectBuilder requestObject, @Nullable ProfileLogger profileLogger) throws IOException {
if (profileLogger != null) {
profileLogger.clearProfile();
- var isProfileSafe = clusterHasCapability("POST", "/_query", List.of(), List.of("fixed_profile_serialization")).orElse(false);
+ var isProfileSafe = hasCapabilities(adminClient(), List.of("fixed_profile_serialization"));
if (isProfileSafe) {
requestObject.profile(true);
}
}
}
+ record CapabilitesCacheKey(RestClient client, List capabilities) {}
+
+ /**
+ * Cache of capabilities.
+ */
+ private static final ConcurrentMap capabilities = new ConcurrentHashMap<>();
+
+ public static boolean hasCapabilities(RestClient client, List requiredCapabilities) {
+ if (requiredCapabilities.isEmpty()) {
+ return true;
+ }
+ return capabilities.computeIfAbsent(new CapabilitesCacheKey(client, requiredCapabilities), r -> {
+ try {
+ return clusterHasCapability(client, "POST", "/_query", List.of(), requiredCapabilities).orElse(false);
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ });
+ }
+
private static Object removeOriginalTypesAndSuggestedCast(Object response) {
if (response instanceof ArrayList> columns) {
var newColumns = new ArrayList<>();
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestRerankTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestRerankTestCase.java
index 17d88f1e21a13..7add36cbc2442 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestRerankTestCase.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestRerankTestCase.java
@@ -33,7 +33,7 @@ public class RestRerankTestCase extends ESRestTestCase {
public void skipWhenRerankDisabled() throws IOException {
assumeTrue(
"Requires RERANK capability",
- EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.RERANK.capabilityName()))
+ RestEsqlTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.RERANK.capabilityName()))
);
}
@@ -93,7 +93,7 @@ public void testRerankWithSingleField() throws IOException {
String query = """
FROM rerank-test-index
| WHERE match(title, "exploration")
- | RERANK "exploration" ON title WITH test_reranker
+ | RERANK "exploration" ON title WITH { "inference_id" : "test_reranker" }
| EVAL _score = ROUND(_score, 5)
""";
@@ -112,7 +112,7 @@ public void testRerankWithMultipleFields() throws IOException {
String query = """
FROM rerank-test-index
| WHERE match(title, "exploration")
- | RERANK "exploration" ON title, author WITH test_reranker
+ | RERANK "exploration" ON title, author WITH { "inference_id" : "test_reranker" }
| EVAL _score = ROUND(_score, 5)
""";
@@ -131,7 +131,7 @@ public void testRerankWithPositionalParams() throws IOException {
String query = """
FROM rerank-test-index
| WHERE match(title, "exploration")
- | RERANK ? ON title WITH ?
+ | RERANK ? ON title WITH { "inference_id" : ? }
| EVAL _score = ROUND(_score, 5)
""";
@@ -150,7 +150,7 @@ public void testRerankWithNamedParams() throws IOException {
String query = """
FROM rerank-test-index
| WHERE match(title, ?queryText)
- | RERANK ?queryText ON title WITH ?inferenceId
+ | RERANK ?queryText ON title WITH { "inference_id" : ?inferenceId }
| EVAL _score = ROUND(_score, 5)
""";
@@ -169,7 +169,7 @@ public void testRerankWithMissingInferenceId() {
String query = """
FROM rerank-test-index
| WHERE match(title, "exploration")
- | RERANK "exploration" ON title WITH test_missing
+ | RERANK "exploration" ON title WITH { "inference_id" : "test_missing" }
| EVAL _score = ROUND(_score, 5)
""";
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestSampleTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestSampleTestCase.java
index 17b60ed94be20..4e7f207269721 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestSampleTestCase.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestSampleTestCase.java
@@ -38,7 +38,7 @@ public class RestSampleTestCase extends ESRestTestCase {
public void skipWhenSampleDisabled() throws IOException {
assumeTrue(
"Requires SAMPLE capability",
- EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.SAMPLE_V3.capabilityName()))
+ RestEsqlTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.SAMPLE_V3.capabilityName()))
);
}
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeForkRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeForkRestTest.java
index c3ba8c9036eea..30af8d4045d19 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeForkRestTest.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeForkRestTest.java
@@ -9,12 +9,12 @@
import org.elasticsearch.xpack.esql.CsvSpecReader;
import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase;
-import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode;
import java.io.IOException;
import java.util.List;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.*;
+import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.hasCapabilities;
/**
* Tests for FORK. We generate tests for FORK from existing CSV tests.
@@ -30,10 +30,9 @@ public GenerativeForkRestTest(
String testName,
Integer lineNumber,
CsvSpecReader.CsvTestCase testCase,
- String instructions,
- Mode mode
+ String instructions
) {
- super(fileName, groupName, testName, lineNumber, testCase, instructions, mode);
+ super(fileName, groupName, testName, lineNumber, testCase, instructions);
}
@Override
@@ -61,6 +60,6 @@ protected void shouldSkipTest(String testName) throws IOException {
testCase.requiredCapabilities.contains(IMPLICIT_CASTING_DATE_AND_DATE_NANOS.capabilityName())
);
- assumeTrue("Cluster needs to support FORK", hasCapabilities(client(), List.of(FORK_V9.capabilityName())));
+ assumeTrue("Cluster needs to support FORK", hasCapabilities(adminClient(), List.of(FORK_V9.capabilityName())));
}
}
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java
index 9677a7e06948f..053f026cd0eda 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java
@@ -70,7 +70,7 @@ public abstract class GenerativeRestTest extends ESRestTestCase {
@Before
public void setup() throws IOException {
if (indexExists(CSV_DATASET_MAP.keySet().iterator().next()) == false) {
- loadDataSetIntoEs(client(), true, supportsSourceFieldMapping());
+ loadDataSetIntoEs(client(), true, supportsSourceFieldMapping(), false);
}
}
@@ -209,7 +209,7 @@ private static List outputSchema(Map
}
private List availableIndices() throws IOException {
- return availableDatasetsForEs(client(), true, supportsSourceFieldMapping()).stream()
+ return availableDatasetsForEs(true, supportsSourceFieldMapping(), false).stream()
.filter(x -> x.requiresInferenceEndpoint() == false)
.map(x -> x.indexName())
.toList();
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java
index 8a92f02a6f2ec..c01141837fae3 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java
@@ -308,7 +308,7 @@ public static void main(String[] args) throws IOException {
}
try (RestClient client = builder.build()) {
- loadDataSetIntoEs(client, true, true, (restClient, indexName, indexMapping, indexSettings) -> {
+ loadDataSetIntoEs(client, true, true, false, (restClient, indexName, indexMapping, indexSettings) -> {
// don't use ESRestTestCase methods here or, if you do, test running the main method before making the change
StringBuilder jsonBody = new StringBuilder("{");
if (indexSettings != null && indexSettings.isEmpty() == false) {
@@ -328,12 +328,10 @@ public static void main(String[] args) throws IOException {
}
public static Set availableDatasetsForEs(
- RestClient client,
boolean supportsIndexModeLookup,
- boolean supportsSourceFieldMapping
+ boolean supportsSourceFieldMapping,
+ boolean inferenceEnabled
) throws IOException {
- boolean inferenceEnabled = clusterHasSparseEmbeddingInferenceEndpoint(client);
-
Set testDataSets = new HashSet<>();
for (TestDataset dataset : CSV_DATASET_MAP.values()) {
@@ -363,12 +361,17 @@ private static boolean isSourceMappingDataset(TestDataset dataset) throws IOExce
return mappingNode.get("_source") != null;
}
- public static void loadDataSetIntoEs(RestClient client, boolean supportsIndexModeLookup, boolean supportsSourceFieldMapping)
- throws IOException {
+ public static void loadDataSetIntoEs(
+ RestClient client,
+ boolean supportsIndexModeLookup,
+ boolean supportsSourceFieldMapping,
+ boolean inferenceEnabled
+ ) throws IOException {
loadDataSetIntoEs(
client,
supportsIndexModeLookup,
supportsSourceFieldMapping,
+ inferenceEnabled,
(restClient, indexName, indexMapping, indexSettings) -> {
ESRestTestCase.createIndex(restClient, indexName, indexSettings, indexMapping, null);
}
@@ -379,12 +382,13 @@ private static void loadDataSetIntoEs(
RestClient client,
boolean supportsIndexModeLookup,
boolean supportsSourceFieldMapping,
+ boolean inferenceEnabled,
IndexCreator indexCreator
) throws IOException {
Logger logger = LogManager.getLogger(CsvTestsDataLoader.class);
Set loadedDatasets = new HashSet<>();
- for (var dataset : availableDatasetsForEs(client, supportsIndexModeLookup, supportsSourceFieldMapping)) {
+ for (var dataset : availableDatasetsForEs(supportsIndexModeLookup, supportsSourceFieldMapping, inferenceEnabled)) {
load(client, dataset, logger, indexCreator);
loadedDatasets.add(dataset.indexName);
}
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/completion.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/completion.csv-spec
index 9f0cf627eb927..9e2f88fd99d42 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/completion.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/completion.csv-spec
@@ -6,7 +6,7 @@ completion using a ROW source operator
required_capability: completion
ROW prompt="Who is Victor Hugo?"
-| COMPLETION completion_output = prompt WITH test_completion
+| COMPLETION completion_output = prompt WITH { "inference_id" : "test_completion" }
;
prompt:keyword | completion_output:keyword
@@ -18,7 +18,7 @@ completion using a ROW source operator and prompt is a multi-valued field
required_capability: completion
ROW prompt=["Answer the following question:", "Who is Victor Hugo?"]
-| COMPLETION completion_output = prompt WITH test_completion
+| COMPLETION completion_output = prompt WITH { "inference_id" : "test_completion" }
;
prompt:keyword | completion_output:keyword
@@ -34,7 +34,7 @@ FROM books METADATA _score
| WHERE title:"war and peace" AND author:"Tolstoy"
| SORT _score DESC
| LIMIT 2
-| COMPLETION title WITH test_completion
+| COMPLETION title WITH { "inference_id" : "test_completion" }
| KEEP title, completion
;
@@ -51,7 +51,7 @@ FROM books METADATA _score
| WHERE title:"war and peace" AND author:"Tolstoy"
| SORT _score DESC
| LIMIT 2
-| COMPLETION CONCAT("This is a prompt: ", title) WITH test_completion
+| COMPLETION CONCAT("This is a prompt: ", title) WITH { "inference_id" : "test_completion" }
| KEEP title, completion
;
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/fork.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/fork.csv-spec
index 2c5b8a650a64a..191d58a547c20 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/fork.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/fork.csv-spec
@@ -809,7 +809,7 @@ FROM employees
| KEEP emp_no, first_name, last_name
| FORK (WHERE emp_no == 10048 OR emp_no == 10081)
(WHERE emp_no == 10081 OR emp_no == 10087)
-| COMPLETION x = CONCAT(first_name, " ", last_name) WITH test_completion
+| COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" }
| SORT _fork, emp_no
;
@@ -827,7 +827,7 @@ required_capability: completion
FROM employees
| KEEP emp_no, first_name, last_name
| FORK (WHERE emp_no == 10048 OR emp_no == 10081
- | COMPLETION x = CONCAT(first_name, " ", last_name) WITH test_completion)
+ | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" })
(WHERE emp_no == 10081 OR emp_no == 10087)
| SORT _fork, emp_no
;
@@ -845,7 +845,7 @@ required_capability: completion
FROM employees
| KEEP emp_no, first_name, last_name
-| COMPLETION x = CONCAT(first_name, " ", last_name) WITH test_completion
+| COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" }
| FORK (WHERE emp_no == 10048 OR emp_no == 10081)
(WHERE emp_no == 10081 OR emp_no == 10087)
| SORT _fork, emp_no
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/rerank.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/rerank.csv-spec
index e9f9c25e93ee7..1f5b5d8d8ecc5 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/rerank.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/rerank.csv-spec
@@ -10,7 +10,7 @@ required_capability: match_operator_colon
FROM books METADATA _score
| WHERE title:"war and peace" AND author:"Tolstoy"
| SORT _score DESC, book_no ASC
-| RERANK "war and peace" ON title WITH inferenceId=test_reranker
+| RERANK "war and peace" ON title WITH { "inference_id" : "test_reranker" }
| EVAL _score=ROUND(_score, 2)
| KEEP book_no, title, author, _score
;
@@ -29,7 +29,7 @@ required_capability: match_operator_colon
FROM books METADATA _score
| WHERE title:"war and peace" AND author:"Tolstoy"
| SORT _score DESC, book_no ASC
-| RERANK "war and peace" ON title WITH inferenceId=test_reranker, scoreColumn=rerank_score
+| RERANK rerank_score="war and peace" ON title WITH { "inference_id" : "test_reranker" }
| EVAL _score=ROUND(_score, 2), rerank_score=ROUND(rerank_score, 2)
| KEEP book_no, title, author, rerank_score
;
@@ -48,7 +48,7 @@ required_capability: match_operator_colon
FROM books METADATA _score
| WHERE title:"war and peace" AND author:"Tolstoy"
| SORT _score DESC
-| RERANK "war and peace" ON title WITH inferenceId=test_reranker, scoreColumn=rerank_score
+| RERANK rerank_score="war and peace" ON title WITH { "inference_id" : "test_reranker" }
| EVAL _score=ROUND(_score, 2), rerank_score=ROUND(rerank_score, 2)
| SORT rerank_score, _score ASC, book_no ASC
| KEEP book_no, title, author, rerank_score
@@ -68,7 +68,7 @@ required_capability: match_operator_colon
FROM books METADATA _score
| WHERE title:"war and peace" AND author:"Tolstoy"
-| RERANK "war and peace" ON title, author WITH inferenceId=test_reranker
+| RERANK "war and peace" ON title, author WITH { "inference_id" : "test_reranker" }
| EVAL _score=ROUND(_score, 2)
| SORT _score DESC, book_no ASC
| KEEP book_no, title, author, _score
@@ -90,7 +90,7 @@ FROM books METADATA _score
| WHERE title:"war and peace" AND author:"Tolstoy"
| SORT _score DESC, book_no ASC
| LIMIT 3
-| RERANK "war and peace" ON title WITH inferenceId=test_reranker
+| RERANK "war and peace" ON title WITH { "inference_id" : "test_reranker" }
| EVAL _score=ROUND(_score, 2)
| SORT _score DESC, book_no ASC
| KEEP book_no, title, author, _score
@@ -109,7 +109,7 @@ required_capability: match_operator_colon
FROM books METADATA _score
| WHERE title:"war and peace" AND author:"Tolstoy"
-| RERANK "war and peace" ON title WITH inferenceId=test_reranker
+| RERANK "war and peace" ON title WITH { "inference_id" : "test_reranker" }
| EVAL _score=ROUND(_score, 2)
| SORT _score DESC, book_no ASC
| KEEP book_no, title, author, _score
@@ -129,7 +129,7 @@ required_capability: match_operator_colon
FROM books
| WHERE title:"war and peace" AND author:"Tolstoy"
-| RERANK "war and peace" ON title WITH inferenceId=test_reranker
+| RERANK "war and peace" ON title WITH { "inference_id" : "test_reranker" }
| EVAL _score=ROUND(_score, 2)
| KEEP book_no, title, author, _score
| SORT author, title
@@ -153,7 +153,7 @@ FROM books METADATA _id, _index, _score
| FORK ( WHERE title:"Tolkien" | SORT _score, _id DESC | LIMIT 3 )
( WHERE author:"Tolkien" | SORT _score, _id DESC | LIMIT 3 )
| FUSE
-| RERANK "Tolkien" ON title WITH inferenceId=test_reranker
+| RERANK "Tolkien" ON title WITH { "inference_id" : "test_reranker" }
| EVAL _score=ROUND(_score, 2)
| SORT _score DESC, book_no ASC
| LIMIT 2
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java
index a79bf06faed2b..933360a19be18 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java
@@ -161,22 +161,20 @@ public static Iterable parameters() {
: Collections.emptyMap(),
Build.current().isSnapshot() ? Map.ofEntries(Map.entry("MAX", 1)) : Collections.emptyMap(),
Build.current().isSnapshot()
+ ) },
+ new Object[] {
+ new Test(
+ """
+ FROM idx
+ | EVAL ip = TO_IP(host), x = TO_STRING(host), y = TO_STRING(host)
+ | INLINESTATS MAX(id)
+ """,
+ Build.current().isSnapshot() ? Map.of("FROM", 1, "EVAL", 1, "INLINESTATS", 1) : Collections.emptyMap(),
+ Build.current().isSnapshot()
+ ? Map.ofEntries(Map.entry("MAX", 1), Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2))
+ : Collections.emptyMap(),
+ Build.current().isSnapshot()
) }
- // awaits fix for https://github.com/elastic/elasticsearch/issues/116003
- // ,
- // new Object[] {
- // new Test(
- // """
- // FROM idx
- // | EVAL ip = to_ip(host), x = to_string(host), y = to_string(host)
- // | INLINESTATS max(id)
- // """,
- // Build.current().isSnapshot() ? Map.of("FROM", 1, "EVAL", 1, "INLINESTATS", 1) : Collections.emptyMap(),
- // Build.current().isSnapshot()
- // ? Map.ofEntries(Map.entry("MAX", 1), Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2))
- // : Collections.emptyMap(),
- // Build.current().isSnapshot()
- // ) }
);
}
diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4
index e4de10a2ef19e..f8de90387ab97 100644
--- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4
+++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4
@@ -219,23 +219,28 @@ renameClause:
;
dissectCommand
- : DISSECT primaryExpression string commandOptions?
+ : DISSECT primaryExpression string dissectCommandOptions?
;
-grokCommand
- : GROK primaryExpression string
+dissectCommandOptions
+ : dissectCommandOption (COMMA dissectCommandOption)*
;
-mvExpandCommand
- : MV_EXPAND qualifiedName
+dissectCommandOption
+ : identifier ASSIGN constant
;
-commandOptions
- : commandOption (COMMA commandOption)*
+
+commandNamedParameters
+ : (WITH mapExpression)?
;
-commandOption
- : identifier ASSIGN constant
+grokCommand
+ : GROK primaryExpression string
+ ;
+
+mvExpandCommand
+ : MV_EXPAND qualifiedName
;
explainCommand
@@ -293,7 +298,7 @@ forkSubQueryProcessingCommand
;
completionCommand
- : COMPLETION (targetField=qualifiedName ASSIGN)? prompt=primaryExpression WITH inferenceId=identifierOrParameter
+ : COMPLETION (targetField=qualifiedName ASSIGN)? prompt=primaryExpression commandNamedParameters
;
//
@@ -315,19 +320,6 @@ fuseCommand
: DEV_FUSE
;
-inferenceCommandOptions
- : inferenceCommandOption (COMMA inferenceCommandOption)*
- ;
-
-inferenceCommandOption
- : identifier ASSIGN inferenceCommandOptionValue
- ;
-
-inferenceCommandOptionValue
- : constant
- | identifier
- ;
-
rerankCommand
- : DEV_RERANK queryText=constant ON rerankFields (WITH inferenceCommandOptions)?
+ : DEV_RERANK (targetField=qualifiedName ASSIGN)? queryText=constant ON rerankFields commandNamedParameters
;
diff --git a/x-pack/plugin/esql/src/main/antlr/parser/Expression.g4 b/x-pack/plugin/esql/src/main/antlr/parser/Expression.g4
index 0462b2d6a67ee..fde700bed2f84 100644
--- a/x-pack/plugin/esql/src/main/antlr/parser/Expression.g4
+++ b/x-pack/plugin/esql/src/main/antlr/parser/Expression.g4
@@ -57,7 +57,7 @@ functionName
;
mapExpression
- : LEFT_BRACES entryExpression (COMMA entryExpression)* RIGHT_BRACES
+ : LEFT_BRACES (entryExpression (COMMA entryExpression)*)? RIGHT_BRACES
;
entryExpression
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java
index f7fd991a9ef16..a801a0bbaefa3 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java
@@ -8,6 +8,7 @@
import org.elasticsearch.TransportVersions;
import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionListenerResponseHandler;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.RemoteClusterActionType;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
@@ -15,10 +16,11 @@
import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
-import org.elasticsearch.client.internal.RemoteClusterClient;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.injection.guice.Inject;
import org.elasticsearch.tasks.Task;
+import org.elasticsearch.transport.Transport;
+import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportService;
/**
@@ -49,19 +51,18 @@ public EsqlResolveFieldsAction(
@Override
protected void doExecute(Task task, FieldCapabilitiesRequest request, final ActionListener listener) {
- fieldCapsAction.executeRequest(task, request, this::executeRemoteRequest, listener);
+ fieldCapsAction.executeRequest(task, request, this::executeLinkedRequest, listener);
}
- void executeRemoteRequest(
- RemoteClusterClient remoteClient,
- FieldCapabilitiesRequest remoteRequest,
- ActionListener remoteListener
+ void executeLinkedRequest(
+ TransportService transportService,
+ Transport.Connection conn,
+ FieldCapabilitiesRequest request,
+ ActionListenerResponseHandler responseHandler
) {
- remoteClient.getConnection(remoteRequest, remoteListener.delegateFailure((l, conn) -> {
- var remoteAction = conn.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)
- ? RESOLVE_REMOTE_TYPE
- : TransportFieldCapabilitiesAction.REMOTE_TYPE;
- remoteClient.execute(conn, remoteAction, remoteRequest, l);
- }));
+ var remoteAction = conn.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)
+ ? RESOLVE_REMOTE_TYPE
+ : TransportFieldCapabilitiesAction.REMOTE_TYPE;
+ transportService.sendRequest(conn, remoteAction.name(), request, TransportRequestOptions.EMPTY, responseHandler);
}
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java
index 44e4fd5a1bb3c..72a68663e41b5 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java
@@ -1405,7 +1405,7 @@ private BitSet gatherPreAnalysisMetrics(LogicalPlan plan, BitSet b) {
private static class ImplicitCasting extends ParameterizedRule {
@Override
public LogicalPlan apply(LogicalPlan plan, AnalyzerContext context) {
- // do implicit casting for function arguments
+ // do implicit casting for named parameters
return plan.transformExpressionsUp(
org.elasticsearch.xpack.esql.core.expression.function.Function.class,
e -> ImplicitCasting.cast(e, context.functionRegistry().snapshotRegistry())
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp
index fe807dc62d367..648ed63ee9313 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp
@@ -323,10 +323,11 @@ dropCommand
renameCommand
renameClause
dissectCommand
+dissectCommandOptions
+dissectCommandOption
+commandNamedParameters
grokCommand
mvExpandCommand
-commandOptions
-commandOption
explainCommand
subqueryExpression
showCommand
@@ -345,9 +346,6 @@ lookupCommand
inlinestatsCommand
insistCommand
fuseCommand
-inferenceCommandOptions
-inferenceCommandOption
-inferenceCommandOptionValue
rerankCommand
booleanExpression
regexBooleanExpression
@@ -373,4 +371,4 @@ joinPredicate
atn:
-[4, 1, 139, 831, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 184, 8, 1, 10, 1, 12, 1, 187, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 196, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 225, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 5, 7, 238, 8, 7, 10, 7, 12, 7, 241, 9, 7, 1, 8, 1, 8, 1, 8, 3, 8, 246, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 5, 9, 253, 8, 9, 10, 9, 12, 9, 256, 9, 9, 1, 10, 1, 10, 1, 10, 3, 10, 261, 8, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 272, 8, 13, 10, 13, 12, 13, 275, 9, 13, 1, 13, 3, 13, 278, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 289, 8, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 303, 8, 19, 10, 19, 12, 19, 306, 9, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 313, 8, 21, 1, 21, 1, 21, 3, 21, 317, 8, 21, 1, 22, 1, 22, 1, 22, 5, 22, 322, 8, 22, 10, 22, 12, 22, 325, 9, 22, 1, 23, 1, 23, 1, 23, 3, 23, 330, 8, 23, 1, 24, 1, 24, 1, 24, 5, 24, 335, 8, 24, 10, 24, 12, 24, 338, 9, 24, 1, 25, 1, 25, 1, 25, 5, 25, 343, 8, 25, 10, 25, 12, 25, 346, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 351, 8, 26, 10, 26, 12, 26, 354, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 3, 28, 361, 8, 28, 1, 29, 1, 29, 3, 29, 365, 8, 29, 1, 30, 1, 30, 3, 30, 369, 8, 30, 1, 31, 1, 31, 1, 31, 3, 31, 374, 8, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 383, 8, 33, 10, 33, 12, 33, 386, 9, 33, 1, 34, 1, 34, 3, 34, 390, 8, 34, 1, 34, 1, 34, 3, 34, 394, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 406, 8, 37, 10, 37, 12, 37, 409, 9, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 419, 8, 38, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 425, 8, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 5, 42, 437, 8, 42, 10, 42, 12, 42, 440, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 460, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 5, 47, 466, 8, 47, 10, 47, 12, 47, 469, 9, 47, 3, 47, 471, 8, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 3, 49, 478, 8, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, 489, 8, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, 496, 8, 51, 1, 52, 1, 52, 1, 52, 1, 53, 4, 53, 502, 8, 53, 11, 53, 12, 53, 503, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 5, 55, 516, 8, 55, 10, 55, 12, 55, 519, 9, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 3, 57, 527, 8, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 3, 59, 542, 8, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 5, 62, 552, 8, 62, 10, 62, 12, 62, 555, 9, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 3, 64, 563, 8, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 3, 65, 571, 8, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 580, 8, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 5, 66, 587, 8, 66, 10, 66, 12, 66, 590, 9, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 597, 8, 66, 1, 66, 1, 66, 1, 66, 3, 66, 602, 8, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 5, 66, 610, 8, 66, 10, 66, 12, 66, 613, 9, 66, 1, 67, 1, 67, 3, 67, 617, 8, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 3, 67, 624, 8, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 3, 67, 631, 8, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 5, 67, 638, 8, 67, 10, 67, 12, 67, 641, 9, 67, 1, 67, 1, 67, 1, 67, 1, 67, 3, 67, 647, 8, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 5, 67, 654, 8, 67, 10, 67, 12, 67, 657, 9, 67, 1, 67, 1, 67, 3, 67, 661, 8, 67, 1, 68, 1, 68, 1, 68, 3, 68, 666, 8, 68, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 3, 69, 676, 8, 69, 1, 70, 1, 70, 1, 70, 1, 70, 3, 70, 682, 8, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 5, 70, 690, 8, 70, 10, 70, 12, 70, 693, 9, 70, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 3, 71, 703, 8, 71, 1, 71, 1, 71, 1, 71, 5, 71, 708, 8, 71, 10, 71, 12, 71, 711, 9, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 5, 72, 719, 8, 72, 10, 72, 12, 72, 722, 9, 72, 1, 72, 1, 72, 3, 72, 726, 8, 72, 3, 72, 728, 8, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 5, 74, 738, 8, 74, 10, 74, 12, 74, 741, 9, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 5, 76, 762, 8, 76, 10, 76, 12, 76, 765, 9, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 5, 76, 773, 8, 76, 10, 76, 12, 76, 776, 9, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 5, 76, 784, 8, 76, 10, 76, 12, 76, 787, 9, 76, 1, 76, 1, 76, 3, 76, 791, 8, 76, 1, 77, 1, 77, 1, 78, 1, 78, 3, 78, 797, 8, 78, 1, 79, 3, 79, 800, 8, 79, 1, 79, 1, 79, 1, 80, 3, 80, 805, 8, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 5, 85, 824, 8, 85, 10, 85, 12, 85, 827, 9, 85, 1, 86, 1, 86, 1, 86, 0, 5, 2, 110, 132, 140, 142, 87, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 172, 0, 10, 2, 0, 53, 53, 107, 107, 1, 0, 101, 102, 2, 0, 57, 57, 63, 63, 2, 0, 66, 66, 69, 69, 2, 0, 38, 38, 53, 53, 1, 0, 87, 88, 1, 0, 89, 91, 2, 0, 65, 65, 78, 78, 2, 0, 80, 80, 82, 86, 2, 0, 23, 23, 25, 26, 860, 0, 174, 1, 0, 0, 0, 2, 177, 1, 0, 0, 0, 4, 195, 1, 0, 0, 0, 6, 224, 1, 0, 0, 0, 8, 226, 1, 0, 0, 0, 10, 229, 1, 0, 0, 0, 12, 231, 1, 0, 0, 0, 14, 234, 1, 0, 0, 0, 16, 245, 1, 0, 0, 0, 18, 249, 1, 0, 0, 0, 20, 257, 1, 0, 0, 0, 22, 262, 1, 0, 0, 0, 24, 265, 1, 0, 0, 0, 26, 268, 1, 0, 0, 0, 28, 288, 1, 0, 0, 0, 30, 290, 1, 0, 0, 0, 32, 292, 1, 0, 0, 0, 34, 294, 1, 0, 0, 0, 36, 296, 1, 0, 0, 0, 38, 298, 1, 0, 0, 0, 40, 307, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 326, 1, 0, 0, 0, 48, 331, 1, 0, 0, 0, 50, 339, 1, 0, 0, 0, 52, 347, 1, 0, 0, 0, 54, 355, 1, 0, 0, 0, 56, 360, 1, 0, 0, 0, 58, 364, 1, 0, 0, 0, 60, 368, 1, 0, 0, 0, 62, 373, 1, 0, 0, 0, 64, 375, 1, 0, 0, 0, 66, 378, 1, 0, 0, 0, 68, 387, 1, 0, 0, 0, 70, 395, 1, 0, 0, 0, 72, 398, 1, 0, 0, 0, 74, 401, 1, 0, 0, 0, 76, 418, 1, 0, 0, 0, 78, 420, 1, 0, 0, 0, 80, 426, 1, 0, 0, 0, 82, 430, 1, 0, 0, 0, 84, 433, 1, 0, 0, 0, 86, 441, 1, 0, 0, 0, 88, 445, 1, 0, 0, 0, 90, 448, 1, 0, 0, 0, 92, 452, 1, 0, 0, 0, 94, 455, 1, 0, 0, 0, 96, 472, 1, 0, 0, 0, 98, 477, 1, 0, 0, 0, 100, 481, 1, 0, 0, 0, 102, 484, 1, 0, 0, 0, 104, 497, 1, 0, 0, 0, 106, 501, 1, 0, 0, 0, 108, 505, 1, 0, 0, 0, 110, 509, 1, 0, 0, 0, 112, 520, 1, 0, 0, 0, 114, 522, 1, 0, 0, 0, 116, 532, 1, 0, 0, 0, 118, 537, 1, 0, 0, 0, 120, 543, 1, 0, 0, 0, 122, 546, 1, 0, 0, 0, 124, 548, 1, 0, 0, 0, 126, 556, 1, 0, 0, 0, 128, 562, 1, 0, 0, 0, 130, 564, 1, 0, 0, 0, 132, 601, 1, 0, 0, 0, 134, 660, 1, 0, 0, 0, 136, 662, 1, 0, 0, 0, 138, 675, 1, 0, 0, 0, 140, 681, 1, 0, 0, 0, 142, 702, 1, 0, 0, 0, 144, 712, 1, 0, 0, 0, 146, 731, 1, 0, 0, 0, 148, 733, 1, 0, 0, 0, 150, 744, 1, 0, 0, 0, 152, 790, 1, 0, 0, 0, 154, 792, 1, 0, 0, 0, 156, 796, 1, 0, 0, 0, 158, 799, 1, 0, 0, 0, 160, 804, 1, 0, 0, 0, 162, 808, 1, 0, 0, 0, 164, 810, 1, 0, 0, 0, 166, 812, 1, 0, 0, 0, 168, 817, 1, 0, 0, 0, 170, 819, 1, 0, 0, 0, 172, 828, 1, 0, 0, 0, 174, 175, 3, 2, 1, 0, 175, 176, 5, 0, 0, 1, 176, 1, 1, 0, 0, 0, 177, 178, 6, 1, -1, 0, 178, 179, 3, 4, 2, 0, 179, 185, 1, 0, 0, 0, 180, 181, 10, 1, 0, 0, 181, 182, 5, 52, 0, 0, 182, 184, 3, 6, 3, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 3, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 196, 3, 22, 11, 0, 189, 196, 3, 12, 6, 0, 190, 196, 3, 92, 46, 0, 191, 192, 4, 2, 1, 0, 192, 196, 3, 24, 12, 0, 193, 194, 4, 2, 2, 0, 194, 196, 3, 88, 44, 0, 195, 188, 1, 0, 0, 0, 195, 189, 1, 0, 0, 0, 195, 190, 1, 0, 0, 0, 195, 191, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 5, 1, 0, 0, 0, 197, 225, 3, 40, 20, 0, 198, 225, 3, 8, 4, 0, 199, 225, 3, 70, 35, 0, 200, 225, 3, 64, 32, 0, 201, 225, 3, 42, 21, 0, 202, 225, 3, 66, 33, 0, 203, 225, 3, 72, 36, 0, 204, 225, 3, 74, 37, 0, 205, 225, 3, 78, 39, 0, 206, 225, 3, 80, 40, 0, 207, 225, 3, 94, 47, 0, 208, 225, 3, 82, 41, 0, 209, 225, 3, 166, 83, 0, 210, 225, 3, 102, 51, 0, 211, 225, 3, 114, 57, 0, 212, 225, 3, 100, 50, 0, 213, 225, 3, 104, 52, 0, 214, 215, 4, 3, 3, 0, 215, 225, 3, 118, 59, 0, 216, 217, 4, 3, 4, 0, 217, 225, 3, 116, 58, 0, 218, 219, 4, 3, 5, 0, 219, 225, 3, 120, 60, 0, 220, 221, 4, 3, 6, 0, 221, 225, 3, 130, 65, 0, 222, 223, 4, 3, 7, 0, 223, 225, 3, 122, 61, 0, 224, 197, 1, 0, 0, 0, 224, 198, 1, 0, 0, 0, 224, 199, 1, 0, 0, 0, 224, 200, 1, 0, 0, 0, 224, 201, 1, 0, 0, 0, 224, 202, 1, 0, 0, 0, 224, 203, 1, 0, 0, 0, 224, 204, 1, 0, 0, 0, 224, 205, 1, 0, 0, 0, 224, 206, 1, 0, 0, 0, 224, 207, 1, 0, 0, 0, 224, 208, 1, 0, 0, 0, 224, 209, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 211, 1, 0, 0, 0, 224, 212, 1, 0, 0, 0, 224, 213, 1, 0, 0, 0, 224, 214, 1, 0, 0, 0, 224, 216, 1, 0, 0, 0, 224, 218, 1, 0, 0, 0, 224, 220, 1, 0, 0, 0, 224, 222, 1, 0, 0, 0, 225, 7, 1, 0, 0, 0, 226, 227, 5, 16, 0, 0, 227, 228, 3, 132, 66, 0, 228, 9, 1, 0, 0, 0, 229, 230, 3, 54, 27, 0, 230, 11, 1, 0, 0, 0, 231, 232, 5, 12, 0, 0, 232, 233, 3, 14, 7, 0, 233, 13, 1, 0, 0, 0, 234, 239, 3, 16, 8, 0, 235, 236, 5, 62, 0, 0, 236, 238, 3, 16, 8, 0, 237, 235, 1, 0, 0, 0, 238, 241, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 15, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 243, 3, 48, 24, 0, 243, 244, 5, 58, 0, 0, 244, 246, 1, 0, 0, 0, 245, 242, 1, 0, 0, 0, 245, 246, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 3, 132, 66, 0, 248, 17, 1, 0, 0, 0, 249, 254, 3, 20, 10, 0, 250, 251, 5, 62, 0, 0, 251, 253, 3, 20, 10, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 19, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 260, 3, 48, 24, 0, 258, 259, 5, 58, 0, 0, 259, 261, 3, 132, 66, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 21, 1, 0, 0, 0, 262, 263, 5, 19, 0, 0, 263, 264, 3, 26, 13, 0, 264, 23, 1, 0, 0, 0, 265, 266, 5, 20, 0, 0, 266, 267, 3, 26, 13, 0, 267, 25, 1, 0, 0, 0, 268, 273, 3, 28, 14, 0, 269, 270, 5, 62, 0, 0, 270, 272, 3, 28, 14, 0, 271, 269, 1, 0, 0, 0, 272, 275, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 273, 274, 1, 0, 0, 0, 274, 277, 1, 0, 0, 0, 275, 273, 1, 0, 0, 0, 276, 278, 3, 38, 19, 0, 277, 276, 1, 0, 0, 0, 277, 278, 1, 0, 0, 0, 278, 27, 1, 0, 0, 0, 279, 280, 3, 30, 15, 0, 280, 281, 5, 61, 0, 0, 281, 282, 3, 34, 17, 0, 282, 289, 1, 0, 0, 0, 283, 284, 3, 34, 17, 0, 284, 285, 5, 60, 0, 0, 285, 286, 3, 32, 16, 0, 286, 289, 1, 0, 0, 0, 287, 289, 3, 36, 18, 0, 288, 279, 1, 0, 0, 0, 288, 283, 1, 0, 0, 0, 288, 287, 1, 0, 0, 0, 289, 29, 1, 0, 0, 0, 290, 291, 5, 107, 0, 0, 291, 31, 1, 0, 0, 0, 292, 293, 5, 107, 0, 0, 293, 33, 1, 0, 0, 0, 294, 295, 5, 107, 0, 0, 295, 35, 1, 0, 0, 0, 296, 297, 7, 0, 0, 0, 297, 37, 1, 0, 0, 0, 298, 299, 5, 106, 0, 0, 299, 304, 5, 107, 0, 0, 300, 301, 5, 62, 0, 0, 301, 303, 5, 107, 0, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 39, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 5, 9, 0, 0, 308, 309, 3, 14, 7, 0, 309, 41, 1, 0, 0, 0, 310, 312, 5, 15, 0, 0, 311, 313, 3, 44, 22, 0, 312, 311, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 316, 1, 0, 0, 0, 314, 315, 5, 59, 0, 0, 315, 317, 3, 14, 7, 0, 316, 314, 1, 0, 0, 0, 316, 317, 1, 0, 0, 0, 317, 43, 1, 0, 0, 0, 318, 323, 3, 46, 23, 0, 319, 320, 5, 62, 0, 0, 320, 322, 3, 46, 23, 0, 321, 319, 1, 0, 0, 0, 322, 325, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, 324, 45, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 326, 329, 3, 16, 8, 0, 327, 328, 5, 16, 0, 0, 328, 330, 3, 132, 66, 0, 329, 327, 1, 0, 0, 0, 329, 330, 1, 0, 0, 0, 330, 47, 1, 0, 0, 0, 331, 336, 3, 62, 31, 0, 332, 333, 5, 64, 0, 0, 333, 335, 3, 62, 31, 0, 334, 332, 1, 0, 0, 0, 335, 338, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 337, 49, 1, 0, 0, 0, 338, 336, 1, 0, 0, 0, 339, 344, 3, 56, 28, 0, 340, 341, 5, 64, 0, 0, 341, 343, 3, 56, 28, 0, 342, 340, 1, 0, 0, 0, 343, 346, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 344, 345, 1, 0, 0, 0, 345, 51, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 347, 352, 3, 50, 25, 0, 348, 349, 5, 62, 0, 0, 349, 351, 3, 50, 25, 0, 350, 348, 1, 0, 0, 0, 351, 354, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 53, 1, 0, 0, 0, 354, 352, 1, 0, 0, 0, 355, 356, 7, 1, 0, 0, 356, 55, 1, 0, 0, 0, 357, 361, 5, 128, 0, 0, 358, 361, 3, 58, 29, 0, 359, 361, 3, 60, 30, 0, 360, 357, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 360, 359, 1, 0, 0, 0, 361, 57, 1, 0, 0, 0, 362, 365, 5, 76, 0, 0, 363, 365, 5, 95, 0, 0, 364, 362, 1, 0, 0, 0, 364, 363, 1, 0, 0, 0, 365, 59, 1, 0, 0, 0, 366, 369, 5, 94, 0, 0, 367, 369, 5, 96, 0, 0, 368, 366, 1, 0, 0, 0, 368, 367, 1, 0, 0, 0, 369, 61, 1, 0, 0, 0, 370, 374, 3, 54, 27, 0, 371, 374, 3, 58, 29, 0, 372, 374, 3, 60, 30, 0, 373, 370, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 373, 372, 1, 0, 0, 0, 374, 63, 1, 0, 0, 0, 375, 376, 5, 11, 0, 0, 376, 377, 3, 152, 76, 0, 377, 65, 1, 0, 0, 0, 378, 379, 5, 14, 0, 0, 379, 384, 3, 68, 34, 0, 380, 381, 5, 62, 0, 0, 381, 383, 3, 68, 34, 0, 382, 380, 1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 67, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, 387, 389, 3, 132, 66, 0, 388, 390, 7, 2, 0, 0, 389, 388, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 392, 5, 73, 0, 0, 392, 394, 7, 3, 0, 0, 393, 391, 1, 0, 0, 0, 393, 394, 1, 0, 0, 0, 394, 69, 1, 0, 0, 0, 395, 396, 5, 30, 0, 0, 396, 397, 3, 52, 26, 0, 397, 71, 1, 0, 0, 0, 398, 399, 5, 29, 0, 0, 399, 400, 3, 52, 26, 0, 400, 73, 1, 0, 0, 0, 401, 402, 5, 32, 0, 0, 402, 407, 3, 76, 38, 0, 403, 404, 5, 62, 0, 0, 404, 406, 3, 76, 38, 0, 405, 403, 1, 0, 0, 0, 406, 409, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 75, 1, 0, 0, 0, 409, 407, 1, 0, 0, 0, 410, 411, 3, 50, 25, 0, 411, 412, 5, 132, 0, 0, 412, 413, 3, 50, 25, 0, 413, 419, 1, 0, 0, 0, 414, 415, 3, 50, 25, 0, 415, 416, 5, 58, 0, 0, 416, 417, 3, 50, 25, 0, 417, 419, 1, 0, 0, 0, 418, 410, 1, 0, 0, 0, 418, 414, 1, 0, 0, 0, 419, 77, 1, 0, 0, 0, 420, 421, 5, 8, 0, 0, 421, 422, 3, 142, 71, 0, 422, 424, 3, 162, 81, 0, 423, 425, 3, 84, 42, 0, 424, 423, 1, 0, 0, 0, 424, 425, 1, 0, 0, 0, 425, 79, 1, 0, 0, 0, 426, 427, 5, 10, 0, 0, 427, 428, 3, 142, 71, 0, 428, 429, 3, 162, 81, 0, 429, 81, 1, 0, 0, 0, 430, 431, 5, 28, 0, 0, 431, 432, 3, 48, 24, 0, 432, 83, 1, 0, 0, 0, 433, 438, 3, 86, 43, 0, 434, 435, 5, 62, 0, 0, 435, 437, 3, 86, 43, 0, 436, 434, 1, 0, 0, 0, 437, 440, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 438, 439, 1, 0, 0, 0, 439, 85, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 441, 442, 3, 54, 27, 0, 442, 443, 5, 58, 0, 0, 443, 444, 3, 152, 76, 0, 444, 87, 1, 0, 0, 0, 445, 446, 5, 6, 0, 0, 446, 447, 3, 90, 45, 0, 447, 89, 1, 0, 0, 0, 448, 449, 5, 99, 0, 0, 449, 450, 3, 2, 1, 0, 450, 451, 5, 100, 0, 0, 451, 91, 1, 0, 0, 0, 452, 453, 5, 33, 0, 0, 453, 454, 5, 136, 0, 0, 454, 93, 1, 0, 0, 0, 455, 456, 5, 5, 0, 0, 456, 459, 3, 96, 48, 0, 457, 458, 5, 74, 0, 0, 458, 460, 3, 50, 25, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 470, 1, 0, 0, 0, 461, 462, 5, 79, 0, 0, 462, 467, 3, 98, 49, 0, 463, 464, 5, 62, 0, 0, 464, 466, 3, 98, 49, 0, 465, 463, 1, 0, 0, 0, 466, 469, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 467, 468, 1, 0, 0, 0, 468, 471, 1, 0, 0, 0, 469, 467, 1, 0, 0, 0, 470, 461, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 95, 1, 0, 0, 0, 472, 473, 7, 4, 0, 0, 473, 97, 1, 0, 0, 0, 474, 475, 3, 50, 25, 0, 475, 476, 5, 58, 0, 0, 476, 478, 1, 0, 0, 0, 477, 474, 1, 0, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 480, 3, 50, 25, 0, 480, 99, 1, 0, 0, 0, 481, 482, 5, 13, 0, 0, 482, 483, 3, 152, 76, 0, 483, 101, 1, 0, 0, 0, 484, 485, 5, 4, 0, 0, 485, 488, 3, 48, 24, 0, 486, 487, 5, 74, 0, 0, 487, 489, 3, 48, 24, 0, 488, 486, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 495, 1, 0, 0, 0, 490, 491, 5, 132, 0, 0, 491, 492, 3, 48, 24, 0, 492, 493, 5, 62, 0, 0, 493, 494, 3, 48, 24, 0, 494, 496, 1, 0, 0, 0, 495, 490, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 103, 1, 0, 0, 0, 497, 498, 5, 21, 0, 0, 498, 499, 3, 106, 53, 0, 499, 105, 1, 0, 0, 0, 500, 502, 3, 108, 54, 0, 501, 500, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 501, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 107, 1, 0, 0, 0, 505, 506, 5, 99, 0, 0, 506, 507, 3, 110, 55, 0, 507, 508, 5, 100, 0, 0, 508, 109, 1, 0, 0, 0, 509, 510, 6, 55, -1, 0, 510, 511, 3, 112, 56, 0, 511, 517, 1, 0, 0, 0, 512, 513, 10, 1, 0, 0, 513, 514, 5, 52, 0, 0, 514, 516, 3, 112, 56, 0, 515, 512, 1, 0, 0, 0, 516, 519, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 111, 1, 0, 0, 0, 519, 517, 1, 0, 0, 0, 520, 521, 3, 6, 3, 0, 521, 113, 1, 0, 0, 0, 522, 526, 5, 7, 0, 0, 523, 524, 3, 48, 24, 0, 524, 525, 5, 58, 0, 0, 525, 527, 1, 0, 0, 0, 526, 523, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 3, 142, 71, 0, 529, 530, 5, 79, 0, 0, 530, 531, 3, 62, 31, 0, 531, 115, 1, 0, 0, 0, 532, 533, 5, 27, 0, 0, 533, 534, 3, 28, 14, 0, 534, 535, 5, 74, 0, 0, 535, 536, 3, 52, 26, 0, 536, 117, 1, 0, 0, 0, 537, 538, 5, 17, 0, 0, 538, 541, 3, 44, 22, 0, 539, 540, 5, 59, 0, 0, 540, 542, 3, 14, 7, 0, 541, 539, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 119, 1, 0, 0, 0, 543, 544, 5, 31, 0, 0, 544, 545, 3, 52, 26, 0, 545, 121, 1, 0, 0, 0, 546, 547, 5, 22, 0, 0, 547, 123, 1, 0, 0, 0, 548, 553, 3, 126, 63, 0, 549, 550, 5, 62, 0, 0, 550, 552, 3, 126, 63, 0, 551, 549, 1, 0, 0, 0, 552, 555, 1, 0, 0, 0, 553, 551, 1, 0, 0, 0, 553, 554, 1, 0, 0, 0, 554, 125, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 556, 557, 3, 54, 27, 0, 557, 558, 5, 58, 0, 0, 558, 559, 3, 128, 64, 0, 559, 127, 1, 0, 0, 0, 560, 563, 3, 152, 76, 0, 561, 563, 3, 54, 27, 0, 562, 560, 1, 0, 0, 0, 562, 561, 1, 0, 0, 0, 563, 129, 1, 0, 0, 0, 564, 565, 5, 18, 0, 0, 565, 566, 3, 152, 76, 0, 566, 567, 5, 74, 0, 0, 567, 570, 3, 18, 9, 0, 568, 569, 5, 79, 0, 0, 569, 571, 3, 124, 62, 0, 570, 568, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 131, 1, 0, 0, 0, 572, 573, 6, 66, -1, 0, 573, 574, 5, 71, 0, 0, 574, 602, 3, 132, 66, 8, 575, 602, 3, 138, 69, 0, 576, 602, 3, 134, 67, 0, 577, 579, 3, 138, 69, 0, 578, 580, 5, 71, 0, 0, 579, 578, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 582, 5, 67, 0, 0, 582, 583, 5, 99, 0, 0, 583, 588, 3, 138, 69, 0, 584, 585, 5, 62, 0, 0, 585, 587, 3, 138, 69, 0, 586, 584, 1, 0, 0, 0, 587, 590, 1, 0, 0, 0, 588, 586, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 591, 1, 0, 0, 0, 590, 588, 1, 0, 0, 0, 591, 592, 5, 100, 0, 0, 592, 602, 1, 0, 0, 0, 593, 594, 3, 138, 69, 0, 594, 596, 5, 68, 0, 0, 595, 597, 5, 71, 0, 0, 596, 595, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 5, 72, 0, 0, 599, 602, 1, 0, 0, 0, 600, 602, 3, 136, 68, 0, 601, 572, 1, 0, 0, 0, 601, 575, 1, 0, 0, 0, 601, 576, 1, 0, 0, 0, 601, 577, 1, 0, 0, 0, 601, 593, 1, 0, 0, 0, 601, 600, 1, 0, 0, 0, 602, 611, 1, 0, 0, 0, 603, 604, 10, 5, 0, 0, 604, 605, 5, 56, 0, 0, 605, 610, 3, 132, 66, 6, 606, 607, 10, 4, 0, 0, 607, 608, 5, 75, 0, 0, 608, 610, 3, 132, 66, 5, 609, 603, 1, 0, 0, 0, 609, 606, 1, 0, 0, 0, 610, 613, 1, 0, 0, 0, 611, 609, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 133, 1, 0, 0, 0, 613, 611, 1, 0, 0, 0, 614, 616, 3, 138, 69, 0, 615, 617, 5, 71, 0, 0, 616, 615, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 619, 5, 70, 0, 0, 619, 620, 3, 162, 81, 0, 620, 661, 1, 0, 0, 0, 621, 623, 3, 138, 69, 0, 622, 624, 5, 71, 0, 0, 623, 622, 1, 0, 0, 0, 623, 624, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 5, 77, 0, 0, 626, 627, 3, 162, 81, 0, 627, 661, 1, 0, 0, 0, 628, 630, 3, 138, 69, 0, 629, 631, 5, 71, 0, 0, 630, 629, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 632, 1, 0, 0, 0, 632, 633, 5, 70, 0, 0, 633, 634, 5, 99, 0, 0, 634, 639, 3, 162, 81, 0, 635, 636, 5, 62, 0, 0, 636, 638, 3, 162, 81, 0, 637, 635, 1, 0, 0, 0, 638, 641, 1, 0, 0, 0, 639, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 642, 1, 0, 0, 0, 641, 639, 1, 0, 0, 0, 642, 643, 5, 100, 0, 0, 643, 661, 1, 0, 0, 0, 644, 646, 3, 138, 69, 0, 645, 647, 5, 71, 0, 0, 646, 645, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 648, 1, 0, 0, 0, 648, 649, 5, 77, 0, 0, 649, 650, 5, 99, 0, 0, 650, 655, 3, 162, 81, 0, 651, 652, 5, 62, 0, 0, 652, 654, 3, 162, 81, 0, 653, 651, 1, 0, 0, 0, 654, 657, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 658, 1, 0, 0, 0, 657, 655, 1, 0, 0, 0, 658, 659, 5, 100, 0, 0, 659, 661, 1, 0, 0, 0, 660, 614, 1, 0, 0, 0, 660, 621, 1, 0, 0, 0, 660, 628, 1, 0, 0, 0, 660, 644, 1, 0, 0, 0, 661, 135, 1, 0, 0, 0, 662, 665, 3, 48, 24, 0, 663, 664, 5, 60, 0, 0, 664, 666, 3, 10, 5, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 667, 1, 0, 0, 0, 667, 668, 5, 61, 0, 0, 668, 669, 3, 152, 76, 0, 669, 137, 1, 0, 0, 0, 670, 676, 3, 140, 70, 0, 671, 672, 3, 140, 70, 0, 672, 673, 3, 164, 82, 0, 673, 674, 3, 140, 70, 0, 674, 676, 1, 0, 0, 0, 675, 670, 1, 0, 0, 0, 675, 671, 1, 0, 0, 0, 676, 139, 1, 0, 0, 0, 677, 678, 6, 70, -1, 0, 678, 682, 3, 142, 71, 0, 679, 680, 7, 5, 0, 0, 680, 682, 3, 140, 70, 3, 681, 677, 1, 0, 0, 0, 681, 679, 1, 0, 0, 0, 682, 691, 1, 0, 0, 0, 683, 684, 10, 2, 0, 0, 684, 685, 7, 6, 0, 0, 685, 690, 3, 140, 70, 3, 686, 687, 10, 1, 0, 0, 687, 688, 7, 5, 0, 0, 688, 690, 3, 140, 70, 2, 689, 683, 1, 0, 0, 0, 689, 686, 1, 0, 0, 0, 690, 693, 1, 0, 0, 0, 691, 689, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 141, 1, 0, 0, 0, 693, 691, 1, 0, 0, 0, 694, 695, 6, 71, -1, 0, 695, 703, 3, 152, 76, 0, 696, 703, 3, 48, 24, 0, 697, 703, 3, 144, 72, 0, 698, 699, 5, 99, 0, 0, 699, 700, 3, 132, 66, 0, 700, 701, 5, 100, 0, 0, 701, 703, 1, 0, 0, 0, 702, 694, 1, 0, 0, 0, 702, 696, 1, 0, 0, 0, 702, 697, 1, 0, 0, 0, 702, 698, 1, 0, 0, 0, 703, 709, 1, 0, 0, 0, 704, 705, 10, 1, 0, 0, 705, 706, 5, 60, 0, 0, 706, 708, 3, 10, 5, 0, 707, 704, 1, 0, 0, 0, 708, 711, 1, 0, 0, 0, 709, 707, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 143, 1, 0, 0, 0, 711, 709, 1, 0, 0, 0, 712, 713, 3, 146, 73, 0, 713, 727, 5, 99, 0, 0, 714, 728, 5, 89, 0, 0, 715, 720, 3, 132, 66, 0, 716, 717, 5, 62, 0, 0, 717, 719, 3, 132, 66, 0, 718, 716, 1, 0, 0, 0, 719, 722, 1, 0, 0, 0, 720, 718, 1, 0, 0, 0, 720, 721, 1, 0, 0, 0, 721, 725, 1, 0, 0, 0, 722, 720, 1, 0, 0, 0, 723, 724, 5, 62, 0, 0, 724, 726, 3, 148, 74, 0, 725, 723, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 728, 1, 0, 0, 0, 727, 714, 1, 0, 0, 0, 727, 715, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 730, 5, 100, 0, 0, 730, 145, 1, 0, 0, 0, 731, 732, 3, 62, 31, 0, 732, 147, 1, 0, 0, 0, 733, 734, 5, 92, 0, 0, 734, 739, 3, 150, 75, 0, 735, 736, 5, 62, 0, 0, 736, 738, 3, 150, 75, 0, 737, 735, 1, 0, 0, 0, 738, 741, 1, 0, 0, 0, 739, 737, 1, 0, 0, 0, 739, 740, 1, 0, 0, 0, 740, 742, 1, 0, 0, 0, 741, 739, 1, 0, 0, 0, 742, 743, 5, 93, 0, 0, 743, 149, 1, 0, 0, 0, 744, 745, 3, 162, 81, 0, 745, 746, 5, 61, 0, 0, 746, 747, 3, 152, 76, 0, 747, 151, 1, 0, 0, 0, 748, 791, 5, 72, 0, 0, 749, 750, 3, 160, 80, 0, 750, 751, 5, 101, 0, 0, 751, 791, 1, 0, 0, 0, 752, 791, 3, 158, 79, 0, 753, 791, 3, 160, 80, 0, 754, 791, 3, 154, 77, 0, 755, 791, 3, 58, 29, 0, 756, 791, 3, 162, 81, 0, 757, 758, 5, 97, 0, 0, 758, 763, 3, 156, 78, 0, 759, 760, 5, 62, 0, 0, 760, 762, 3, 156, 78, 0, 761, 759, 1, 0, 0, 0, 762, 765, 1, 0, 0, 0, 763, 761, 1, 0, 0, 0, 763, 764, 1, 0, 0, 0, 764, 766, 1, 0, 0, 0, 765, 763, 1, 0, 0, 0, 766, 767, 5, 98, 0, 0, 767, 791, 1, 0, 0, 0, 768, 769, 5, 97, 0, 0, 769, 774, 3, 154, 77, 0, 770, 771, 5, 62, 0, 0, 771, 773, 3, 154, 77, 0, 772, 770, 1, 0, 0, 0, 773, 776, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 777, 1, 0, 0, 0, 776, 774, 1, 0, 0, 0, 777, 778, 5, 98, 0, 0, 778, 791, 1, 0, 0, 0, 779, 780, 5, 97, 0, 0, 780, 785, 3, 162, 81, 0, 781, 782, 5, 62, 0, 0, 782, 784, 3, 162, 81, 0, 783, 781, 1, 0, 0, 0, 784, 787, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 788, 1, 0, 0, 0, 787, 785, 1, 0, 0, 0, 788, 789, 5, 98, 0, 0, 789, 791, 1, 0, 0, 0, 790, 748, 1, 0, 0, 0, 790, 749, 1, 0, 0, 0, 790, 752, 1, 0, 0, 0, 790, 753, 1, 0, 0, 0, 790, 754, 1, 0, 0, 0, 790, 755, 1, 0, 0, 0, 790, 756, 1, 0, 0, 0, 790, 757, 1, 0, 0, 0, 790, 768, 1, 0, 0, 0, 790, 779, 1, 0, 0, 0, 791, 153, 1, 0, 0, 0, 792, 793, 7, 7, 0, 0, 793, 155, 1, 0, 0, 0, 794, 797, 3, 158, 79, 0, 795, 797, 3, 160, 80, 0, 796, 794, 1, 0, 0, 0, 796, 795, 1, 0, 0, 0, 797, 157, 1, 0, 0, 0, 798, 800, 7, 5, 0, 0, 799, 798, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 801, 1, 0, 0, 0, 801, 802, 5, 55, 0, 0, 802, 159, 1, 0, 0, 0, 803, 805, 7, 5, 0, 0, 804, 803, 1, 0, 0, 0, 804, 805, 1, 0, 0, 0, 805, 806, 1, 0, 0, 0, 806, 807, 5, 54, 0, 0, 807, 161, 1, 0, 0, 0, 808, 809, 5, 53, 0, 0, 809, 163, 1, 0, 0, 0, 810, 811, 7, 8, 0, 0, 811, 165, 1, 0, 0, 0, 812, 813, 7, 9, 0, 0, 813, 814, 5, 114, 0, 0, 814, 815, 3, 168, 84, 0, 815, 816, 3, 170, 85, 0, 816, 167, 1, 0, 0, 0, 817, 818, 3, 28, 14, 0, 818, 169, 1, 0, 0, 0, 819, 820, 5, 74, 0, 0, 820, 825, 3, 172, 86, 0, 821, 822, 5, 62, 0, 0, 822, 824, 3, 172, 86, 0, 823, 821, 1, 0, 0, 0, 824, 827, 1, 0, 0, 0, 825, 823, 1, 0, 0, 0, 825, 826, 1, 0, 0, 0, 826, 171, 1, 0, 0, 0, 827, 825, 1, 0, 0, 0, 828, 829, 3, 138, 69, 0, 829, 173, 1, 0, 0, 0, 74, 185, 195, 224, 239, 245, 254, 260, 273, 277, 288, 304, 312, 316, 323, 329, 336, 344, 352, 360, 364, 368, 373, 384, 389, 393, 407, 418, 424, 438, 459, 467, 470, 477, 488, 495, 503, 517, 526, 541, 553, 562, 570, 579, 588, 596, 601, 609, 611, 616, 623, 630, 639, 646, 655, 660, 665, 675, 681, 689, 691, 702, 709, 720, 725, 727, 739, 763, 774, 785, 790, 796, 799, 804, 825]
\ No newline at end of file
+[4, 1, 139, 819, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 180, 8, 1, 10, 1, 12, 1, 183, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 192, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 221, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 5, 7, 234, 8, 7, 10, 7, 12, 7, 237, 9, 7, 1, 8, 1, 8, 1, 8, 3, 8, 242, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 5, 9, 249, 8, 9, 10, 9, 12, 9, 252, 9, 9, 1, 10, 1, 10, 1, 10, 3, 10, 257, 8, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 268, 8, 13, 10, 13, 12, 13, 271, 9, 13, 1, 13, 3, 13, 274, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 285, 8, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 299, 8, 19, 10, 19, 12, 19, 302, 9, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 309, 8, 21, 1, 21, 1, 21, 3, 21, 313, 8, 21, 1, 22, 1, 22, 1, 22, 5, 22, 318, 8, 22, 10, 22, 12, 22, 321, 9, 22, 1, 23, 1, 23, 1, 23, 3, 23, 326, 8, 23, 1, 24, 1, 24, 1, 24, 5, 24, 331, 8, 24, 10, 24, 12, 24, 334, 9, 24, 1, 25, 1, 25, 1, 25, 5, 25, 339, 8, 25, 10, 25, 12, 25, 342, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 347, 8, 26, 10, 26, 12, 26, 350, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 3, 28, 357, 8, 28, 1, 29, 1, 29, 3, 29, 361, 8, 29, 1, 30, 1, 30, 3, 30, 365, 8, 30, 1, 31, 1, 31, 1, 31, 3, 31, 370, 8, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 379, 8, 33, 10, 33, 12, 33, 382, 9, 33, 1, 34, 1, 34, 3, 34, 386, 8, 34, 1, 34, 1, 34, 3, 34, 390, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 402, 8, 37, 10, 37, 12, 37, 405, 9, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 415, 8, 38, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 421, 8, 39, 1, 40, 1, 40, 1, 40, 5, 40, 426, 8, 40, 10, 40, 12, 40, 429, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 3, 42, 437, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 3, 48, 460, 8, 48, 1, 48, 1, 48, 1, 48, 1, 48, 5, 48, 466, 8, 48, 10, 48, 12, 48, 469, 9, 48, 3, 48, 471, 8, 48, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 3, 50, 478, 8, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 489, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 496, 8, 52, 1, 53, 1, 53, 1, 53, 1, 54, 4, 54, 502, 8, 54, 11, 54, 12, 54, 503, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 516, 8, 56, 10, 56, 12, 56, 519, 9, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 527, 8, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 3, 60, 541, 8, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 3, 63, 552, 8, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 3, 64, 566, 8, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 573, 8, 64, 10, 64, 12, 64, 576, 9, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 3, 64, 583, 8, 64, 1, 64, 1, 64, 1, 64, 3, 64, 588, 8, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 596, 8, 64, 10, 64, 12, 64, 599, 9, 64, 1, 65, 1, 65, 3, 65, 603, 8, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 3, 65, 610, 8, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 3, 65, 617, 8, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 5, 65, 624, 8, 65, 10, 65, 12, 65, 627, 9, 65, 1, 65, 1, 65, 1, 65, 1, 65, 3, 65, 633, 8, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 5, 65, 640, 8, 65, 10, 65, 12, 65, 643, 9, 65, 1, 65, 1, 65, 3, 65, 647, 8, 65, 1, 66, 1, 66, 1, 66, 3, 66, 652, 8, 66, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 3, 67, 662, 8, 67, 1, 68, 1, 68, 1, 68, 1, 68, 3, 68, 668, 8, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 5, 68, 676, 8, 68, 10, 68, 12, 68, 679, 9, 68, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 3, 69, 689, 8, 69, 1, 69, 1, 69, 1, 69, 5, 69, 694, 8, 69, 10, 69, 12, 69, 697, 9, 69, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 5, 70, 705, 8, 70, 10, 70, 12, 70, 708, 9, 70, 1, 70, 1, 70, 3, 70, 712, 8, 70, 3, 70, 714, 8, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 5, 72, 724, 8, 72, 10, 72, 12, 72, 727, 9, 72, 3, 72, 729, 8, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 5, 74, 750, 8, 74, 10, 74, 12, 74, 753, 9, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 5, 74, 761, 8, 74, 10, 74, 12, 74, 764, 9, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 5, 74, 772, 8, 74, 10, 74, 12, 74, 775, 9, 74, 1, 74, 1, 74, 3, 74, 779, 8, 74, 1, 75, 1, 75, 1, 76, 1, 76, 3, 76, 785, 8, 76, 1, 77, 3, 77, 788, 8, 77, 1, 77, 1, 77, 1, 78, 3, 78, 793, 8, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 5, 83, 812, 8, 83, 10, 83, 12, 83, 815, 9, 83, 1, 84, 1, 84, 1, 84, 0, 5, 2, 112, 128, 136, 138, 85, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 0, 10, 2, 0, 53, 53, 107, 107, 1, 0, 101, 102, 2, 0, 57, 57, 63, 63, 2, 0, 66, 66, 69, 69, 2, 0, 38, 38, 53, 53, 1, 0, 87, 88, 1, 0, 89, 91, 2, 0, 65, 65, 78, 78, 2, 0, 80, 80, 82, 86, 2, 0, 23, 23, 25, 26, 850, 0, 170, 1, 0, 0, 0, 2, 173, 1, 0, 0, 0, 4, 191, 1, 0, 0, 0, 6, 220, 1, 0, 0, 0, 8, 222, 1, 0, 0, 0, 10, 225, 1, 0, 0, 0, 12, 227, 1, 0, 0, 0, 14, 230, 1, 0, 0, 0, 16, 241, 1, 0, 0, 0, 18, 245, 1, 0, 0, 0, 20, 253, 1, 0, 0, 0, 22, 258, 1, 0, 0, 0, 24, 261, 1, 0, 0, 0, 26, 264, 1, 0, 0, 0, 28, 284, 1, 0, 0, 0, 30, 286, 1, 0, 0, 0, 32, 288, 1, 0, 0, 0, 34, 290, 1, 0, 0, 0, 36, 292, 1, 0, 0, 0, 38, 294, 1, 0, 0, 0, 40, 303, 1, 0, 0, 0, 42, 306, 1, 0, 0, 0, 44, 314, 1, 0, 0, 0, 46, 322, 1, 0, 0, 0, 48, 327, 1, 0, 0, 0, 50, 335, 1, 0, 0, 0, 52, 343, 1, 0, 0, 0, 54, 351, 1, 0, 0, 0, 56, 356, 1, 0, 0, 0, 58, 360, 1, 0, 0, 0, 60, 364, 1, 0, 0, 0, 62, 369, 1, 0, 0, 0, 64, 371, 1, 0, 0, 0, 66, 374, 1, 0, 0, 0, 68, 383, 1, 0, 0, 0, 70, 391, 1, 0, 0, 0, 72, 394, 1, 0, 0, 0, 74, 397, 1, 0, 0, 0, 76, 414, 1, 0, 0, 0, 78, 416, 1, 0, 0, 0, 80, 422, 1, 0, 0, 0, 82, 430, 1, 0, 0, 0, 84, 436, 1, 0, 0, 0, 86, 438, 1, 0, 0, 0, 88, 442, 1, 0, 0, 0, 90, 445, 1, 0, 0, 0, 92, 448, 1, 0, 0, 0, 94, 452, 1, 0, 0, 0, 96, 455, 1, 0, 0, 0, 98, 472, 1, 0, 0, 0, 100, 477, 1, 0, 0, 0, 102, 481, 1, 0, 0, 0, 104, 484, 1, 0, 0, 0, 106, 497, 1, 0, 0, 0, 108, 501, 1, 0, 0, 0, 110, 505, 1, 0, 0, 0, 112, 509, 1, 0, 0, 0, 114, 520, 1, 0, 0, 0, 116, 522, 1, 0, 0, 0, 118, 531, 1, 0, 0, 0, 120, 536, 1, 0, 0, 0, 122, 542, 1, 0, 0, 0, 124, 545, 1, 0, 0, 0, 126, 547, 1, 0, 0, 0, 128, 587, 1, 0, 0, 0, 130, 646, 1, 0, 0, 0, 132, 648, 1, 0, 0, 0, 134, 661, 1, 0, 0, 0, 136, 667, 1, 0, 0, 0, 138, 688, 1, 0, 0, 0, 140, 698, 1, 0, 0, 0, 142, 717, 1, 0, 0, 0, 144, 719, 1, 0, 0, 0, 146, 732, 1, 0, 0, 0, 148, 778, 1, 0, 0, 0, 150, 780, 1, 0, 0, 0, 152, 784, 1, 0, 0, 0, 154, 787, 1, 0, 0, 0, 156, 792, 1, 0, 0, 0, 158, 796, 1, 0, 0, 0, 160, 798, 1, 0, 0, 0, 162, 800, 1, 0, 0, 0, 164, 805, 1, 0, 0, 0, 166, 807, 1, 0, 0, 0, 168, 816, 1, 0, 0, 0, 170, 171, 3, 2, 1, 0, 171, 172, 5, 0, 0, 1, 172, 1, 1, 0, 0, 0, 173, 174, 6, 1, -1, 0, 174, 175, 3, 4, 2, 0, 175, 181, 1, 0, 0, 0, 176, 177, 10, 1, 0, 0, 177, 178, 5, 52, 0, 0, 178, 180, 3, 6, 3, 0, 179, 176, 1, 0, 0, 0, 180, 183, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 3, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 184, 192, 3, 22, 11, 0, 185, 192, 3, 12, 6, 0, 186, 192, 3, 94, 47, 0, 187, 188, 4, 2, 1, 0, 188, 192, 3, 24, 12, 0, 189, 190, 4, 2, 2, 0, 190, 192, 3, 90, 45, 0, 191, 184, 1, 0, 0, 0, 191, 185, 1, 0, 0, 0, 191, 186, 1, 0, 0, 0, 191, 187, 1, 0, 0, 0, 191, 189, 1, 0, 0, 0, 192, 5, 1, 0, 0, 0, 193, 221, 3, 40, 20, 0, 194, 221, 3, 8, 4, 0, 195, 221, 3, 70, 35, 0, 196, 221, 3, 64, 32, 0, 197, 221, 3, 42, 21, 0, 198, 221, 3, 66, 33, 0, 199, 221, 3, 72, 36, 0, 200, 221, 3, 74, 37, 0, 201, 221, 3, 78, 39, 0, 202, 221, 3, 86, 43, 0, 203, 221, 3, 96, 48, 0, 204, 221, 3, 88, 44, 0, 205, 221, 3, 162, 81, 0, 206, 221, 3, 104, 52, 0, 207, 221, 3, 116, 58, 0, 208, 221, 3, 102, 51, 0, 209, 221, 3, 106, 53, 0, 210, 211, 4, 3, 3, 0, 211, 221, 3, 120, 60, 0, 212, 213, 4, 3, 4, 0, 213, 221, 3, 118, 59, 0, 214, 215, 4, 3, 5, 0, 215, 221, 3, 122, 61, 0, 216, 217, 4, 3, 6, 0, 217, 221, 3, 126, 63, 0, 218, 219, 4, 3, 7, 0, 219, 221, 3, 124, 62, 0, 220, 193, 1, 0, 0, 0, 220, 194, 1, 0, 0, 0, 220, 195, 1, 0, 0, 0, 220, 196, 1, 0, 0, 0, 220, 197, 1, 0, 0, 0, 220, 198, 1, 0, 0, 0, 220, 199, 1, 0, 0, 0, 220, 200, 1, 0, 0, 0, 220, 201, 1, 0, 0, 0, 220, 202, 1, 0, 0, 0, 220, 203, 1, 0, 0, 0, 220, 204, 1, 0, 0, 0, 220, 205, 1, 0, 0, 0, 220, 206, 1, 0, 0, 0, 220, 207, 1, 0, 0, 0, 220, 208, 1, 0, 0, 0, 220, 209, 1, 0, 0, 0, 220, 210, 1, 0, 0, 0, 220, 212, 1, 0, 0, 0, 220, 214, 1, 0, 0, 0, 220, 216, 1, 0, 0, 0, 220, 218, 1, 0, 0, 0, 221, 7, 1, 0, 0, 0, 222, 223, 5, 16, 0, 0, 223, 224, 3, 128, 64, 0, 224, 9, 1, 0, 0, 0, 225, 226, 3, 54, 27, 0, 226, 11, 1, 0, 0, 0, 227, 228, 5, 12, 0, 0, 228, 229, 3, 14, 7, 0, 229, 13, 1, 0, 0, 0, 230, 235, 3, 16, 8, 0, 231, 232, 5, 62, 0, 0, 232, 234, 3, 16, 8, 0, 233, 231, 1, 0, 0, 0, 234, 237, 1, 0, 0, 0, 235, 233, 1, 0, 0, 0, 235, 236, 1, 0, 0, 0, 236, 15, 1, 0, 0, 0, 237, 235, 1, 0, 0, 0, 238, 239, 3, 48, 24, 0, 239, 240, 5, 58, 0, 0, 240, 242, 1, 0, 0, 0, 241, 238, 1, 0, 0, 0, 241, 242, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 244, 3, 128, 64, 0, 244, 17, 1, 0, 0, 0, 245, 250, 3, 20, 10, 0, 246, 247, 5, 62, 0, 0, 247, 249, 3, 20, 10, 0, 248, 246, 1, 0, 0, 0, 249, 252, 1, 0, 0, 0, 250, 248, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 19, 1, 0, 0, 0, 252, 250, 1, 0, 0, 0, 253, 256, 3, 48, 24, 0, 254, 255, 5, 58, 0, 0, 255, 257, 3, 128, 64, 0, 256, 254, 1, 0, 0, 0, 256, 257, 1, 0, 0, 0, 257, 21, 1, 0, 0, 0, 258, 259, 5, 19, 0, 0, 259, 260, 3, 26, 13, 0, 260, 23, 1, 0, 0, 0, 261, 262, 5, 20, 0, 0, 262, 263, 3, 26, 13, 0, 263, 25, 1, 0, 0, 0, 264, 269, 3, 28, 14, 0, 265, 266, 5, 62, 0, 0, 266, 268, 3, 28, 14, 0, 267, 265, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 274, 3, 38, 19, 0, 273, 272, 1, 0, 0, 0, 273, 274, 1, 0, 0, 0, 274, 27, 1, 0, 0, 0, 275, 276, 3, 30, 15, 0, 276, 277, 5, 61, 0, 0, 277, 278, 3, 34, 17, 0, 278, 285, 1, 0, 0, 0, 279, 280, 3, 34, 17, 0, 280, 281, 5, 60, 0, 0, 281, 282, 3, 32, 16, 0, 282, 285, 1, 0, 0, 0, 283, 285, 3, 36, 18, 0, 284, 275, 1, 0, 0, 0, 284, 279, 1, 0, 0, 0, 284, 283, 1, 0, 0, 0, 285, 29, 1, 0, 0, 0, 286, 287, 5, 107, 0, 0, 287, 31, 1, 0, 0, 0, 288, 289, 5, 107, 0, 0, 289, 33, 1, 0, 0, 0, 290, 291, 5, 107, 0, 0, 291, 35, 1, 0, 0, 0, 292, 293, 7, 0, 0, 0, 293, 37, 1, 0, 0, 0, 294, 295, 5, 106, 0, 0, 295, 300, 5, 107, 0, 0, 296, 297, 5, 62, 0, 0, 297, 299, 5, 107, 0, 0, 298, 296, 1, 0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 39, 1, 0, 0, 0, 302, 300, 1, 0, 0, 0, 303, 304, 5, 9, 0, 0, 304, 305, 3, 14, 7, 0, 305, 41, 1, 0, 0, 0, 306, 308, 5, 15, 0, 0, 307, 309, 3, 44, 22, 0, 308, 307, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 312, 1, 0, 0, 0, 310, 311, 5, 59, 0, 0, 311, 313, 3, 14, 7, 0, 312, 310, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 43, 1, 0, 0, 0, 314, 319, 3, 46, 23, 0, 315, 316, 5, 62, 0, 0, 316, 318, 3, 46, 23, 0, 317, 315, 1, 0, 0, 0, 318, 321, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 45, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 322, 325, 3, 16, 8, 0, 323, 324, 5, 16, 0, 0, 324, 326, 3, 128, 64, 0, 325, 323, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 47, 1, 0, 0, 0, 327, 332, 3, 62, 31, 0, 328, 329, 5, 64, 0, 0, 329, 331, 3, 62, 31, 0, 330, 328, 1, 0, 0, 0, 331, 334, 1, 0, 0, 0, 332, 330, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 49, 1, 0, 0, 0, 334, 332, 1, 0, 0, 0, 335, 340, 3, 56, 28, 0, 336, 337, 5, 64, 0, 0, 337, 339, 3, 56, 28, 0, 338, 336, 1, 0, 0, 0, 339, 342, 1, 0, 0, 0, 340, 338, 1, 0, 0, 0, 340, 341, 1, 0, 0, 0, 341, 51, 1, 0, 0, 0, 342, 340, 1, 0, 0, 0, 343, 348, 3, 50, 25, 0, 344, 345, 5, 62, 0, 0, 345, 347, 3, 50, 25, 0, 346, 344, 1, 0, 0, 0, 347, 350, 1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 53, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, 352, 7, 1, 0, 0, 352, 55, 1, 0, 0, 0, 353, 357, 5, 128, 0, 0, 354, 357, 3, 58, 29, 0, 355, 357, 3, 60, 30, 0, 356, 353, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 356, 355, 1, 0, 0, 0, 357, 57, 1, 0, 0, 0, 358, 361, 5, 76, 0, 0, 359, 361, 5, 95, 0, 0, 360, 358, 1, 0, 0, 0, 360, 359, 1, 0, 0, 0, 361, 59, 1, 0, 0, 0, 362, 365, 5, 94, 0, 0, 363, 365, 5, 96, 0, 0, 364, 362, 1, 0, 0, 0, 364, 363, 1, 0, 0, 0, 365, 61, 1, 0, 0, 0, 366, 370, 3, 54, 27, 0, 367, 370, 3, 58, 29, 0, 368, 370, 3, 60, 30, 0, 369, 366, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 368, 1, 0, 0, 0, 370, 63, 1, 0, 0, 0, 371, 372, 5, 11, 0, 0, 372, 373, 3, 148, 74, 0, 373, 65, 1, 0, 0, 0, 374, 375, 5, 14, 0, 0, 375, 380, 3, 68, 34, 0, 376, 377, 5, 62, 0, 0, 377, 379, 3, 68, 34, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 67, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 385, 3, 128, 64, 0, 384, 386, 7, 2, 0, 0, 385, 384, 1, 0, 0, 0, 385, 386, 1, 0, 0, 0, 386, 389, 1, 0, 0, 0, 387, 388, 5, 73, 0, 0, 388, 390, 7, 3, 0, 0, 389, 387, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 69, 1, 0, 0, 0, 391, 392, 5, 30, 0, 0, 392, 393, 3, 52, 26, 0, 393, 71, 1, 0, 0, 0, 394, 395, 5, 29, 0, 0, 395, 396, 3, 52, 26, 0, 396, 73, 1, 0, 0, 0, 397, 398, 5, 32, 0, 0, 398, 403, 3, 76, 38, 0, 399, 400, 5, 62, 0, 0, 400, 402, 3, 76, 38, 0, 401, 399, 1, 0, 0, 0, 402, 405, 1, 0, 0, 0, 403, 401, 1, 0, 0, 0, 403, 404, 1, 0, 0, 0, 404, 75, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 406, 407, 3, 50, 25, 0, 407, 408, 5, 132, 0, 0, 408, 409, 3, 50, 25, 0, 409, 415, 1, 0, 0, 0, 410, 411, 3, 50, 25, 0, 411, 412, 5, 58, 0, 0, 412, 413, 3, 50, 25, 0, 413, 415, 1, 0, 0, 0, 414, 406, 1, 0, 0, 0, 414, 410, 1, 0, 0, 0, 415, 77, 1, 0, 0, 0, 416, 417, 5, 8, 0, 0, 417, 418, 3, 138, 69, 0, 418, 420, 3, 158, 79, 0, 419, 421, 3, 80, 40, 0, 420, 419, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 79, 1, 0, 0, 0, 422, 427, 3, 82, 41, 0, 423, 424, 5, 62, 0, 0, 424, 426, 3, 82, 41, 0, 425, 423, 1, 0, 0, 0, 426, 429, 1, 0, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 81, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 430, 431, 3, 54, 27, 0, 431, 432, 5, 58, 0, 0, 432, 433, 3, 148, 74, 0, 433, 83, 1, 0, 0, 0, 434, 435, 5, 79, 0, 0, 435, 437, 3, 144, 72, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 85, 1, 0, 0, 0, 438, 439, 5, 10, 0, 0, 439, 440, 3, 138, 69, 0, 440, 441, 3, 158, 79, 0, 441, 87, 1, 0, 0, 0, 442, 443, 5, 28, 0, 0, 443, 444, 3, 48, 24, 0, 444, 89, 1, 0, 0, 0, 445, 446, 5, 6, 0, 0, 446, 447, 3, 92, 46, 0, 447, 91, 1, 0, 0, 0, 448, 449, 5, 99, 0, 0, 449, 450, 3, 2, 1, 0, 450, 451, 5, 100, 0, 0, 451, 93, 1, 0, 0, 0, 452, 453, 5, 33, 0, 0, 453, 454, 5, 136, 0, 0, 454, 95, 1, 0, 0, 0, 455, 456, 5, 5, 0, 0, 456, 459, 3, 98, 49, 0, 457, 458, 5, 74, 0, 0, 458, 460, 3, 50, 25, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 470, 1, 0, 0, 0, 461, 462, 5, 79, 0, 0, 462, 467, 3, 100, 50, 0, 463, 464, 5, 62, 0, 0, 464, 466, 3, 100, 50, 0, 465, 463, 1, 0, 0, 0, 466, 469, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 467, 468, 1, 0, 0, 0, 468, 471, 1, 0, 0, 0, 469, 467, 1, 0, 0, 0, 470, 461, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 97, 1, 0, 0, 0, 472, 473, 7, 4, 0, 0, 473, 99, 1, 0, 0, 0, 474, 475, 3, 50, 25, 0, 475, 476, 5, 58, 0, 0, 476, 478, 1, 0, 0, 0, 477, 474, 1, 0, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 480, 3, 50, 25, 0, 480, 101, 1, 0, 0, 0, 481, 482, 5, 13, 0, 0, 482, 483, 3, 148, 74, 0, 483, 103, 1, 0, 0, 0, 484, 485, 5, 4, 0, 0, 485, 488, 3, 48, 24, 0, 486, 487, 5, 74, 0, 0, 487, 489, 3, 48, 24, 0, 488, 486, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 495, 1, 0, 0, 0, 490, 491, 5, 132, 0, 0, 491, 492, 3, 48, 24, 0, 492, 493, 5, 62, 0, 0, 493, 494, 3, 48, 24, 0, 494, 496, 1, 0, 0, 0, 495, 490, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 105, 1, 0, 0, 0, 497, 498, 5, 21, 0, 0, 498, 499, 3, 108, 54, 0, 499, 107, 1, 0, 0, 0, 500, 502, 3, 110, 55, 0, 501, 500, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 501, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 109, 1, 0, 0, 0, 505, 506, 5, 99, 0, 0, 506, 507, 3, 112, 56, 0, 507, 508, 5, 100, 0, 0, 508, 111, 1, 0, 0, 0, 509, 510, 6, 56, -1, 0, 510, 511, 3, 114, 57, 0, 511, 517, 1, 0, 0, 0, 512, 513, 10, 1, 0, 0, 513, 514, 5, 52, 0, 0, 514, 516, 3, 114, 57, 0, 515, 512, 1, 0, 0, 0, 516, 519, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 113, 1, 0, 0, 0, 519, 517, 1, 0, 0, 0, 520, 521, 3, 6, 3, 0, 521, 115, 1, 0, 0, 0, 522, 526, 5, 7, 0, 0, 523, 524, 3, 48, 24, 0, 524, 525, 5, 58, 0, 0, 525, 527, 1, 0, 0, 0, 526, 523, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 3, 138, 69, 0, 529, 530, 3, 84, 42, 0, 530, 117, 1, 0, 0, 0, 531, 532, 5, 27, 0, 0, 532, 533, 3, 28, 14, 0, 533, 534, 5, 74, 0, 0, 534, 535, 3, 52, 26, 0, 535, 119, 1, 0, 0, 0, 536, 537, 5, 17, 0, 0, 537, 540, 3, 44, 22, 0, 538, 539, 5, 59, 0, 0, 539, 541, 3, 14, 7, 0, 540, 538, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 121, 1, 0, 0, 0, 542, 543, 5, 31, 0, 0, 543, 544, 3, 52, 26, 0, 544, 123, 1, 0, 0, 0, 545, 546, 5, 22, 0, 0, 546, 125, 1, 0, 0, 0, 547, 551, 5, 18, 0, 0, 548, 549, 3, 48, 24, 0, 549, 550, 5, 58, 0, 0, 550, 552, 1, 0, 0, 0, 551, 548, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 3, 148, 74, 0, 554, 555, 5, 74, 0, 0, 555, 556, 3, 18, 9, 0, 556, 557, 3, 84, 42, 0, 557, 127, 1, 0, 0, 0, 558, 559, 6, 64, -1, 0, 559, 560, 5, 71, 0, 0, 560, 588, 3, 128, 64, 8, 561, 588, 3, 134, 67, 0, 562, 588, 3, 130, 65, 0, 563, 565, 3, 134, 67, 0, 564, 566, 5, 71, 0, 0, 565, 564, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 567, 1, 0, 0, 0, 567, 568, 5, 67, 0, 0, 568, 569, 5, 99, 0, 0, 569, 574, 3, 134, 67, 0, 570, 571, 5, 62, 0, 0, 571, 573, 3, 134, 67, 0, 572, 570, 1, 0, 0, 0, 573, 576, 1, 0, 0, 0, 574, 572, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 577, 1, 0, 0, 0, 576, 574, 1, 0, 0, 0, 577, 578, 5, 100, 0, 0, 578, 588, 1, 0, 0, 0, 579, 580, 3, 134, 67, 0, 580, 582, 5, 68, 0, 0, 581, 583, 5, 71, 0, 0, 582, 581, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 584, 1, 0, 0, 0, 584, 585, 5, 72, 0, 0, 585, 588, 1, 0, 0, 0, 586, 588, 3, 132, 66, 0, 587, 558, 1, 0, 0, 0, 587, 561, 1, 0, 0, 0, 587, 562, 1, 0, 0, 0, 587, 563, 1, 0, 0, 0, 587, 579, 1, 0, 0, 0, 587, 586, 1, 0, 0, 0, 588, 597, 1, 0, 0, 0, 589, 590, 10, 5, 0, 0, 590, 591, 5, 56, 0, 0, 591, 596, 3, 128, 64, 6, 592, 593, 10, 4, 0, 0, 593, 594, 5, 75, 0, 0, 594, 596, 3, 128, 64, 5, 595, 589, 1, 0, 0, 0, 595, 592, 1, 0, 0, 0, 596, 599, 1, 0, 0, 0, 597, 595, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 129, 1, 0, 0, 0, 599, 597, 1, 0, 0, 0, 600, 602, 3, 134, 67, 0, 601, 603, 5, 71, 0, 0, 602, 601, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 603, 604, 1, 0, 0, 0, 604, 605, 5, 70, 0, 0, 605, 606, 3, 158, 79, 0, 606, 647, 1, 0, 0, 0, 607, 609, 3, 134, 67, 0, 608, 610, 5, 71, 0, 0, 609, 608, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 612, 5, 77, 0, 0, 612, 613, 3, 158, 79, 0, 613, 647, 1, 0, 0, 0, 614, 616, 3, 134, 67, 0, 615, 617, 5, 71, 0, 0, 616, 615, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 619, 5, 70, 0, 0, 619, 620, 5, 99, 0, 0, 620, 625, 3, 158, 79, 0, 621, 622, 5, 62, 0, 0, 622, 624, 3, 158, 79, 0, 623, 621, 1, 0, 0, 0, 624, 627, 1, 0, 0, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 628, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 628, 629, 5, 100, 0, 0, 629, 647, 1, 0, 0, 0, 630, 632, 3, 134, 67, 0, 631, 633, 5, 71, 0, 0, 632, 631, 1, 0, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 635, 5, 77, 0, 0, 635, 636, 5, 99, 0, 0, 636, 641, 3, 158, 79, 0, 637, 638, 5, 62, 0, 0, 638, 640, 3, 158, 79, 0, 639, 637, 1, 0, 0, 0, 640, 643, 1, 0, 0, 0, 641, 639, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 644, 1, 0, 0, 0, 643, 641, 1, 0, 0, 0, 644, 645, 5, 100, 0, 0, 645, 647, 1, 0, 0, 0, 646, 600, 1, 0, 0, 0, 646, 607, 1, 0, 0, 0, 646, 614, 1, 0, 0, 0, 646, 630, 1, 0, 0, 0, 647, 131, 1, 0, 0, 0, 648, 651, 3, 48, 24, 0, 649, 650, 5, 60, 0, 0, 650, 652, 3, 10, 5, 0, 651, 649, 1, 0, 0, 0, 651, 652, 1, 0, 0, 0, 652, 653, 1, 0, 0, 0, 653, 654, 5, 61, 0, 0, 654, 655, 3, 148, 74, 0, 655, 133, 1, 0, 0, 0, 656, 662, 3, 136, 68, 0, 657, 658, 3, 136, 68, 0, 658, 659, 3, 160, 80, 0, 659, 660, 3, 136, 68, 0, 660, 662, 1, 0, 0, 0, 661, 656, 1, 0, 0, 0, 661, 657, 1, 0, 0, 0, 662, 135, 1, 0, 0, 0, 663, 664, 6, 68, -1, 0, 664, 668, 3, 138, 69, 0, 665, 666, 7, 5, 0, 0, 666, 668, 3, 136, 68, 3, 667, 663, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 677, 1, 0, 0, 0, 669, 670, 10, 2, 0, 0, 670, 671, 7, 6, 0, 0, 671, 676, 3, 136, 68, 3, 672, 673, 10, 1, 0, 0, 673, 674, 7, 5, 0, 0, 674, 676, 3, 136, 68, 2, 675, 669, 1, 0, 0, 0, 675, 672, 1, 0, 0, 0, 676, 679, 1, 0, 0, 0, 677, 675, 1, 0, 0, 0, 677, 678, 1, 0, 0, 0, 678, 137, 1, 0, 0, 0, 679, 677, 1, 0, 0, 0, 680, 681, 6, 69, -1, 0, 681, 689, 3, 148, 74, 0, 682, 689, 3, 48, 24, 0, 683, 689, 3, 140, 70, 0, 684, 685, 5, 99, 0, 0, 685, 686, 3, 128, 64, 0, 686, 687, 5, 100, 0, 0, 687, 689, 1, 0, 0, 0, 688, 680, 1, 0, 0, 0, 688, 682, 1, 0, 0, 0, 688, 683, 1, 0, 0, 0, 688, 684, 1, 0, 0, 0, 689, 695, 1, 0, 0, 0, 690, 691, 10, 1, 0, 0, 691, 692, 5, 60, 0, 0, 692, 694, 3, 10, 5, 0, 693, 690, 1, 0, 0, 0, 694, 697, 1, 0, 0, 0, 695, 693, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 139, 1, 0, 0, 0, 697, 695, 1, 0, 0, 0, 698, 699, 3, 142, 71, 0, 699, 713, 5, 99, 0, 0, 700, 714, 5, 89, 0, 0, 701, 706, 3, 128, 64, 0, 702, 703, 5, 62, 0, 0, 703, 705, 3, 128, 64, 0, 704, 702, 1, 0, 0, 0, 705, 708, 1, 0, 0, 0, 706, 704, 1, 0, 0, 0, 706, 707, 1, 0, 0, 0, 707, 711, 1, 0, 0, 0, 708, 706, 1, 0, 0, 0, 709, 710, 5, 62, 0, 0, 710, 712, 3, 144, 72, 0, 711, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 714, 1, 0, 0, 0, 713, 700, 1, 0, 0, 0, 713, 701, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 715, 1, 0, 0, 0, 715, 716, 5, 100, 0, 0, 716, 141, 1, 0, 0, 0, 717, 718, 3, 62, 31, 0, 718, 143, 1, 0, 0, 0, 719, 728, 5, 92, 0, 0, 720, 725, 3, 146, 73, 0, 721, 722, 5, 62, 0, 0, 722, 724, 3, 146, 73, 0, 723, 721, 1, 0, 0, 0, 724, 727, 1, 0, 0, 0, 725, 723, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 729, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 728, 720, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 730, 1, 0, 0, 0, 730, 731, 5, 93, 0, 0, 731, 145, 1, 0, 0, 0, 732, 733, 3, 158, 79, 0, 733, 734, 5, 61, 0, 0, 734, 735, 3, 148, 74, 0, 735, 147, 1, 0, 0, 0, 736, 779, 5, 72, 0, 0, 737, 738, 3, 156, 78, 0, 738, 739, 5, 101, 0, 0, 739, 779, 1, 0, 0, 0, 740, 779, 3, 154, 77, 0, 741, 779, 3, 156, 78, 0, 742, 779, 3, 150, 75, 0, 743, 779, 3, 58, 29, 0, 744, 779, 3, 158, 79, 0, 745, 746, 5, 97, 0, 0, 746, 751, 3, 152, 76, 0, 747, 748, 5, 62, 0, 0, 748, 750, 3, 152, 76, 0, 749, 747, 1, 0, 0, 0, 750, 753, 1, 0, 0, 0, 751, 749, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 752, 754, 1, 0, 0, 0, 753, 751, 1, 0, 0, 0, 754, 755, 5, 98, 0, 0, 755, 779, 1, 0, 0, 0, 756, 757, 5, 97, 0, 0, 757, 762, 3, 150, 75, 0, 758, 759, 5, 62, 0, 0, 759, 761, 3, 150, 75, 0, 760, 758, 1, 0, 0, 0, 761, 764, 1, 0, 0, 0, 762, 760, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 763, 765, 1, 0, 0, 0, 764, 762, 1, 0, 0, 0, 765, 766, 5, 98, 0, 0, 766, 779, 1, 0, 0, 0, 767, 768, 5, 97, 0, 0, 768, 773, 3, 158, 79, 0, 769, 770, 5, 62, 0, 0, 770, 772, 3, 158, 79, 0, 771, 769, 1, 0, 0, 0, 772, 775, 1, 0, 0, 0, 773, 771, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 776, 1, 0, 0, 0, 775, 773, 1, 0, 0, 0, 776, 777, 5, 98, 0, 0, 777, 779, 1, 0, 0, 0, 778, 736, 1, 0, 0, 0, 778, 737, 1, 0, 0, 0, 778, 740, 1, 0, 0, 0, 778, 741, 1, 0, 0, 0, 778, 742, 1, 0, 0, 0, 778, 743, 1, 0, 0, 0, 778, 744, 1, 0, 0, 0, 778, 745, 1, 0, 0, 0, 778, 756, 1, 0, 0, 0, 778, 767, 1, 0, 0, 0, 779, 149, 1, 0, 0, 0, 780, 781, 7, 7, 0, 0, 781, 151, 1, 0, 0, 0, 782, 785, 3, 154, 77, 0, 783, 785, 3, 156, 78, 0, 784, 782, 1, 0, 0, 0, 784, 783, 1, 0, 0, 0, 785, 153, 1, 0, 0, 0, 786, 788, 7, 5, 0, 0, 787, 786, 1, 0, 0, 0, 787, 788, 1, 0, 0, 0, 788, 789, 1, 0, 0, 0, 789, 790, 5, 55, 0, 0, 790, 155, 1, 0, 0, 0, 791, 793, 7, 5, 0, 0, 792, 791, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 794, 1, 0, 0, 0, 794, 795, 5, 54, 0, 0, 795, 157, 1, 0, 0, 0, 796, 797, 5, 53, 0, 0, 797, 159, 1, 0, 0, 0, 798, 799, 7, 8, 0, 0, 799, 161, 1, 0, 0, 0, 800, 801, 7, 9, 0, 0, 801, 802, 5, 114, 0, 0, 802, 803, 3, 164, 82, 0, 803, 804, 3, 166, 83, 0, 804, 163, 1, 0, 0, 0, 805, 806, 3, 28, 14, 0, 806, 165, 1, 0, 0, 0, 807, 808, 5, 74, 0, 0, 808, 813, 3, 168, 84, 0, 809, 810, 5, 62, 0, 0, 810, 812, 3, 168, 84, 0, 811, 809, 1, 0, 0, 0, 812, 815, 1, 0, 0, 0, 813, 811, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 167, 1, 0, 0, 0, 815, 813, 1, 0, 0, 0, 816, 817, 3, 134, 67, 0, 817, 169, 1, 0, 0, 0, 74, 181, 191, 220, 235, 241, 250, 256, 269, 273, 284, 300, 308, 312, 319, 325, 332, 340, 348, 356, 360, 364, 369, 380, 385, 389, 403, 414, 420, 427, 436, 459, 467, 470, 477, 488, 495, 503, 517, 526, 540, 551, 565, 574, 582, 587, 595, 597, 602, 609, 616, 625, 632, 641, 646, 651, 661, 667, 675, 677, 688, 695, 706, 711, 713, 725, 728, 751, 762, 773, 778, 784, 787, 792, 813]
\ No newline at end of file
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
index 8a87254e54a0b..0d4d4c9cbf698 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
@@ -67,23 +67,22 @@ public class EsqlBaseParser extends ParserConfig {
RULE_doubleParameter = 30, RULE_identifierOrParameter = 31, RULE_limitCommand = 32,
RULE_sortCommand = 33, RULE_orderExpression = 34, RULE_keepCommand = 35,
RULE_dropCommand = 36, RULE_renameCommand = 37, RULE_renameClause = 38,
- RULE_dissectCommand = 39, RULE_grokCommand = 40, RULE_mvExpandCommand = 41,
- RULE_commandOptions = 42, RULE_commandOption = 43, RULE_explainCommand = 44,
- RULE_subqueryExpression = 45, RULE_showCommand = 46, RULE_enrichCommand = 47,
- RULE_enrichPolicyName = 48, RULE_enrichWithClause = 49, RULE_sampleCommand = 50,
- RULE_changePointCommand = 51, RULE_forkCommand = 52, RULE_forkSubQueries = 53,
- RULE_forkSubQuery = 54, RULE_forkSubQueryCommand = 55, RULE_forkSubQueryProcessingCommand = 56,
- RULE_completionCommand = 57, RULE_lookupCommand = 58, RULE_inlinestatsCommand = 59,
- RULE_insistCommand = 60, RULE_fuseCommand = 61, RULE_inferenceCommandOptions = 62,
- RULE_inferenceCommandOption = 63, RULE_inferenceCommandOptionValue = 64,
- RULE_rerankCommand = 65, RULE_booleanExpression = 66, RULE_regexBooleanExpression = 67,
- RULE_matchBooleanExpression = 68, RULE_valueExpression = 69, RULE_operatorExpression = 70,
- RULE_primaryExpression = 71, RULE_functionExpression = 72, RULE_functionName = 73,
- RULE_mapExpression = 74, RULE_entryExpression = 75, RULE_constant = 76,
- RULE_booleanValue = 77, RULE_numericValue = 78, RULE_decimalValue = 79,
- RULE_integerValue = 80, RULE_string = 81, RULE_comparisonOperator = 82,
- RULE_joinCommand = 83, RULE_joinTarget = 84, RULE_joinCondition = 85,
- RULE_joinPredicate = 86;
+ RULE_dissectCommand = 39, RULE_dissectCommandOptions = 40, RULE_dissectCommandOption = 41,
+ RULE_commandNamedParameters = 42, RULE_grokCommand = 43, RULE_mvExpandCommand = 44,
+ RULE_explainCommand = 45, RULE_subqueryExpression = 46, RULE_showCommand = 47,
+ RULE_enrichCommand = 48, RULE_enrichPolicyName = 49, RULE_enrichWithClause = 50,
+ RULE_sampleCommand = 51, RULE_changePointCommand = 52, RULE_forkCommand = 53,
+ RULE_forkSubQueries = 54, RULE_forkSubQuery = 55, RULE_forkSubQueryCommand = 56,
+ RULE_forkSubQueryProcessingCommand = 57, RULE_completionCommand = 58,
+ RULE_lookupCommand = 59, RULE_inlinestatsCommand = 60, RULE_insistCommand = 61,
+ RULE_fuseCommand = 62, RULE_rerankCommand = 63, RULE_booleanExpression = 64,
+ RULE_regexBooleanExpression = 65, RULE_matchBooleanExpression = 66, RULE_valueExpression = 67,
+ RULE_operatorExpression = 68, RULE_primaryExpression = 69, RULE_functionExpression = 70,
+ RULE_functionName = 71, RULE_mapExpression = 72, RULE_entryExpression = 73,
+ RULE_constant = 74, RULE_booleanValue = 75, RULE_numericValue = 76, RULE_decimalValue = 77,
+ RULE_integerValue = 78, RULE_string = 79, RULE_comparisonOperator = 80,
+ RULE_joinCommand = 81, RULE_joinTarget = 82, RULE_joinCondition = 83,
+ RULE_joinPredicate = 84;
private static String[] makeRuleNames() {
return new String[] {
"singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand",
@@ -94,14 +93,14 @@ private static String[] makeRuleNames() {
"aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns",
"identifier", "identifierPattern", "parameter", "doubleParameter", "identifierOrParameter",
"limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand",
- "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand",
- "commandOptions", "commandOption", "explainCommand", "subqueryExpression",
- "showCommand", "enrichCommand", "enrichPolicyName", "enrichWithClause",
- "sampleCommand", "changePointCommand", "forkCommand", "forkSubQueries",
- "forkSubQuery", "forkSubQueryCommand", "forkSubQueryProcessingCommand",
- "completionCommand", "lookupCommand", "inlinestatsCommand", "insistCommand",
- "fuseCommand", "inferenceCommandOptions", "inferenceCommandOption", "inferenceCommandOptionValue",
- "rerankCommand", "booleanExpression", "regexBooleanExpression", "matchBooleanExpression",
+ "renameCommand", "renameClause", "dissectCommand", "dissectCommandOptions",
+ "dissectCommandOption", "commandNamedParameters", "grokCommand", "mvExpandCommand",
+ "explainCommand", "subqueryExpression", "showCommand", "enrichCommand",
+ "enrichPolicyName", "enrichWithClause", "sampleCommand", "changePointCommand",
+ "forkCommand", "forkSubQueries", "forkSubQuery", "forkSubQueryCommand",
+ "forkSubQueryProcessingCommand", "completionCommand", "lookupCommand",
+ "inlinestatsCommand", "insistCommand", "fuseCommand", "rerankCommand",
+ "booleanExpression", "regexBooleanExpression", "matchBooleanExpression",
"valueExpression", "operatorExpression", "primaryExpression", "functionExpression",
"functionName", "mapExpression", "entryExpression", "constant", "booleanValue",
"numericValue", "decimalValue", "integerValue", "string", "comparisonOperator",
@@ -245,9 +244,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio
try {
enterOuterAlt(_localctx, 1);
{
- setState(174);
+ setState(170);
query(0);
- setState(175);
+ setState(171);
match(EOF);
}
}
@@ -343,11 +342,11 @@ private QueryContext query(int _p) throws RecognitionException {
_ctx = _localctx;
_prevctx = _localctx;
- setState(178);
+ setState(174);
sourceCommand();
}
_ctx.stop = _input.LT(-1);
- setState(185);
+ setState(181);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,0,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
@@ -358,16 +357,16 @@ private QueryContext query(int _p) throws RecognitionException {
{
_localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState));
pushNewRecursionContext(_localctx, _startState, RULE_query);
- setState(180);
+ setState(176);
if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
- setState(181);
+ setState(177);
match(PIPE);
- setState(182);
+ setState(178);
processingCommand();
}
}
}
- setState(187);
+ setState(183);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,0,_ctx);
}
@@ -425,45 +424,45 @@ public final SourceCommandContext sourceCommand() throws RecognitionException {
SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState());
enterRule(_localctx, 4, RULE_sourceCommand);
try {
- setState(195);
+ setState(191);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(188);
+ setState(184);
fromCommand();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(189);
+ setState(185);
rowCommand();
}
break;
case 3:
enterOuterAlt(_localctx, 3);
{
- setState(190);
+ setState(186);
showCommand();
}
break;
case 4:
enterOuterAlt(_localctx, 4);
{
- setState(191);
+ setState(187);
if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()");
- setState(192);
+ setState(188);
timeSeriesCommand();
}
break;
case 5:
enterOuterAlt(_localctx, 5);
{
- setState(193);
+ setState(189);
if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()");
- setState(194);
+ setState(190);
explainCommand();
}
break;
@@ -572,170 +571,170 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce
ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState());
enterRule(_localctx, 6, RULE_processingCommand);
try {
- setState(224);
+ setState(220);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(197);
+ setState(193);
evalCommand();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(198);
+ setState(194);
whereCommand();
}
break;
case 3:
enterOuterAlt(_localctx, 3);
{
- setState(199);
+ setState(195);
keepCommand();
}
break;
case 4:
enterOuterAlt(_localctx, 4);
{
- setState(200);
+ setState(196);
limitCommand();
}
break;
case 5:
enterOuterAlt(_localctx, 5);
{
- setState(201);
+ setState(197);
statsCommand();
}
break;
case 6:
enterOuterAlt(_localctx, 6);
{
- setState(202);
+ setState(198);
sortCommand();
}
break;
case 7:
enterOuterAlt(_localctx, 7);
{
- setState(203);
+ setState(199);
dropCommand();
}
break;
case 8:
enterOuterAlt(_localctx, 8);
{
- setState(204);
+ setState(200);
renameCommand();
}
break;
case 9:
enterOuterAlt(_localctx, 9);
{
- setState(205);
+ setState(201);
dissectCommand();
}
break;
case 10:
enterOuterAlt(_localctx, 10);
{
- setState(206);
+ setState(202);
grokCommand();
}
break;
case 11:
enterOuterAlt(_localctx, 11);
{
- setState(207);
+ setState(203);
enrichCommand();
}
break;
case 12:
enterOuterAlt(_localctx, 12);
{
- setState(208);
+ setState(204);
mvExpandCommand();
}
break;
case 13:
enterOuterAlt(_localctx, 13);
{
- setState(209);
+ setState(205);
joinCommand();
}
break;
case 14:
enterOuterAlt(_localctx, 14);
{
- setState(210);
+ setState(206);
changePointCommand();
}
break;
case 15:
enterOuterAlt(_localctx, 15);
{
- setState(211);
+ setState(207);
completionCommand();
}
break;
case 16:
enterOuterAlt(_localctx, 16);
{
- setState(212);
+ setState(208);
sampleCommand();
}
break;
case 17:
enterOuterAlt(_localctx, 17);
{
- setState(213);
+ setState(209);
forkCommand();
}
break;
case 18:
enterOuterAlt(_localctx, 18);
{
- setState(214);
+ setState(210);
if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()");
- setState(215);
+ setState(211);
inlinestatsCommand();
}
break;
case 19:
enterOuterAlt(_localctx, 19);
{
- setState(216);
+ setState(212);
if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()");
- setState(217);
+ setState(213);
lookupCommand();
}
break;
case 20:
enterOuterAlt(_localctx, 20);
{
- setState(218);
+ setState(214);
if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()");
- setState(219);
+ setState(215);
insistCommand();
}
break;
case 21:
enterOuterAlt(_localctx, 21);
{
- setState(220);
+ setState(216);
if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()");
- setState(221);
+ setState(217);
rerankCommand();
}
break;
case 22:
enterOuterAlt(_localctx, 22);
{
- setState(222);
+ setState(218);
if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()");
- setState(223);
+ setState(219);
fuseCommand();
}
break;
@@ -784,9 +783,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(226);
+ setState(222);
match(WHERE);
- setState(227);
+ setState(223);
booleanExpression(0);
}
}
@@ -844,7 +843,7 @@ public final DataTypeContext dataType() throws RecognitionException {
_localctx = new ToDataTypeContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(229);
+ setState(225);
identifier();
}
}
@@ -891,9 +890,9 @@ public final RowCommandContext rowCommand() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(231);
+ setState(227);
match(ROW);
- setState(232);
+ setState(228);
fields();
}
}
@@ -947,23 +946,23 @@ public final FieldsContext fields() throws RecognitionException {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(234);
+ setState(230);
field();
- setState(239);
+ setState(235);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,3,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(235);
+ setState(231);
match(COMMA);
- setState(236);
+ setState(232);
field();
}
}
}
- setState(241);
+ setState(237);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,3,_ctx);
}
@@ -1015,19 +1014,19 @@ public final FieldContext field() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(245);
+ setState(241);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) {
case 1:
{
- setState(242);
+ setState(238);
qualifiedName();
- setState(243);
+ setState(239);
match(ASSIGN);
}
break;
}
- setState(247);
+ setState(243);
booleanExpression(0);
}
}
@@ -1081,23 +1080,23 @@ public final RerankFieldsContext rerankFields() throws RecognitionException {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(249);
+ setState(245);
rerankField();
- setState(254);
+ setState(250);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,5,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(250);
+ setState(246);
match(COMMA);
- setState(251);
+ setState(247);
rerankField();
}
}
}
- setState(256);
+ setState(252);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,5,_ctx);
}
@@ -1149,16 +1148,16 @@ public final RerankFieldContext rerankField() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(257);
+ setState(253);
qualifiedName();
- setState(260);
+ setState(256);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) {
case 1:
{
- setState(258);
+ setState(254);
match(ASSIGN);
- setState(259);
+ setState(255);
booleanExpression(0);
}
break;
@@ -1208,9 +1207,9 @@ public final FromCommandContext fromCommand() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(262);
+ setState(258);
match(FROM);
- setState(263);
+ setState(259);
indexPatternAndMetadataFields();
}
}
@@ -1257,9 +1256,9 @@ public final TimeSeriesCommandContext timeSeriesCommand() throws RecognitionExce
try {
enterOuterAlt(_localctx, 1);
{
- setState(265);
+ setState(261);
match(DEV_TIME_SERIES);
- setState(266);
+ setState(262);
indexPatternAndMetadataFields();
}
}
@@ -1316,32 +1315,32 @@ public final IndexPatternAndMetadataFieldsContext indexPatternAndMetadataFields(
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(268);
+ setState(264);
indexPattern();
- setState(273);
+ setState(269);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,7,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(269);
+ setState(265);
match(COMMA);
- setState(270);
+ setState(266);
indexPattern();
}
}
}
- setState(275);
+ setState(271);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,7,_ctx);
}
- setState(277);
+ setState(273);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,8,_ctx) ) {
case 1:
{
- setState(276);
+ setState(272);
metadata();
}
break;
@@ -1399,35 +1398,35 @@ public final IndexPatternContext indexPattern() throws RecognitionException {
IndexPatternContext _localctx = new IndexPatternContext(_ctx, getState());
enterRule(_localctx, 28, RULE_indexPattern);
try {
- setState(288);
+ setState(284);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,9,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(279);
+ setState(275);
clusterString();
- setState(280);
+ setState(276);
match(COLON);
- setState(281);
+ setState(277);
unquotedIndexString();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(283);
+ setState(279);
unquotedIndexString();
- setState(284);
+ setState(280);
match(CAST_OP);
- setState(285);
+ setState(281);
selectorString();
}
break;
case 3:
enterOuterAlt(_localctx, 3);
{
- setState(287);
+ setState(283);
indexString();
}
break;
@@ -1473,7 +1472,7 @@ public final ClusterStringContext clusterString() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(290);
+ setState(286);
match(UNQUOTED_SOURCE);
}
}
@@ -1517,7 +1516,7 @@ public final SelectorStringContext selectorString() throws RecognitionException
try {
enterOuterAlt(_localctx, 1);
{
- setState(292);
+ setState(288);
match(UNQUOTED_SOURCE);
}
}
@@ -1561,7 +1560,7 @@ public final UnquotedIndexStringContext unquotedIndexString() throws Recognition
try {
enterOuterAlt(_localctx, 1);
{
- setState(294);
+ setState(290);
match(UNQUOTED_SOURCE);
}
}
@@ -1607,7 +1606,7 @@ public final IndexStringContext indexString() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(296);
+ setState(292);
_la = _input.LA(1);
if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) {
_errHandler.recoverInline(this);
@@ -1668,25 +1667,25 @@ public final MetadataContext metadata() throws RecognitionException {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(298);
+ setState(294);
match(METADATA);
- setState(299);
+ setState(295);
match(UNQUOTED_SOURCE);
- setState(304);
+ setState(300);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,10,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(300);
+ setState(296);
match(COMMA);
- setState(301);
+ setState(297);
match(UNQUOTED_SOURCE);
}
}
}
- setState(306);
+ setState(302);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,10,_ctx);
}
@@ -1735,9 +1734,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(307);
+ setState(303);
match(EVAL);
- setState(308);
+ setState(304);
fields();
}
}
@@ -1790,26 +1789,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(310);
+ setState(306);
match(STATS);
- setState(312);
+ setState(308);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) {
case 1:
{
- setState(311);
+ setState(307);
((StatsCommandContext)_localctx).stats = aggFields();
}
break;
}
- setState(316);
+ setState(312);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) {
case 1:
{
- setState(314);
+ setState(310);
match(BY);
- setState(315);
+ setState(311);
((StatsCommandContext)_localctx).grouping = fields();
}
break;
@@ -1866,23 +1865,23 @@ public final AggFieldsContext aggFields() throws RecognitionException {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(318);
+ setState(314);
aggField();
- setState(323);
+ setState(319);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,13,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(319);
+ setState(315);
match(COMMA);
- setState(320);
+ setState(316);
aggField();
}
}
}
- setState(325);
+ setState(321);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,13,_ctx);
}
@@ -1934,16 +1933,16 @@ public final AggFieldContext aggField() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(326);
+ setState(322);
field();
- setState(329);
+ setState(325);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) {
case 1:
{
- setState(327);
+ setState(323);
match(WHERE);
- setState(328);
+ setState(324);
booleanExpression(0);
}
break;
@@ -2000,23 +1999,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(331);
+ setState(327);
identifierOrParameter();
- setState(336);
+ setState(332);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,15,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(332);
+ setState(328);
match(DOT);
- setState(333);
+ setState(329);
identifierOrParameter();
}
}
}
- setState(338);
+ setState(334);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,15,_ctx);
}
@@ -2072,23 +2071,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(339);
+ setState(335);
identifierPattern();
- setState(344);
+ setState(340);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,16,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(340);
+ setState(336);
match(DOT);
- setState(341);
+ setState(337);
identifierPattern();
}
}
}
- setState(346);
+ setState(342);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,16,_ctx);
}
@@ -2144,23 +2143,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(347);
+ setState(343);
qualifiedNamePattern();
- setState(352);
+ setState(348);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,17,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(348);
+ setState(344);
match(COMMA);
- setState(349);
+ setState(345);
qualifiedNamePattern();
}
}
}
- setState(354);
+ setState(350);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,17,_ctx);
}
@@ -2208,7 +2207,7 @@ public final IdentifierContext identifier() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(355);
+ setState(351);
_la = _input.LA(1);
if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) {
_errHandler.recoverInline(this);
@@ -2264,13 +2263,13 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce
IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState());
enterRule(_localctx, 56, RULE_identifierPattern);
try {
- setState(360);
+ setState(356);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ID_PATTERN:
enterOuterAlt(_localctx, 1);
{
- setState(357);
+ setState(353);
match(ID_PATTERN);
}
break;
@@ -2278,7 +2277,7 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce
case NAMED_OR_POSITIONAL_PARAM:
enterOuterAlt(_localctx, 2);
{
- setState(358);
+ setState(354);
parameter();
}
break;
@@ -2286,7 +2285,7 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce
case NAMED_OR_POSITIONAL_DOUBLE_PARAMS:
enterOuterAlt(_localctx, 3);
{
- setState(359);
+ setState(355);
doubleParameter();
}
break;
@@ -2362,14 +2361,14 @@ public final ParameterContext parameter() throws RecognitionException {
ParameterContext _localctx = new ParameterContext(_ctx, getState());
enterRule(_localctx, 58, RULE_parameter);
try {
- setState(364);
+ setState(360);
_errHandler.sync(this);
switch (_input.LA(1)) {
case PARAM:
_localctx = new InputParamContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(362);
+ setState(358);
match(PARAM);
}
break;
@@ -2377,7 +2376,7 @@ public final ParameterContext parameter() throws RecognitionException {
_localctx = new InputNamedOrPositionalParamContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(363);
+ setState(359);
match(NAMED_OR_POSITIONAL_PARAM);
}
break;
@@ -2453,14 +2452,14 @@ public final DoubleParameterContext doubleParameter() throws RecognitionExceptio
DoubleParameterContext _localctx = new DoubleParameterContext(_ctx, getState());
enterRule(_localctx, 60, RULE_doubleParameter);
try {
- setState(368);
+ setState(364);
_errHandler.sync(this);
switch (_input.LA(1)) {
case DOUBLE_PARAMS:
_localctx = new InputDoubleParamsContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(366);
+ setState(362);
match(DOUBLE_PARAMS);
}
break;
@@ -2468,7 +2467,7 @@ public final DoubleParameterContext doubleParameter() throws RecognitionExceptio
_localctx = new InputNamedOrPositionalDoubleParamsContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(367);
+ setState(363);
match(NAMED_OR_POSITIONAL_DOUBLE_PARAMS);
}
break;
@@ -2522,14 +2521,14 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni
IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState());
enterRule(_localctx, 62, RULE_identifierOrParameter);
try {
- setState(373);
+ setState(369);
_errHandler.sync(this);
switch (_input.LA(1)) {
case UNQUOTED_IDENTIFIER:
case QUOTED_IDENTIFIER:
enterOuterAlt(_localctx, 1);
{
- setState(370);
+ setState(366);
identifier();
}
break;
@@ -2537,7 +2536,7 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni
case NAMED_OR_POSITIONAL_PARAM:
enterOuterAlt(_localctx, 2);
{
- setState(371);
+ setState(367);
parameter();
}
break;
@@ -2545,7 +2544,7 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni
case NAMED_OR_POSITIONAL_DOUBLE_PARAMS:
enterOuterAlt(_localctx, 3);
{
- setState(372);
+ setState(368);
doubleParameter();
}
break;
@@ -2596,9 +2595,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(375);
+ setState(371);
match(LIMIT);
- setState(376);
+ setState(372);
constant();
}
}
@@ -2653,25 +2652,25 @@ public final SortCommandContext sortCommand() throws RecognitionException {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(378);
+ setState(374);
match(SORT);
- setState(379);
+ setState(375);
orderExpression();
- setState(384);
+ setState(380);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,22,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(380);
+ setState(376);
match(COMMA);
- setState(381);
+ setState(377);
orderExpression();
}
}
}
- setState(386);
+ setState(382);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,22,_ctx);
}
@@ -2727,14 +2726,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio
try {
enterOuterAlt(_localctx, 1);
{
- setState(387);
+ setState(383);
booleanExpression(0);
- setState(389);
+ setState(385);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) {
case 1:
{
- setState(388);
+ setState(384);
((OrderExpressionContext)_localctx).ordering = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==ASC || _la==DESC) ) {
@@ -2748,14 +2747,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio
}
break;
}
- setState(393);
+ setState(389);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) {
case 1:
{
- setState(391);
+ setState(387);
match(NULLS);
- setState(392);
+ setState(388);
((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==FIRST || _la==LAST) ) {
@@ -2814,9 +2813,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(395);
+ setState(391);
match(KEEP);
- setState(396);
+ setState(392);
qualifiedNamePatterns();
}
}
@@ -2863,9 +2862,9 @@ public final DropCommandContext dropCommand() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(398);
+ setState(394);
match(DROP);
- setState(399);
+ setState(395);
qualifiedNamePatterns();
}
}
@@ -2920,25 +2919,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(401);
+ setState(397);
match(RENAME);
- setState(402);
+ setState(398);
renameClause();
- setState(407);
+ setState(403);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,25,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(403);
+ setState(399);
match(COMMA);
- setState(404);
+ setState(400);
renameClause();
}
}
}
- setState(409);
+ setState(405);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,25,_ctx);
}
@@ -2991,28 +2990,28 @@ public final RenameClauseContext renameClause() throws RecognitionException {
RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState());
enterRule(_localctx, 76, RULE_renameClause);
try {
- setState(418);
+ setState(414);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(410);
+ setState(406);
((RenameClauseContext)_localctx).oldName = qualifiedNamePattern();
- setState(411);
+ setState(407);
match(AS);
- setState(412);
+ setState(408);
((RenameClauseContext)_localctx).newName = qualifiedNamePattern();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(414);
+ setState(410);
((RenameClauseContext)_localctx).newName = qualifiedNamePattern();
- setState(415);
+ setState(411);
match(ASSIGN);
- setState(416);
+ setState(412);
((RenameClauseContext)_localctx).oldName = qualifiedNamePattern();
}
break;
@@ -3038,8 +3037,8 @@ public PrimaryExpressionContext primaryExpression() {
public StringContext string() {
return getRuleContext(StringContext.class,0);
}
- public CommandOptionsContext commandOptions() {
- return getRuleContext(CommandOptionsContext.class,0);
+ public DissectCommandOptionsContext dissectCommandOptions() {
+ return getRuleContext(DissectCommandOptionsContext.class,0);
}
@SuppressWarnings("this-escape")
public DissectCommandContext(ParserRuleContext parent, int invokingState) {
@@ -3067,19 +3066,19 @@ public final DissectCommandContext dissectCommand() throws RecognitionException
try {
enterOuterAlt(_localctx, 1);
{
- setState(420);
+ setState(416);
match(DISSECT);
- setState(421);
+ setState(417);
primaryExpression(0);
- setState(422);
+ setState(418);
string();
- setState(424);
+ setState(420);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) {
case 1:
{
- setState(423);
- commandOptions();
+ setState(419);
+ dissectCommandOptions();
}
break;
}
@@ -3097,46 +3096,64 @@ public final DissectCommandContext dissectCommand() throws RecognitionException
}
@SuppressWarnings("CheckReturnValue")
- public static class GrokCommandContext extends ParserRuleContext {
- public TerminalNode GROK() { return getToken(EsqlBaseParser.GROK, 0); }
- public PrimaryExpressionContext primaryExpression() {
- return getRuleContext(PrimaryExpressionContext.class,0);
+ public static class DissectCommandOptionsContext extends ParserRuleContext {
+ public List dissectCommandOption() {
+ return getRuleContexts(DissectCommandOptionContext.class);
}
- public StringContext string() {
- return getRuleContext(StringContext.class,0);
+ public DissectCommandOptionContext dissectCommandOption(int i) {
+ return getRuleContext(DissectCommandOptionContext.class,i);
+ }
+ public List COMMA() { return getTokens(EsqlBaseParser.COMMA); }
+ public TerminalNode COMMA(int i) {
+ return getToken(EsqlBaseParser.COMMA, i);
}
@SuppressWarnings("this-escape")
- public GrokCommandContext(ParserRuleContext parent, int invokingState) {
+ public DissectCommandOptionsContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_grokCommand; }
+ @Override public int getRuleIndex() { return RULE_dissectCommandOptions; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterGrokCommand(this);
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterDissectCommandOptions(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitGrokCommand(this);
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitDissectCommandOptions(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitGrokCommand(this);
+ if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitDissectCommandOptions(this);
else return visitor.visitChildren(this);
}
}
- public final GrokCommandContext grokCommand() throws RecognitionException {
- GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState());
- enterRule(_localctx, 80, RULE_grokCommand);
+ public final DissectCommandOptionsContext dissectCommandOptions() throws RecognitionException {
+ DissectCommandOptionsContext _localctx = new DissectCommandOptionsContext(_ctx, getState());
+ enterRule(_localctx, 80, RULE_dissectCommandOptions);
try {
+ int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(426);
- match(GROK);
+ setState(422);
+ dissectCommandOption();
setState(427);
- primaryExpression(0);
- setState(428);
- string();
+ _errHandler.sync(this);
+ _alt = getInterpreter().adaptivePredict(_input,28,_ctx);
+ while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
+ if ( _alt==1 ) {
+ {
+ {
+ setState(423);
+ match(COMMA);
+ setState(424);
+ dissectCommandOption();
+ }
+ }
+ }
+ setState(429);
+ _errHandler.sync(this);
+ _alt = getInterpreter().adaptivePredict(_input,28,_ctx);
+ }
}
}
catch (RecognitionException re) {
@@ -3151,41 +3168,46 @@ public final GrokCommandContext grokCommand() throws RecognitionException {
}
@SuppressWarnings("CheckReturnValue")
- public static class MvExpandCommandContext extends ParserRuleContext {
- public TerminalNode MV_EXPAND() { return getToken(EsqlBaseParser.MV_EXPAND, 0); }
- public QualifiedNameContext qualifiedName() {
- return getRuleContext(QualifiedNameContext.class,0);
+ public static class DissectCommandOptionContext extends ParserRuleContext {
+ public IdentifierContext identifier() {
+ return getRuleContext(IdentifierContext.class,0);
+ }
+ public TerminalNode ASSIGN() { return getToken(EsqlBaseParser.ASSIGN, 0); }
+ public ConstantContext constant() {
+ return getRuleContext(ConstantContext.class,0);
}
@SuppressWarnings("this-escape")
- public MvExpandCommandContext(ParserRuleContext parent, int invokingState) {
+ public DissectCommandOptionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_mvExpandCommand; }
+ @Override public int getRuleIndex() { return RULE_dissectCommandOption; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMvExpandCommand(this);
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterDissectCommandOption(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMvExpandCommand(this);
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitDissectCommandOption(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitMvExpandCommand(this);
+ if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitDissectCommandOption(this);
else return visitor.visitChildren(this);
}
}
- public final MvExpandCommandContext mvExpandCommand() throws RecognitionException {
- MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState());
- enterRule(_localctx, 82, RULE_mvExpandCommand);
+ public final DissectCommandOptionContext dissectCommandOption() throws RecognitionException {
+ DissectCommandOptionContext _localctx = new DissectCommandOptionContext(_ctx, getState());
+ enterRule(_localctx, 82, RULE_dissectCommandOption);
try {
enterOuterAlt(_localctx, 1);
{
setState(430);
- match(MV_EXPAND);
+ identifier();
setState(431);
- qualifiedName();
+ match(ASSIGN);
+ setState(432);
+ constant();
}
}
catch (RecognitionException re) {
@@ -3200,63 +3222,48 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio
}
@SuppressWarnings("CheckReturnValue")
- public static class CommandOptionsContext extends ParserRuleContext {
- public List commandOption() {
- return getRuleContexts(CommandOptionContext.class);
- }
- public CommandOptionContext commandOption(int i) {
- return getRuleContext(CommandOptionContext.class,i);
- }
- public List COMMA() { return getTokens(EsqlBaseParser.COMMA); }
- public TerminalNode COMMA(int i) {
- return getToken(EsqlBaseParser.COMMA, i);
+ public static class CommandNamedParametersContext extends ParserRuleContext {
+ public TerminalNode WITH() { return getToken(EsqlBaseParser.WITH, 0); }
+ public MapExpressionContext mapExpression() {
+ return getRuleContext(MapExpressionContext.class,0);
}
@SuppressWarnings("this-escape")
- public CommandOptionsContext(ParserRuleContext parent, int invokingState) {
+ public CommandNamedParametersContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_commandOptions; }
+ @Override public int getRuleIndex() { return RULE_commandNamedParameters; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterCommandOptions(this);
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterCommandNamedParameters(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitCommandOptions(this);
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitCommandNamedParameters(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitCommandOptions(this);
+ if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitCommandNamedParameters(this);
else return visitor.visitChildren(this);
}
}
- public final CommandOptionsContext commandOptions() throws RecognitionException {
- CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState());
- enterRule(_localctx, 84, RULE_commandOptions);
+ public final CommandNamedParametersContext commandNamedParameters() throws RecognitionException {
+ CommandNamedParametersContext _localctx = new CommandNamedParametersContext(_ctx, getState());
+ enterRule(_localctx, 84, RULE_commandNamedParameters);
try {
- int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(433);
- commandOption();
- setState(438);
+ setState(436);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,28,_ctx);
- while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
- if ( _alt==1 ) {
- {
- {
- setState(434);
- match(COMMA);
- setState(435);
- commandOption();
- }
- }
+ switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) {
+ case 1:
+ {
+ setState(434);
+ match(WITH);
+ setState(435);
+ mapExpression();
}
- setState(440);
- _errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,28,_ctx);
+ break;
}
}
}
@@ -3272,46 +3279,46 @@ public final CommandOptionsContext commandOptions() throws RecognitionException
}
@SuppressWarnings("CheckReturnValue")
- public static class CommandOptionContext extends ParserRuleContext {
- public IdentifierContext identifier() {
- return getRuleContext(IdentifierContext.class,0);
+ public static class GrokCommandContext extends ParserRuleContext {
+ public TerminalNode GROK() { return getToken(EsqlBaseParser.GROK, 0); }
+ public PrimaryExpressionContext primaryExpression() {
+ return getRuleContext(PrimaryExpressionContext.class,0);
}
- public TerminalNode ASSIGN() { return getToken(EsqlBaseParser.ASSIGN, 0); }
- public ConstantContext constant() {
- return getRuleContext(ConstantContext.class,0);
+ public StringContext string() {
+ return getRuleContext(StringContext.class,0);
}
@SuppressWarnings("this-escape")
- public CommandOptionContext(ParserRuleContext parent, int invokingState) {
+ public GrokCommandContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_commandOption; }
+ @Override public int getRuleIndex() { return RULE_grokCommand; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterCommandOption(this);
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterGrokCommand(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitCommandOption(this);
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitGrokCommand(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitCommandOption(this);
+ if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitGrokCommand(this);
else return visitor.visitChildren(this);
}
}
- public final CommandOptionContext commandOption() throws RecognitionException {
- CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState());
- enterRule(_localctx, 86, RULE_commandOption);
+ public final GrokCommandContext grokCommand() throws RecognitionException {
+ GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState());
+ enterRule(_localctx, 86, RULE_grokCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(441);
- identifier();
- setState(442);
- match(ASSIGN);
- setState(443);
- constant();
+ setState(438);
+ match(GROK);
+ setState(439);
+ primaryExpression(0);
+ setState(440);
+ string();
}
}
catch (RecognitionException re) {
@@ -3326,16 +3333,65 @@ public final CommandOptionContext commandOption() throws RecognitionException {
}
@SuppressWarnings("CheckReturnValue")
- public static class ExplainCommandContext extends ParserRuleContext {
- public TerminalNode DEV_EXPLAIN() { return getToken(EsqlBaseParser.DEV_EXPLAIN, 0); }
- public SubqueryExpressionContext subqueryExpression() {
- return getRuleContext(SubqueryExpressionContext.class,0);
+ public static class MvExpandCommandContext extends ParserRuleContext {
+ public TerminalNode MV_EXPAND() { return getToken(EsqlBaseParser.MV_EXPAND, 0); }
+ public QualifiedNameContext qualifiedName() {
+ return getRuleContext(QualifiedNameContext.class,0);
}
@SuppressWarnings("this-escape")
- public ExplainCommandContext(ParserRuleContext parent, int invokingState) {
+ public MvExpandCommandContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_explainCommand; }
+ @Override public int getRuleIndex() { return RULE_mvExpandCommand; }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMvExpandCommand(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMvExpandCommand(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitMvExpandCommand(this);
+ else return visitor.visitChildren(this);
+ }
+ }
+
+ public final MvExpandCommandContext mvExpandCommand() throws RecognitionException {
+ MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState());
+ enterRule(_localctx, 88, RULE_mvExpandCommand);
+ try {
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(442);
+ match(MV_EXPAND);
+ setState(443);
+ qualifiedName();
+ }
+ }
+ catch (RecognitionException re) {
+ _localctx.exception = re;
+ _errHandler.reportError(this, re);
+ _errHandler.recover(this, re);
+ }
+ finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ @SuppressWarnings("CheckReturnValue")
+ public static class ExplainCommandContext extends ParserRuleContext {
+ public TerminalNode DEV_EXPLAIN() { return getToken(EsqlBaseParser.DEV_EXPLAIN, 0); }
+ public SubqueryExpressionContext subqueryExpression() {
+ return getRuleContext(SubqueryExpressionContext.class,0);
+ }
+ @SuppressWarnings("this-escape")
+ public ExplainCommandContext(ParserRuleContext parent, int invokingState) {
+ super(parent, invokingState);
+ }
+ @Override public int getRuleIndex() { return RULE_explainCommand; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterExplainCommand(this);
@@ -3353,7 +3409,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ExplainCommandContext explainCommand() throws RecognitionException {
ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState());
- enterRule(_localctx, 88, RULE_explainCommand);
+ enterRule(_localctx, 90, RULE_explainCommand);
try {
enterOuterAlt(_localctx, 1);
{
@@ -3403,7 +3459,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final SubqueryExpressionContext subqueryExpression() throws RecognitionException {
SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState());
- enterRule(_localctx, 90, RULE_subqueryExpression);
+ enterRule(_localctx, 92, RULE_subqueryExpression);
try {
enterOuterAlt(_localctx, 1);
{
@@ -3463,7 +3519,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ShowCommandContext showCommand() throws RecognitionException {
ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState());
- enterRule(_localctx, 92, RULE_showCommand);
+ enterRule(_localctx, 94, RULE_showCommand);
try {
_localctx = new ShowInfoContext(_localctx);
enterOuterAlt(_localctx, 1);
@@ -3530,7 +3586,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final EnrichCommandContext enrichCommand() throws RecognitionException {
EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState());
- enterRule(_localctx, 94, RULE_enrichCommand);
+ enterRule(_localctx, 96, RULE_enrichCommand);
try {
int _alt;
enterOuterAlt(_localctx, 1);
@@ -3541,7 +3597,7 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException {
((EnrichCommandContext)_localctx).policyName = enrichPolicyName();
setState(459);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) {
case 1:
{
setState(457);
@@ -3553,7 +3609,7 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException {
}
setState(470);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) {
case 1:
{
setState(461);
@@ -3562,7 +3618,7 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException {
enrichWithClause();
setState(467);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,30,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,31,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
@@ -3576,7 +3632,7 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException {
}
setState(469);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,30,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,31,_ctx);
}
}
break;
@@ -3620,7 +3676,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final EnrichPolicyNameContext enrichPolicyName() throws RecognitionException {
EnrichPolicyNameContext _localctx = new EnrichPolicyNameContext(_ctx, getState());
- enterRule(_localctx, 96, RULE_enrichPolicyName);
+ enterRule(_localctx, 98, RULE_enrichPolicyName);
int _la;
try {
enterOuterAlt(_localctx, 1);
@@ -3681,13 +3737,13 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final EnrichWithClauseContext enrichWithClause() throws RecognitionException {
EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState());
- enterRule(_localctx, 98, RULE_enrichWithClause);
+ enterRule(_localctx, 100, RULE_enrichWithClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(477);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) {
case 1:
{
setState(474);
@@ -3741,7 +3797,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final SampleCommandContext sampleCommand() throws RecognitionException {
SampleCommandContext _localctx = new SampleCommandContext(_ctx, getState());
- enterRule(_localctx, 100, RULE_sampleCommand);
+ enterRule(_localctx, 102, RULE_sampleCommand);
try {
enterOuterAlt(_localctx, 1);
{
@@ -3800,7 +3856,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ChangePointCommandContext changePointCommand() throws RecognitionException {
ChangePointCommandContext _localctx = new ChangePointCommandContext(_ctx, getState());
- enterRule(_localctx, 102, RULE_changePointCommand);
+ enterRule(_localctx, 104, RULE_changePointCommand);
try {
enterOuterAlt(_localctx, 1);
{
@@ -3810,7 +3866,7 @@ public final ChangePointCommandContext changePointCommand() throws RecognitionEx
((ChangePointCommandContext)_localctx).value = qualifiedName();
setState(488);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) {
case 1:
{
setState(486);
@@ -3822,7 +3878,7 @@ public final ChangePointCommandContext changePointCommand() throws RecognitionEx
}
setState(495);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) {
case 1:
{
setState(490);
@@ -3877,7 +3933,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ForkCommandContext forkCommand() throws RecognitionException {
ForkCommandContext _localctx = new ForkCommandContext(_ctx, getState());
- enterRule(_localctx, 104, RULE_forkCommand);
+ enterRule(_localctx, 106, RULE_forkCommand);
try {
enterOuterAlt(_localctx, 1);
{
@@ -3928,7 +3984,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ForkSubQueriesContext forkSubQueries() throws RecognitionException {
ForkSubQueriesContext _localctx = new ForkSubQueriesContext(_ctx, getState());
- enterRule(_localctx, 106, RULE_forkSubQueries);
+ enterRule(_localctx, 108, RULE_forkSubQueries);
try {
int _alt;
enterOuterAlt(_localctx, 1);
@@ -3951,7 +4007,7 @@ public final ForkSubQueriesContext forkSubQueries() throws RecognitionException
}
setState(503);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,35,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,36,_ctx);
} while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER );
}
}
@@ -3995,7 +4051,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ForkSubQueryContext forkSubQuery() throws RecognitionException {
ForkSubQueryContext _localctx = new ForkSubQueryContext(_ctx, getState());
- enterRule(_localctx, 108, RULE_forkSubQuery);
+ enterRule(_localctx, 110, RULE_forkSubQuery);
try {
enterOuterAlt(_localctx, 1);
{
@@ -4088,8 +4144,8 @@ private ForkSubQueryCommandContext forkSubQueryCommand(int _p) throws Recognitio
int _parentState = getState();
ForkSubQueryCommandContext _localctx = new ForkSubQueryCommandContext(_ctx, _parentState);
ForkSubQueryCommandContext _prevctx = _localctx;
- int _startState = 110;
- enterRecursionRule(_localctx, 110, RULE_forkSubQueryCommand, _p);
+ int _startState = 112;
+ enterRecursionRule(_localctx, 112, RULE_forkSubQueryCommand, _p);
try {
int _alt;
enterOuterAlt(_localctx, 1);
@@ -4105,7 +4161,7 @@ private ForkSubQueryCommandContext forkSubQueryCommand(int _p) throws Recognitio
_ctx.stop = _input.LT(-1);
setState(517);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,36,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,37,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
if ( _parseListeners!=null ) triggerExitRuleEvent();
@@ -4125,7 +4181,7 @@ private ForkSubQueryCommandContext forkSubQueryCommand(int _p) throws Recognitio
}
setState(519);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,36,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,37,_ctx);
}
}
}
@@ -4167,7 +4223,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ForkSubQueryProcessingCommandContext forkSubQueryProcessingCommand() throws RecognitionException {
ForkSubQueryProcessingCommandContext _localctx = new ForkSubQueryProcessingCommandContext(_ctx, getState());
- enterRule(_localctx, 112, RULE_forkSubQueryProcessingCommand);
+ enterRule(_localctx, 114, RULE_forkSubQueryProcessingCommand);
try {
enterOuterAlt(_localctx, 1);
{
@@ -4190,15 +4246,13 @@ public final ForkSubQueryProcessingCommandContext forkSubQueryProcessingCommand(
public static class CompletionCommandContext extends ParserRuleContext {
public QualifiedNameContext targetField;
public PrimaryExpressionContext prompt;
- public IdentifierOrParameterContext inferenceId;
public TerminalNode COMPLETION() { return getToken(EsqlBaseParser.COMPLETION, 0); }
- public TerminalNode WITH() { return getToken(EsqlBaseParser.WITH, 0); }
+ public CommandNamedParametersContext commandNamedParameters() {
+ return getRuleContext(CommandNamedParametersContext.class,0);
+ }
public PrimaryExpressionContext primaryExpression() {
return getRuleContext(PrimaryExpressionContext.class,0);
}
- public IdentifierOrParameterContext identifierOrParameter() {
- return getRuleContext(IdentifierOrParameterContext.class,0);
- }
public TerminalNode ASSIGN() { return getToken(EsqlBaseParser.ASSIGN, 0); }
public QualifiedNameContext qualifiedName() {
return getRuleContext(QualifiedNameContext.class,0);
@@ -4225,7 +4279,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final CompletionCommandContext completionCommand() throws RecognitionException {
CompletionCommandContext _localctx = new CompletionCommandContext(_ctx, getState());
- enterRule(_localctx, 114, RULE_completionCommand);
+ enterRule(_localctx, 116, RULE_completionCommand);
try {
enterOuterAlt(_localctx, 1);
{
@@ -4233,7 +4287,7 @@ public final CompletionCommandContext completionCommand() throws RecognitionExce
match(COMPLETION);
setState(526);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) {
case 1:
{
setState(523);
@@ -4246,9 +4300,7 @@ public final CompletionCommandContext completionCommand() throws RecognitionExce
setState(528);
((CompletionCommandContext)_localctx).prompt = primaryExpression(0);
setState(529);
- match(WITH);
- setState(530);
- ((CompletionCommandContext)_localctx).inferenceId = identifierOrParameter();
+ commandNamedParameters();
}
}
catch (RecognitionException re) {
@@ -4296,17 +4348,17 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final LookupCommandContext lookupCommand() throws RecognitionException {
LookupCommandContext _localctx = new LookupCommandContext(_ctx, getState());
- enterRule(_localctx, 116, RULE_lookupCommand);
+ enterRule(_localctx, 118, RULE_lookupCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(532);
+ setState(531);
match(DEV_LOOKUP);
- setState(533);
+ setState(532);
((LookupCommandContext)_localctx).tableName = indexPattern();
- setState(534);
+ setState(533);
match(ON);
- setState(535);
+ setState(534);
((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns();
}
}
@@ -4355,22 +4407,22 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException {
InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState());
- enterRule(_localctx, 118, RULE_inlinestatsCommand);
+ enterRule(_localctx, 120, RULE_inlinestatsCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(537);
+ setState(536);
match(DEV_INLINESTATS);
- setState(538);
+ setState(537);
((InlinestatsCommandContext)_localctx).stats = aggFields();
- setState(541);
+ setState(540);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) {
case 1:
{
- setState(539);
+ setState(538);
match(BY);
- setState(540);
+ setState(539);
((InlinestatsCommandContext)_localctx).grouping = fields();
}
break;
@@ -4416,13 +4468,13 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final InsistCommandContext insistCommand() throws RecognitionException {
InsistCommandContext _localctx = new InsistCommandContext(_ctx, getState());
- enterRule(_localctx, 120, RULE_insistCommand);
+ enterRule(_localctx, 122, RULE_insistCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(543);
+ setState(542);
match(DEV_INSIST);
- setState(544);
+ setState(543);
qualifiedNamePatterns();
}
}
@@ -4462,11 +4514,11 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final FuseCommandContext fuseCommand() throws RecognitionException {
FuseCommandContext _localctx = new FuseCommandContext(_ctx, getState());
- enterRule(_localctx, 122, RULE_fuseCommand);
+ enterRule(_localctx, 124, RULE_fuseCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(546);
+ setState(545);
match(DEV_FUSE);
}
}
@@ -4481,221 +4533,24 @@ public final FuseCommandContext fuseCommand() throws RecognitionException {
return _localctx;
}
- @SuppressWarnings("CheckReturnValue")
- public static class InferenceCommandOptionsContext extends ParserRuleContext {
- public List inferenceCommandOption() {
- return getRuleContexts(InferenceCommandOptionContext.class);
- }
- public InferenceCommandOptionContext inferenceCommandOption(int i) {
- return getRuleContext(InferenceCommandOptionContext.class,i);
- }
- public List COMMA() { return getTokens(EsqlBaseParser.COMMA); }
- public TerminalNode COMMA(int i) {
- return getToken(EsqlBaseParser.COMMA, i);
- }
- @SuppressWarnings("this-escape")
- public InferenceCommandOptionsContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_inferenceCommandOptions; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterInferenceCommandOptions(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitInferenceCommandOptions(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitInferenceCommandOptions(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final InferenceCommandOptionsContext inferenceCommandOptions() throws RecognitionException {
- InferenceCommandOptionsContext _localctx = new InferenceCommandOptionsContext(_ctx, getState());
- enterRule(_localctx, 124, RULE_inferenceCommandOptions);
- try {
- int _alt;
- enterOuterAlt(_localctx, 1);
- {
- setState(548);
- inferenceCommandOption();
- setState(553);
- _errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,39,_ctx);
- while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
- if ( _alt==1 ) {
- {
- {
- setState(549);
- match(COMMA);
- setState(550);
- inferenceCommandOption();
- }
- }
- }
- setState(555);
- _errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,39,_ctx);
- }
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class InferenceCommandOptionContext extends ParserRuleContext {
- public IdentifierContext identifier() {
- return getRuleContext(IdentifierContext.class,0);
- }
- public TerminalNode ASSIGN() { return getToken(EsqlBaseParser.ASSIGN, 0); }
- public InferenceCommandOptionValueContext inferenceCommandOptionValue() {
- return getRuleContext(InferenceCommandOptionValueContext.class,0);
- }
- @SuppressWarnings("this-escape")
- public InferenceCommandOptionContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_inferenceCommandOption; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterInferenceCommandOption(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitInferenceCommandOption(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitInferenceCommandOption(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final InferenceCommandOptionContext inferenceCommandOption() throws RecognitionException {
- InferenceCommandOptionContext _localctx = new InferenceCommandOptionContext(_ctx, getState());
- enterRule(_localctx, 126, RULE_inferenceCommandOption);
- try {
- enterOuterAlt(_localctx, 1);
- {
- setState(556);
- identifier();
- setState(557);
- match(ASSIGN);
- setState(558);
- inferenceCommandOptionValue();
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class InferenceCommandOptionValueContext extends ParserRuleContext {
- public ConstantContext constant() {
- return getRuleContext(ConstantContext.class,0);
- }
- public IdentifierContext identifier() {
- return getRuleContext(IdentifierContext.class,0);
- }
- @SuppressWarnings("this-escape")
- public InferenceCommandOptionValueContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_inferenceCommandOptionValue; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterInferenceCommandOptionValue(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitInferenceCommandOptionValue(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitInferenceCommandOptionValue(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final InferenceCommandOptionValueContext inferenceCommandOptionValue() throws RecognitionException {
- InferenceCommandOptionValueContext _localctx = new InferenceCommandOptionValueContext(_ctx, getState());
- enterRule(_localctx, 128, RULE_inferenceCommandOptionValue);
- try {
- setState(562);
- _errHandler.sync(this);
- switch (_input.LA(1)) {
- case QUOTED_STRING:
- case INTEGER_LITERAL:
- case DECIMAL_LITERAL:
- case FALSE:
- case NULL:
- case PARAM:
- case TRUE:
- case PLUS:
- case MINUS:
- case NAMED_OR_POSITIONAL_PARAM:
- case OPENING_BRACKET:
- enterOuterAlt(_localctx, 1);
- {
- setState(560);
- constant();
- }
- break;
- case UNQUOTED_IDENTIFIER:
- case QUOTED_IDENTIFIER:
- enterOuterAlt(_localctx, 2);
- {
- setState(561);
- identifier();
- }
- break;
- default:
- throw new NoViableAltException(this);
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
@SuppressWarnings("CheckReturnValue")
public static class RerankCommandContext extends ParserRuleContext {
+ public QualifiedNameContext targetField;
public ConstantContext queryText;
public TerminalNode DEV_RERANK() { return getToken(EsqlBaseParser.DEV_RERANK, 0); }
public TerminalNode ON() { return getToken(EsqlBaseParser.ON, 0); }
public RerankFieldsContext rerankFields() {
return getRuleContext(RerankFieldsContext.class,0);
}
+ public CommandNamedParametersContext commandNamedParameters() {
+ return getRuleContext(CommandNamedParametersContext.class,0);
+ }
public ConstantContext constant() {
return getRuleContext(ConstantContext.class,0);
}
- public TerminalNode WITH() { return getToken(EsqlBaseParser.WITH, 0); }
- public InferenceCommandOptionsContext inferenceCommandOptions() {
- return getRuleContext(InferenceCommandOptionsContext.class,0);
+ public TerminalNode ASSIGN() { return getToken(EsqlBaseParser.ASSIGN, 0); }
+ public QualifiedNameContext qualifiedName() {
+ return getRuleContext(QualifiedNameContext.class,0);
}
@SuppressWarnings("this-escape")
public RerankCommandContext(ParserRuleContext parent, int invokingState) {
@@ -4719,30 +4574,32 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final RerankCommandContext rerankCommand() throws RecognitionException {
RerankCommandContext _localctx = new RerankCommandContext(_ctx, getState());
- enterRule(_localctx, 130, RULE_rerankCommand);
+ enterRule(_localctx, 126, RULE_rerankCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(564);
+ setState(547);
match(DEV_RERANK);
- setState(565);
- ((RerankCommandContext)_localctx).queryText = constant();
- setState(566);
- match(ON);
- setState(567);
- rerankFields();
- setState(570);
+ setState(551);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) {
case 1:
{
- setState(568);
- match(WITH);
- setState(569);
- inferenceCommandOptions();
+ setState(548);
+ ((RerankCommandContext)_localctx).targetField = qualifiedName();
+ setState(549);
+ match(ASSIGN);
}
break;
}
+ setState(553);
+ ((RerankCommandContext)_localctx).queryText = constant();
+ setState(554);
+ match(ON);
+ setState(555);
+ rerankFields();
+ setState(556);
+ commandNamedParameters();
}
}
catch (RecognitionException re) {
@@ -4950,25 +4807,25 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
int _parentState = getState();
BooleanExpressionContext _localctx = new BooleanExpressionContext(_ctx, _parentState);
BooleanExpressionContext _prevctx = _localctx;
- int _startState = 132;
- enterRecursionRule(_localctx, 132, RULE_booleanExpression, _p);
+ int _startState = 128;
+ enterRecursionRule(_localctx, 128, RULE_booleanExpression, _p);
int _la;
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(601);
+ setState(587);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) {
case 1:
{
_localctx = new LogicalNotContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(573);
+ setState(559);
match(NOT);
- setState(574);
+ setState(560);
booleanExpression(8);
}
break;
@@ -4977,7 +4834,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new BooleanDefaultContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(575);
+ setState(561);
valueExpression();
}
break;
@@ -4986,7 +4843,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new RegexExpressionContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(576);
+ setState(562);
regexBooleanExpression();
}
break;
@@ -4995,41 +4852,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new LogicalInContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(577);
+ setState(563);
valueExpression();
- setState(579);
+ setState(565);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(578);
+ setState(564);
match(NOT);
}
}
- setState(581);
+ setState(567);
match(IN);
- setState(582);
+ setState(568);
match(LP);
- setState(583);
+ setState(569);
valueExpression();
- setState(588);
+ setState(574);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
- setState(584);
+ setState(570);
match(COMMA);
- setState(585);
+ setState(571);
valueExpression();
}
}
- setState(590);
+ setState(576);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(591);
+ setState(577);
match(RP);
}
break;
@@ -5038,21 +4895,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new IsNullContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(593);
+ setState(579);
valueExpression();
- setState(594);
+ setState(580);
match(IS);
- setState(596);
+ setState(582);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(595);
+ setState(581);
match(NOT);
}
}
- setState(598);
+ setState(584);
match(NULL);
}
break;
@@ -5061,33 +4918,33 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new MatchExpressionContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(600);
+ setState(586);
matchBooleanExpression();
}
break;
}
_ctx.stop = _input.LT(-1);
- setState(611);
+ setState(597);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,47,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,46,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
if ( _parseListeners!=null ) triggerExitRuleEvent();
_prevctx = _localctx;
{
- setState(609);
+ setState(595);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) {
case 1:
{
_localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState));
((LogicalBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression);
- setState(603);
+ setState(589);
if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)");
- setState(604);
+ setState(590);
((LogicalBinaryContext)_localctx).operator = match(AND);
- setState(605);
+ setState(591);
((LogicalBinaryContext)_localctx).right = booleanExpression(6);
}
break;
@@ -5096,20 +4953,20 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState));
((LogicalBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression);
- setState(606);
+ setState(592);
if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)");
- setState(607);
+ setState(593);
((LogicalBinaryContext)_localctx).operator = match(OR);
- setState(608);
+ setState(594);
((LogicalBinaryContext)_localctx).right = booleanExpression(5);
}
break;
}
}
}
- setState(613);
+ setState(599);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,47,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,46,_ctx);
}
}
}
@@ -5263,31 +5120,31 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final RegexBooleanExpressionContext regexBooleanExpression() throws RecognitionException {
RegexBooleanExpressionContext _localctx = new RegexBooleanExpressionContext(_ctx, getState());
- enterRule(_localctx, 134, RULE_regexBooleanExpression);
+ enterRule(_localctx, 130, RULE_regexBooleanExpression);
int _la;
try {
- setState(660);
+ setState(646);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) {
case 1:
_localctx = new LikeExpressionContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(614);
+ setState(600);
valueExpression();
- setState(616);
+ setState(602);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(615);
+ setState(601);
match(NOT);
}
}
- setState(618);
+ setState(604);
match(LIKE);
- setState(619);
+ setState(605);
string();
}
break;
@@ -5295,21 +5152,21 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog
_localctx = new RlikeExpressionContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(621);
+ setState(607);
valueExpression();
- setState(623);
+ setState(609);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(622);
+ setState(608);
match(NOT);
}
}
- setState(625);
+ setState(611);
match(RLIKE);
- setState(626);
+ setState(612);
string();
}
break;
@@ -5317,41 +5174,41 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog
_localctx = new LikeListExpressionContext(_localctx);
enterOuterAlt(_localctx, 3);
{
- setState(628);
+ setState(614);
valueExpression();
- setState(630);
+ setState(616);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(629);
+ setState(615);
match(NOT);
}
}
- setState(632);
+ setState(618);
match(LIKE);
- setState(633);
+ setState(619);
match(LP);
- setState(634);
+ setState(620);
string();
- setState(639);
+ setState(625);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
- setState(635);
+ setState(621);
match(COMMA);
- setState(636);
+ setState(622);
string();
}
}
- setState(641);
+ setState(627);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(642);
+ setState(628);
match(RP);
}
break;
@@ -5359,41 +5216,41 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog
_localctx = new RlikeListExpressionContext(_localctx);
enterOuterAlt(_localctx, 4);
{
- setState(644);
+ setState(630);
valueExpression();
- setState(646);
+ setState(632);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(645);
+ setState(631);
match(NOT);
}
}
- setState(648);
+ setState(634);
match(RLIKE);
- setState(649);
+ setState(635);
match(LP);
- setState(650);
+ setState(636);
string();
- setState(655);
+ setState(641);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
- setState(651);
+ setState(637);
match(COMMA);
- setState(652);
+ setState(638);
string();
}
}
- setState(657);
+ setState(643);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(658);
+ setState(644);
match(RP);
}
break;
@@ -5448,28 +5305,28 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final MatchBooleanExpressionContext matchBooleanExpression() throws RecognitionException {
MatchBooleanExpressionContext _localctx = new MatchBooleanExpressionContext(_ctx, getState());
- enterRule(_localctx, 136, RULE_matchBooleanExpression);
+ enterRule(_localctx, 132, RULE_matchBooleanExpression);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(662);
+ setState(648);
((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName();
- setState(665);
+ setState(651);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==CAST_OP) {
{
- setState(663);
+ setState(649);
match(CAST_OP);
- setState(664);
+ setState(650);
((MatchBooleanExpressionContext)_localctx).fieldType = dataType();
}
}
- setState(667);
+ setState(653);
match(COLON);
- setState(668);
+ setState(654);
((MatchBooleanExpressionContext)_localctx).matchQuery = constant();
}
}
@@ -5551,16 +5408,16 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ValueExpressionContext valueExpression() throws RecognitionException {
ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState());
- enterRule(_localctx, 138, RULE_valueExpression);
+ enterRule(_localctx, 134, RULE_valueExpression);
try {
- setState(675);
+ setState(661);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) {
case 1:
_localctx = new ValueExpressionDefaultContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(670);
+ setState(656);
operatorExpression(0);
}
break;
@@ -5568,11 +5425,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio
_localctx = new ComparisonContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(671);
+ setState(657);
((ComparisonContext)_localctx).left = operatorExpression(0);
- setState(672);
+ setState(658);
comparisonOperator();
- setState(673);
+ setState(659);
((ComparisonContext)_localctx).right = operatorExpression(0);
}
break;
@@ -5690,23 +5547,23 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
int _parentState = getState();
OperatorExpressionContext _localctx = new OperatorExpressionContext(_ctx, _parentState);
OperatorExpressionContext _prevctx = _localctx;
- int _startState = 140;
- enterRecursionRule(_localctx, 140, RULE_operatorExpression, _p);
+ int _startState = 136;
+ enterRecursionRule(_localctx, 136, RULE_operatorExpression, _p);
int _la;
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(681);
+ setState(667);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) {
case 1:
{
_localctx = new OperatorExpressionDefaultContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(678);
+ setState(664);
primaryExpression(0);
}
break;
@@ -5715,7 +5572,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_localctx = new ArithmeticUnaryContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(679);
+ setState(665);
((ArithmeticUnaryContext)_localctx).operator = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==PLUS || _la==MINUS) ) {
@@ -5726,31 +5583,31 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_errHandler.reportMatch(this);
consume();
}
- setState(680);
+ setState(666);
operatorExpression(3);
}
break;
}
_ctx.stop = _input.LT(-1);
- setState(691);
+ setState(677);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,59,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,58,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
if ( _parseListeners!=null ) triggerExitRuleEvent();
_prevctx = _localctx;
{
- setState(689);
+ setState(675);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) {
case 1:
{
_localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState));
((ArithmeticBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression);
- setState(683);
+ setState(669);
if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)");
- setState(684);
+ setState(670);
((ArithmeticBinaryContext)_localctx).operator = _input.LT(1);
_la = _input.LA(1);
if ( !(((((_la - 89)) & ~0x3f) == 0 && ((1L << (_la - 89)) & 7L) != 0)) ) {
@@ -5761,7 +5618,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_errHandler.reportMatch(this);
consume();
}
- setState(685);
+ setState(671);
((ArithmeticBinaryContext)_localctx).right = operatorExpression(3);
}
break;
@@ -5770,9 +5627,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState));
((ArithmeticBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression);
- setState(686);
+ setState(672);
if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
- setState(687);
+ setState(673);
((ArithmeticBinaryContext)_localctx).operator = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==PLUS || _la==MINUS) ) {
@@ -5783,16 +5640,16 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_errHandler.reportMatch(this);
consume();
}
- setState(688);
+ setState(674);
((ArithmeticBinaryContext)_localctx).right = operatorExpression(2);
}
break;
}
}
}
- setState(693);
+ setState(679);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,59,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,58,_ctx);
}
}
}
@@ -5942,22 +5799,22 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc
int _parentState = getState();
PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, _parentState);
PrimaryExpressionContext _prevctx = _localctx;
- int _startState = 142;
- enterRecursionRule(_localctx, 142, RULE_primaryExpression, _p);
+ int _startState = 138;
+ enterRecursionRule(_localctx, 138, RULE_primaryExpression, _p);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(702);
+ setState(688);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,60,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) {
case 1:
{
_localctx = new ConstantDefaultContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(695);
+ setState(681);
constant();
}
break;
@@ -5966,7 +5823,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc
_localctx = new DereferenceContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(696);
+ setState(682);
qualifiedName();
}
break;
@@ -5975,7 +5832,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc
_localctx = new FunctionContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(697);
+ setState(683);
functionExpression();
}
break;
@@ -5984,19 +5841,19 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc
_localctx = new ParenthesizedExpressionContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(698);
+ setState(684);
match(LP);
- setState(699);
+ setState(685);
booleanExpression(0);
- setState(700);
+ setState(686);
match(RP);
}
break;
}
_ctx.stop = _input.LT(-1);
- setState(709);
+ setState(695);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,61,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,60,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
if ( _parseListeners!=null ) triggerExitRuleEvent();
@@ -6005,18 +5862,18 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc
{
_localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState));
pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression);
- setState(704);
+ setState(690);
if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
- setState(705);
+ setState(691);
match(CAST_OP);
- setState(706);
+ setState(692);
dataType();
}
}
}
- setState(711);
+ setState(697);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,61,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,60,_ctx);
}
}
}
@@ -6074,22 +5931,22 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final FunctionExpressionContext functionExpression() throws RecognitionException {
FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState());
- enterRule(_localctx, 144, RULE_functionExpression);
+ enterRule(_localctx, 140, RULE_functionExpression);
int _la;
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(712);
+ setState(698);
functionName();
- setState(713);
+ setState(699);
match(LP);
- setState(727);
+ setState(713);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ASTERISK:
{
- setState(714);
+ setState(700);
match(ASTERISK);
}
break;
@@ -6112,34 +5969,34 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx
case QUOTED_IDENTIFIER:
{
{
- setState(715);
+ setState(701);
booleanExpression(0);
- setState(720);
+ setState(706);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,62,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,61,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(716);
+ setState(702);
match(COMMA);
- setState(717);
+ setState(703);
booleanExpression(0);
}
}
}
- setState(722);
+ setState(708);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,62,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,61,_ctx);
}
- setState(725);
+ setState(711);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==COMMA) {
{
- setState(723);
+ setState(709);
match(COMMA);
- setState(724);
+ setState(710);
mapExpression();
}
}
@@ -6152,7 +6009,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx
default:
break;
}
- setState(729);
+ setState(715);
match(RP);
}
}
@@ -6194,11 +6051,11 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final FunctionNameContext functionName() throws RecognitionException {
FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState());
- enterRule(_localctx, 146, RULE_functionName);
+ enterRule(_localctx, 142, RULE_functionName);
try {
enterOuterAlt(_localctx, 1);
{
- setState(731);
+ setState(717);
identifierOrParameter();
}
}
@@ -6216,13 +6073,13 @@ public final FunctionNameContext functionName() throws RecognitionException {
@SuppressWarnings("CheckReturnValue")
public static class MapExpressionContext extends ParserRuleContext {
public TerminalNode LEFT_BRACES() { return getToken(EsqlBaseParser.LEFT_BRACES, 0); }
+ public TerminalNode RIGHT_BRACES() { return getToken(EsqlBaseParser.RIGHT_BRACES, 0); }
public List entryExpression() {
return getRuleContexts(EntryExpressionContext.class);
}
public EntryExpressionContext entryExpression(int i) {
return getRuleContext(EntryExpressionContext.class,i);
}
- public TerminalNode RIGHT_BRACES() { return getToken(EsqlBaseParser.RIGHT_BRACES, 0); }
public List COMMA() { return getTokens(EsqlBaseParser.COMMA); }
public TerminalNode COMMA(int i) {
return getToken(EsqlBaseParser.COMMA, i);
@@ -6249,32 +6106,40 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final MapExpressionContext mapExpression() throws RecognitionException {
MapExpressionContext _localctx = new MapExpressionContext(_ctx, getState());
- enterRule(_localctx, 148, RULE_mapExpression);
+ enterRule(_localctx, 144, RULE_mapExpression);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(733);
+ setState(719);
match(LEFT_BRACES);
- setState(734);
- entryExpression();
- setState(739);
+ setState(728);
_errHandler.sync(this);
_la = _input.LA(1);
- while (_la==COMMA) {
- {
+ if (_la==QUOTED_STRING) {
{
- setState(735);
- match(COMMA);
- setState(736);
+ setState(720);
entryExpression();
- }
- }
- setState(741);
+ setState(725);
_errHandler.sync(this);
_la = _input.LA(1);
+ while (_la==COMMA) {
+ {
+ {
+ setState(721);
+ match(COMMA);
+ setState(722);
+ entryExpression();
+ }
+ }
+ setState(727);
+ _errHandler.sync(this);
+ _la = _input.LA(1);
+ }
+ }
}
- setState(742);
+
+ setState(730);
match(RIGHT_BRACES);
}
}
@@ -6322,15 +6187,15 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final EntryExpressionContext entryExpression() throws RecognitionException {
EntryExpressionContext _localctx = new EntryExpressionContext(_ctx, getState());
- enterRule(_localctx, 150, RULE_entryExpression);
+ enterRule(_localctx, 146, RULE_entryExpression);
try {
enterOuterAlt(_localctx, 1);
{
- setState(744);
+ setState(732);
((EntryExpressionContext)_localctx).key = string();
- setState(745);
+ setState(733);
match(COLON);
- setState(746);
+ setState(734);
((EntryExpressionContext)_localctx).value = constant();
}
}
@@ -6598,17 +6463,17 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ConstantContext constant() throws RecognitionException {
ConstantContext _localctx = new ConstantContext(_ctx, getState());
- enterRule(_localctx, 152, RULE_constant);
+ enterRule(_localctx, 148, RULE_constant);
int _la;
try {
- setState(790);
+ setState(778);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,69,_ctx) ) {
case 1:
_localctx = new NullLiteralContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(748);
+ setState(736);
match(NULL);
}
break;
@@ -6616,9 +6481,9 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new QualifiedIntegerLiteralContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(749);
+ setState(737);
integerValue();
- setState(750);
+ setState(738);
match(UNQUOTED_IDENTIFIER);
}
break;
@@ -6626,7 +6491,7 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new DecimalLiteralContext(_localctx);
enterOuterAlt(_localctx, 3);
{
- setState(752);
+ setState(740);
decimalValue();
}
break;
@@ -6634,7 +6499,7 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new IntegerLiteralContext(_localctx);
enterOuterAlt(_localctx, 4);
{
- setState(753);
+ setState(741);
integerValue();
}
break;
@@ -6642,7 +6507,7 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new BooleanLiteralContext(_localctx);
enterOuterAlt(_localctx, 5);
{
- setState(754);
+ setState(742);
booleanValue();
}
break;
@@ -6650,7 +6515,7 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new InputParameterContext(_localctx);
enterOuterAlt(_localctx, 6);
{
- setState(755);
+ setState(743);
parameter();
}
break;
@@ -6658,7 +6523,7 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new StringLiteralContext(_localctx);
enterOuterAlt(_localctx, 7);
{
- setState(756);
+ setState(744);
string();
}
break;
@@ -6666,27 +6531,27 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new NumericArrayLiteralContext(_localctx);
enterOuterAlt(_localctx, 8);
{
- setState(757);
+ setState(745);
match(OPENING_BRACKET);
- setState(758);
+ setState(746);
numericValue();
- setState(763);
+ setState(751);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
- setState(759);
+ setState(747);
match(COMMA);
- setState(760);
+ setState(748);
numericValue();
}
}
- setState(765);
+ setState(753);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(766);
+ setState(754);
match(CLOSING_BRACKET);
}
break;
@@ -6694,27 +6559,27 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new BooleanArrayLiteralContext(_localctx);
enterOuterAlt(_localctx, 9);
{
- setState(768);
+ setState(756);
match(OPENING_BRACKET);
- setState(769);
+ setState(757);
booleanValue();
- setState(774);
+ setState(762);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
- setState(770);
+ setState(758);
match(COMMA);
- setState(771);
+ setState(759);
booleanValue();
}
}
- setState(776);
+ setState(764);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(777);
+ setState(765);
match(CLOSING_BRACKET);
}
break;
@@ -6722,27 +6587,27 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new StringArrayLiteralContext(_localctx);
enterOuterAlt(_localctx, 10);
{
- setState(779);
+ setState(767);
match(OPENING_BRACKET);
- setState(780);
+ setState(768);
string();
- setState(785);
+ setState(773);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
- setState(781);
+ setState(769);
match(COMMA);
- setState(782);
+ setState(770);
string();
}
}
- setState(787);
+ setState(775);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(788);
+ setState(776);
match(CLOSING_BRACKET);
}
break;
@@ -6785,12 +6650,12 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final BooleanValueContext booleanValue() throws RecognitionException {
BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState());
- enterRule(_localctx, 154, RULE_booleanValue);
+ enterRule(_localctx, 150, RULE_booleanValue);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(792);
+ setState(780);
_la = _input.LA(1);
if ( !(_la==FALSE || _la==TRUE) ) {
_errHandler.recoverInline(this);
@@ -6843,22 +6708,22 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final NumericValueContext numericValue() throws RecognitionException {
NumericValueContext _localctx = new NumericValueContext(_ctx, getState());
- enterRule(_localctx, 156, RULE_numericValue);
+ enterRule(_localctx, 152, RULE_numericValue);
try {
- setState(796);
+ setState(784);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,70,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(794);
+ setState(782);
decimalValue();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(795);
+ setState(783);
integerValue();
}
break;
@@ -6902,17 +6767,17 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final DecimalValueContext decimalValue() throws RecognitionException {
DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState());
- enterRule(_localctx, 158, RULE_decimalValue);
+ enterRule(_localctx, 154, RULE_decimalValue);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(799);
+ setState(787);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==PLUS || _la==MINUS) {
{
- setState(798);
+ setState(786);
_la = _input.LA(1);
if ( !(_la==PLUS || _la==MINUS) ) {
_errHandler.recoverInline(this);
@@ -6925,7 +6790,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException {
}
}
- setState(801);
+ setState(789);
match(DECIMAL_LITERAL);
}
}
@@ -6967,17 +6832,17 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final IntegerValueContext integerValue() throws RecognitionException {
IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState());
- enterRule(_localctx, 160, RULE_integerValue);
+ enterRule(_localctx, 156, RULE_integerValue);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(804);
+ setState(792);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==PLUS || _la==MINUS) {
{
- setState(803);
+ setState(791);
_la = _input.LA(1);
if ( !(_la==PLUS || _la==MINUS) ) {
_errHandler.recoverInline(this);
@@ -6990,7 +6855,7 @@ public final IntegerValueContext integerValue() throws RecognitionException {
}
}
- setState(806);
+ setState(794);
match(INTEGER_LITERAL);
}
}
@@ -7030,11 +6895,11 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final StringContext string() throws RecognitionException {
StringContext _localctx = new StringContext(_ctx, getState());
- enterRule(_localctx, 162, RULE_string);
+ enterRule(_localctx, 158, RULE_string);
try {
enterOuterAlt(_localctx, 1);
{
- setState(808);
+ setState(796);
match(QUOTED_STRING);
}
}
@@ -7079,12 +6944,12 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ComparisonOperatorContext comparisonOperator() throws RecognitionException {
ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState());
- enterRule(_localctx, 164, RULE_comparisonOperator);
+ enterRule(_localctx, 160, RULE_comparisonOperator);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(810);
+ setState(798);
_la = _input.LA(1);
if ( !(((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & 125L) != 0)) ) {
_errHandler.recoverInline(this);
@@ -7142,12 +7007,12 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final JoinCommandContext joinCommand() throws RecognitionException {
JoinCommandContext _localctx = new JoinCommandContext(_ctx, getState());
- enterRule(_localctx, 166, RULE_joinCommand);
+ enterRule(_localctx, 162, RULE_joinCommand);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(812);
+ setState(800);
((JoinCommandContext)_localctx).type = _input.LT(1);
_la = _input.LA(1);
if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 109051904L) != 0)) ) {
@@ -7158,11 +7023,11 @@ public final JoinCommandContext joinCommand() throws RecognitionException {
_errHandler.reportMatch(this);
consume();
}
- setState(813);
+ setState(801);
match(JOIN);
- setState(814);
+ setState(802);
joinTarget();
- setState(815);
+ setState(803);
joinCondition();
}
}
@@ -7205,11 +7070,11 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final JoinTargetContext joinTarget() throws RecognitionException {
JoinTargetContext _localctx = new JoinTargetContext(_ctx, getState());
- enterRule(_localctx, 168, RULE_joinTarget);
+ enterRule(_localctx, 164, RULE_joinTarget);
try {
enterOuterAlt(_localctx, 1);
{
- setState(817);
+ setState(805);
((JoinTargetContext)_localctx).index = indexPattern();
}
}
@@ -7259,30 +7124,30 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final JoinConditionContext joinCondition() throws RecognitionException {
JoinConditionContext _localctx = new JoinConditionContext(_ctx, getState());
- enterRule(_localctx, 170, RULE_joinCondition);
+ enterRule(_localctx, 166, RULE_joinCondition);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(819);
+ setState(807);
match(ON);
- setState(820);
+ setState(808);
joinPredicate();
- setState(825);
+ setState(813);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,73,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(821);
+ setState(809);
match(COMMA);
- setState(822);
+ setState(810);
joinPredicate();
}
}
}
- setState(827);
+ setState(815);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,73,_ctx);
}
@@ -7326,11 +7191,11 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final JoinPredicateContext joinPredicate() throws RecognitionException {
JoinPredicateContext _localctx = new JoinPredicateContext(_ctx, getState());
- enterRule(_localctx, 172, RULE_joinPredicate);
+ enterRule(_localctx, 168, RULE_joinPredicate);
try {
enterOuterAlt(_localctx, 1);
{
- setState(828);
+ setState(816);
valueExpression();
}
}
@@ -7353,13 +7218,13 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) {
return sourceCommand_sempred((SourceCommandContext)_localctx, predIndex);
case 3:
return processingCommand_sempred((ProcessingCommandContext)_localctx, predIndex);
- case 55:
+ case 56:
return forkSubQueryCommand_sempred((ForkSubQueryCommandContext)_localctx, predIndex);
- case 66:
+ case 64:
return booleanExpression_sempred((BooleanExpressionContext)_localctx, predIndex);
- case 70:
+ case 68:
return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex);
- case 71:
+ case 69:
return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex);
}
return true;
@@ -7429,7 +7294,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in
}
public static final String _serializedATN =
- "\u0004\u0001\u008b\u033f\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+
+ "\u0004\u0001\u008b\u0333\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+
"\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+
"\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+
"\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+
@@ -7450,494 +7315,489 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in
"E\u0002F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007"+
"J\u0002K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007"+
"O\u0002P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007"+
- "T\u0002U\u0007U\u0002V\u0007V\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+
- "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005"+
- "\u0001\u00b8\b\u0001\n\u0001\f\u0001\u00bb\t\u0001\u0001\u0002\u0001\u0002"+
- "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002"+
- "\u00c4\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+
+ "T\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001"+
+ "\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u00b4\b\u0001\n\u0001"+
+ "\f\u0001\u00b7\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+
+ "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u00c0\b\u0002\u0001\u0003"+
+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+
"\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+
"\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+
"\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+
- "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u00e1\b\u0003"+
- "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0006"+
- "\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0005\u0007"+
- "\u00ee\b\u0007\n\u0007\f\u0007\u00f1\t\u0007\u0001\b\u0001\b\u0001\b\u0003"+
- "\b\u00f6\b\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0005\t\u00fd\b\t"+
- "\n\t\f\t\u0100\t\t\u0001\n\u0001\n\u0001\n\u0003\n\u0105\b\n\u0001\u000b"+
- "\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001"+
- "\r\u0005\r\u0110\b\r\n\r\f\r\u0113\t\r\u0001\r\u0003\r\u0116\b\r\u0001"+
- "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+
- "\u000e\u0001\u000e\u0001\u000e\u0003\u000e\u0121\b\u000e\u0001\u000f\u0001"+
- "\u000f\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0012\u0001"+
- "\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u012f"+
- "\b\u0013\n\u0013\f\u0013\u0132\t\u0013\u0001\u0014\u0001\u0014\u0001\u0014"+
- "\u0001\u0015\u0001\u0015\u0003\u0015\u0139\b\u0015\u0001\u0015\u0001\u0015"+
- "\u0003\u0015\u013d\b\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016"+
- "\u0142\b\u0016\n\u0016\f\u0016\u0145\t\u0016\u0001\u0017\u0001\u0017\u0001"+
- "\u0017\u0003\u0017\u014a\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0005"+
- "\u0018\u014f\b\u0018\n\u0018\f\u0018\u0152\t\u0018\u0001\u0019\u0001\u0019"+
- "\u0001\u0019\u0005\u0019\u0157\b\u0019\n\u0019\f\u0019\u015a\t\u0019\u0001"+
- "\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u015f\b\u001a\n\u001a\f\u001a"+
- "\u0162\t\u001a\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+
- "\u0003\u001c\u0169\b\u001c\u0001\u001d\u0001\u001d\u0003\u001d\u016d\b"+
- "\u001d\u0001\u001e\u0001\u001e\u0003\u001e\u0171\b\u001e\u0001\u001f\u0001"+
- "\u001f\u0001\u001f\u0003\u001f\u0176\b\u001f\u0001 \u0001 \u0001 \u0001"+
- "!\u0001!\u0001!\u0001!\u0005!\u017f\b!\n!\f!\u0182\t!\u0001\"\u0001\""+
- "\u0003\"\u0186\b\"\u0001\"\u0001\"\u0003\"\u018a\b\"\u0001#\u0001#\u0001"+
- "#\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0001%\u0005%\u0196\b%\n%"+
- "\f%\u0199\t%\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003"+
- "&\u01a3\b&\u0001\'\u0001\'\u0001\'\u0001\'\u0003\'\u01a9\b\'\u0001(\u0001"+
- "(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0005*\u01b5"+
- "\b*\n*\f*\u01b8\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001"+
- "-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u0001"+
- "/\u0003/\u01cc\b/\u0001/\u0001/\u0001/\u0001/\u0005/\u01d2\b/\n/\f/\u01d5"+
- "\t/\u0003/\u01d7\b/\u00010\u00010\u00011\u00011\u00011\u00031\u01de\b"+
- "1\u00011\u00011\u00012\u00012\u00012\u00013\u00013\u00013\u00013\u0003"+
- "3\u01e9\b3\u00013\u00013\u00013\u00013\u00013\u00033\u01f0\b3\u00014\u0001"+
- "4\u00014\u00015\u00045\u01f6\b5\u000b5\f5\u01f7\u00016\u00016\u00016\u0001"+
- "6\u00017\u00017\u00017\u00017\u00017\u00017\u00057\u0204\b7\n7\f7\u0207"+
- "\t7\u00018\u00018\u00019\u00019\u00019\u00019\u00039\u020f\b9\u00019\u0001"+
- "9\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001"+
- ";\u0001;\u0003;\u021e\b;\u0001<\u0001<\u0001<\u0001=\u0001=\u0001>\u0001"+
- ">\u0001>\u0005>\u0228\b>\n>\f>\u022b\t>\u0001?\u0001?\u0001?\u0001?\u0001"+
- "@\u0001@\u0003@\u0233\b@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001A\u0003"+
- "A\u023b\bA\u0001B\u0001B\u0001B\u0001B\u0001B\u0001B\u0001B\u0003B\u0244"+
- "\bB\u0001B\u0001B\u0001B\u0001B\u0001B\u0005B\u024b\bB\nB\fB\u024e\tB"+
- "\u0001B\u0001B\u0001B\u0001B\u0001B\u0003B\u0255\bB\u0001B\u0001B\u0001"+
- "B\u0003B\u025a\bB\u0001B\u0001B\u0001B\u0001B\u0001B\u0001B\u0005B\u0262"+
- "\bB\nB\fB\u0265\tB\u0001C\u0001C\u0003C\u0269\bC\u0001C\u0001C\u0001C"+
- "\u0001C\u0001C\u0003C\u0270\bC\u0001C\u0001C\u0001C\u0001C\u0001C\u0003"+
- "C\u0277\bC\u0001C\u0001C\u0001C\u0001C\u0001C\u0005C\u027e\bC\nC\fC\u0281"+
- "\tC\u0001C\u0001C\u0001C\u0001C\u0003C\u0287\bC\u0001C\u0001C\u0001C\u0001"+
- "C\u0001C\u0005C\u028e\bC\nC\fC\u0291\tC\u0001C\u0001C\u0003C\u0295\bC"+
- "\u0001D\u0001D\u0001D\u0003D\u029a\bD\u0001D\u0001D\u0001D\u0001E\u0001"+
- "E\u0001E\u0001E\u0001E\u0003E\u02a4\bE\u0001F\u0001F\u0001F\u0001F\u0003"+
- "F\u02aa\bF\u0001F\u0001F\u0001F\u0001F\u0001F\u0001F\u0005F\u02b2\bF\n"+
- "F\fF\u02b5\tF\u0001G\u0001G\u0001G\u0001G\u0001G\u0001G\u0001G\u0001G"+
- "\u0003G\u02bf\bG\u0001G\u0001G\u0001G\u0005G\u02c4\bG\nG\fG\u02c7\tG\u0001"+
- "H\u0001H\u0001H\u0001H\u0001H\u0001H\u0005H\u02cf\bH\nH\fH\u02d2\tH\u0001"+
- "H\u0001H\u0003H\u02d6\bH\u0003H\u02d8\bH\u0001H\u0001H\u0001I\u0001I\u0001"+
- "J\u0001J\u0001J\u0001J\u0005J\u02e2\bJ\nJ\fJ\u02e5\tJ\u0001J\u0001J\u0001"+
- "K\u0001K\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001L\u0001"+
- "L\u0001L\u0001L\u0001L\u0001L\u0001L\u0001L\u0005L\u02fa\bL\nL\fL\u02fd"+
- "\tL\u0001L\u0001L\u0001L\u0001L\u0001L\u0001L\u0005L\u0305\bL\nL\fL\u0308"+
- "\tL\u0001L\u0001L\u0001L\u0001L\u0001L\u0001L\u0005L\u0310\bL\nL\fL\u0313"+
- "\tL\u0001L\u0001L\u0003L\u0317\bL\u0001M\u0001M\u0001N\u0001N\u0003N\u031d"+
- "\bN\u0001O\u0003O\u0320\bO\u0001O\u0001O\u0001P\u0003P\u0325\bP\u0001"+
- "P\u0001P\u0001Q\u0001Q\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001"+
- "S\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0005U\u0338\bU\nU\fU\u033b"+
- "\tU\u0001V\u0001V\u0001V\u0000\u0005\u0002n\u0084\u008c\u008eW\u0000\u0002"+
- "\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e"+
- " \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086"+
- "\u0088\u008a\u008c\u008e\u0090\u0092\u0094\u0096\u0098\u009a\u009c\u009e"+
- "\u00a0\u00a2\u00a4\u00a6\u00a8\u00aa\u00ac\u0000\n\u0002\u000055kk\u0001"+
- "\u0000ef\u0002\u000099??\u0002\u0000BBEE\u0002\u0000&&55\u0001\u0000W"+
- "X\u0001\u0000Y[\u0002\u0000AANN\u0002\u0000PPRV\u0002\u0000\u0017\u0017"+
- "\u0019\u001a\u035c\u0000\u00ae\u0001\u0000\u0000\u0000\u0002\u00b1\u0001"+
- "\u0000\u0000\u0000\u0004\u00c3\u0001\u0000\u0000\u0000\u0006\u00e0\u0001"+
- "\u0000\u0000\u0000\b\u00e2\u0001\u0000\u0000\u0000\n\u00e5\u0001\u0000"+
- "\u0000\u0000\f\u00e7\u0001\u0000\u0000\u0000\u000e\u00ea\u0001\u0000\u0000"+
- "\u0000\u0010\u00f5\u0001\u0000\u0000\u0000\u0012\u00f9\u0001\u0000\u0000"+
- "\u0000\u0014\u0101\u0001\u0000\u0000\u0000\u0016\u0106\u0001\u0000\u0000"+
- "\u0000\u0018\u0109\u0001\u0000\u0000\u0000\u001a\u010c\u0001\u0000\u0000"+
- "\u0000\u001c\u0120\u0001\u0000\u0000\u0000\u001e\u0122\u0001\u0000\u0000"+
- "\u0000 \u0124\u0001\u0000\u0000\u0000\"\u0126\u0001\u0000\u0000\u0000"+
- "$\u0128\u0001\u0000\u0000\u0000&\u012a\u0001\u0000\u0000\u0000(\u0133"+
- "\u0001\u0000\u0000\u0000*\u0136\u0001\u0000\u0000\u0000,\u013e\u0001\u0000"+
- "\u0000\u0000.\u0146\u0001\u0000\u0000\u00000\u014b\u0001\u0000\u0000\u0000"+
- "2\u0153\u0001\u0000\u0000\u00004\u015b\u0001\u0000\u0000\u00006\u0163"+
- "\u0001\u0000\u0000\u00008\u0168\u0001\u0000\u0000\u0000:\u016c\u0001\u0000"+
- "\u0000\u0000<\u0170\u0001\u0000\u0000\u0000>\u0175\u0001\u0000\u0000\u0000"+
- "@\u0177\u0001\u0000\u0000\u0000B\u017a\u0001\u0000\u0000\u0000D\u0183"+
- "\u0001\u0000\u0000\u0000F\u018b\u0001\u0000\u0000\u0000H\u018e\u0001\u0000"+
- "\u0000\u0000J\u0191\u0001\u0000\u0000\u0000L\u01a2\u0001\u0000\u0000\u0000"+
- "N\u01a4\u0001\u0000\u0000\u0000P\u01aa\u0001\u0000\u0000\u0000R\u01ae"+
- "\u0001\u0000\u0000\u0000T\u01b1\u0001\u0000\u0000\u0000V\u01b9\u0001\u0000"+
- "\u0000\u0000X\u01bd\u0001\u0000\u0000\u0000Z\u01c0\u0001\u0000\u0000\u0000"+
- "\\\u01c4\u0001\u0000\u0000\u0000^\u01c7\u0001\u0000\u0000\u0000`\u01d8"+
- "\u0001\u0000\u0000\u0000b\u01dd\u0001\u0000\u0000\u0000d\u01e1\u0001\u0000"+
- "\u0000\u0000f\u01e4\u0001\u0000\u0000\u0000h\u01f1\u0001\u0000\u0000\u0000"+
- "j\u01f5\u0001\u0000\u0000\u0000l\u01f9\u0001\u0000\u0000\u0000n\u01fd"+
- "\u0001\u0000\u0000\u0000p\u0208\u0001\u0000\u0000\u0000r\u020a\u0001\u0000"+
- "\u0000\u0000t\u0214\u0001\u0000\u0000\u0000v\u0219\u0001\u0000\u0000\u0000"+
- "x\u021f\u0001\u0000\u0000\u0000z\u0222\u0001\u0000\u0000\u0000|\u0224"+
- "\u0001\u0000\u0000\u0000~\u022c\u0001\u0000\u0000\u0000\u0080\u0232\u0001"+
- "\u0000\u0000\u0000\u0082\u0234\u0001\u0000\u0000\u0000\u0084\u0259\u0001"+
- "\u0000\u0000\u0000\u0086\u0294\u0001\u0000\u0000\u0000\u0088\u0296\u0001"+
- "\u0000\u0000\u0000\u008a\u02a3\u0001\u0000\u0000\u0000\u008c\u02a9\u0001"+
- "\u0000\u0000\u0000\u008e\u02be\u0001\u0000\u0000\u0000\u0090\u02c8\u0001"+
- "\u0000\u0000\u0000\u0092\u02db\u0001\u0000\u0000\u0000\u0094\u02dd\u0001"+
- "\u0000\u0000\u0000\u0096\u02e8\u0001\u0000\u0000\u0000\u0098\u0316\u0001"+
- "\u0000\u0000\u0000\u009a\u0318\u0001\u0000\u0000\u0000\u009c\u031c\u0001"+
- "\u0000\u0000\u0000\u009e\u031f\u0001\u0000\u0000\u0000\u00a0\u0324\u0001"+
- "\u0000\u0000\u0000\u00a2\u0328\u0001\u0000\u0000\u0000\u00a4\u032a\u0001"+
- "\u0000\u0000\u0000\u00a6\u032c\u0001\u0000\u0000\u0000\u00a8\u0331\u0001"+
- "\u0000\u0000\u0000\u00aa\u0333\u0001\u0000\u0000\u0000\u00ac\u033c\u0001"+
- "\u0000\u0000\u0000\u00ae\u00af\u0003\u0002\u0001\u0000\u00af\u00b0\u0005"+
- "\u0000\u0000\u0001\u00b0\u0001\u0001\u0000\u0000\u0000\u00b1\u00b2\u0006"+
- "\u0001\uffff\uffff\u0000\u00b2\u00b3\u0003\u0004\u0002\u0000\u00b3\u00b9"+
- "\u0001\u0000\u0000\u0000\u00b4\u00b5\n\u0001\u0000\u0000\u00b5\u00b6\u0005"+
- "4\u0000\u0000\u00b6\u00b8\u0003\u0006\u0003\u0000\u00b7\u00b4\u0001\u0000"+
- "\u0000\u0000\u00b8\u00bb\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000"+
- "\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u0003\u0001\u0000"+
- "\u0000\u0000\u00bb\u00b9\u0001\u0000\u0000\u0000\u00bc\u00c4\u0003\u0016"+
- "\u000b\u0000\u00bd\u00c4\u0003\f\u0006\u0000\u00be\u00c4\u0003\\.\u0000"+
- "\u00bf\u00c0\u0004\u0002\u0001\u0000\u00c0\u00c4\u0003\u0018\f\u0000\u00c1"+
- "\u00c2\u0004\u0002\u0002\u0000\u00c2\u00c4\u0003X,\u0000\u00c3\u00bc\u0001"+
- "\u0000\u0000\u0000\u00c3\u00bd\u0001\u0000\u0000\u0000\u00c3\u00be\u0001"+
- "\u0000\u0000\u0000\u00c3\u00bf\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001"+
- "\u0000\u0000\u0000\u00c4\u0005\u0001\u0000\u0000\u0000\u00c5\u00e1\u0003"+
- "(\u0014\u0000\u00c6\u00e1\u0003\b\u0004\u0000\u00c7\u00e1\u0003F#\u0000"+
- "\u00c8\u00e1\u0003@ \u0000\u00c9\u00e1\u0003*\u0015\u0000\u00ca\u00e1"+
- "\u0003B!\u0000\u00cb\u00e1\u0003H$\u0000\u00cc\u00e1\u0003J%\u0000\u00cd"+
- "\u00e1\u0003N\'\u0000\u00ce\u00e1\u0003P(\u0000\u00cf\u00e1\u0003^/\u0000"+
- "\u00d0\u00e1\u0003R)\u0000\u00d1\u00e1\u0003\u00a6S\u0000\u00d2\u00e1"+
- "\u0003f3\u0000\u00d3\u00e1\u0003r9\u0000\u00d4\u00e1\u0003d2\u0000\u00d5"+
- "\u00e1\u0003h4\u0000\u00d6\u00d7\u0004\u0003\u0003\u0000\u00d7\u00e1\u0003"+
- "v;\u0000\u00d8\u00d9\u0004\u0003\u0004\u0000\u00d9\u00e1\u0003t:\u0000"+
- "\u00da\u00db\u0004\u0003\u0005\u0000\u00db\u00e1\u0003x<\u0000\u00dc\u00dd"+
- "\u0004\u0003\u0006\u0000\u00dd\u00e1\u0003\u0082A\u0000\u00de\u00df\u0004"+
- "\u0003\u0007\u0000\u00df\u00e1\u0003z=\u0000\u00e0\u00c5\u0001\u0000\u0000"+
- "\u0000\u00e0\u00c6\u0001\u0000\u0000\u0000\u00e0\u00c7\u0001\u0000\u0000"+
- "\u0000\u00e0\u00c8\u0001\u0000\u0000\u0000\u00e0\u00c9\u0001\u0000\u0000"+
- "\u0000\u00e0\u00ca\u0001\u0000\u0000\u0000\u00e0\u00cb\u0001\u0000\u0000"+
- "\u0000\u00e0\u00cc\u0001\u0000\u0000\u0000\u00e0\u00cd\u0001\u0000\u0000"+
- "\u0000\u00e0\u00ce\u0001\u0000\u0000\u0000\u00e0\u00cf\u0001\u0000\u0000"+
- "\u0000\u00e0\u00d0\u0001\u0000\u0000\u0000\u00e0\u00d1\u0001\u0000\u0000"+
- "\u0000\u00e0\u00d2\u0001\u0000\u0000\u0000\u00e0\u00d3\u0001\u0000\u0000"+
- "\u0000\u00e0\u00d4\u0001\u0000\u0000\u0000\u00e0\u00d5\u0001\u0000\u0000"+
- "\u0000\u00e0\u00d6\u0001\u0000\u0000\u0000\u00e0\u00d8\u0001\u0000\u0000"+
- "\u0000\u00e0\u00da\u0001\u0000\u0000\u0000\u00e0\u00dc\u0001\u0000\u0000"+
- "\u0000\u00e0\u00de\u0001\u0000\u0000\u0000\u00e1\u0007\u0001\u0000\u0000"+
- "\u0000\u00e2\u00e3\u0005\u0010\u0000\u0000\u00e3\u00e4\u0003\u0084B\u0000"+
- "\u00e4\t\u0001\u0000\u0000\u0000\u00e5\u00e6\u00036\u001b\u0000\u00e6"+
- "\u000b\u0001\u0000\u0000\u0000\u00e7\u00e8\u0005\f\u0000\u0000\u00e8\u00e9"+
- "\u0003\u000e\u0007\u0000\u00e9\r\u0001\u0000\u0000\u0000\u00ea\u00ef\u0003"+
- "\u0010\b\u0000\u00eb\u00ec\u0005>\u0000\u0000\u00ec\u00ee\u0003\u0010"+
- "\b\u0000\u00ed\u00eb\u0001\u0000\u0000\u0000\u00ee\u00f1\u0001\u0000\u0000"+
- "\u0000\u00ef\u00ed\u0001\u0000\u0000\u0000\u00ef\u00f0\u0001\u0000\u0000"+
- "\u0000\u00f0\u000f\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000"+
- "\u0000\u00f2\u00f3\u00030\u0018\u0000\u00f3\u00f4\u0005:\u0000\u0000\u00f4"+
- "\u00f6\u0001\u0000\u0000\u0000\u00f5\u00f2\u0001\u0000\u0000\u0000\u00f5"+
- "\u00f6\u0001\u0000\u0000\u0000\u00f6\u00f7\u0001\u0000\u0000\u0000\u00f7"+
- "\u00f8\u0003\u0084B\u0000\u00f8\u0011\u0001\u0000\u0000\u0000\u00f9\u00fe"+
- "\u0003\u0014\n\u0000\u00fa\u00fb\u0005>\u0000\u0000\u00fb\u00fd\u0003"+
- "\u0014\n\u0000\u00fc\u00fa\u0001\u0000\u0000\u0000\u00fd\u0100\u0001\u0000"+
- "\u0000\u0000\u00fe\u00fc\u0001\u0000\u0000\u0000\u00fe\u00ff\u0001\u0000"+
- "\u0000\u0000\u00ff\u0013\u0001\u0000\u0000\u0000\u0100\u00fe\u0001\u0000"+
- "\u0000\u0000\u0101\u0104\u00030\u0018\u0000\u0102\u0103\u0005:\u0000\u0000"+
- "\u0103\u0105\u0003\u0084B\u0000\u0104\u0102\u0001\u0000\u0000\u0000\u0104"+
- "\u0105\u0001\u0000\u0000\u0000\u0105\u0015\u0001\u0000\u0000\u0000\u0106"+
- "\u0107\u0005\u0013\u0000\u0000\u0107\u0108\u0003\u001a\r\u0000\u0108\u0017"+
- "\u0001\u0000\u0000\u0000\u0109\u010a\u0005\u0014\u0000\u0000\u010a\u010b"+
- "\u0003\u001a\r\u0000\u010b\u0019\u0001\u0000\u0000\u0000\u010c\u0111\u0003"+
- "\u001c\u000e\u0000\u010d\u010e\u0005>\u0000\u0000\u010e\u0110\u0003\u001c"+
- "\u000e\u0000\u010f\u010d\u0001\u0000\u0000\u0000\u0110\u0113\u0001\u0000"+
- "\u0000\u0000\u0111\u010f\u0001\u0000\u0000\u0000\u0111\u0112\u0001\u0000"+
- "\u0000\u0000\u0112\u0115\u0001\u0000\u0000\u0000\u0113\u0111\u0001\u0000"+
- "\u0000\u0000\u0114\u0116\u0003&\u0013\u0000\u0115\u0114\u0001\u0000\u0000"+
- "\u0000\u0115\u0116\u0001\u0000\u0000\u0000\u0116\u001b\u0001\u0000\u0000"+
- "\u0000\u0117\u0118\u0003\u001e\u000f\u0000\u0118\u0119\u0005=\u0000\u0000"+
- "\u0119\u011a\u0003\"\u0011\u0000\u011a\u0121\u0001\u0000\u0000\u0000\u011b"+
- "\u011c\u0003\"\u0011\u0000\u011c\u011d\u0005<\u0000\u0000\u011d\u011e"+
- "\u0003 \u0010\u0000\u011e\u0121\u0001\u0000\u0000\u0000\u011f\u0121\u0003"+
- "$\u0012\u0000\u0120\u0117\u0001\u0000\u0000\u0000\u0120\u011b\u0001\u0000"+
- "\u0000\u0000\u0120\u011f\u0001\u0000\u0000\u0000\u0121\u001d\u0001\u0000"+
- "\u0000\u0000\u0122\u0123\u0005k\u0000\u0000\u0123\u001f\u0001\u0000\u0000"+
- "\u0000\u0124\u0125\u0005k\u0000\u0000\u0125!\u0001\u0000\u0000\u0000\u0126"+
- "\u0127\u0005k\u0000\u0000\u0127#\u0001\u0000\u0000\u0000\u0128\u0129\u0007"+
- "\u0000\u0000\u0000\u0129%\u0001\u0000\u0000\u0000\u012a\u012b\u0005j\u0000"+
- "\u0000\u012b\u0130\u0005k\u0000\u0000\u012c\u012d\u0005>\u0000\u0000\u012d"+
- "\u012f\u0005k\u0000\u0000\u012e\u012c\u0001\u0000\u0000\u0000\u012f\u0132"+
- "\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000\u0000\u0000\u0130\u0131"+
- "\u0001\u0000\u0000\u0000\u0131\'\u0001\u0000\u0000\u0000\u0132\u0130\u0001"+
- "\u0000\u0000\u0000\u0133\u0134\u0005\t\u0000\u0000\u0134\u0135\u0003\u000e"+
- "\u0007\u0000\u0135)\u0001\u0000\u0000\u0000\u0136\u0138\u0005\u000f\u0000"+
- "\u0000\u0137\u0139\u0003,\u0016\u0000\u0138\u0137\u0001\u0000\u0000\u0000"+
- "\u0138\u0139\u0001\u0000\u0000\u0000\u0139\u013c\u0001\u0000\u0000\u0000"+
- "\u013a\u013b\u0005;\u0000\u0000\u013b\u013d\u0003\u000e\u0007\u0000\u013c"+
- "\u013a\u0001\u0000\u0000\u0000\u013c\u013d\u0001\u0000\u0000\u0000\u013d"+
- "+\u0001\u0000\u0000\u0000\u013e\u0143\u0003.\u0017\u0000\u013f\u0140\u0005"+
- ">\u0000\u0000\u0140\u0142\u0003.\u0017\u0000\u0141\u013f\u0001\u0000\u0000"+
- "\u0000\u0142\u0145\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000\u0000"+
- "\u0000\u0143\u0144\u0001\u0000\u0000\u0000\u0144-\u0001\u0000\u0000\u0000"+
- "\u0145\u0143\u0001\u0000\u0000\u0000\u0146\u0149\u0003\u0010\b\u0000\u0147"+
- "\u0148\u0005\u0010\u0000\u0000\u0148\u014a\u0003\u0084B\u0000\u0149\u0147"+
- "\u0001\u0000\u0000\u0000\u0149\u014a\u0001\u0000\u0000\u0000\u014a/\u0001"+
- "\u0000\u0000\u0000\u014b\u0150\u0003>\u001f\u0000\u014c\u014d\u0005@\u0000"+
- "\u0000\u014d\u014f\u0003>\u001f\u0000\u014e\u014c\u0001\u0000\u0000\u0000"+
- "\u014f\u0152\u0001\u0000\u0000\u0000\u0150\u014e\u0001\u0000\u0000\u0000"+
- "\u0150\u0151\u0001\u0000\u0000\u0000\u01511\u0001\u0000\u0000\u0000\u0152"+
- "\u0150\u0001\u0000\u0000\u0000\u0153\u0158\u00038\u001c\u0000\u0154\u0155"+
- "\u0005@\u0000\u0000\u0155\u0157\u00038\u001c\u0000\u0156\u0154\u0001\u0000"+
- "\u0000\u0000\u0157\u015a\u0001\u0000\u0000\u0000\u0158\u0156\u0001\u0000"+
- "\u0000\u0000\u0158\u0159\u0001\u0000\u0000\u0000\u01593\u0001\u0000\u0000"+
- "\u0000\u015a\u0158\u0001\u0000\u0000\u0000\u015b\u0160\u00032\u0019\u0000"+
- "\u015c\u015d\u0005>\u0000\u0000\u015d\u015f\u00032\u0019\u0000\u015e\u015c"+
- "\u0001\u0000\u0000\u0000\u015f\u0162\u0001\u0000\u0000\u0000\u0160\u015e"+
- "\u0001\u0000\u0000\u0000\u0160\u0161\u0001\u0000\u0000\u0000\u01615\u0001"+
- "\u0000\u0000\u0000\u0162\u0160\u0001\u0000\u0000\u0000\u0163\u0164\u0007"+
- "\u0001\u0000\u0000\u01647\u0001\u0000\u0000\u0000\u0165\u0169\u0005\u0080"+
- "\u0000\u0000\u0166\u0169\u0003:\u001d\u0000\u0167\u0169\u0003<\u001e\u0000"+
- "\u0168\u0165\u0001\u0000\u0000\u0000\u0168\u0166\u0001\u0000\u0000\u0000"+
- "\u0168\u0167\u0001\u0000\u0000\u0000\u01699\u0001\u0000\u0000\u0000\u016a"+
- "\u016d\u0005L\u0000\u0000\u016b\u016d\u0005_\u0000\u0000\u016c\u016a\u0001"+
- "\u0000\u0000\u0000\u016c\u016b\u0001\u0000\u0000\u0000\u016d;\u0001\u0000"+
- "\u0000\u0000\u016e\u0171\u0005^\u0000\u0000\u016f\u0171\u0005`\u0000\u0000"+
- "\u0170\u016e\u0001\u0000\u0000\u0000\u0170\u016f\u0001\u0000\u0000\u0000"+
- "\u0171=\u0001\u0000\u0000\u0000\u0172\u0176\u00036\u001b\u0000\u0173\u0176"+
- "\u0003:\u001d\u0000\u0174\u0176\u0003<\u001e\u0000\u0175\u0172\u0001\u0000"+
- "\u0000\u0000\u0175\u0173\u0001\u0000\u0000\u0000\u0175\u0174\u0001\u0000"+
- "\u0000\u0000\u0176?\u0001\u0000\u0000\u0000\u0177\u0178\u0005\u000b\u0000"+
- "\u0000\u0178\u0179\u0003\u0098L\u0000\u0179A\u0001\u0000\u0000\u0000\u017a"+
- "\u017b\u0005\u000e\u0000\u0000\u017b\u0180\u0003D\"\u0000\u017c\u017d"+
- "\u0005>\u0000\u0000\u017d\u017f\u0003D\"\u0000\u017e\u017c\u0001\u0000"+
- "\u0000\u0000\u017f\u0182\u0001\u0000\u0000\u0000\u0180\u017e\u0001\u0000"+
- "\u0000\u0000\u0180\u0181\u0001\u0000\u0000\u0000\u0181C\u0001\u0000\u0000"+
- "\u0000\u0182\u0180\u0001\u0000\u0000\u0000\u0183\u0185\u0003\u0084B\u0000"+
- "\u0184\u0186\u0007\u0002\u0000\u0000\u0185\u0184\u0001\u0000\u0000\u0000"+
- "\u0185\u0186\u0001\u0000\u0000\u0000\u0186\u0189\u0001\u0000\u0000\u0000"+
- "\u0187\u0188\u0005I\u0000\u0000\u0188\u018a\u0007\u0003\u0000\u0000\u0189"+
- "\u0187\u0001\u0000\u0000\u0000\u0189\u018a\u0001\u0000\u0000\u0000\u018a"+
- "E\u0001\u0000\u0000\u0000\u018b\u018c\u0005\u001e\u0000\u0000\u018c\u018d"+
- "\u00034\u001a\u0000\u018dG\u0001\u0000\u0000\u0000\u018e\u018f\u0005\u001d"+
- "\u0000\u0000\u018f\u0190\u00034\u001a\u0000\u0190I\u0001\u0000\u0000\u0000"+
- "\u0191\u0192\u0005 \u0000\u0000\u0192\u0197\u0003L&\u0000\u0193\u0194"+
- "\u0005>\u0000\u0000\u0194\u0196\u0003L&\u0000\u0195\u0193\u0001\u0000"+
- "\u0000\u0000\u0196\u0199\u0001\u0000\u0000\u0000\u0197\u0195\u0001\u0000"+
- "\u0000\u0000\u0197\u0198\u0001\u0000\u0000\u0000\u0198K\u0001\u0000\u0000"+
- "\u0000\u0199\u0197\u0001\u0000\u0000\u0000\u019a\u019b\u00032\u0019\u0000"+
- "\u019b\u019c\u0005\u0084\u0000\u0000\u019c\u019d\u00032\u0019\u0000\u019d"+
- "\u01a3\u0001\u0000\u0000\u0000\u019e\u019f\u00032\u0019\u0000\u019f\u01a0"+
- "\u0005:\u0000\u0000\u01a0\u01a1\u00032\u0019\u0000\u01a1\u01a3\u0001\u0000"+
- "\u0000\u0000\u01a2\u019a\u0001\u0000\u0000\u0000\u01a2\u019e\u0001\u0000"+
- "\u0000\u0000\u01a3M\u0001\u0000\u0000\u0000\u01a4\u01a5\u0005\b\u0000"+
- "\u0000\u01a5\u01a6\u0003\u008eG\u0000\u01a6\u01a8\u0003\u00a2Q\u0000\u01a7"+
- "\u01a9\u0003T*\u0000\u01a8\u01a7\u0001\u0000\u0000\u0000\u01a8\u01a9\u0001"+
- "\u0000\u0000\u0000\u01a9O\u0001\u0000\u0000\u0000\u01aa\u01ab\u0005\n"+
- "\u0000\u0000\u01ab\u01ac\u0003\u008eG\u0000\u01ac\u01ad\u0003\u00a2Q\u0000"+
- "\u01adQ\u0001\u0000\u0000\u0000\u01ae\u01af\u0005\u001c\u0000\u0000\u01af"+
- "\u01b0\u00030\u0018\u0000\u01b0S\u0001\u0000\u0000\u0000\u01b1\u01b6\u0003"+
- "V+\u0000\u01b2\u01b3\u0005>\u0000\u0000\u01b3\u01b5\u0003V+\u0000\u01b4"+
- "\u01b2\u0001\u0000\u0000\u0000\u01b5\u01b8\u0001\u0000\u0000\u0000\u01b6"+
- "\u01b4\u0001\u0000\u0000\u0000\u01b6\u01b7\u0001\u0000\u0000\u0000\u01b7"+
- "U\u0001\u0000\u0000\u0000\u01b8\u01b6\u0001\u0000\u0000\u0000\u01b9\u01ba"+
- "\u00036\u001b\u0000\u01ba\u01bb\u0005:\u0000\u0000\u01bb\u01bc\u0003\u0098"+
- "L\u0000\u01bcW\u0001\u0000\u0000\u0000\u01bd\u01be\u0005\u0006\u0000\u0000"+
- "\u01be\u01bf\u0003Z-\u0000\u01bfY\u0001\u0000\u0000\u0000\u01c0\u01c1"+
+ "\u0001\u0003\u0001\u0003\u0003\u0003\u00dd\b\u0003\u0001\u0004\u0001\u0004"+
+ "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006"+
+ "\u0001\u0007\u0001\u0007\u0001\u0007\u0005\u0007\u00ea\b\u0007\n\u0007"+
+ "\f\u0007\u00ed\t\u0007\u0001\b\u0001\b\u0001\b\u0003\b\u00f2\b\b\u0001"+
+ "\b\u0001\b\u0001\t\u0001\t\u0001\t\u0005\t\u00f9\b\t\n\t\f\t\u00fc\t\t"+
+ "\u0001\n\u0001\n\u0001\n\u0003\n\u0101\b\n\u0001\u000b\u0001\u000b\u0001"+
+ "\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0005\r\u010c\b"+
+ "\r\n\r\f\r\u010f\t\r\u0001\r\u0003\r\u0112\b\r\u0001\u000e\u0001\u000e"+
+ "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+
+ "\u0001\u000e\u0003\u000e\u011d\b\u000e\u0001\u000f\u0001\u000f\u0001\u0010"+
+ "\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0013"+
+ "\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u012b\b\u0013\n\u0013"+
+ "\f\u0013\u012e\t\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015"+
+ "\u0001\u0015\u0003\u0015\u0135\b\u0015\u0001\u0015\u0001\u0015\u0003\u0015"+
+ "\u0139\b\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u013e\b"+
+ "\u0016\n\u0016\f\u0016\u0141\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017"+
+ "\u0003\u0017\u0146\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018"+
+ "\u014b\b\u0018\n\u0018\f\u0018\u014e\t\u0018\u0001\u0019\u0001\u0019\u0001"+
+ "\u0019\u0005\u0019\u0153\b\u0019\n\u0019\f\u0019\u0156\t\u0019\u0001\u001a"+
+ "\u0001\u001a\u0001\u001a\u0005\u001a\u015b\b\u001a\n\u001a\f\u001a\u015e"+
+ "\t\u001a\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0003"+
+ "\u001c\u0165\b\u001c\u0001\u001d\u0001\u001d\u0003\u001d\u0169\b\u001d"+
+ "\u0001\u001e\u0001\u001e\u0003\u001e\u016d\b\u001e\u0001\u001f\u0001\u001f"+
+ "\u0001\u001f\u0003\u001f\u0172\b\u001f\u0001 \u0001 \u0001 \u0001!\u0001"+
+ "!\u0001!\u0001!\u0005!\u017b\b!\n!\f!\u017e\t!\u0001\"\u0001\"\u0003\""+
+ "\u0182\b\"\u0001\"\u0001\"\u0003\"\u0186\b\"\u0001#\u0001#\u0001#\u0001"+
+ "$\u0001$\u0001$\u0001%\u0001%\u0001%\u0001%\u0005%\u0192\b%\n%\f%\u0195"+
+ "\t%\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u019f"+
+ "\b&\u0001\'\u0001\'\u0001\'\u0001\'\u0003\'\u01a5\b\'\u0001(\u0001(\u0001"+
+ "(\u0005(\u01aa\b(\n(\f(\u01ad\t(\u0001)\u0001)\u0001)\u0001)\u0001*\u0001"+
+ "*\u0003*\u01b5\b*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001"+
+ "-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u0001"+
+ "0\u00010\u00010\u00010\u00030\u01cc\b0\u00010\u00010\u00010\u00010\u0005"+
+ "0\u01d2\b0\n0\f0\u01d5\t0\u00030\u01d7\b0\u00011\u00011\u00012\u00012"+
+ "\u00012\u00032\u01de\b2\u00012\u00012\u00013\u00013\u00013\u00014\u0001"+
+ "4\u00014\u00014\u00034\u01e9\b4\u00014\u00014\u00014\u00014\u00014\u0003"+
+ "4\u01f0\b4\u00015\u00015\u00015\u00016\u00046\u01f6\b6\u000b6\f6\u01f7"+
+ "\u00017\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u0001"+
+ "8\u00058\u0204\b8\n8\f8\u0207\t8\u00019\u00019\u0001:\u0001:\u0001:\u0001"+
+ ":\u0003:\u020f\b:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001"+
+ ";\u0001<\u0001<\u0001<\u0001<\u0003<\u021d\b<\u0001=\u0001=\u0001=\u0001"+
+ ">\u0001>\u0001?\u0001?\u0001?\u0001?\u0003?\u0228\b?\u0001?\u0001?\u0001"+
+ "?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001@\u0001@\u0001@\u0001@\u0003"+
+ "@\u0236\b@\u0001@\u0001@\u0001@\u0001@\u0001@\u0005@\u023d\b@\n@\f@\u0240"+
+ "\t@\u0001@\u0001@\u0001@\u0001@\u0001@\u0003@\u0247\b@\u0001@\u0001@\u0001"+
+ "@\u0003@\u024c\b@\u0001@\u0001@\u0001@\u0001@\u0001@\u0001@\u0005@\u0254"+
+ "\b@\n@\f@\u0257\t@\u0001A\u0001A\u0003A\u025b\bA\u0001A\u0001A\u0001A"+
+ "\u0001A\u0001A\u0003A\u0262\bA\u0001A\u0001A\u0001A\u0001A\u0001A\u0003"+
+ "A\u0269\bA\u0001A\u0001A\u0001A\u0001A\u0001A\u0005A\u0270\bA\nA\fA\u0273"+
+ "\tA\u0001A\u0001A\u0001A\u0001A\u0003A\u0279\bA\u0001A\u0001A\u0001A\u0001"+
+ "A\u0001A\u0005A\u0280\bA\nA\fA\u0283\tA\u0001A\u0001A\u0003A\u0287\bA"+
+ "\u0001B\u0001B\u0001B\u0003B\u028c\bB\u0001B\u0001B\u0001B\u0001C\u0001"+
+ "C\u0001C\u0001C\u0001C\u0003C\u0296\bC\u0001D\u0001D\u0001D\u0001D\u0003"+
+ "D\u029c\bD\u0001D\u0001D\u0001D\u0001D\u0001D\u0001D\u0005D\u02a4\bD\n"+
+ "D\fD\u02a7\tD\u0001E\u0001E\u0001E\u0001E\u0001E\u0001E\u0001E\u0001E"+
+ "\u0003E\u02b1\bE\u0001E\u0001E\u0001E\u0005E\u02b6\bE\nE\fE\u02b9\tE\u0001"+
+ "F\u0001F\u0001F\u0001F\u0001F\u0001F\u0005F\u02c1\bF\nF\fF\u02c4\tF\u0001"+
+ "F\u0001F\u0003F\u02c8\bF\u0003F\u02ca\bF\u0001F\u0001F\u0001G\u0001G\u0001"+
+ "H\u0001H\u0001H\u0001H\u0005H\u02d4\bH\nH\fH\u02d7\tH\u0003H\u02d9\bH"+
+ "\u0001H\u0001H\u0001I\u0001I\u0001I\u0001I\u0001J\u0001J\u0001J\u0001"+
+ "J\u0001J\u0001J\u0001J\u0001J\u0001J\u0001J\u0001J\u0001J\u0001J\u0005"+
+ "J\u02ee\bJ\nJ\fJ\u02f1\tJ\u0001J\u0001J\u0001J\u0001J\u0001J\u0001J\u0005"+
+ "J\u02f9\bJ\nJ\fJ\u02fc\tJ\u0001J\u0001J\u0001J\u0001J\u0001J\u0001J\u0005"+
+ "J\u0304\bJ\nJ\fJ\u0307\tJ\u0001J\u0001J\u0003J\u030b\bJ\u0001K\u0001K"+
+ "\u0001L\u0001L\u0003L\u0311\bL\u0001M\u0003M\u0314\bM\u0001M\u0001M\u0001"+
+ "N\u0003N\u0319\bN\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001Q\u0001"+
+ "Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0005"+
+ "S\u032c\bS\nS\fS\u032f\tS\u0001T\u0001T\u0001T\u0000\u0005\u0002p\u0080"+
+ "\u0088\u008aU\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016"+
+ "\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprt"+
+ "vxz|~\u0080\u0082\u0084\u0086\u0088\u008a\u008c\u008e\u0090\u0092\u0094"+
+ "\u0096\u0098\u009a\u009c\u009e\u00a0\u00a2\u00a4\u00a6\u00a8\u0000\n\u0002"+
+ "\u000055kk\u0001\u0000ef\u0002\u000099??\u0002\u0000BBEE\u0002\u0000&"+
+ "&55\u0001\u0000WX\u0001\u0000Y[\u0002\u0000AANN\u0002\u0000PPRV\u0002"+
+ "\u0000\u0017\u0017\u0019\u001a\u0352\u0000\u00aa\u0001\u0000\u0000\u0000"+
+ "\u0002\u00ad\u0001\u0000\u0000\u0000\u0004\u00bf\u0001\u0000\u0000\u0000"+
+ "\u0006\u00dc\u0001\u0000\u0000\u0000\b\u00de\u0001\u0000\u0000\u0000\n"+
+ "\u00e1\u0001\u0000\u0000\u0000\f\u00e3\u0001\u0000\u0000\u0000\u000e\u00e6"+
+ "\u0001\u0000\u0000\u0000\u0010\u00f1\u0001\u0000\u0000\u0000\u0012\u00f5"+
+ "\u0001\u0000\u0000\u0000\u0014\u00fd\u0001\u0000\u0000\u0000\u0016\u0102"+
+ "\u0001\u0000\u0000\u0000\u0018\u0105\u0001\u0000\u0000\u0000\u001a\u0108"+
+ "\u0001\u0000\u0000\u0000\u001c\u011c\u0001\u0000\u0000\u0000\u001e\u011e"+
+ "\u0001\u0000\u0000\u0000 \u0120\u0001\u0000\u0000\u0000\"\u0122\u0001"+
+ "\u0000\u0000\u0000$\u0124\u0001\u0000\u0000\u0000&\u0126\u0001\u0000\u0000"+
+ "\u0000(\u012f\u0001\u0000\u0000\u0000*\u0132\u0001\u0000\u0000\u0000,"+
+ "\u013a\u0001\u0000\u0000\u0000.\u0142\u0001\u0000\u0000\u00000\u0147\u0001"+
+ "\u0000\u0000\u00002\u014f\u0001\u0000\u0000\u00004\u0157\u0001\u0000\u0000"+
+ "\u00006\u015f\u0001\u0000\u0000\u00008\u0164\u0001\u0000\u0000\u0000:"+
+ "\u0168\u0001\u0000\u0000\u0000<\u016c\u0001\u0000\u0000\u0000>\u0171\u0001"+
+ "\u0000\u0000\u0000@\u0173\u0001\u0000\u0000\u0000B\u0176\u0001\u0000\u0000"+
+ "\u0000D\u017f\u0001\u0000\u0000\u0000F\u0187\u0001\u0000\u0000\u0000H"+
+ "\u018a\u0001\u0000\u0000\u0000J\u018d\u0001\u0000\u0000\u0000L\u019e\u0001"+
+ "\u0000\u0000\u0000N\u01a0\u0001\u0000\u0000\u0000P\u01a6\u0001\u0000\u0000"+
+ "\u0000R\u01ae\u0001\u0000\u0000\u0000T\u01b4\u0001\u0000\u0000\u0000V"+
+ "\u01b6\u0001\u0000\u0000\u0000X\u01ba\u0001\u0000\u0000\u0000Z\u01bd\u0001"+
+ "\u0000\u0000\u0000\\\u01c0\u0001\u0000\u0000\u0000^\u01c4\u0001\u0000"+
+ "\u0000\u0000`\u01c7\u0001\u0000\u0000\u0000b\u01d8\u0001\u0000\u0000\u0000"+
+ "d\u01dd\u0001\u0000\u0000\u0000f\u01e1\u0001\u0000\u0000\u0000h\u01e4"+
+ "\u0001\u0000\u0000\u0000j\u01f1\u0001\u0000\u0000\u0000l\u01f5\u0001\u0000"+
+ "\u0000\u0000n\u01f9\u0001\u0000\u0000\u0000p\u01fd\u0001\u0000\u0000\u0000"+
+ "r\u0208\u0001\u0000\u0000\u0000t\u020a\u0001\u0000\u0000\u0000v\u0213"+
+ "\u0001\u0000\u0000\u0000x\u0218\u0001\u0000\u0000\u0000z\u021e\u0001\u0000"+
+ "\u0000\u0000|\u0221\u0001\u0000\u0000\u0000~\u0223\u0001\u0000\u0000\u0000"+
+ "\u0080\u024b\u0001\u0000\u0000\u0000\u0082\u0286\u0001\u0000\u0000\u0000"+
+ "\u0084\u0288\u0001\u0000\u0000\u0000\u0086\u0295\u0001\u0000\u0000\u0000"+
+ "\u0088\u029b\u0001\u0000\u0000\u0000\u008a\u02b0\u0001\u0000\u0000\u0000"+
+ "\u008c\u02ba\u0001\u0000\u0000\u0000\u008e\u02cd\u0001\u0000\u0000\u0000"+
+ "\u0090\u02cf\u0001\u0000\u0000\u0000\u0092\u02dc\u0001\u0000\u0000\u0000"+
+ "\u0094\u030a\u0001\u0000\u0000\u0000\u0096\u030c\u0001\u0000\u0000\u0000"+
+ "\u0098\u0310\u0001\u0000\u0000\u0000\u009a\u0313\u0001\u0000\u0000\u0000"+
+ "\u009c\u0318\u0001\u0000\u0000\u0000\u009e\u031c\u0001\u0000\u0000\u0000"+
+ "\u00a0\u031e\u0001\u0000\u0000\u0000\u00a2\u0320\u0001\u0000\u0000\u0000"+
+ "\u00a4\u0325\u0001\u0000\u0000\u0000\u00a6\u0327\u0001\u0000\u0000\u0000"+
+ "\u00a8\u0330\u0001\u0000\u0000\u0000\u00aa\u00ab\u0003\u0002\u0001\u0000"+
+ "\u00ab\u00ac\u0005\u0000\u0000\u0001\u00ac\u0001\u0001\u0000\u0000\u0000"+
+ "\u00ad\u00ae\u0006\u0001\uffff\uffff\u0000\u00ae\u00af\u0003\u0004\u0002"+
+ "\u0000\u00af\u00b5\u0001\u0000\u0000\u0000\u00b0\u00b1\n\u0001\u0000\u0000"+
+ "\u00b1\u00b2\u00054\u0000\u0000\u00b2\u00b4\u0003\u0006\u0003\u0000\u00b3"+
+ "\u00b0\u0001\u0000\u0000\u0000\u00b4\u00b7\u0001\u0000\u0000\u0000\u00b5"+
+ "\u00b3\u0001\u0000\u0000\u0000\u00b5\u00b6\u0001\u0000\u0000\u0000\u00b6"+
+ "\u0003\u0001\u0000\u0000\u0000\u00b7\u00b5\u0001\u0000\u0000\u0000\u00b8"+
+ "\u00c0\u0003\u0016\u000b\u0000\u00b9\u00c0\u0003\f\u0006\u0000\u00ba\u00c0"+
+ "\u0003^/\u0000\u00bb\u00bc\u0004\u0002\u0001\u0000\u00bc\u00c0\u0003\u0018"+
+ "\f\u0000\u00bd\u00be\u0004\u0002\u0002\u0000\u00be\u00c0\u0003Z-\u0000"+
+ "\u00bf\u00b8\u0001\u0000\u0000\u0000\u00bf\u00b9\u0001\u0000\u0000\u0000"+
+ "\u00bf\u00ba\u0001\u0000\u0000\u0000\u00bf\u00bb\u0001\u0000\u0000\u0000"+
+ "\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u0005\u0001\u0000\u0000\u0000"+
+ "\u00c1\u00dd\u0003(\u0014\u0000\u00c2\u00dd\u0003\b\u0004\u0000\u00c3"+
+ "\u00dd\u0003F#\u0000\u00c4\u00dd\u0003@ \u0000\u00c5\u00dd\u0003*\u0015"+
+ "\u0000\u00c6\u00dd\u0003B!\u0000\u00c7\u00dd\u0003H$\u0000\u00c8\u00dd"+
+ "\u0003J%\u0000\u00c9\u00dd\u0003N\'\u0000\u00ca\u00dd\u0003V+\u0000\u00cb"+
+ "\u00dd\u0003`0\u0000\u00cc\u00dd\u0003X,\u0000\u00cd\u00dd\u0003\u00a2"+
+ "Q\u0000\u00ce\u00dd\u0003h4\u0000\u00cf\u00dd\u0003t:\u0000\u00d0\u00dd"+
+ "\u0003f3\u0000\u00d1\u00dd\u0003j5\u0000\u00d2\u00d3\u0004\u0003\u0003"+
+ "\u0000\u00d3\u00dd\u0003x<\u0000\u00d4\u00d5\u0004\u0003\u0004\u0000\u00d5"+
+ "\u00dd\u0003v;\u0000\u00d6\u00d7\u0004\u0003\u0005\u0000\u00d7\u00dd\u0003"+
+ "z=\u0000\u00d8\u00d9\u0004\u0003\u0006\u0000\u00d9\u00dd\u0003~?\u0000"+
+ "\u00da\u00db\u0004\u0003\u0007\u0000\u00db\u00dd\u0003|>\u0000\u00dc\u00c1"+
+ "\u0001\u0000\u0000\u0000\u00dc\u00c2\u0001\u0000\u0000\u0000\u00dc\u00c3"+
+ "\u0001\u0000\u0000\u0000\u00dc\u00c4\u0001\u0000\u0000\u0000\u00dc\u00c5"+
+ "\u0001\u0000\u0000\u0000\u00dc\u00c6\u0001\u0000\u0000\u0000\u00dc\u00c7"+
+ "\u0001\u0000\u0000\u0000\u00dc\u00c8\u0001\u0000\u0000\u0000\u00dc\u00c9"+
+ "\u0001\u0000\u0000\u0000\u00dc\u00ca\u0001\u0000\u0000\u0000\u00dc\u00cb"+
+ "\u0001\u0000\u0000\u0000\u00dc\u00cc\u0001\u0000\u0000\u0000\u00dc\u00cd"+
+ "\u0001\u0000\u0000\u0000\u00dc\u00ce\u0001\u0000\u0000\u0000\u00dc\u00cf"+
+ "\u0001\u0000\u0000\u0000\u00dc\u00d0\u0001\u0000\u0000\u0000\u00dc\u00d1"+
+ "\u0001\u0000\u0000\u0000\u00dc\u00d2\u0001\u0000\u0000\u0000\u00dc\u00d4"+
+ "\u0001\u0000\u0000\u0000\u00dc\u00d6\u0001\u0000\u0000\u0000\u00dc\u00d8"+
+ "\u0001\u0000\u0000\u0000\u00dc\u00da\u0001\u0000\u0000\u0000\u00dd\u0007"+
+ "\u0001\u0000\u0000\u0000\u00de\u00df\u0005\u0010\u0000\u0000\u00df\u00e0"+
+ "\u0003\u0080@\u0000\u00e0\t\u0001\u0000\u0000\u0000\u00e1\u00e2\u0003"+
+ "6\u001b\u0000\u00e2\u000b\u0001\u0000\u0000\u0000\u00e3\u00e4\u0005\f"+
+ "\u0000\u0000\u00e4\u00e5\u0003\u000e\u0007\u0000\u00e5\r\u0001\u0000\u0000"+
+ "\u0000\u00e6\u00eb\u0003\u0010\b\u0000\u00e7\u00e8\u0005>\u0000\u0000"+
+ "\u00e8\u00ea\u0003\u0010\b\u0000\u00e9\u00e7\u0001\u0000\u0000\u0000\u00ea"+
+ "\u00ed\u0001\u0000\u0000\u0000\u00eb\u00e9\u0001\u0000\u0000\u0000\u00eb"+
+ "\u00ec\u0001\u0000\u0000\u0000\u00ec\u000f\u0001\u0000\u0000\u0000\u00ed"+
+ "\u00eb\u0001\u0000\u0000\u0000\u00ee\u00ef\u00030\u0018\u0000\u00ef\u00f0"+
+ "\u0005:\u0000\u0000\u00f0\u00f2\u0001\u0000\u0000\u0000\u00f1\u00ee\u0001"+
+ "\u0000\u0000\u0000\u00f1\u00f2\u0001\u0000\u0000\u0000\u00f2\u00f3\u0001"+
+ "\u0000\u0000\u0000\u00f3\u00f4\u0003\u0080@\u0000\u00f4\u0011\u0001\u0000"+
+ "\u0000\u0000\u00f5\u00fa\u0003\u0014\n\u0000\u00f6\u00f7\u0005>\u0000"+
+ "\u0000\u00f7\u00f9\u0003\u0014\n\u0000\u00f8\u00f6\u0001\u0000\u0000\u0000"+
+ "\u00f9\u00fc\u0001\u0000\u0000\u0000\u00fa\u00f8\u0001\u0000\u0000\u0000"+
+ "\u00fa\u00fb\u0001\u0000\u0000\u0000\u00fb\u0013\u0001\u0000\u0000\u0000"+
+ "\u00fc\u00fa\u0001\u0000\u0000\u0000\u00fd\u0100\u00030\u0018\u0000\u00fe"+
+ "\u00ff\u0005:\u0000\u0000\u00ff\u0101\u0003\u0080@\u0000\u0100\u00fe\u0001"+
+ "\u0000\u0000\u0000\u0100\u0101\u0001\u0000\u0000\u0000\u0101\u0015\u0001"+
+ "\u0000\u0000\u0000\u0102\u0103\u0005\u0013\u0000\u0000\u0103\u0104\u0003"+
+ "\u001a\r\u0000\u0104\u0017\u0001\u0000\u0000\u0000\u0105\u0106\u0005\u0014"+
+ "\u0000\u0000\u0106\u0107\u0003\u001a\r\u0000\u0107\u0019\u0001\u0000\u0000"+
+ "\u0000\u0108\u010d\u0003\u001c\u000e\u0000\u0109\u010a\u0005>\u0000\u0000"+
+ "\u010a\u010c\u0003\u001c\u000e\u0000\u010b\u0109\u0001\u0000\u0000\u0000"+
+ "\u010c\u010f\u0001\u0000\u0000\u0000\u010d\u010b\u0001\u0000\u0000\u0000"+
+ "\u010d\u010e\u0001\u0000\u0000\u0000\u010e\u0111\u0001\u0000\u0000\u0000"+
+ "\u010f\u010d\u0001\u0000\u0000\u0000\u0110\u0112\u0003&\u0013\u0000\u0111"+
+ "\u0110\u0001\u0000\u0000\u0000\u0111\u0112\u0001\u0000\u0000\u0000\u0112"+
+ "\u001b\u0001\u0000\u0000\u0000\u0113\u0114\u0003\u001e\u000f\u0000\u0114"+
+ "\u0115\u0005=\u0000\u0000\u0115\u0116\u0003\"\u0011\u0000\u0116\u011d"+
+ "\u0001\u0000\u0000\u0000\u0117\u0118\u0003\"\u0011\u0000\u0118\u0119\u0005"+
+ "<\u0000\u0000\u0119\u011a\u0003 \u0010\u0000\u011a\u011d\u0001\u0000\u0000"+
+ "\u0000\u011b\u011d\u0003$\u0012\u0000\u011c\u0113\u0001\u0000\u0000\u0000"+
+ "\u011c\u0117\u0001\u0000\u0000\u0000\u011c\u011b\u0001\u0000\u0000\u0000"+
+ "\u011d\u001d\u0001\u0000\u0000\u0000\u011e\u011f\u0005k\u0000\u0000\u011f"+
+ "\u001f\u0001\u0000\u0000\u0000\u0120\u0121\u0005k\u0000\u0000\u0121!\u0001"+
+ "\u0000\u0000\u0000\u0122\u0123\u0005k\u0000\u0000\u0123#\u0001\u0000\u0000"+
+ "\u0000\u0124\u0125\u0007\u0000\u0000\u0000\u0125%\u0001\u0000\u0000\u0000"+
+ "\u0126\u0127\u0005j\u0000\u0000\u0127\u012c\u0005k\u0000\u0000\u0128\u0129"+
+ "\u0005>\u0000\u0000\u0129\u012b\u0005k\u0000\u0000\u012a\u0128\u0001\u0000"+
+ "\u0000\u0000\u012b\u012e\u0001\u0000\u0000\u0000\u012c\u012a\u0001\u0000"+
+ "\u0000\u0000\u012c\u012d\u0001\u0000\u0000\u0000\u012d\'\u0001\u0000\u0000"+
+ "\u0000\u012e\u012c\u0001\u0000\u0000\u0000\u012f\u0130\u0005\t\u0000\u0000"+
+ "\u0130\u0131\u0003\u000e\u0007\u0000\u0131)\u0001\u0000\u0000\u0000\u0132"+
+ "\u0134\u0005\u000f\u0000\u0000\u0133\u0135\u0003,\u0016\u0000\u0134\u0133"+
+ "\u0001\u0000\u0000\u0000\u0134\u0135\u0001\u0000\u0000\u0000\u0135\u0138"+
+ "\u0001\u0000\u0000\u0000\u0136\u0137\u0005;\u0000\u0000\u0137\u0139\u0003"+
+ "\u000e\u0007\u0000\u0138\u0136\u0001\u0000\u0000\u0000\u0138\u0139\u0001"+
+ "\u0000\u0000\u0000\u0139+\u0001\u0000\u0000\u0000\u013a\u013f\u0003.\u0017"+
+ "\u0000\u013b\u013c\u0005>\u0000\u0000\u013c\u013e\u0003.\u0017\u0000\u013d"+
+ "\u013b\u0001\u0000\u0000\u0000\u013e\u0141\u0001\u0000\u0000\u0000\u013f"+
+ "\u013d\u0001\u0000\u0000\u0000\u013f\u0140\u0001\u0000\u0000\u0000\u0140"+
+ "-\u0001\u0000\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0142\u0145"+
+ "\u0003\u0010\b\u0000\u0143\u0144\u0005\u0010\u0000\u0000\u0144\u0146\u0003"+
+ "\u0080@\u0000\u0145\u0143\u0001\u0000\u0000\u0000\u0145\u0146\u0001\u0000"+
+ "\u0000\u0000\u0146/\u0001\u0000\u0000\u0000\u0147\u014c\u0003>\u001f\u0000"+
+ "\u0148\u0149\u0005@\u0000\u0000\u0149\u014b\u0003>\u001f\u0000\u014a\u0148"+
+ "\u0001\u0000\u0000\u0000\u014b\u014e\u0001\u0000\u0000\u0000\u014c\u014a"+
+ "\u0001\u0000\u0000\u0000\u014c\u014d\u0001\u0000\u0000\u0000\u014d1\u0001"+
+ "\u0000\u0000\u0000\u014e\u014c\u0001\u0000\u0000\u0000\u014f\u0154\u0003"+
+ "8\u001c\u0000\u0150\u0151\u0005@\u0000\u0000\u0151\u0153\u00038\u001c"+
+ "\u0000\u0152\u0150\u0001\u0000\u0000\u0000\u0153\u0156\u0001\u0000\u0000"+
+ "\u0000\u0154\u0152\u0001\u0000\u0000\u0000\u0154\u0155\u0001\u0000\u0000"+
+ "\u0000\u01553\u0001\u0000\u0000\u0000\u0156\u0154\u0001\u0000\u0000\u0000"+
+ "\u0157\u015c\u00032\u0019\u0000\u0158\u0159\u0005>\u0000\u0000\u0159\u015b"+
+ "\u00032\u0019\u0000\u015a\u0158\u0001\u0000\u0000\u0000\u015b\u015e\u0001"+
+ "\u0000\u0000\u0000\u015c\u015a\u0001\u0000\u0000\u0000\u015c\u015d\u0001"+
+ "\u0000\u0000\u0000\u015d5\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000"+
+ "\u0000\u0000\u015f\u0160\u0007\u0001\u0000\u0000\u01607\u0001\u0000\u0000"+
+ "\u0000\u0161\u0165\u0005\u0080\u0000\u0000\u0162\u0165\u0003:\u001d\u0000"+
+ "\u0163\u0165\u0003<\u001e\u0000\u0164\u0161\u0001\u0000\u0000\u0000\u0164"+
+ "\u0162\u0001\u0000\u0000\u0000\u0164\u0163\u0001\u0000\u0000\u0000\u0165"+
+ "9\u0001\u0000\u0000\u0000\u0166\u0169\u0005L\u0000\u0000\u0167\u0169\u0005"+
+ "_\u0000\u0000\u0168\u0166\u0001\u0000\u0000\u0000\u0168\u0167\u0001\u0000"+
+ "\u0000\u0000\u0169;\u0001\u0000\u0000\u0000\u016a\u016d\u0005^\u0000\u0000"+
+ "\u016b\u016d\u0005`\u0000\u0000\u016c\u016a\u0001\u0000\u0000\u0000\u016c"+
+ "\u016b\u0001\u0000\u0000\u0000\u016d=\u0001\u0000\u0000\u0000\u016e\u0172"+
+ "\u00036\u001b\u0000\u016f\u0172\u0003:\u001d\u0000\u0170\u0172\u0003<"+
+ "\u001e\u0000\u0171\u016e\u0001\u0000\u0000\u0000\u0171\u016f\u0001\u0000"+
+ "\u0000\u0000\u0171\u0170\u0001\u0000\u0000\u0000\u0172?\u0001\u0000\u0000"+
+ "\u0000\u0173\u0174\u0005\u000b\u0000\u0000\u0174\u0175\u0003\u0094J\u0000"+
+ "\u0175A\u0001\u0000\u0000\u0000\u0176\u0177\u0005\u000e\u0000\u0000\u0177"+
+ "\u017c\u0003D\"\u0000\u0178\u0179\u0005>\u0000\u0000\u0179\u017b\u0003"+
+ "D\"\u0000\u017a\u0178\u0001\u0000\u0000\u0000\u017b\u017e\u0001\u0000"+
+ "\u0000\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017c\u017d\u0001\u0000"+
+ "\u0000\u0000\u017dC\u0001\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000"+
+ "\u0000\u017f\u0181\u0003\u0080@\u0000\u0180\u0182\u0007\u0002\u0000\u0000"+
+ "\u0181\u0180\u0001\u0000\u0000\u0000\u0181\u0182\u0001\u0000\u0000\u0000"+
+ "\u0182\u0185\u0001\u0000\u0000\u0000\u0183\u0184\u0005I\u0000\u0000\u0184"+
+ "\u0186\u0007\u0003\u0000\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0185"+
+ "\u0186\u0001\u0000\u0000\u0000\u0186E\u0001\u0000\u0000\u0000\u0187\u0188"+
+ "\u0005\u001e\u0000\u0000\u0188\u0189\u00034\u001a\u0000\u0189G\u0001\u0000"+
+ "\u0000\u0000\u018a\u018b\u0005\u001d\u0000\u0000\u018b\u018c\u00034\u001a"+
+ "\u0000\u018cI\u0001\u0000\u0000\u0000\u018d\u018e\u0005 \u0000\u0000\u018e"+
+ "\u0193\u0003L&\u0000\u018f\u0190\u0005>\u0000\u0000\u0190\u0192\u0003"+
+ "L&\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0192\u0195\u0001\u0000\u0000"+
+ "\u0000\u0193\u0191\u0001\u0000\u0000\u0000\u0193\u0194\u0001\u0000\u0000"+
+ "\u0000\u0194K\u0001\u0000\u0000\u0000\u0195\u0193\u0001\u0000\u0000\u0000"+
+ "\u0196\u0197\u00032\u0019\u0000\u0197\u0198\u0005\u0084\u0000\u0000\u0198"+
+ "\u0199\u00032\u0019\u0000\u0199\u019f\u0001\u0000\u0000\u0000\u019a\u019b"+
+ "\u00032\u0019\u0000\u019b\u019c\u0005:\u0000\u0000\u019c\u019d\u00032"+
+ "\u0019\u0000\u019d\u019f\u0001\u0000\u0000\u0000\u019e\u0196\u0001\u0000"+
+ "\u0000\u0000\u019e\u019a\u0001\u0000\u0000\u0000\u019fM\u0001\u0000\u0000"+
+ "\u0000\u01a0\u01a1\u0005\b\u0000\u0000\u01a1\u01a2\u0003\u008aE\u0000"+
+ "\u01a2\u01a4\u0003\u009eO\u0000\u01a3\u01a5\u0003P(\u0000\u01a4\u01a3"+
+ "\u0001\u0000\u0000\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5O\u0001"+
+ "\u0000\u0000\u0000\u01a6\u01ab\u0003R)\u0000\u01a7\u01a8\u0005>\u0000"+
+ "\u0000\u01a8\u01aa\u0003R)\u0000\u01a9\u01a7\u0001\u0000\u0000\u0000\u01aa"+
+ "\u01ad\u0001\u0000\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab"+
+ "\u01ac\u0001\u0000\u0000\u0000\u01acQ\u0001\u0000\u0000\u0000\u01ad\u01ab"+
+ "\u0001\u0000\u0000\u0000\u01ae\u01af\u00036\u001b\u0000\u01af\u01b0\u0005"+
+ ":\u0000\u0000\u01b0\u01b1\u0003\u0094J\u0000\u01b1S\u0001\u0000\u0000"+
+ "\u0000\u01b2\u01b3\u0005O\u0000\u0000\u01b3\u01b5\u0003\u0090H\u0000\u01b4"+
+ "\u01b2\u0001\u0000\u0000\u0000\u01b4\u01b5\u0001\u0000\u0000\u0000\u01b5"+
+ "U\u0001\u0000\u0000\u0000\u01b6\u01b7\u0005\n\u0000\u0000\u01b7\u01b8"+
+ "\u0003\u008aE\u0000\u01b8\u01b9\u0003\u009eO\u0000\u01b9W\u0001\u0000"+
+ "\u0000\u0000\u01ba\u01bb\u0005\u001c\u0000\u0000\u01bb\u01bc\u00030\u0018"+
+ "\u0000\u01bcY\u0001\u0000\u0000\u0000\u01bd\u01be\u0005\u0006\u0000\u0000"+
+ "\u01be\u01bf\u0003\\.\u0000\u01bf[\u0001\u0000\u0000\u0000\u01c0\u01c1"+
"\u0005c\u0000\u0000\u01c1\u01c2\u0003\u0002\u0001\u0000\u01c2\u01c3\u0005"+
- "d\u0000\u0000\u01c3[\u0001\u0000\u0000\u0000\u01c4\u01c5\u0005!\u0000"+
- "\u0000\u01c5\u01c6\u0005\u0088\u0000\u0000\u01c6]\u0001\u0000\u0000\u0000"+
- "\u01c7\u01c8\u0005\u0005\u0000\u0000\u01c8\u01cb\u0003`0\u0000\u01c9\u01ca"+
+ "d\u0000\u0000\u01c3]\u0001\u0000\u0000\u0000\u01c4\u01c5\u0005!\u0000"+
+ "\u0000\u01c5\u01c6\u0005\u0088\u0000\u0000\u01c6_\u0001\u0000\u0000\u0000"+
+ "\u01c7\u01c8\u0005\u0005\u0000\u0000\u01c8\u01cb\u0003b1\u0000\u01c9\u01ca"+
"\u0005J\u0000\u0000\u01ca\u01cc\u00032\u0019\u0000\u01cb\u01c9\u0001\u0000"+
"\u0000\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000\u01cc\u01d6\u0001\u0000"+
- "\u0000\u0000\u01cd\u01ce\u0005O\u0000\u0000\u01ce\u01d3\u0003b1\u0000"+
- "\u01cf\u01d0\u0005>\u0000\u0000\u01d0\u01d2\u0003b1\u0000\u01d1\u01cf"+
+ "\u0000\u0000\u01cd\u01ce\u0005O\u0000\u0000\u01ce\u01d3\u0003d2\u0000"+
+ "\u01cf\u01d0\u0005>\u0000\u0000\u01d0\u01d2\u0003d2\u0000\u01d1\u01cf"+
"\u0001\u0000\u0000\u0000\u01d2\u01d5\u0001\u0000\u0000\u0000\u01d3\u01d1"+
"\u0001\u0000\u0000\u0000\u01d3\u01d4\u0001\u0000\u0000\u0000\u01d4\u01d7"+
"\u0001\u0000\u0000\u0000\u01d5\u01d3\u0001\u0000\u0000\u0000\u01d6\u01cd"+
- "\u0001\u0000\u0000\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7_\u0001"+
- "\u0000\u0000\u0000\u01d8\u01d9\u0007\u0004\u0000\u0000\u01d9a\u0001\u0000"+
+ "\u0001\u0000\u0000\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7a\u0001"+
+ "\u0000\u0000\u0000\u01d8\u01d9\u0007\u0004\u0000\u0000\u01d9c\u0001\u0000"+
"\u0000\u0000\u01da\u01db\u00032\u0019\u0000\u01db\u01dc\u0005:\u0000\u0000"+
"\u01dc\u01de\u0001\u0000\u0000\u0000\u01dd\u01da\u0001\u0000\u0000\u0000"+
"\u01dd\u01de\u0001\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000"+
- "\u01df\u01e0\u00032\u0019\u0000\u01e0c\u0001\u0000\u0000\u0000\u01e1\u01e2"+
- "\u0005\r\u0000\u0000\u01e2\u01e3\u0003\u0098L\u0000\u01e3e\u0001\u0000"+
+ "\u01df\u01e0\u00032\u0019\u0000\u01e0e\u0001\u0000\u0000\u0000\u01e1\u01e2"+
+ "\u0005\r\u0000\u0000\u01e2\u01e3\u0003\u0094J\u0000\u01e3g\u0001\u0000"+
"\u0000\u0000\u01e4\u01e5\u0005\u0004\u0000\u0000\u01e5\u01e8\u00030\u0018"+
"\u0000\u01e6\u01e7\u0005J\u0000\u0000\u01e7\u01e9\u00030\u0018\u0000\u01e8"+
"\u01e6\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9"+
"\u01ef\u0001\u0000\u0000\u0000\u01ea\u01eb\u0005\u0084\u0000\u0000\u01eb"+
"\u01ec\u00030\u0018\u0000\u01ec\u01ed\u0005>\u0000\u0000\u01ed\u01ee\u0003"+
"0\u0018\u0000\u01ee\u01f0\u0001\u0000\u0000\u0000\u01ef\u01ea\u0001\u0000"+
- "\u0000\u0000\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0g\u0001\u0000\u0000"+
- "\u0000\u01f1\u01f2\u0005\u0015\u0000\u0000\u01f2\u01f3\u0003j5\u0000\u01f3"+
- "i\u0001\u0000\u0000\u0000\u01f4\u01f6\u0003l6\u0000\u01f5\u01f4\u0001"+
+ "\u0000\u0000\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0i\u0001\u0000\u0000"+
+ "\u0000\u01f1\u01f2\u0005\u0015\u0000\u0000\u01f2\u01f3\u0003l6\u0000\u01f3"+
+ "k\u0001\u0000\u0000\u0000\u01f4\u01f6\u0003n7\u0000\u01f5\u01f4\u0001"+
"\u0000\u0000\u0000\u01f6\u01f7\u0001\u0000\u0000\u0000\u01f7\u01f5\u0001"+
- "\u0000\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8k\u0001\u0000"+
- "\u0000\u0000\u01f9\u01fa\u0005c\u0000\u0000\u01fa\u01fb\u0003n7\u0000"+
- "\u01fb\u01fc\u0005d\u0000\u0000\u01fcm\u0001\u0000\u0000\u0000\u01fd\u01fe"+
- "\u00067\uffff\uffff\u0000\u01fe\u01ff\u0003p8\u0000\u01ff\u0205\u0001"+
+ "\u0000\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8m\u0001\u0000"+
+ "\u0000\u0000\u01f9\u01fa\u0005c\u0000\u0000\u01fa\u01fb\u0003p8\u0000"+
+ "\u01fb\u01fc\u0005d\u0000\u0000\u01fco\u0001\u0000\u0000\u0000\u01fd\u01fe"+
+ "\u00068\uffff\uffff\u0000\u01fe\u01ff\u0003r9\u0000\u01ff\u0205\u0001"+
"\u0000\u0000\u0000\u0200\u0201\n\u0001\u0000\u0000\u0201\u0202\u00054"+
- "\u0000\u0000\u0202\u0204\u0003p8\u0000\u0203\u0200\u0001\u0000\u0000\u0000"+
+ "\u0000\u0000\u0202\u0204\u0003r9\u0000\u0203\u0200\u0001\u0000\u0000\u0000"+
"\u0204\u0207\u0001\u0000\u0000\u0000\u0205\u0203\u0001\u0000\u0000\u0000"+
- "\u0205\u0206\u0001\u0000\u0000\u0000\u0206o\u0001\u0000\u0000\u0000\u0207"+
+ "\u0205\u0206\u0001\u0000\u0000\u0000\u0206q\u0001\u0000\u0000\u0000\u0207"+
"\u0205\u0001\u0000\u0000\u0000\u0208\u0209\u0003\u0006\u0003\u0000\u0209"+
- "q\u0001\u0000\u0000\u0000\u020a\u020e\u0005\u0007\u0000\u0000\u020b\u020c"+
+ "s\u0001\u0000\u0000\u0000\u020a\u020e\u0005\u0007\u0000\u0000\u020b\u020c"+
"\u00030\u0018\u0000\u020c\u020d\u0005:\u0000\u0000\u020d\u020f\u0001\u0000"+
"\u0000\u0000\u020e\u020b\u0001\u0000\u0000\u0000\u020e\u020f\u0001\u0000"+
- "\u0000\u0000\u020f\u0210\u0001\u0000\u0000\u0000\u0210\u0211\u0003\u008e"+
- "G\u0000\u0211\u0212\u0005O\u0000\u0000\u0212\u0213\u0003>\u001f\u0000"+
- "\u0213s\u0001\u0000\u0000\u0000\u0214\u0215\u0005\u001b\u0000\u0000\u0215"+
- "\u0216\u0003\u001c\u000e\u0000\u0216\u0217\u0005J\u0000\u0000\u0217\u0218"+
- "\u00034\u001a\u0000\u0218u\u0001\u0000\u0000\u0000\u0219\u021a\u0005\u0011"+
- "\u0000\u0000\u021a\u021d\u0003,\u0016\u0000\u021b\u021c\u0005;\u0000\u0000"+
- "\u021c\u021e\u0003\u000e\u0007\u0000\u021d\u021b\u0001\u0000\u0000\u0000"+
- "\u021d\u021e\u0001\u0000\u0000\u0000\u021ew\u0001\u0000\u0000\u0000\u021f"+
- "\u0220\u0005\u001f\u0000\u0000\u0220\u0221\u00034\u001a\u0000\u0221y\u0001"+
- "\u0000\u0000\u0000\u0222\u0223\u0005\u0016\u0000\u0000\u0223{\u0001\u0000"+
- "\u0000\u0000\u0224\u0229\u0003~?\u0000\u0225\u0226\u0005>\u0000\u0000"+
- "\u0226\u0228\u0003~?\u0000\u0227\u0225\u0001\u0000\u0000\u0000\u0228\u022b"+
- "\u0001\u0000\u0000\u0000\u0229\u0227\u0001\u0000\u0000\u0000\u0229\u022a"+
- "\u0001\u0000\u0000\u0000\u022a}\u0001\u0000\u0000\u0000\u022b\u0229\u0001"+
- "\u0000\u0000\u0000\u022c\u022d\u00036\u001b\u0000\u022d\u022e\u0005:\u0000"+
- "\u0000\u022e\u022f\u0003\u0080@\u0000\u022f\u007f\u0001\u0000\u0000\u0000"+
- "\u0230\u0233\u0003\u0098L\u0000\u0231\u0233\u00036\u001b\u0000\u0232\u0230"+
- "\u0001\u0000\u0000\u0000\u0232\u0231\u0001\u0000\u0000\u0000\u0233\u0081"+
- "\u0001\u0000\u0000\u0000\u0234\u0235\u0005\u0012\u0000\u0000\u0235\u0236"+
- "\u0003\u0098L\u0000\u0236\u0237\u0005J\u0000\u0000\u0237\u023a\u0003\u0012"+
- "\t\u0000\u0238\u0239\u0005O\u0000\u0000\u0239\u023b\u0003|>\u0000\u023a"+
- "\u0238\u0001\u0000\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000\u023b"+
- "\u0083\u0001\u0000\u0000\u0000\u023c\u023d\u0006B\uffff\uffff\u0000\u023d"+
- "\u023e\u0005G\u0000\u0000\u023e\u025a\u0003\u0084B\b\u023f\u025a\u0003"+
- "\u008aE\u0000\u0240\u025a\u0003\u0086C\u0000\u0241\u0243\u0003\u008aE"+
- "\u0000\u0242\u0244\u0005G\u0000\u0000\u0243\u0242\u0001\u0000\u0000\u0000"+
- "\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0245\u0001\u0000\u0000\u0000"+
- "\u0245\u0246\u0005C\u0000\u0000\u0246\u0247\u0005c\u0000\u0000\u0247\u024c"+
- "\u0003\u008aE\u0000\u0248\u0249\u0005>\u0000\u0000\u0249\u024b\u0003\u008a"+
- "E\u0000\u024a\u0248\u0001\u0000\u0000\u0000\u024b\u024e\u0001\u0000\u0000"+
- "\u0000\u024c\u024a\u0001\u0000\u0000\u0000\u024c\u024d\u0001\u0000\u0000"+
- "\u0000\u024d\u024f\u0001\u0000\u0000\u0000\u024e\u024c\u0001\u0000\u0000"+
- "\u0000\u024f\u0250\u0005d\u0000\u0000\u0250\u025a\u0001\u0000\u0000\u0000"+
- "\u0251\u0252\u0003\u008aE\u0000\u0252\u0254\u0005D\u0000\u0000\u0253\u0255"+
- "\u0005G\u0000\u0000\u0254\u0253\u0001\u0000\u0000\u0000\u0254\u0255\u0001"+
- "\u0000\u0000\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0005"+
- "H\u0000\u0000\u0257\u025a\u0001\u0000\u0000\u0000\u0258\u025a\u0003\u0088"+
- "D\u0000\u0259\u023c\u0001\u0000\u0000\u0000\u0259\u023f\u0001\u0000\u0000"+
- "\u0000\u0259\u0240\u0001\u0000\u0000\u0000\u0259\u0241\u0001\u0000\u0000"+
- "\u0000\u0259\u0251\u0001\u0000\u0000\u0000\u0259\u0258\u0001\u0000\u0000"+
- "\u0000\u025a\u0263\u0001\u0000\u0000\u0000\u025b\u025c\n\u0005\u0000\u0000"+
- "\u025c\u025d\u00058\u0000\u0000\u025d\u0262\u0003\u0084B\u0006\u025e\u025f"+
- "\n\u0004\u0000\u0000\u025f\u0260\u0005K\u0000\u0000\u0260\u0262\u0003"+
- "\u0084B\u0005\u0261\u025b\u0001\u0000\u0000\u0000\u0261\u025e\u0001\u0000"+
- "\u0000\u0000\u0262\u0265\u0001\u0000\u0000\u0000\u0263\u0261\u0001\u0000"+
- "\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000\u0264\u0085\u0001\u0000"+
- "\u0000\u0000\u0265\u0263\u0001\u0000\u0000\u0000\u0266\u0268\u0003\u008a"+
- "E\u0000\u0267\u0269\u0005G\u0000\u0000\u0268\u0267\u0001\u0000\u0000\u0000"+
- "\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026a\u0001\u0000\u0000\u0000"+
- "\u026a\u026b\u0005F\u0000\u0000\u026b\u026c\u0003\u00a2Q\u0000\u026c\u0295"+
- "\u0001\u0000\u0000\u0000\u026d\u026f\u0003\u008aE\u0000\u026e\u0270\u0005"+
- "G\u0000\u0000\u026f\u026e\u0001\u0000\u0000\u0000\u026f\u0270\u0001\u0000"+
- "\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u0272\u0005M\u0000"+
- "\u0000\u0272\u0273\u0003\u00a2Q\u0000\u0273\u0295\u0001\u0000\u0000\u0000"+
- "\u0274\u0276\u0003\u008aE\u0000\u0275\u0277\u0005G\u0000\u0000\u0276\u0275"+
- "\u0001\u0000\u0000\u0000\u0276\u0277\u0001\u0000\u0000\u0000\u0277\u0278"+
- "\u0001\u0000\u0000\u0000\u0278\u0279\u0005F\u0000\u0000\u0279\u027a\u0005"+
- "c\u0000\u0000\u027a\u027f\u0003\u00a2Q\u0000\u027b\u027c\u0005>\u0000"+
- "\u0000\u027c\u027e\u0003\u00a2Q\u0000\u027d\u027b\u0001\u0000\u0000\u0000"+
- "\u027e\u0281\u0001\u0000\u0000\u0000\u027f\u027d\u0001\u0000\u0000\u0000"+
- "\u027f\u0280\u0001\u0000\u0000\u0000\u0280\u0282\u0001\u0000\u0000\u0000"+
- "\u0281\u027f\u0001\u0000\u0000\u0000\u0282\u0283\u0005d\u0000\u0000\u0283"+
- "\u0295\u0001\u0000\u0000\u0000\u0284\u0286\u0003\u008aE\u0000\u0285\u0287"+
- "\u0005G\u0000\u0000\u0286\u0285\u0001\u0000\u0000\u0000\u0286\u0287\u0001"+
- "\u0000\u0000\u0000\u0287\u0288\u0001\u0000\u0000\u0000\u0288\u0289\u0005"+
- "M\u0000\u0000\u0289\u028a\u0005c\u0000\u0000\u028a\u028f\u0003\u00a2Q"+
- "\u0000\u028b\u028c\u0005>\u0000\u0000\u028c\u028e\u0003\u00a2Q\u0000\u028d"+
- "\u028b\u0001\u0000\u0000\u0000\u028e\u0291\u0001\u0000\u0000\u0000\u028f"+
- "\u028d\u0001\u0000\u0000\u0000\u028f\u0290\u0001\u0000\u0000\u0000\u0290"+
- "\u0292\u0001\u0000\u0000\u0000\u0291\u028f\u0001\u0000\u0000\u0000\u0292"+
- "\u0293\u0005d\u0000\u0000\u0293\u0295\u0001\u0000\u0000\u0000\u0294\u0266"+
- "\u0001\u0000\u0000\u0000\u0294\u026d\u0001\u0000\u0000\u0000\u0294\u0274"+
- "\u0001\u0000\u0000\u0000\u0294\u0284\u0001\u0000\u0000\u0000\u0295\u0087"+
- "\u0001\u0000\u0000\u0000\u0296\u0299\u00030\u0018\u0000\u0297\u0298\u0005"+
- "<\u0000\u0000\u0298\u029a\u0003\n\u0005\u0000\u0299\u0297\u0001\u0000"+
- "\u0000\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029b\u0001\u0000"+
- "\u0000\u0000\u029b\u029c\u0005=\u0000\u0000\u029c\u029d\u0003\u0098L\u0000"+
- "\u029d\u0089\u0001\u0000\u0000\u0000\u029e\u02a4\u0003\u008cF\u0000\u029f"+
- "\u02a0\u0003\u008cF\u0000\u02a0\u02a1\u0003\u00a4R\u0000\u02a1\u02a2\u0003"+
- "\u008cF\u0000\u02a2\u02a4\u0001\u0000\u0000\u0000\u02a3\u029e\u0001\u0000"+
- "\u0000\u0000\u02a3\u029f\u0001\u0000\u0000\u0000\u02a4\u008b\u0001\u0000"+
- "\u0000\u0000\u02a5\u02a6\u0006F\uffff\uffff\u0000\u02a6\u02aa\u0003\u008e"+
- "G\u0000\u02a7\u02a8\u0007\u0005\u0000\u0000\u02a8\u02aa\u0003\u008cF\u0003"+
- "\u02a9\u02a5\u0001\u0000\u0000\u0000\u02a9\u02a7\u0001\u0000\u0000\u0000"+
- "\u02aa\u02b3\u0001\u0000\u0000\u0000\u02ab\u02ac\n\u0002\u0000\u0000\u02ac"+
- "\u02ad\u0007\u0006\u0000\u0000\u02ad\u02b2\u0003\u008cF\u0003\u02ae\u02af"+
- "\n\u0001\u0000\u0000\u02af\u02b0\u0007\u0005\u0000\u0000\u02b0\u02b2\u0003"+
- "\u008cF\u0002\u02b1\u02ab\u0001\u0000\u0000\u0000\u02b1\u02ae\u0001\u0000"+
- "\u0000\u0000\u02b2\u02b5\u0001\u0000\u0000\u0000\u02b3\u02b1\u0001\u0000"+
- "\u0000\u0000\u02b3\u02b4\u0001\u0000\u0000\u0000\u02b4\u008d\u0001\u0000"+
- "\u0000\u0000\u02b5\u02b3\u0001\u0000\u0000\u0000\u02b6\u02b7\u0006G\uffff"+
- "\uffff\u0000\u02b7\u02bf\u0003\u0098L\u0000\u02b8\u02bf\u00030\u0018\u0000"+
- "\u02b9\u02bf\u0003\u0090H\u0000\u02ba\u02bb\u0005c\u0000\u0000\u02bb\u02bc"+
- "\u0003\u0084B\u0000\u02bc\u02bd\u0005d\u0000\u0000\u02bd\u02bf\u0001\u0000"+
- "\u0000\u0000\u02be\u02b6\u0001\u0000\u0000\u0000\u02be\u02b8\u0001\u0000"+
- "\u0000\u0000\u02be\u02b9\u0001\u0000\u0000\u0000\u02be\u02ba\u0001\u0000"+
- "\u0000\u0000\u02bf\u02c5\u0001\u0000\u0000\u0000\u02c0\u02c1\n\u0001\u0000"+
- "\u0000\u02c1\u02c2\u0005<\u0000\u0000\u02c2\u02c4\u0003\n\u0005\u0000"+
- "\u02c3\u02c0\u0001\u0000\u0000\u0000\u02c4\u02c7\u0001\u0000\u0000\u0000"+
- "\u02c5\u02c3\u0001\u0000\u0000\u0000\u02c5\u02c6\u0001\u0000\u0000\u0000"+
- "\u02c6\u008f\u0001\u0000\u0000\u0000\u02c7\u02c5\u0001\u0000\u0000\u0000"+
- "\u02c8\u02c9\u0003\u0092I\u0000\u02c9\u02d7\u0005c\u0000\u0000\u02ca\u02d8"+
- "\u0005Y\u0000\u0000\u02cb\u02d0\u0003\u0084B\u0000\u02cc\u02cd\u0005>"+
- "\u0000\u0000\u02cd\u02cf\u0003\u0084B\u0000\u02ce\u02cc\u0001\u0000\u0000"+
- "\u0000\u02cf\u02d2\u0001\u0000\u0000\u0000\u02d0\u02ce\u0001\u0000\u0000"+
- "\u0000\u02d0\u02d1\u0001\u0000\u0000\u0000\u02d1\u02d5\u0001\u0000\u0000"+
- "\u0000\u02d2\u02d0\u0001\u0000\u0000\u0000\u02d3\u02d4\u0005>\u0000\u0000"+
- "\u02d4\u02d6\u0003\u0094J\u0000\u02d5\u02d3\u0001\u0000\u0000\u0000\u02d5"+
- "\u02d6\u0001\u0000\u0000\u0000\u02d6\u02d8\u0001\u0000\u0000\u0000\u02d7"+
- "\u02ca\u0001\u0000\u0000\u0000\u02d7\u02cb\u0001\u0000\u0000\u0000\u02d7"+
- "\u02d8\u0001\u0000\u0000\u0000\u02d8\u02d9\u0001\u0000\u0000\u0000\u02d9"+
- "\u02da\u0005d\u0000\u0000\u02da\u0091\u0001\u0000\u0000\u0000\u02db\u02dc"+
- "\u0003>\u001f\u0000\u02dc\u0093\u0001\u0000\u0000\u0000\u02dd\u02de\u0005"+
- "\\\u0000\u0000\u02de\u02e3\u0003\u0096K\u0000\u02df\u02e0\u0005>\u0000"+
- "\u0000\u02e0\u02e2\u0003\u0096K\u0000\u02e1\u02df\u0001\u0000\u0000\u0000"+
- "\u02e2\u02e5\u0001\u0000\u0000\u0000\u02e3\u02e1\u0001\u0000\u0000\u0000"+
- "\u02e3\u02e4\u0001\u0000\u0000\u0000\u02e4\u02e6\u0001\u0000\u0000\u0000"+
- "\u02e5\u02e3\u0001\u0000\u0000\u0000\u02e6\u02e7\u0005]\u0000\u0000\u02e7"+
- "\u0095\u0001\u0000\u0000\u0000\u02e8\u02e9\u0003\u00a2Q\u0000\u02e9\u02ea"+
- "\u0005=\u0000\u0000\u02ea\u02eb\u0003\u0098L\u0000\u02eb\u0097\u0001\u0000"+
- "\u0000\u0000\u02ec\u0317\u0005H\u0000\u0000\u02ed\u02ee\u0003\u00a0P\u0000"+
- "\u02ee\u02ef\u0005e\u0000\u0000\u02ef\u0317\u0001\u0000\u0000\u0000\u02f0"+
- "\u0317\u0003\u009eO\u0000\u02f1\u0317\u0003\u00a0P\u0000\u02f2\u0317\u0003"+
- "\u009aM\u0000\u02f3\u0317\u0003:\u001d\u0000\u02f4\u0317\u0003\u00a2Q"+
- "\u0000\u02f5\u02f6\u0005a\u0000\u0000\u02f6\u02fb\u0003\u009cN\u0000\u02f7"+
- "\u02f8\u0005>\u0000\u0000\u02f8\u02fa\u0003\u009cN\u0000\u02f9\u02f7\u0001"+
- "\u0000\u0000\u0000\u02fa\u02fd\u0001\u0000\u0000\u0000\u02fb\u02f9\u0001"+
- "\u0000\u0000\u0000\u02fb\u02fc\u0001\u0000\u0000\u0000\u02fc\u02fe\u0001"+
- "\u0000\u0000\u0000\u02fd\u02fb\u0001\u0000\u0000\u0000\u02fe\u02ff\u0005"+
- "b\u0000\u0000\u02ff\u0317\u0001\u0000\u0000\u0000\u0300\u0301\u0005a\u0000"+
- "\u0000\u0301\u0306\u0003\u009aM\u0000\u0302\u0303\u0005>\u0000\u0000\u0303"+
- "\u0305\u0003\u009aM\u0000\u0304\u0302\u0001\u0000\u0000\u0000\u0305\u0308"+
- "\u0001\u0000\u0000\u0000\u0306\u0304\u0001\u0000\u0000\u0000\u0306\u0307"+
- "\u0001\u0000\u0000\u0000\u0307\u0309\u0001\u0000\u0000\u0000\u0308\u0306"+
- "\u0001\u0000\u0000\u0000\u0309\u030a\u0005b\u0000\u0000\u030a\u0317\u0001"+
- "\u0000\u0000\u0000\u030b\u030c\u0005a\u0000\u0000\u030c\u0311\u0003\u00a2"+
- "Q\u0000\u030d\u030e\u0005>\u0000\u0000\u030e\u0310\u0003\u00a2Q\u0000"+
- "\u030f\u030d\u0001\u0000\u0000\u0000\u0310\u0313\u0001\u0000\u0000\u0000"+
- "\u0311\u030f\u0001\u0000\u0000\u0000\u0311\u0312\u0001\u0000\u0000\u0000"+
- "\u0312\u0314\u0001\u0000\u0000\u0000\u0313\u0311\u0001\u0000\u0000\u0000"+
- "\u0314\u0315\u0005b\u0000\u0000\u0315\u0317\u0001\u0000\u0000\u0000\u0316"+
- "\u02ec\u0001\u0000\u0000\u0000\u0316\u02ed\u0001\u0000\u0000\u0000\u0316"+
- "\u02f0\u0001\u0000\u0000\u0000\u0316\u02f1\u0001\u0000\u0000\u0000\u0316"+
- "\u02f2\u0001\u0000\u0000\u0000\u0316\u02f3\u0001\u0000\u0000\u0000\u0316"+
- "\u02f4\u0001\u0000\u0000\u0000\u0316\u02f5\u0001\u0000\u0000\u0000\u0316"+
- "\u0300\u0001\u0000\u0000\u0000\u0316\u030b\u0001\u0000\u0000\u0000\u0317"+
- "\u0099\u0001\u0000\u0000\u0000\u0318\u0319\u0007\u0007\u0000\u0000\u0319"+
- "\u009b\u0001\u0000\u0000\u0000\u031a\u031d\u0003\u009eO\u0000\u031b\u031d"+
- "\u0003\u00a0P\u0000\u031c\u031a\u0001\u0000\u0000\u0000\u031c\u031b\u0001"+
- "\u0000\u0000\u0000\u031d\u009d\u0001\u0000\u0000\u0000\u031e\u0320\u0007"+
- "\u0005\u0000\u0000\u031f\u031e\u0001\u0000\u0000\u0000\u031f\u0320\u0001"+
- "\u0000\u0000\u0000\u0320\u0321\u0001\u0000\u0000\u0000\u0321\u0322\u0005"+
- "7\u0000\u0000\u0322\u009f\u0001\u0000\u0000\u0000\u0323\u0325\u0007\u0005"+
- "\u0000\u0000\u0324\u0323\u0001\u0000\u0000\u0000\u0324\u0325\u0001\u0000"+
- "\u0000\u0000\u0325\u0326\u0001\u0000\u0000\u0000\u0326\u0327\u00056\u0000"+
- "\u0000\u0327\u00a1\u0001\u0000\u0000\u0000\u0328\u0329\u00055\u0000\u0000"+
- "\u0329\u00a3\u0001\u0000\u0000\u0000\u032a\u032b\u0007\b\u0000\u0000\u032b"+
- "\u00a5\u0001\u0000\u0000\u0000\u032c\u032d\u0007\t\u0000\u0000\u032d\u032e"+
- "\u0005r\u0000\u0000\u032e\u032f\u0003\u00a8T\u0000\u032f\u0330\u0003\u00aa"+
- "U\u0000\u0330\u00a7\u0001\u0000\u0000\u0000\u0331\u0332\u0003\u001c\u000e"+
- "\u0000\u0332\u00a9\u0001\u0000\u0000\u0000\u0333\u0334\u0005J\u0000\u0000"+
- "\u0334\u0339\u0003\u00acV\u0000\u0335\u0336\u0005>\u0000\u0000\u0336\u0338"+
- "\u0003\u00acV\u0000\u0337\u0335\u0001\u0000\u0000\u0000\u0338\u033b\u0001"+
- "\u0000\u0000\u0000\u0339\u0337\u0001\u0000\u0000\u0000\u0339\u033a\u0001"+
- "\u0000\u0000\u0000\u033a\u00ab\u0001\u0000\u0000\u0000\u033b\u0339\u0001"+
- "\u0000\u0000\u0000\u033c\u033d\u0003\u008aE\u0000\u033d\u00ad\u0001\u0000"+
- "\u0000\u0000J\u00b9\u00c3\u00e0\u00ef\u00f5\u00fe\u0104\u0111\u0115\u0120"+
- "\u0130\u0138\u013c\u0143\u0149\u0150\u0158\u0160\u0168\u016c\u0170\u0175"+
- "\u0180\u0185\u0189\u0197\u01a2\u01a8\u01b6\u01cb\u01d3\u01d6\u01dd\u01e8"+
- "\u01ef\u01f7\u0205\u020e\u021d\u0229\u0232\u023a\u0243\u024c\u0254\u0259"+
- "\u0261\u0263\u0268\u026f\u0276\u027f\u0286\u028f\u0294\u0299\u02a3\u02a9"+
- "\u02b1\u02b3\u02be\u02c5\u02d0\u02d5\u02d7\u02e3\u02fb\u0306\u0311\u0316"+
- "\u031c\u031f\u0324\u0339";
+ "\u0000\u0000\u020f\u0210\u0001\u0000\u0000\u0000\u0210\u0211\u0003\u008a"+
+ "E\u0000\u0211\u0212\u0003T*\u0000\u0212u\u0001\u0000\u0000\u0000\u0213"+
+ "\u0214\u0005\u001b\u0000\u0000\u0214\u0215\u0003\u001c\u000e\u0000\u0215"+
+ "\u0216\u0005J\u0000\u0000\u0216\u0217\u00034\u001a\u0000\u0217w\u0001"+
+ "\u0000\u0000\u0000\u0218\u0219\u0005\u0011\u0000\u0000\u0219\u021c\u0003"+
+ ",\u0016\u0000\u021a\u021b\u0005;\u0000\u0000\u021b\u021d\u0003\u000e\u0007"+
+ "\u0000\u021c\u021a\u0001\u0000\u0000\u0000\u021c\u021d\u0001\u0000\u0000"+
+ "\u0000\u021dy\u0001\u0000\u0000\u0000\u021e\u021f\u0005\u001f\u0000\u0000"+
+ "\u021f\u0220\u00034\u001a\u0000\u0220{\u0001\u0000\u0000\u0000\u0221\u0222"+
+ "\u0005\u0016\u0000\u0000\u0222}\u0001\u0000\u0000\u0000\u0223\u0227\u0005"+
+ "\u0012\u0000\u0000\u0224\u0225\u00030\u0018\u0000\u0225\u0226\u0005:\u0000"+
+ "\u0000\u0226\u0228\u0001\u0000\u0000\u0000\u0227\u0224\u0001\u0000\u0000"+
+ "\u0000\u0227\u0228\u0001\u0000\u0000\u0000\u0228\u0229\u0001\u0000\u0000"+
+ "\u0000\u0229\u022a\u0003\u0094J\u0000\u022a\u022b\u0005J\u0000\u0000\u022b"+
+ "\u022c\u0003\u0012\t\u0000\u022c\u022d\u0003T*\u0000\u022d\u007f\u0001"+
+ "\u0000\u0000\u0000\u022e\u022f\u0006@\uffff\uffff\u0000\u022f\u0230\u0005"+
+ "G\u0000\u0000\u0230\u024c\u0003\u0080@\b\u0231\u024c\u0003\u0086C\u0000"+
+ "\u0232\u024c\u0003\u0082A\u0000\u0233\u0235\u0003\u0086C\u0000\u0234\u0236"+
+ "\u0005G\u0000\u0000\u0235\u0234\u0001\u0000\u0000\u0000\u0235\u0236\u0001"+
+ "\u0000\u0000\u0000\u0236\u0237\u0001\u0000\u0000\u0000\u0237\u0238\u0005"+
+ "C\u0000\u0000\u0238\u0239\u0005c\u0000\u0000\u0239\u023e\u0003\u0086C"+
+ "\u0000\u023a\u023b\u0005>\u0000\u0000\u023b\u023d\u0003\u0086C\u0000\u023c"+
+ "\u023a\u0001\u0000\u0000\u0000\u023d\u0240\u0001\u0000\u0000\u0000\u023e"+
+ "\u023c\u0001\u0000\u0000\u0000\u023e\u023f\u0001\u0000\u0000\u0000\u023f"+
+ "\u0241\u0001\u0000\u0000\u0000\u0240\u023e\u0001\u0000\u0000\u0000\u0241"+
+ "\u0242\u0005d\u0000\u0000\u0242\u024c\u0001\u0000\u0000\u0000\u0243\u0244"+
+ "\u0003\u0086C\u0000\u0244\u0246\u0005D\u0000\u0000\u0245\u0247\u0005G"+
+ "\u0000\u0000\u0246\u0245\u0001\u0000\u0000\u0000\u0246\u0247\u0001\u0000"+
+ "\u0000\u0000\u0247\u0248\u0001\u0000\u0000\u0000\u0248\u0249\u0005H\u0000"+
+ "\u0000\u0249\u024c\u0001\u0000\u0000\u0000\u024a\u024c\u0003\u0084B\u0000"+
+ "\u024b\u022e\u0001\u0000\u0000\u0000\u024b\u0231\u0001\u0000\u0000\u0000"+
+ "\u024b\u0232\u0001\u0000\u0000\u0000\u024b\u0233\u0001\u0000\u0000\u0000"+
+ "\u024b\u0243\u0001\u0000\u0000\u0000\u024b\u024a\u0001\u0000\u0000\u0000"+
+ "\u024c\u0255\u0001\u0000\u0000\u0000\u024d\u024e\n\u0005\u0000\u0000\u024e"+
+ "\u024f\u00058\u0000\u0000\u024f\u0254\u0003\u0080@\u0006\u0250\u0251\n"+
+ "\u0004\u0000\u0000\u0251\u0252\u0005K\u0000\u0000\u0252\u0254\u0003\u0080"+
+ "@\u0005\u0253\u024d\u0001\u0000\u0000\u0000\u0253\u0250\u0001\u0000\u0000"+
+ "\u0000\u0254\u0257\u0001\u0000\u0000\u0000\u0255\u0253\u0001\u0000\u0000"+
+ "\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0081\u0001\u0000\u0000"+
+ "\u0000\u0257\u0255\u0001\u0000\u0000\u0000\u0258\u025a\u0003\u0086C\u0000"+
+ "\u0259\u025b\u0005G\u0000\u0000\u025a\u0259\u0001\u0000\u0000\u0000\u025a"+
+ "\u025b\u0001\u0000\u0000\u0000\u025b\u025c\u0001\u0000\u0000\u0000\u025c"+
+ "\u025d\u0005F\u0000\u0000\u025d\u025e\u0003\u009eO\u0000\u025e\u0287\u0001"+
+ "\u0000\u0000\u0000\u025f\u0261\u0003\u0086C\u0000\u0260\u0262\u0005G\u0000"+
+ "\u0000\u0261\u0260\u0001\u0000\u0000\u0000\u0261\u0262\u0001\u0000\u0000"+
+ "\u0000\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0264\u0005M\u0000\u0000"+
+ "\u0264\u0265\u0003\u009eO\u0000\u0265\u0287\u0001\u0000\u0000\u0000\u0266"+
+ "\u0268\u0003\u0086C\u0000\u0267\u0269\u0005G\u0000\u0000\u0268\u0267\u0001"+
+ "\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026a\u0001"+
+ "\u0000\u0000\u0000\u026a\u026b\u0005F\u0000\u0000\u026b\u026c\u0005c\u0000"+
+ "\u0000\u026c\u0271\u0003\u009eO\u0000\u026d\u026e\u0005>\u0000\u0000\u026e"+
+ "\u0270\u0003\u009eO\u0000\u026f\u026d\u0001\u0000\u0000\u0000\u0270\u0273"+
+ "\u0001\u0000\u0000\u0000\u0271\u026f\u0001\u0000\u0000\u0000\u0271\u0272"+
+ "\u0001\u0000\u0000\u0000\u0272\u0274\u0001\u0000\u0000\u0000\u0273\u0271"+
+ "\u0001\u0000\u0000\u0000\u0274\u0275\u0005d\u0000\u0000\u0275\u0287\u0001"+
+ "\u0000\u0000\u0000\u0276\u0278\u0003\u0086C\u0000\u0277\u0279\u0005G\u0000"+
+ "\u0000\u0278\u0277\u0001\u0000\u0000\u0000\u0278\u0279\u0001\u0000\u0000"+
+ "\u0000\u0279\u027a\u0001\u0000\u0000\u0000\u027a\u027b\u0005M\u0000\u0000"+
+ "\u027b\u027c\u0005c\u0000\u0000\u027c\u0281\u0003\u009eO\u0000\u027d\u027e"+
+ "\u0005>\u0000\u0000\u027e\u0280\u0003\u009eO\u0000\u027f\u027d\u0001\u0000"+
+ "\u0000\u0000\u0280\u0283\u0001\u0000\u0000\u0000\u0281\u027f\u0001\u0000"+
+ "\u0000\u0000\u0281\u0282\u0001\u0000\u0000\u0000\u0282\u0284\u0001\u0000"+
+ "\u0000\u0000\u0283\u0281\u0001\u0000\u0000\u0000\u0284\u0285\u0005d\u0000"+
+ "\u0000\u0285\u0287\u0001\u0000\u0000\u0000\u0286\u0258\u0001\u0000\u0000"+
+ "\u0000\u0286\u025f\u0001\u0000\u0000\u0000\u0286\u0266\u0001\u0000\u0000"+
+ "\u0000\u0286\u0276\u0001\u0000\u0000\u0000\u0287\u0083\u0001\u0000\u0000"+
+ "\u0000\u0288\u028b\u00030\u0018\u0000\u0289\u028a\u0005<\u0000\u0000\u028a"+
+ "\u028c\u0003\n\u0005\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028b\u028c"+
+ "\u0001\u0000\u0000\u0000\u028c\u028d\u0001\u0000\u0000\u0000\u028d\u028e"+
+ "\u0005=\u0000\u0000\u028e\u028f\u0003\u0094J\u0000\u028f\u0085\u0001\u0000"+
+ "\u0000\u0000\u0290\u0296\u0003\u0088D\u0000\u0291\u0292\u0003\u0088D\u0000"+
+ "\u0292\u0293\u0003\u00a0P\u0000\u0293\u0294\u0003\u0088D\u0000\u0294\u0296"+
+ "\u0001\u0000\u0000\u0000\u0295\u0290\u0001\u0000\u0000\u0000\u0295\u0291"+
+ "\u0001\u0000\u0000\u0000\u0296\u0087\u0001\u0000\u0000\u0000\u0297\u0298"+
+ "\u0006D\uffff\uffff\u0000\u0298\u029c\u0003\u008aE\u0000\u0299\u029a\u0007"+
+ "\u0005\u0000\u0000\u029a\u029c\u0003\u0088D\u0003\u029b\u0297\u0001\u0000"+
+ "\u0000\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029c\u02a5\u0001\u0000"+
+ "\u0000\u0000\u029d\u029e\n\u0002\u0000\u0000\u029e\u029f\u0007\u0006\u0000"+
+ "\u0000\u029f\u02a4\u0003\u0088D\u0003\u02a0\u02a1\n\u0001\u0000\u0000"+
+ "\u02a1\u02a2\u0007\u0005\u0000\u0000\u02a2\u02a4\u0003\u0088D\u0002\u02a3"+
+ "\u029d\u0001\u0000\u0000\u0000\u02a3\u02a0\u0001\u0000\u0000\u0000\u02a4"+
+ "\u02a7\u0001\u0000\u0000\u0000\u02a5\u02a3\u0001\u0000\u0000\u0000\u02a5"+
+ "\u02a6\u0001\u0000\u0000\u0000\u02a6\u0089\u0001\u0000\u0000\u0000\u02a7"+
+ "\u02a5\u0001\u0000\u0000\u0000\u02a8\u02a9\u0006E\uffff\uffff\u0000\u02a9"+
+ "\u02b1\u0003\u0094J\u0000\u02aa\u02b1\u00030\u0018\u0000\u02ab\u02b1\u0003"+
+ "\u008cF\u0000\u02ac\u02ad\u0005c\u0000\u0000\u02ad\u02ae\u0003\u0080@"+
+ "\u0000\u02ae\u02af\u0005d\u0000\u0000\u02af\u02b1\u0001\u0000\u0000\u0000"+
+ "\u02b0\u02a8\u0001\u0000\u0000\u0000\u02b0\u02aa\u0001\u0000\u0000\u0000"+
+ "\u02b0\u02ab\u0001\u0000\u0000\u0000\u02b0\u02ac\u0001\u0000\u0000\u0000"+
+ "\u02b1\u02b7\u0001\u0000\u0000\u0000\u02b2\u02b3\n\u0001\u0000\u0000\u02b3"+
+ "\u02b4\u0005<\u0000\u0000\u02b4\u02b6\u0003\n\u0005\u0000\u02b5\u02b2"+
+ "\u0001\u0000\u0000\u0000\u02b6\u02b9\u0001\u0000\u0000\u0000\u02b7\u02b5"+
+ "\u0001\u0000\u0000\u0000\u02b7\u02b8\u0001\u0000\u0000\u0000\u02b8\u008b"+
+ "\u0001\u0000\u0000\u0000\u02b9\u02b7\u0001\u0000\u0000\u0000\u02ba\u02bb"+
+ "\u0003\u008eG\u0000\u02bb\u02c9\u0005c\u0000\u0000\u02bc\u02ca\u0005Y"+
+ "\u0000\u0000\u02bd\u02c2\u0003\u0080@\u0000\u02be\u02bf\u0005>\u0000\u0000"+
+ "\u02bf\u02c1\u0003\u0080@\u0000\u02c0\u02be\u0001\u0000\u0000\u0000\u02c1"+
+ "\u02c4\u0001\u0000\u0000\u0000\u02c2\u02c0\u0001\u0000\u0000\u0000\u02c2"+
+ "\u02c3\u0001\u0000\u0000\u0000\u02c3\u02c7\u0001\u0000\u0000\u0000\u02c4"+
+ "\u02c2\u0001\u0000\u0000\u0000\u02c5\u02c6\u0005>\u0000\u0000\u02c6\u02c8"+
+ "\u0003\u0090H\u0000\u02c7\u02c5\u0001\u0000\u0000\u0000\u02c7\u02c8\u0001"+
+ "\u0000\u0000\u0000\u02c8\u02ca\u0001\u0000\u0000\u0000\u02c9\u02bc\u0001"+
+ "\u0000\u0000\u0000\u02c9\u02bd\u0001\u0000\u0000\u0000\u02c9\u02ca\u0001"+
+ "\u0000\u0000\u0000\u02ca\u02cb\u0001\u0000\u0000\u0000\u02cb\u02cc\u0005"+
+ "d\u0000\u0000\u02cc\u008d\u0001\u0000\u0000\u0000\u02cd\u02ce\u0003>\u001f"+
+ "\u0000\u02ce\u008f\u0001\u0000\u0000\u0000\u02cf\u02d8\u0005\\\u0000\u0000"+
+ "\u02d0\u02d5\u0003\u0092I\u0000\u02d1\u02d2\u0005>\u0000\u0000\u02d2\u02d4"+
+ "\u0003\u0092I\u0000\u02d3\u02d1\u0001\u0000\u0000\u0000\u02d4\u02d7\u0001"+
+ "\u0000\u0000\u0000\u02d5\u02d3\u0001\u0000\u0000\u0000\u02d5\u02d6\u0001"+
+ "\u0000\u0000\u0000\u02d6\u02d9\u0001\u0000\u0000\u0000\u02d7\u02d5\u0001"+
+ "\u0000\u0000\u0000\u02d8\u02d0\u0001\u0000\u0000\u0000\u02d8\u02d9\u0001"+
+ "\u0000\u0000\u0000\u02d9\u02da\u0001\u0000\u0000\u0000\u02da\u02db\u0005"+
+ "]\u0000\u0000\u02db\u0091\u0001\u0000\u0000\u0000\u02dc\u02dd\u0003\u009e"+
+ "O\u0000\u02dd\u02de\u0005=\u0000\u0000\u02de\u02df\u0003\u0094J\u0000"+
+ "\u02df\u0093\u0001\u0000\u0000\u0000\u02e0\u030b\u0005H\u0000\u0000\u02e1"+
+ "\u02e2\u0003\u009cN\u0000\u02e2\u02e3\u0005e\u0000\u0000\u02e3\u030b\u0001"+
+ "\u0000\u0000\u0000\u02e4\u030b\u0003\u009aM\u0000\u02e5\u030b\u0003\u009c"+
+ "N\u0000\u02e6\u030b\u0003\u0096K\u0000\u02e7\u030b\u0003:\u001d\u0000"+
+ "\u02e8\u030b\u0003\u009eO\u0000\u02e9\u02ea\u0005a\u0000\u0000\u02ea\u02ef"+
+ "\u0003\u0098L\u0000\u02eb\u02ec\u0005>\u0000\u0000\u02ec\u02ee\u0003\u0098"+
+ "L\u0000\u02ed\u02eb\u0001\u0000\u0000\u0000\u02ee\u02f1\u0001\u0000\u0000"+
+ "\u0000\u02ef\u02ed\u0001\u0000\u0000\u0000\u02ef\u02f0\u0001\u0000\u0000"+
+ "\u0000\u02f0\u02f2\u0001\u0000\u0000\u0000\u02f1\u02ef\u0001\u0000\u0000"+
+ "\u0000\u02f2\u02f3\u0005b\u0000\u0000\u02f3\u030b\u0001\u0000\u0000\u0000"+
+ "\u02f4\u02f5\u0005a\u0000\u0000\u02f5\u02fa\u0003\u0096K\u0000\u02f6\u02f7"+
+ "\u0005>\u0000\u0000\u02f7\u02f9\u0003\u0096K\u0000\u02f8\u02f6\u0001\u0000"+
+ "\u0000\u0000\u02f9\u02fc\u0001\u0000\u0000\u0000\u02fa\u02f8\u0001\u0000"+
+ "\u0000\u0000\u02fa\u02fb\u0001\u0000\u0000\u0000\u02fb\u02fd\u0001\u0000"+
+ "\u0000\u0000\u02fc\u02fa\u0001\u0000\u0000\u0000\u02fd\u02fe\u0005b\u0000"+
+ "\u0000\u02fe\u030b\u0001\u0000\u0000\u0000\u02ff\u0300\u0005a\u0000\u0000"+
+ "\u0300\u0305\u0003\u009eO\u0000\u0301\u0302\u0005>\u0000\u0000\u0302\u0304"+
+ "\u0003\u009eO\u0000\u0303\u0301\u0001\u0000\u0000\u0000\u0304\u0307\u0001"+
+ "\u0000\u0000\u0000\u0305\u0303\u0001\u0000\u0000\u0000\u0305\u0306\u0001"+
+ "\u0000\u0000\u0000\u0306\u0308\u0001\u0000\u0000\u0000\u0307\u0305\u0001"+
+ "\u0000\u0000\u0000\u0308\u0309\u0005b\u0000\u0000\u0309\u030b\u0001\u0000"+
+ "\u0000\u0000\u030a\u02e0\u0001\u0000\u0000\u0000\u030a\u02e1\u0001\u0000"+
+ "\u0000\u0000\u030a\u02e4\u0001\u0000\u0000\u0000\u030a\u02e5\u0001\u0000"+
+ "\u0000\u0000\u030a\u02e6\u0001\u0000\u0000\u0000\u030a\u02e7\u0001\u0000"+
+ "\u0000\u0000\u030a\u02e8\u0001\u0000\u0000\u0000\u030a\u02e9\u0001\u0000"+
+ "\u0000\u0000\u030a\u02f4\u0001\u0000\u0000\u0000\u030a\u02ff\u0001\u0000"+
+ "\u0000\u0000\u030b\u0095\u0001\u0000\u0000\u0000\u030c\u030d\u0007\u0007"+
+ "\u0000\u0000\u030d\u0097\u0001\u0000\u0000\u0000\u030e\u0311\u0003\u009a"+
+ "M\u0000\u030f\u0311\u0003\u009cN\u0000\u0310\u030e\u0001\u0000\u0000\u0000"+
+ "\u0310\u030f\u0001\u0000\u0000\u0000\u0311\u0099\u0001\u0000\u0000\u0000"+
+ "\u0312\u0314\u0007\u0005\u0000\u0000\u0313\u0312\u0001\u0000\u0000\u0000"+
+ "\u0313\u0314\u0001\u0000\u0000\u0000\u0314\u0315\u0001\u0000\u0000\u0000"+
+ "\u0315\u0316\u00057\u0000\u0000\u0316\u009b\u0001\u0000\u0000\u0000\u0317"+
+ "\u0319\u0007\u0005\u0000\u0000\u0318\u0317\u0001\u0000\u0000\u0000\u0318"+
+ "\u0319\u0001\u0000\u0000\u0000\u0319\u031a\u0001\u0000\u0000\u0000\u031a"+
+ "\u031b\u00056\u0000\u0000\u031b\u009d\u0001\u0000\u0000\u0000\u031c\u031d"+
+ "\u00055\u0000\u0000\u031d\u009f\u0001\u0000\u0000\u0000\u031e\u031f\u0007"+
+ "\b\u0000\u0000\u031f\u00a1\u0001\u0000\u0000\u0000\u0320\u0321\u0007\t"+
+ "\u0000\u0000\u0321\u0322\u0005r\u0000\u0000\u0322\u0323\u0003\u00a4R\u0000"+
+ "\u0323\u0324\u0003\u00a6S\u0000\u0324\u00a3\u0001\u0000\u0000\u0000\u0325"+
+ "\u0326\u0003\u001c\u000e\u0000\u0326\u00a5\u0001\u0000\u0000\u0000\u0327"+
+ "\u0328\u0005J\u0000\u0000\u0328\u032d\u0003\u00a8T\u0000\u0329\u032a\u0005"+
+ ">\u0000\u0000\u032a\u032c\u0003\u00a8T\u0000\u032b\u0329\u0001\u0000\u0000"+
+ "\u0000\u032c\u032f\u0001\u0000\u0000\u0000\u032d\u032b\u0001\u0000\u0000"+
+ "\u0000\u032d\u032e\u0001\u0000\u0000\u0000\u032e\u00a7\u0001\u0000\u0000"+
+ "\u0000\u032f\u032d\u0001\u0000\u0000\u0000\u0330\u0331\u0003\u0086C\u0000"+
+ "\u0331\u00a9\u0001\u0000\u0000\u0000J\u00b5\u00bf\u00dc\u00eb\u00f1\u00fa"+
+ "\u0100\u010d\u0111\u011c\u012c\u0134\u0138\u013f\u0145\u014c\u0154\u015c"+
+ "\u0164\u0168\u016c\u0171\u017c\u0181\u0185\u0193\u019e\u01a4\u01ab\u01b4"+
+ "\u01cb\u01d3\u01d6\u01dd\u01e8\u01ef\u01f7\u0205\u020e\u021c\u0227\u0235"+
+ "\u023e\u0246\u024b\u0253\u0255\u025a\u0261\u0268\u0271\u0278\u0281\u0286"+
+ "\u028b\u0295\u029b\u02a3\u02a5\u02b0\u02b7\u02c2\u02c7\u02c9\u02d5\u02d8"+
+ "\u02ef\u02fa\u0305\u030a\u0310\u0313\u0318\u032d";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java
index b6d5af9d90870..adce3bca00991 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java
@@ -541,49 +541,61 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener {
*
* The default implementation does nothing.
*/
- @Override public void enterGrokCommand(EsqlBaseParser.GrokCommandContext ctx) { }
+ @Override public void enterDissectCommandOptions(EsqlBaseParser.DissectCommandOptionsContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitGrokCommand(EsqlBaseParser.GrokCommandContext ctx) { }
+ @Override public void exitDissectCommandOptions(EsqlBaseParser.DissectCommandOptionsContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void enterMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx) { }
+ @Override public void enterDissectCommandOption(EsqlBaseParser.DissectCommandOptionContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx) { }
+ @Override public void exitDissectCommandOption(EsqlBaseParser.DissectCommandOptionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterCommandNamedParameters(EsqlBaseParser.CommandNamedParametersContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitCommandNamedParameters(EsqlBaseParser.CommandNamedParametersContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void enterCommandOptions(EsqlBaseParser.CommandOptionsContext ctx) { }
+ @Override public void enterGrokCommand(EsqlBaseParser.GrokCommandContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitCommandOptions(EsqlBaseParser.CommandOptionsContext ctx) { }
+ @Override public void exitGrokCommand(EsqlBaseParser.GrokCommandContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void enterCommandOption(EsqlBaseParser.CommandOptionContext ctx) { }
+ @Override public void enterMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitCommandOption(EsqlBaseParser.CommandOptionContext ctx) { }
+ @Override public void exitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx) { }
/**
* {@inheritDoc}
*
@@ -812,42 +824,6 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener {
* The default implementation does nothing.
*/
@Override public void exitFuseCommand(EsqlBaseParser.FuseCommandContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterInferenceCommandOptions(EsqlBaseParser.InferenceCommandOptionsContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitInferenceCommandOptions(EsqlBaseParser.InferenceCommandOptionsContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterInferenceCommandOption(EsqlBaseParser.InferenceCommandOptionContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitInferenceCommandOption(EsqlBaseParser.InferenceCommandOptionContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterInferenceCommandOptionValue(EsqlBaseParser.InferenceCommandOptionValueContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitInferenceCommandOptionValue(EsqlBaseParser.InferenceCommandOptionValueContext ctx) { }
/**
* {@inheritDoc}
*
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java
index ab8ed34810ddf..3de999920b23c 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java
@@ -327,28 +327,35 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitGrokCommand(EsqlBaseParser.GrokCommandContext ctx) { return visitChildren(ctx); }
+ @Override public T visitDissectCommandOptions(EsqlBaseParser.DissectCommandOptionsContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx) { return visitChildren(ctx); }
+ @Override public T visitDissectCommandOption(EsqlBaseParser.DissectCommandOptionContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitCommandNamedParameters(EsqlBaseParser.CommandNamedParametersContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitCommandOptions(EsqlBaseParser.CommandOptionsContext ctx) { return visitChildren(ctx); }
+ @Override public T visitGrokCommand(EsqlBaseParser.GrokCommandContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitCommandOption(EsqlBaseParser.CommandOptionContext ctx) { return visitChildren(ctx); }
+ @Override public T visitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@@ -482,27 +489,6 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im
* {@link #visitChildren} on {@code ctx}.
*/
@Override public T visitFuseCommand(EsqlBaseParser.FuseCommandContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitInferenceCommandOptions(EsqlBaseParser.InferenceCommandOptionsContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitInferenceCommandOption(EsqlBaseParser.InferenceCommandOptionContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitInferenceCommandOptionValue(EsqlBaseParser.InferenceCommandOptionValueContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java
index 0ab831b10b68e..3cf9768afa515 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java
@@ -460,45 +460,55 @@ public interface EsqlBaseParserListener extends ParseTreeListener {
*/
void exitDissectCommand(EsqlBaseParser.DissectCommandContext ctx);
/**
- * Enter a parse tree produced by {@link EsqlBaseParser#grokCommand}.
+ * Enter a parse tree produced by {@link EsqlBaseParser#dissectCommandOptions}.
* @param ctx the parse tree
*/
- void enterGrokCommand(EsqlBaseParser.GrokCommandContext ctx);
+ void enterDissectCommandOptions(EsqlBaseParser.DissectCommandOptionsContext ctx);
/**
- * Exit a parse tree produced by {@link EsqlBaseParser#grokCommand}.
+ * Exit a parse tree produced by {@link EsqlBaseParser#dissectCommandOptions}.
* @param ctx the parse tree
*/
- void exitGrokCommand(EsqlBaseParser.GrokCommandContext ctx);
+ void exitDissectCommandOptions(EsqlBaseParser.DissectCommandOptionsContext ctx);
/**
- * Enter a parse tree produced by {@link EsqlBaseParser#mvExpandCommand}.
+ * Enter a parse tree produced by {@link EsqlBaseParser#dissectCommandOption}.
* @param ctx the parse tree
*/
- void enterMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx);
+ void enterDissectCommandOption(EsqlBaseParser.DissectCommandOptionContext ctx);
/**
- * Exit a parse tree produced by {@link EsqlBaseParser#mvExpandCommand}.
+ * Exit a parse tree produced by {@link EsqlBaseParser#dissectCommandOption}.
* @param ctx the parse tree
*/
- void exitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx);
+ void exitDissectCommandOption(EsqlBaseParser.DissectCommandOptionContext ctx);
+ /**
+ * Enter a parse tree produced by {@link EsqlBaseParser#commandNamedParameters}.
+ * @param ctx the parse tree
+ */
+ void enterCommandNamedParameters(EsqlBaseParser.CommandNamedParametersContext ctx);
/**
- * Enter a parse tree produced by {@link EsqlBaseParser#commandOptions}.
+ * Exit a parse tree produced by {@link EsqlBaseParser#commandNamedParameters}.
+ * @param ctx the parse tree
+ */
+ void exitCommandNamedParameters(EsqlBaseParser.CommandNamedParametersContext ctx);
+ /**
+ * Enter a parse tree produced by {@link EsqlBaseParser#grokCommand}.
* @param ctx the parse tree
*/
- void enterCommandOptions(EsqlBaseParser.CommandOptionsContext ctx);
+ void enterGrokCommand(EsqlBaseParser.GrokCommandContext ctx);
/**
- * Exit a parse tree produced by {@link EsqlBaseParser#commandOptions}.
+ * Exit a parse tree produced by {@link EsqlBaseParser#grokCommand}.
* @param ctx the parse tree
*/
- void exitCommandOptions(EsqlBaseParser.CommandOptionsContext ctx);
+ void exitGrokCommand(EsqlBaseParser.GrokCommandContext ctx);
/**
- * Enter a parse tree produced by {@link EsqlBaseParser#commandOption}.
+ * Enter a parse tree produced by {@link EsqlBaseParser#mvExpandCommand}.
* @param ctx the parse tree
*/
- void enterCommandOption(EsqlBaseParser.CommandOptionContext ctx);
+ void enterMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx);
/**
- * Exit a parse tree produced by {@link EsqlBaseParser#commandOption}.
+ * Exit a parse tree produced by {@link EsqlBaseParser#mvExpandCommand}.
* @param ctx the parse tree
*/
- void exitCommandOption(EsqlBaseParser.CommandOptionContext ctx);
+ void exitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx);
/**
* Enter a parse tree produced by {@link EsqlBaseParser#explainCommand}.
* @param ctx the parse tree
@@ -695,36 +705,6 @@ public interface EsqlBaseParserListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitFuseCommand(EsqlBaseParser.FuseCommandContext ctx);
- /**
- * Enter a parse tree produced by {@link EsqlBaseParser#inferenceCommandOptions}.
- * @param ctx the parse tree
- */
- void enterInferenceCommandOptions(EsqlBaseParser.InferenceCommandOptionsContext ctx);
- /**
- * Exit a parse tree produced by {@link EsqlBaseParser#inferenceCommandOptions}.
- * @param ctx the parse tree
- */
- void exitInferenceCommandOptions(EsqlBaseParser.InferenceCommandOptionsContext ctx);
- /**
- * Enter a parse tree produced by {@link EsqlBaseParser#inferenceCommandOption}.
- * @param ctx the parse tree
- */
- void enterInferenceCommandOption(EsqlBaseParser.InferenceCommandOptionContext ctx);
- /**
- * Exit a parse tree produced by {@link EsqlBaseParser#inferenceCommandOption}.
- * @param ctx the parse tree
- */
- void exitInferenceCommandOption(EsqlBaseParser.InferenceCommandOptionContext ctx);
- /**
- * Enter a parse tree produced by {@link EsqlBaseParser#inferenceCommandOptionValue}.
- * @param ctx the parse tree
- */
- void enterInferenceCommandOptionValue(EsqlBaseParser.InferenceCommandOptionValueContext ctx);
- /**
- * Exit a parse tree produced by {@link EsqlBaseParser#inferenceCommandOptionValue}.
- * @param ctx the parse tree
- */
- void exitInferenceCommandOptionValue(EsqlBaseParser.InferenceCommandOptionValueContext ctx);
/**
* Enter a parse tree produced by {@link EsqlBaseParser#rerankCommand}.
* @param ctx the parse tree
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java
index b9d15fcd37b76..58291d1326c3a 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java
@@ -284,29 +284,35 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor {
*/
T visitDissectCommand(EsqlBaseParser.DissectCommandContext ctx);
/**
- * Visit a parse tree produced by {@link EsqlBaseParser#grokCommand}.
+ * Visit a parse tree produced by {@link EsqlBaseParser#dissectCommandOptions}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitGrokCommand(EsqlBaseParser.GrokCommandContext ctx);
+ T visitDissectCommandOptions(EsqlBaseParser.DissectCommandOptionsContext ctx);
/**
- * Visit a parse tree produced by {@link EsqlBaseParser#mvExpandCommand}.
+ * Visit a parse tree produced by {@link EsqlBaseParser#dissectCommandOption}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx);
+ T visitDissectCommandOption(EsqlBaseParser.DissectCommandOptionContext ctx);
+ /**
+ * Visit a parse tree produced by {@link EsqlBaseParser#commandNamedParameters}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitCommandNamedParameters(EsqlBaseParser.CommandNamedParametersContext ctx);
/**
- * Visit a parse tree produced by {@link EsqlBaseParser#commandOptions}.
+ * Visit a parse tree produced by {@link EsqlBaseParser#grokCommand}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitCommandOptions(EsqlBaseParser.CommandOptionsContext ctx);
+ T visitGrokCommand(EsqlBaseParser.GrokCommandContext ctx);
/**
- * Visit a parse tree produced by {@link EsqlBaseParser#commandOption}.
+ * Visit a parse tree produced by {@link EsqlBaseParser#mvExpandCommand}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitCommandOption(EsqlBaseParser.CommandOptionContext ctx);
+ T visitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx);
/**
* Visit a parse tree produced by {@link EsqlBaseParser#explainCommand}.
* @param ctx the parse tree
@@ -424,24 +430,6 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor {
* @return the visitor result
*/
T visitFuseCommand(EsqlBaseParser.FuseCommandContext ctx);
- /**
- * Visit a parse tree produced by {@link EsqlBaseParser#inferenceCommandOptions}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitInferenceCommandOptions(EsqlBaseParser.InferenceCommandOptionsContext ctx);
- /**
- * Visit a parse tree produced by {@link EsqlBaseParser#inferenceCommandOption}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitInferenceCommandOption(EsqlBaseParser.InferenceCommandOptionContext ctx);
- /**
- * Visit a parse tree produced by {@link EsqlBaseParser#inferenceCommandOptionValue}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitInferenceCommandOptionValue(EsqlBaseParser.InferenceCommandOptionValueContext ctx);
/**
* Visit a parse tree produced by {@link EsqlBaseParser#rerankCommand}.
* @param ctx the parse tree
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java
index dc60a6dbbfa0a..acb19740fe6da 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java
@@ -261,8 +261,12 @@ public Literal visitString(EsqlBaseParser.StringContext ctx) {
@Override
public UnresolvedAttribute visitQualifiedName(EsqlBaseParser.QualifiedNameContext ctx) {
+ return visitQualifiedName(ctx, null);
+ }
+
+ public UnresolvedAttribute visitQualifiedName(EsqlBaseParser.QualifiedNameContext ctx, UnresolvedAttribute defaultValue) {
if (ctx == null) {
- return null;
+ return defaultValue;
}
List items = visitList(this, ctx.identifierOrParameter(), Object.class);
List strings = new ArrayList<>(items.size());
@@ -647,35 +651,32 @@ public MapExpression visitMapExpression(EsqlBaseParser.MapExpressionContext ctx)
EsqlBaseParser.StringContext stringCtx = entry.string();
String key = unquote(stringCtx.QUOTED_STRING().getText()); // key is case-sensitive
if (key.isBlank()) {
- throw new ParsingException(
- source(ctx),
- "Invalid named function argument [{}], empty key is not supported",
- entry.getText()
- );
+ throw new ParsingException(source(ctx), "Invalid named parameter [{}], empty key is not supported", entry.getText());
}
if (names.contains(key)) {
- throw new ParsingException(source(ctx), "Duplicated function arguments with the same name [{}] is not supported", key);
+ throw new ParsingException(source(ctx), "Duplicated named parameters with the same name [{}] is not supported", key);
}
Expression value = expression(entry.constant());
String entryText = entry.getText();
if (value instanceof Literal l) {
if (l.dataType() == NULL) {
- throw new ParsingException(source(ctx), "Invalid named function argument [{}], NULL is not supported", entryText);
+ throw new ParsingException(source(ctx), "Invalid named parameter [{}], NULL is not supported", entryText);
}
namedArgs.add(Literal.keyword(source(stringCtx), key));
namedArgs.add(l);
names.add(key);
} else {
- throw new ParsingException(
- source(ctx),
- "Invalid named function argument [{}], only constant value is supported",
- entryText
- );
+ throw new ParsingException(source(ctx), "Invalid named parameter [{}], only constant value is supported", entryText);
}
}
return new MapExpression(Source.EMPTY, namedArgs);
}
+ @Override
+ public MapExpression visitCommandNamedParameters(EsqlBaseParser.CommandNamedParametersContext ctx) {
+ return ctx == null || ctx.mapExpression() == null ? null : visitMapExpression(ctx.mapExpression());
+ }
+
@Override
public String visitIdentifierOrParameter(EsqlBaseParser.IdentifierOrParameterContext ctx) {
if (ctx.identifier() != null) {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java
index cfb431ab0a293..3602861cf8f02 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java
@@ -31,6 +31,7 @@
import org.elasticsearch.xpack.esql.core.expression.Expressions;
import org.elasticsearch.xpack.esql.core.expression.FoldContext;
import org.elasticsearch.xpack.esql.core.expression.Literal;
+import org.elasticsearch.xpack.esql.core.expression.MapExpression;
import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute;
import org.elasticsearch.xpack.esql.core.expression.NameId;
import org.elasticsearch.xpack.esql.core.expression.NamedExpression;
@@ -71,6 +72,7 @@
import org.elasticsearch.xpack.esql.plan.logical.TimeSeriesAggregate;
import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation;
import org.elasticsearch.xpack.esql.plan.logical.inference.Completion;
+import org.elasticsearch.xpack.esql.plan.logical.inference.InferencePlan;
import org.elasticsearch.xpack.esql.plan.logical.inference.Rerank;
import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin;
import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo;
@@ -89,7 +91,6 @@
import java.util.function.Function;
import static java.util.Collections.emptyList;
-import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD;
import static org.elasticsearch.xpack.esql.core.util.StringUtils.WILDCARD;
import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions;
import static org.elasticsearch.xpack.esql.parser.ParserUtils.source;
@@ -212,7 +213,7 @@ private void validateGrokPattern(Source source, Grok.Parser grokParser, String p
public PlanFactory visitDissectCommand(EsqlBaseParser.DissectCommandContext ctx) {
return p -> {
String pattern = BytesRefs.toString(visitString(ctx.string()).fold(FoldContext.small() /* TODO remove me */));
- Map options = visitCommandOptions(ctx.commandOptions());
+ Map options = visitDissectCommandOptions(ctx.dissectCommandOptions());
String appendSeparator = "";
for (Map.Entry item : options.entrySet()) {
if (item.getKey().equalsIgnoreCase("append_separator") == false) {
@@ -260,12 +261,12 @@ public PlanFactory visitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ct
}
@Override
- public Map visitCommandOptions(EsqlBaseParser.CommandOptionsContext ctx) {
+ public Map visitDissectCommandOptions(EsqlBaseParser.DissectCommandOptionsContext ctx) {
if (ctx == null) {
return Map.of();
}
Map result = new HashMap<>();
- for (EsqlBaseParser.CommandOptionContext option : ctx.commandOption()) {
+ for (EsqlBaseParser.DissectCommandOptionContext option : ctx.dissectCommandOption()) {
result.put(visitIdentifier(option.identifier()), expression(option.constant()).fold(FoldContext.small() /* TODO remove me */));
}
return result;
@@ -755,137 +756,106 @@ public PlanFactory visitRerankCommand(EsqlBaseParser.RerankCommandContext ctx) {
Source source = source(ctx);
List rerankFields = visitRerankFields(ctx.rerankFields());
Expression queryText = expression(ctx.queryText);
+ Attribute scoreAttribute = visitQualifiedName(ctx.targetField, new UnresolvedAttribute(source, MetadataAttribute.SCORE));
if (queryText instanceof Literal queryTextLiteral && DataType.isString(queryText.dataType())) {
if (queryTextLiteral.value() == null) {
- throw new ParsingException(
- source(ctx.queryText),
- "Query text cannot be null or undefined in RERANK",
- ctx.queryText.getText()
- );
+ throw new ParsingException(source(ctx.queryText), "Query cannot be null or undefined in RERANK", ctx.queryText.getText());
}
} else {
throw new ParsingException(
source(ctx.queryText),
- "RERANK only support string as query text but [{}] cannot be used as string",
+ "Query must be a valid string in RERANK, found [{}]",
ctx.queryText.getText()
);
}
return p -> {
checkForRemoteClusters(p, source, "RERANK");
- return visitRerankOptions(new Rerank(source, p, queryText, rerankFields), ctx.inferenceCommandOptions());
+ return applyRerankOptions(new Rerank(source, p, queryText, rerankFields, scoreAttribute), ctx.commandNamedParameters());
};
}
- private Rerank visitRerankOptions(Rerank rerank, EsqlBaseParser.InferenceCommandOptionsContext ctx) {
- if (ctx == null) {
- return rerank;
- }
-
- Rerank.Builder rerankBuilder = new Rerank.Builder(rerank);
+ private Rerank applyRerankOptions(Rerank rerank, EsqlBaseParser.CommandNamedParametersContext ctx) {
+ MapExpression optionExpression = visitCommandNamedParameters(ctx);
- for (var option : ctx.inferenceCommandOption()) {
- String optionName = visitIdentifier(option.identifier());
- EsqlBaseParser.InferenceCommandOptionValueContext optionValue = option.inferenceCommandOptionValue();
- if (optionName.equals(Rerank.INFERENCE_ID_OPTION_NAME)) {
- rerankBuilder.withInferenceId(visitInferenceId(optionValue));
- } else if (optionName.equals(Rerank.SCORE_COLUMN_OPTION_NAME)) {
- rerankBuilder.withScoreAttribute(visitRerankScoreAttribute(optionName, optionValue));
- } else {
- throw new ParsingException(
- source(option.identifier()),
- "Unknowm parameter [{}] in RERANK command",
- option.identifier().getText()
- );
- }
+ if (optionExpression == null) {
+ return rerank;
}
- return rerankBuilder.build();
- }
+ Map optionsMap = optionExpression.keyFoldedMap();
+ Expression inferenceId = optionsMap.remove(Rerank.INFERENCE_ID_OPTION_NAME);
- private UnresolvedAttribute visitRerankScoreAttribute(String optionName, EsqlBaseParser.InferenceCommandOptionValueContext ctx) {
- if (ctx.constant() == null && ctx.identifier() == null) {
- throw new ParsingException(source(ctx), "Parameter [{}] is null or undefined", optionName);
+ if (inferenceId != null) {
+ rerank = applyInferenceId(rerank, inferenceId);
}
- Expression optionValue = ctx.identifier() != null
- ? Literal.keyword(source(ctx.identifier()), visitIdentifier(ctx.identifier()))
- : expression(ctx.constant());
-
- if (optionValue instanceof UnresolvedAttribute scoreAttribute) {
- return scoreAttribute;
- } else if (optionValue instanceof Literal literal) {
- if (literal.value() == null) {
- throw new ParsingException(optionValue.source(), "Parameter [{}] is null or undefined", optionName);
- }
-
- if (literal.value() instanceof BytesRef attributeName) {
- return new UnresolvedAttribute(literal.source(), BytesRefs.toString(attributeName));
- }
+ if (optionsMap.isEmpty() == false) {
+ throw new ParsingException(
+ source(ctx),
+ "Inavalid option [{}] in RERANK, expected one of [{}]",
+ optionsMap.keySet().stream().findAny().get(),
+ rerank.validOptionNames()
+ );
}
- throw new ParsingException(
- source(ctx),
- "Option [{}] expects a valid attribute in RERANK command. [{}] provided.",
- optionName,
- ctx.constant().getText()
- );
+ return rerank;
}
- @Override
public PlanFactory visitCompletionCommand(EsqlBaseParser.CompletionCommandContext ctx) {
Source source = source(ctx);
Expression prompt = expression(ctx.prompt);
- Literal inferenceId = visitInferenceId(ctx.inferenceId);
- Attribute targetField = ctx.targetField == null
- ? new UnresolvedAttribute(source, Completion.DEFAULT_OUTPUT_FIELD_NAME)
- : visitQualifiedName(ctx.targetField);
+ Attribute targetField = visitQualifiedName(ctx.targetField, new UnresolvedAttribute(source, Completion.DEFAULT_OUTPUT_FIELD_NAME));
return p -> {
checkForRemoteClusters(p, source, "COMPLETION");
- return new Completion(source, p, inferenceId, prompt, targetField);
+ return applyCompletionOptions(new Completion(source, p, prompt, targetField), ctx.commandNamedParameters());
};
}
- private Literal visitInferenceId(EsqlBaseParser.IdentifierOrParameterContext ctx) {
- if (ctx.identifier() != null) {
- return Literal.keyword(source(ctx), visitIdentifier(ctx.identifier()));
+ private Completion applyCompletionOptions(Completion completion, EsqlBaseParser.CommandNamedParametersContext ctx) {
+ MapExpression optionsExpresion = visitCommandNamedParameters(ctx);
+
+ if (optionsExpresion == null || optionsExpresion.containsKey(Completion.INFERENCE_ID_OPTION_NAME) == false) {
+ // Having a mandatory named parameter for inference_id is an antipattern, but it will be optional in the future when we have a
+ // default LLM. It is better to keep inference_id as a named parameter and relax the syntax when it will become optional than
+ // completely change the syntax in the future.
+ throw new ParsingException(source(ctx), "Missing mandatory option [{}] in COMPLETION", Completion.INFERENCE_ID_OPTION_NAME);
}
- return visitInferenceId(expression(ctx.parameter()));
- }
+ Map optionsMap = visitCommandNamedParameters(ctx).keyFoldedMap();
- private Literal visitInferenceId(EsqlBaseParser.InferenceCommandOptionValueContext ctx) {
- if (ctx.identifier() != null) {
- return Literal.keyword(source(ctx), visitIdentifier(ctx.identifier()));
+ Expression inferenceId = optionsMap.remove(Completion.INFERENCE_ID_OPTION_NAME);
+ if (inferenceId != null) {
+ completion = applyInferenceId(completion, inferenceId);
}
- return visitInferenceId(expression(ctx.constant()));
- }
+ if (optionsMap.isEmpty() == false) {
+ throw new ParsingException(
+ source(ctx),
+ "Inavalid option [{}] in COMPLETION, expected one of [{}]",
+ optionsMap.keySet().stream().findAny().get(),
+ completion.validOptionNames()
+ );
+ }
- private Literal visitInferenceId(Expression expression) {
- if (expression instanceof Literal literal) {
- if (literal.value() == null) {
- throw new ParsingException(
- expression.source(),
- "Parameter [{}] is null or undefined and cannot be used as inference id",
- expression.source().text()
- );
- }
+ return completion;
+ }
- return literal;
- } else if (expression instanceof UnresolvedAttribute attribute) {
- // Support for unquoted inference id
- return new Literal(expression.source(), attribute.name(), KEYWORD);
+ private > InferencePlanType applyInferenceId(
+ InferencePlanType inferencePlan,
+ Expression inferenceId
+ ) {
+ if ((inferenceId instanceof Literal && DataType.isString(inferenceId.dataType())) == false) {
+ throw new ParsingException(
+ inferenceId.source(),
+ "Option [{}] must be a valid string, found [{}]",
+ Completion.INFERENCE_ID_OPTION_NAME,
+ inferenceId.source().text()
+ );
}
- throw new ParsingException(
- expression.source(),
- "Query parameter [{}] is not a string and cannot be used as inference id [{}]",
- expression.source().text(),
- expression.getClass()
- );
+ return inferencePlan.withInferenceId(inferenceId);
}
public PlanFactory visitSampleCommand(EsqlBaseParser.SampleCommandContext ctx) {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/Completion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/Completion.java
index c4115caf111d1..191664bea9a81 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/Completion.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/Completion.java
@@ -17,6 +17,7 @@
import org.elasticsearch.xpack.esql.core.expression.Attribute;
import org.elasticsearch.xpack.esql.core.expression.AttributeSet;
import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.expression.Literal;
import org.elasticsearch.xpack.esql.core.expression.NameId;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
import org.elasticsearch.xpack.esql.core.tree.Source;
@@ -45,6 +46,10 @@ public class Completion extends InferencePlan implements TelemetryAw
private final Attribute targetField;
private List lazyOutput;
+ public Completion(Source source, LogicalPlan p, Expression prompt, Attribute targetField) {
+ this(source, p, Literal.keyword(Source.EMPTY, DEFAULT_OUTPUT_FIELD_NAME), prompt, targetField);
+ }
+
public Completion(Source source, LogicalPlan child, Expression inferenceId, Expression prompt, Attribute targetField) {
super(source, child, inferenceId);
this.prompt = prompt;
@@ -78,6 +83,10 @@ public Attribute targetField() {
@Override
public Completion withInferenceId(Expression newInferenceId) {
+ if (inferenceId().equals(newInferenceId)) {
+ return this;
+ }
+
return new Completion(source(), child(), newInferenceId, prompt, targetField);
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/InferencePlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/InferencePlan.java
index 620e8726865d6..b4c7d21f3e364 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/InferencePlan.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/InferencePlan.java
@@ -18,6 +18,7 @@
import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan;
import java.io.IOException;
+import java.util.List;
import java.util.Objects;
public abstract class InferencePlan> extends UnaryPlan
@@ -25,6 +26,9 @@ public abstract class InferencePlan> ex
SortAgnostic,
GeneratingPlan> {
+ public static final String INFERENCE_ID_OPTION_NAME = "inference_id";
+ public static final List VALID_INFERENCE_OPTION_NAMES = List.of(INFERENCE_ID_OPTION_NAME);
+
private final Expression inferenceId;
protected InferencePlan(Source source, LogicalPlan child, Expression inferenceId) {
@@ -69,4 +73,8 @@ public int hashCode() {
public PlanType withInferenceResolutionError(String inferenceId, String error) {
return withInferenceId(new UnresolvedAttribute(inferenceId().source(), inferenceId, error));
}
+
+ public List validOptionNames() {
+ return VALID_INFERENCE_OPTION_NAMES;
+ }
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/Rerank.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/Rerank.java
index 308bdccfc0a01..ae1ba84eeddac 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/Rerank.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/inference/Rerank.java
@@ -19,9 +19,7 @@
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.expression.Expressions;
import org.elasticsearch.xpack.esql.core.expression.Literal;
-import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute;
import org.elasticsearch.xpack.esql.core.expression.NameId;
-import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
import org.elasticsearch.xpack.esql.core.tree.Source;
import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput;
@@ -34,29 +32,19 @@
import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes;
import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes;
-import static org.elasticsearch.xpack.esql.parser.ParserUtils.source;
public class Rerank extends InferencePlan implements TelemetryAware {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Rerank", Rerank::new);
public static final String DEFAULT_INFERENCE_ID = ".rerank-v1-elasticsearch";
- public static final String INFERENCE_ID_OPTION_NAME = "inferenceId";
- public static final String SCORE_COLUMN_OPTION_NAME = "scoreColumn";
private final Attribute scoreAttribute;
private final Expression queryText;
private final List rerankFields;
private List lazyOutput;
- public Rerank(Source source, LogicalPlan child, Expression queryText, List rerankFields) {
- this(
- source,
- child,
- Literal.keyword(Source.EMPTY, DEFAULT_INFERENCE_ID),
- queryText,
- rerankFields,
- new UnresolvedAttribute(Source.EMPTY, MetadataAttribute.SCORE)
- );
+ public Rerank(Source source, LogicalPlan child, Expression queryText, List rerankFields, Attribute scoreAttribute) {
+ this(source, child, Literal.keyword(Source.EMPTY, DEFAULT_INFERENCE_ID), queryText, rerankFields, scoreAttribute);
}
public Rerank(
@@ -111,14 +99,25 @@ public TaskType taskType() {
@Override
public Rerank withInferenceId(Expression newInferenceId) {
+ if (inferenceId().equals(newInferenceId)) {
+ return this;
+ }
return new Rerank(source(), child(), newInferenceId, queryText, rerankFields, scoreAttribute);
}
public Rerank withRerankFields(List newRerankFields) {
+ if (rerankFields.equals(newRerankFields)) {
+ return this;
+ }
+
return new Rerank(source(), child(), inferenceId(), queryText, newRerankFields, scoreAttribute);
}
public Rerank withScoreAttribute(Attribute newScoreAttribute) {
+ if (scoreAttribute.equals(newScoreAttribute)) {
+ return this;
+ }
+
return new Rerank(source(), child(), inferenceId(), queryText, rerankFields, newScoreAttribute);
}
@@ -193,26 +192,4 @@ public List output() {
}
return lazyOutput;
}
-
- public static class Builder {
- private Rerank rerank;
-
- public Builder(Rerank rerank) {
- this.rerank = rerank;
- }
-
- public Rerank build() {
- return rerank;
- }
-
- public Builder withInferenceId(Expression inferenceId) {
- this.rerank = this.rerank.withInferenceId(inferenceId);
- return this;
- }
-
- public Builder withScoreAttribute(Attribute scoreAttribute) {
- this.rerank = this.rerank.withScoreAttribute(scoreAttribute);
- return this;
- }
- }
}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java
index 4dab485326b99..b19426a0e914f 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java
@@ -3549,23 +3549,20 @@ public void testResolveRerankInferenceId() {
assumeTrue("Requires RERANK command", EsqlCapabilities.Cap.RERANK.isEnabled());
{
- LogicalPlan plan = analyze(
- "FROM books METADATA _score | RERANK \"italian food recipe\" ON title WITH inferenceId=`reranking-inference-id`",
- "mapping-books.json"
- );
+ LogicalPlan plan = analyze("""
+ FROM books METADATA _score
+ | RERANK "italian food recipe" ON title WITH { "inference_id" : "reranking-inference-id" }
+ """, "mapping-books.json");
Rerank rerank = as(as(plan, Limit.class).child(), Rerank.class);
assertThat(rerank.inferenceId(), equalTo(string("reranking-inference-id")));
}
{
- VerificationException ve = expectThrows(
- VerificationException.class,
- () -> analyze(
- "FROM books METADATA _score | RERANK \"italian food recipe\" ON title WITH inferenceId=`completion-inference-id`",
- "mapping-books.json"
- )
+ VerificationException ve = expectThrows(VerificationException.class, () -> analyze("""
+ FROM books METADATA _score
+ | RERANK "italian food recipe" ON title WITH { "inference_id" : "completion-inference-id" }
+ """, "mapping-books.json"));
- );
assertThat(
ve.getMessage(),
containsString(
@@ -3576,26 +3573,19 @@ public void testResolveRerankInferenceId() {
}
{
- VerificationException ve = expectThrows(
- VerificationException.class,
- () -> analyze(
- "FROM books METADATA _score | RERANK \"italian food recipe\" ON title WITH inferenceId=`error-inference-id`",
- "mapping-books.json"
- )
+ VerificationException ve = expectThrows(VerificationException.class, () -> analyze("""
+ FROM books METADATA _score
+ | RERANK "italian food recipe" ON title WITH { "inference_id" : "error-inference-id" }
+ """, "mapping-books.json"));
- );
assertThat(ve.getMessage(), containsString("error with inference resolution"));
}
{
- VerificationException ve = expectThrows(
- VerificationException.class,
- () -> analyze(
- "FROM books METADATA _score | RERANK \"italian food recipe\" ON title WITH inferenceId=`unknown-inference-id`",
- "mapping-books.json"
- )
-
- );
+ VerificationException ve = expectThrows(VerificationException.class, () -> analyze("""
+ FROM books METADATA _score
+ | RERANK "italian food recipe" ON title WITH { "inference_id" : "unknown-inference-id" }
+ """, "mapping-books.json"));
assertThat(ve.getMessage(), containsString("unresolved inference [unknown-inference-id]"));
}
}
@@ -3610,7 +3600,7 @@ public void testResolveRerankFields() {
| WHERE title:"italian food recipe" OR description:"italian food recipe"
| KEEP description, title, year, _score
| DROP description
- | RERANK "italian food recipe" ON title WITH inferenceId=`reranking-inference-id`
+ | RERANK "italian food recipe" ON title WITH { "inference_id" : "reranking-inference-id" }
""", "mapping-books.json");
Limit limit = as(plan, Limit.class); // Implicit limit added by AddImplicitLimit rule.
@@ -3635,7 +3625,7 @@ public void testResolveRerankFields() {
FROM books METADATA _score
| WHERE title:"food"
| RERANK "food" ON title, description=SUBSTRING(description, 0, 100), yearRenamed=year
- WITH inferenceId=`reranking-inference-id`
+ WITH { "inference_id" : "reranking-inference-id" }
""", "mapping-books.json");
Limit limit = as(plan, Limit.class); // Implicit limit added by AddImplicitLimit rule.
@@ -3667,29 +3657,13 @@ public void testResolveRerankFields() {
assertThat(rerank.scoreAttribute(), equalTo(getAttributeByName(relation.output(), MetadataAttribute.SCORE)));
}
- {
- // Unnamed field.
- try {
- LogicalPlan plan = analyze("""
- FROM books METADATA _score
- | WHERE title:"food"
- | RERANK "food" ON title, SUBSTRING(description, 0, 100), yearRenamed=year WITH inferenceId=`reranking-inference-id`
- """, "mapping-books.json");
- } catch (ParsingException ex) {
- assertThat(
- ex.getMessage(),
- containsString("line 3:36: mismatched input '(' expecting {, '|', '=', ',', '.', 'with'}")
- );
- }
- }
-
{
VerificationException ve = expectThrows(
VerificationException.class,
- () -> analyze(
- "FROM books METADATA _score | RERANK \"italian food recipe\" ON missingField WITH inferenceId=`reranking-inference-id`",
- "mapping-books.json"
- )
+ () -> analyze("""
+ FROM books METADATA _score
+ | RERANK \"italian food recipe\" ON missingField WITH { "inference_id" : "reranking-inference-id" }
+ """, "mapping-books.json")
);
assertThat(ve.getMessage(), containsString("Unknown column [missingField]"));
@@ -3704,7 +3678,7 @@ public void testResolveRerankScoreField() {
LogicalPlan plan = analyze("""
FROM books METADATA _score
| WHERE title:"italian food recipe" OR description:"italian food recipe"
- | RERANK "italian food recipe" ON title WITH inferenceId=`reranking-inference-id`
+ | RERANK "italian food recipe" ON title WITH { "inference_id" : "reranking-inference-id" }
""", "mapping-books.json");
Limit limit = as(plan, Limit.class); // Implicit limit added by AddImplicitLimit rule.
@@ -3722,7 +3696,7 @@ public void testResolveRerankScoreField() {
LogicalPlan plan = analyze("""
FROM books
| WHERE title:"italian food recipe" OR description:"italian food recipe"
- | RERANK "italian food recipe" ON title WITH inferenceId=`reranking-inference-id`
+ | RERANK "italian food recipe" ON title WITH { "inference_id" : "reranking-inference-id" }
""", "mapping-books.json");
Limit limit = as(plan, Limit.class); // Implicit limit added by AddImplicitLimit rule.
@@ -3740,7 +3714,7 @@ public void testResolveRerankScoreField() {
LogicalPlan plan = analyze("""
FROM books METADATA _score
| WHERE title:"italian food recipe" OR description:"italian food recipe"
- | RERANK "italian food recipe" ON title WITH inferenceId=`reranking-inference-id`, scoreColumn=rerank_score
+ | RERANK rerank_score = "italian food recipe" ON title WITH { "inference_id" : "reranking-inference-id" }
""", "mapping-books.json");
Limit limit = as(plan, Limit.class); // Implicit limit added by AddImplicitLimit rule.
@@ -3758,7 +3732,7 @@ public void testResolveRerankScoreField() {
FROM books METADATA _score
| WHERE title:"italian food recipe" OR description:"italian food recipe"
| EVAL rerank_score = _score
- | RERANK "italian food recipe" ON title WITH inferenceId=`reranking-inference-id`, scoreColumn=rerank_score
+ | RERANK rerank_score = "italian food recipe" ON title WITH { "inference_id" : "reranking-inference-id" }
""", "mapping-books.json");
Limit limit = as(plan, Limit.class); // Implicit limit added by AddImplicitLimit rule.
@@ -3775,8 +3749,9 @@ public void testResolveRerankScoreField() {
public void testResolveCompletionInferenceId() {
LogicalPlan plan = analyze("""
FROM books METADATA _score
- | COMPLETION CONCAT("Translate the following text in French\\n", description) WITH `completion-inference-id`
+ | COMPLETION CONCAT("Translate this text in French\\n", description) WITH { "inference_id" : "completion-inference-id" }
""", "mapping-books.json");
+
Completion completion = as(as(plan, Limit.class).child(), Completion.class);
assertThat(completion.inferenceId(), equalTo(string("completion-inference-id")));
}
@@ -3785,7 +3760,7 @@ public void testResolveCompletionInferenceIdInvalidTaskType() {
assertError(
"""
FROM books METADATA _score
- | COMPLETION CONCAT("Translate the following text in French\\n", description) WITH `reranking-inference-id`
+ | COMPLETION CONCAT("Translate this text in French\\n", description) WITH { "inference_id" : "reranking-inference-id" }
""",
"mapping-books.json",
new QueryParams(),
@@ -3797,21 +3772,22 @@ public void testResolveCompletionInferenceIdInvalidTaskType() {
public void testResolveCompletionInferenceMissingInferenceId() {
assertError("""
FROM books METADATA _score
- | COMPLETION CONCAT("Translate the following text in French\\n", description) WITH `unknown-inference-id`
+ | COMPLETION CONCAT("Translate the following text in French\\n", description) WITH { "inference_id" : "unknown-inference-id" }
""", "mapping-books.json", new QueryParams(), "unresolved inference [unknown-inference-id]");
}
public void testResolveCompletionInferenceIdResolutionError() {
assertError("""
FROM books METADATA _score
- | COMPLETION CONCAT("Translate the following text in French\\n", description) WITH `error-inference-id`
+ | COMPLETION CONCAT("Translate the following text in French\\n", description) WITH { "inference_id" : "error-inference-id" }
""", "mapping-books.json", new QueryParams(), "error with inference resolution");
}
public void testResolveCompletionTargetField() {
LogicalPlan plan = analyze("""
FROM books METADATA _score
- | COMPLETION translation=CONCAT("Translate the following text in French\\n", description) WITH `completion-inference-id`
+ | COMPLETION translation = CONCAT("Translate the following text in French\\n", description)
+ WITH { "inference_id" : "completion-inference-id" }
""", "mapping-books.json");
Completion completion = as(as(plan, Limit.class).child(), Completion.class);
@@ -3821,7 +3797,7 @@ public void testResolveCompletionTargetField() {
public void testResolveCompletionDefaultTargetField() {
LogicalPlan plan = analyze("""
FROM books METADATA _score
- | COMPLETION CONCAT("Translate the following text in French\\n", description) WITH `completion-inference-id`
+ | COMPLETION CONCAT("Translate this text in French\\n", description) WITH { "inference_id" : "completion-inference-id" }
""", "mapping-books.json");
Completion completion = as(as(plan, Limit.class).child(), Completion.class);
@@ -3831,7 +3807,7 @@ public void testResolveCompletionDefaultTargetField() {
public void testResolveCompletionPrompt() {
LogicalPlan plan = analyze("""
FROM books METADATA _score
- | COMPLETION CONCAT("Translate the following text in French\\n", description) WITH `completion-inference-id`
+ | COMPLETION CONCAT("Translate this text in French\\n", description) WITH { "inference_id" : "completion-inference-id" }
""", "mapping-books.json");
Completion completion = as(as(plan, Limit.class).child(), Completion.class);
@@ -3839,21 +3815,22 @@ public void testResolveCompletionPrompt() {
assertThat(
as(completion.prompt(), Concat.class).children(),
- equalTo(List.of(string("Translate the following text in French\n"), getAttributeByName(esRelation.output(), "description")))
+ equalTo(List.of(string("Translate this text in French\n"), getAttributeByName(esRelation.output(), "description")))
);
}
public void testResolveCompletionPromptInvalidType() {
assertError("""
FROM books METADATA _score
- | COMPLETION LENGTH(description) WITH `completion-inference-id`
+ | COMPLETION LENGTH(description) WITH { "inference_id" : "completion-inference-id" }
""", "mapping-books.json", new QueryParams(), "prompt must be of type [text] but is [integer]");
}
- public void testResolveCompletionOutputField() {
+ public void testResolveCompletionOutputFieldOverwriteInputField() {
LogicalPlan plan = analyze("""
FROM books METADATA _score
- | COMPLETION description=CONCAT("Translate the following text in French\\n", description) WITH `completion-inference-id`
+ | COMPLETION description = CONCAT("Translate the following text in French\\n", description)
+ WITH { "inference_id" : "completion-inference-id" }
""", "mapping-books.json");
Completion completion = as(as(plan, Limit.class).child(), Completion.class);
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java
index 409bb5bcba6fb..a429abf721739 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java
@@ -61,7 +61,7 @@ private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier
welfordAlgorithm.add(value);
}
var result = welfordAlgorithm.evaluate();
- var expected = Double.isInfinite(result) ? null : result;
+ var expected = Double.isFinite(result) ? result : null;
return new TestCaseSupplier.TestCase(
List.of(fieldTypedData),
"StdDev[field=Attribute[channel=0]]",
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java
index a658ab567a83e..4c707ca977501 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java
@@ -5515,7 +5515,7 @@ record PushdownShadowingGeneratingPlanTestCase(
),
new PushDownEnrich()
),
- // | COMPLETION y=CONCAT(some text, x) WITH inferenceID
+ // | COMPLETION y =CONCAT(some text, x) WITH { "inference_id" : "inferenceID" }
new PushdownShadowingGeneratingPlanTestCase(
(plan, attr) -> new Completion(
EMPTY,
@@ -5526,7 +5526,7 @@ record PushdownShadowingGeneratingPlanTestCase(
),
new PushDownInferencePlan()
),
- // | RERANK "some text" ON x WITH inferenceID=inferenceID, scoreColumn=y
+ // | RERANK "some text" ON x INTO y WITH { "inference_id" : "inferenceID" }
new PushdownShadowingGeneratingPlanTestCase(
(plan, attr) -> new Rerank(
EMPTY,
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java
index ca975e1e09954..f3d1107983628 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java
@@ -247,8 +247,11 @@ public void testSelectivelyPushDownFilterPastFunctionAgg() {
assertEquals(expected, new PushDownAndCombineFilters().apply(fb));
}
- // from ... | where a > 1 | COMPLETION completion="some prompt" WITH inferenceId | where b < 2 and match(completion, some text)
- // => ... | where a > 1 AND b < 2| COMPLETION completion="some prompt" WITH inferenceId | where match(completion, some text)
+ // from ... | where a > 1 | COMPLETION completion = "some prompt" WITH { "inferenceId' : "inferenceId" } | where b < 2 and
+ // match(completion, some text)
+ // => ... | where a > 1 AND b < 2| COMPLETION completion = "some prompt" WITH { "inferenceId' : "inferenceId" } | where
+ // match(completion,
+ // some text)
public void testPushDownFilterPastCompletion() {
FieldAttribute a = getFieldAttribute("a");
FieldAttribute b = getFieldAttribute("b");
@@ -284,8 +287,8 @@ public void testPushDownFilterPastCompletion() {
assertEquals(expectedOptimizedPlan, new PushDownAndCombineFilters().apply(filterB));
}
- // from ... | where a > 1 | RERANK "query" ON title WITH inferenceId | where b < 2 and _score > 1
- // => ... | where a > 1 AND b < 2| RERANK "query" ON title WITH inferenceId | where _score > 1
+ // from ... | where a > 1 | RERANK "query" ON title WITH { "inference_id" : "inferenceId" } | where b < 2 and _score > 1
+ // => ... | where a > 1 AND b < 2| RERANK "query" ON title WITH { "inference_id" : "inferenceId" } | where _score > 1
public void testPushDownFilterPastRerank() {
FieldAttribute a = getFieldAttribute("a");
FieldAttribute b = getFieldAttribute("b");
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java
index b7f4968cf725e..bf66bfb8345eb 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java
@@ -2604,11 +2604,11 @@ public void testFailingMetadataWithSquareBrackets() {
);
}
- public void testNamedFunctionArgumentInMap() {
+ public void testFunctionNamedParameterInMap() {
// functions can be scalar, grouping and aggregation
// functions can be in eval/where/stats/sort/dissect/grok commands, commands in snapshot are not covered
// positive
- // In eval and where clause as function arguments
+ // In eval and where clause as function named parameters
LinkedHashMap expectedMap1 = new LinkedHashMap<>(4);
expectedMap1.put("option1", "string");
expectedMap1.put("option2", 1);
@@ -2652,7 +2652,7 @@ public void testNamedFunctionArgumentInMap() {
""")
);
- // In stats, by and sort as function arguments
+ // In stats, by and sort as function named parameters
assertEquals(
new OrderBy(
EMPTY,
@@ -2688,7 +2688,7 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt
""")
);
- // In dissect and grok as function arguments
+ // In dissect and grok as function named parameter
LogicalPlan plan = statement("""
from test
| dissect fn1(f1, f2, {"option1":"string", "option2":1,"option3":[2.0,3.0,4.0],"option4":[true,false]}) "%{bar}"
@@ -2707,7 +2707,7 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt
assertEquals(ur, relation("test"));
}
- public void testNamedFunctionArgumentInMapWithNamedParameters() {
+ public void testFunctionNamedParameterInMapWithNamedParameters() {
// map entry values provided in named parameter, arrays are not supported by named parameters yet
LinkedHashMap expectedMap1 = new LinkedHashMap<>(4);
expectedMap1.put("option1", "string");
@@ -2850,7 +2850,7 @@ public void testNamedFunctionArgumentInMapWithNamedParameters() {
assertEquals(ur, relation("test"));
}
- public void testNamedFunctionArgumentWithCaseSensitiveKeys() {
+ public void testFunctionNamedParameterWithCaseSensitiveKeys() {
LinkedHashMap expectedMap1 = new LinkedHashMap<>(3);
expectedMap1.put("option", "string");
expectedMap1.put("Option", 1);
@@ -2888,7 +2888,7 @@ public void testNamedFunctionArgumentWithCaseSensitiveKeys() {
);
}
- public void testMultipleNamedFunctionArgumentsNotAllowed() {
+ public void testMultipleFunctionNamedParametersNotAllowed() {
Map commands = Map.ofEntries(
Map.entry("eval x = {}", "41"),
Map.entry("where {}", "38"),
@@ -2912,7 +2912,7 @@ public void testMultipleNamedFunctionArgumentsNotAllowed() {
}
}
- public void testNamedFunctionArgumentNotInMap() {
+ public void testFunctionNamedParameterNotInMap() {
Map commands = Map.ofEntries(
Map.entry("eval x = {}", "38"),
Map.entry("where {}", "35"),
@@ -2936,7 +2936,7 @@ public void testNamedFunctionArgumentNotInMap() {
}
}
- public void testNamedFunctionArgumentNotConstant() {
+ public void testFunctionNamedParameterNotConstant() {
Map commands = Map.ofEntries(
Map.entry("eval x = {}", new String[] { "31", "35" }),
Map.entry("where {}", new String[] { "28", "32" }),
@@ -2952,7 +2952,7 @@ public void testNamedFunctionArgumentNotConstant() {
String error1 = command.getValue()[0];
String error2 = command.getValue()[1];
String errorMessage1 = cmd.startsWith("dissect") || cmd.startsWith("grok")
- ? "mismatched input '1' expecting QUOTED_STRING"
+ ? "mismatched input '1' expecting {QUOTED_STRING"
: "no viable alternative at input 'fn(f1, { 1'";
String errorMessage2 = cmd.startsWith("dissect") || cmd.startsWith("grok")
? "mismatched input 'string' expecting {QUOTED_STRING"
@@ -2968,7 +2968,7 @@ public void testNamedFunctionArgumentNotConstant() {
}
}
- public void testNamedFunctionArgumentEmptyMap() {
+ public void testNamedFunctionNamedParametersEmptyMap() {
Map commands = Map.ofEntries(
Map.entry("eval x = {}", "30"),
Map.entry("where {}", "27"),
@@ -2982,17 +2982,12 @@ public void testNamedFunctionArgumentEmptyMap() {
for (Map.Entry command : commands.entrySet()) {
String cmd = command.getKey();
String error = command.getValue();
- String errorMessage = cmd.startsWith("dissect") || cmd.startsWith("grok")
- ? "mismatched input '}' expecting QUOTED_STRING"
- : "no viable alternative at input 'fn(f1, {}'";
- expectError(
- LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {}})"),
- LoggerMessageFormat.format(null, "line 1:{}: {}", error, errorMessage)
- );
+
+ statement(LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {})"));
}
}
- public void testNamedFunctionArgumentMapWithNULL() {
+ public void testNamedFunctionNamedParametersMapWithNULL() {
Map commands = Map.ofEntries(
Map.entry("eval x = {}", "29"),
Map.entry("where {}", "26"),
@@ -3008,17 +3003,12 @@ public void testNamedFunctionArgumentMapWithNULL() {
String error = command.getValue();
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"option\":null})"),
- LoggerMessageFormat.format(
- null,
- "line 1:{}: {}",
- error,
- "Invalid named function argument [\"option\":null], NULL is not supported"
- )
+ LoggerMessageFormat.format(null, "line 1:{}: {}", error, "Invalid named parameter [\"option\":null], NULL is not supported")
);
}
}
- public void testNamedFunctionArgumentMapWithEmptyKey() {
+ public void testNamedFunctionNamedParametersMapWithEmptyKey() {
Map commands = Map.ofEntries(
Map.entry("eval x = {}", "29"),
Map.entry("where {}", "26"),
@@ -3034,26 +3024,16 @@ public void testNamedFunctionArgumentMapWithEmptyKey() {
String error = command.getValue();
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"\":1})"),
- LoggerMessageFormat.format(
- null,
- "line 1:{}: {}",
- error,
- "Invalid named function argument [\"\":1], empty key is not supported"
- )
+ LoggerMessageFormat.format(null, "line 1:{}: {}", error, "Invalid named parameter [\"\":1], empty key is not supported")
);
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\" \":1})"),
- LoggerMessageFormat.format(
- null,
- "line 1:{}: {}",
- error,
- "Invalid named function argument [\" \":1], empty key is not supported"
- )
+ LoggerMessageFormat.format(null, "line 1:{}: {}", error, "Invalid named parameter [\" \":1], empty key is not supported")
);
}
}
- public void testNamedFunctionArgumentMapWithDuplicatedKey() {
+ public void testNamedFunctionNamedParametersMapWithDuplicatedKey() {
Map commands = Map.ofEntries(
Map.entry("eval x = {}", "29"),
Map.entry("where {}", "26"),
@@ -3073,13 +3053,13 @@ public void testNamedFunctionArgumentMapWithDuplicatedKey() {
null,
"line 1:{}: {}",
error,
- "Duplicated function arguments with the same name [dup] is not supported"
+ "Duplicated named parameters with the same name [dup] is not supported"
)
);
}
}
- public void testNamedFunctionArgumentInInvalidPositions() {
+ public void testNamedFunctionNamedParametersInInvalidPositions() {
// negative, named arguments are not supported outside of a functionExpression where booleanExpression or indexPattern is supported
String map = "{\"option1\":\"string\", \"option2\":1}";
@@ -3108,7 +3088,7 @@ public void testNamedFunctionArgumentInInvalidPositions() {
}
}
- public void testNamedFunctionArgumentWithUnsupportedNamedParameterTypes() {
+ public void testNamedFunctionNamedParametersWithUnsupportedNamedParameterTypes() {
Map commands = Map.ofEntries(
Map.entry("eval x = {}", "29"),
Map.entry("where {}", "26"),
@@ -3129,7 +3109,7 @@ public void testNamedFunctionArgumentWithUnsupportedNamedParameterTypes() {
null,
"line 1:{}: {}",
error,
- "Invalid named function argument [\"option1\":?n1], only constant value is supported"
+ "Invalid named parameter [\"option1\":?n1], only constant value is supported"
)
);
expectError(
@@ -3139,7 +3119,7 @@ public void testNamedFunctionArgumentWithUnsupportedNamedParameterTypes() {
null,
"line 1:{}: {}",
error,
- "Invalid named function argument [\"option1\":?n1], only constant value is supported"
+ "Invalid named parameter [\"option1\":?n1], only constant value is supported"
)
);
}
@@ -3577,7 +3557,7 @@ public void testForkAllReleasedCommands() {
( LOOKUP JOIN idx2 ON f1 )
( ENRICH idx2 on f1 with f2 = f3 )
( FORK ( WHERE a:"baz" ) ( EVAL x = [ 1, 2, 3 ] ) )
- ( COMPLETION a = b WITH c )
+ ( COMPLETION a=b WITH { "inference_id": "c" } )
| KEEP a
""";
@@ -3614,7 +3594,7 @@ public void testForkAllCommands() {
( LOOKUP JOIN idx2 ON f1 )
( ENRICH idx2 on f1 with f2 = f3 )
( FORK ( WHERE a:"baz" ) ( EVAL x = [ 1, 2, 3 ] ) )
- ( COMPLETION a = b WITH c )
+ ( COMPLETION a=b WITH { "inference_id": "c" } )
( SAMPLE 0.99 )
| KEEP a
""";
@@ -3719,22 +3699,22 @@ public void testRerankDefaultInferenceIdAndScoreAttribute() {
assertThat(rerank.rerankFields(), equalTo(List.of(alias("title", attribute("title")))));
}
- public void testRerankInferenceId() {
+ public void testRerankEmptyOptions() {
assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
- var plan = processingCommand("RERANK \"query text\" ON title WITH inferenceId=inferenceId");
+ var plan = processingCommand("RERANK \"query text\" ON title WITH {}");
var rerank = as(plan, Rerank.class);
- assertThat(rerank.inferenceId(), equalTo(literalString("inferenceId")));
+ assertThat(rerank.inferenceId(), equalTo(literalString(".rerank-v1-elasticsearch")));
+ assertThat(rerank.scoreAttribute(), equalTo(attribute("_score")));
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.rerankFields(), equalTo(List.of(alias("title", attribute("title")))));
- assertThat(rerank.scoreAttribute(), equalTo(attribute("_score")));
}
- public void testRerankQuotedInferenceId() {
+ public void testRerankInferenceId() {
assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
- var plan = processingCommand("RERANK \"query text\" ON title WITH inferenceId=\"inferenceId\"");
+ var plan = processingCommand("RERANK \"query text\" ON title WITH { \"inference_id\" : \"inferenceId\" }");
var rerank = as(plan, Rerank.class);
assertThat(rerank.inferenceId(), equalTo(literalString("inferenceId")));
@@ -3746,19 +3726,7 @@ public void testRerankQuotedInferenceId() {
public void testRerankScoreAttribute() {
assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
- var plan = processingCommand("RERANK \"query text\" ON title WITH scoreColumn=rerank_score");
- var rerank = as(plan, Rerank.class);
-
- assertThat(rerank.inferenceId(), equalTo(literalString(".rerank-v1-elasticsearch")));
- assertThat(rerank.scoreAttribute(), equalTo(attribute("rerank_score")));
- assertThat(rerank.queryText(), equalTo(literalString("query text")));
- assertThat(rerank.rerankFields(), equalTo(List.of(alias("title", attribute("title")))));
- }
-
- public void testRerankQuotedScoreAttribute() {
- assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
-
- var plan = processingCommand("RERANK \"query text\" ON title WITH scoreColumn=\"rerank_score\"");
+ var plan = processingCommand("RERANK rerank_score=\"query text\" ON title");
var rerank = as(plan, Rerank.class);
assertThat(rerank.inferenceId(), equalTo(literalString(".rerank-v1-elasticsearch")));
@@ -3770,7 +3738,7 @@ public void testRerankQuotedScoreAttribute() {
public void testRerankInferenceIdAnddScoreAttribute() {
assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
- var plan = processingCommand("RERANK \"query text\" ON title WITH inferenceId=inferenceId, scoreColumn=rerank_score");
+ var plan = processingCommand("RERANK rerank_score=\"query text\" ON title WITH { \"inference_id\" : \"inferenceId\" }");
var rerank = as(plan, Rerank.class);
assertThat(rerank.inferenceId(), equalTo(literalString("inferenceId")));
@@ -3782,7 +3750,7 @@ public void testRerankInferenceIdAnddScoreAttribute() {
public void testRerankSingleField() {
assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
- var plan = processingCommand("RERANK \"query text\" ON title WITH inferenceId=inferenceID");
+ var plan = processingCommand("RERANK \"query text\" ON title WITH { \"inference_id\" : \"inferenceID\" }");
var rerank = as(plan, Rerank.class);
assertThat(rerank.queryText(), equalTo(literalString("query text")));
@@ -3794,7 +3762,9 @@ public void testRerankSingleField() {
public void testRerankMultipleFields() {
assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
- var plan = processingCommand("RERANK \"query text\" ON title, description, authors_renamed=authors WITH inferenceId=inferenceID");
+ var plan = processingCommand(
+ "RERANK \"query text\" ON title, description, authors_renamed=authors WITH { \"inference_id\" : \"inferenceID\" }"
+ );
var rerank = as(plan, Rerank.class);
assertThat(rerank.queryText(), equalTo(literalString("query text")));
@@ -3815,9 +3785,9 @@ public void testRerankMultipleFields() {
public void testRerankComputedFields() {
assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
- var plan = processingCommand(
- "RERANK \"query text\" ON title, short_description = SUBSTRING(description, 0, 100) WITH inferenceId=inferenceID"
- );
+ var plan = processingCommand("""
+ RERANK "query text" ON title, short_description = SUBSTRING(description, 0, 100) WITH { "inference_id": "inferenceID" }
+ """);
var rerank = as(plan, Rerank.class);
assertThat(rerank.queryText(), equalTo(literalString("query text")));
@@ -3834,14 +3804,25 @@ public void testRerankComputedFields() {
assertThat(rerank.scoreAttribute(), equalTo(attribute("_score")));
}
+ public void testRerankComputedFieldsWithoutName() {
+ assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
+ // Unnamed alias are forbidden
+ expectError(
+ "FROM books METADATA _score | RERANK \"food\" ON title, SUBSTRING(description, 0, 100), yearRenamed=year`",
+ "line 1:63: mismatched input '(' expecting {, '|', '=', ',', '.', 'with'}"
+ );
+ }
+
public void testRerankWithPositionalParameters() {
assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
- var queryParams = new QueryParams(
- List.of(paramAsConstant(null, "query text"), paramAsConstant(null, "reranker"), paramAsConstant(null, "rerank_score"))
- );
+ var queryParams = new QueryParams(List.of(paramAsConstant(null, "query text"), paramAsConstant(null, "reranker")));
var rerank = as(
- parser.createStatement("row a = 1 | RERANK ? ON title WITH inferenceId=?, scoreColumn=? ", queryParams, EsqlTestUtils.TEST_CFG),
+ parser.createStatement(
+ "row a = 1 | RERANK rerank_score = ? ON title WITH { \"inference_id\" : ? }",
+ queryParams,
+ EsqlTestUtils.TEST_CFG
+ ),
Rerank.class
);
@@ -3854,16 +3835,10 @@ public void testRerankWithPositionalParameters() {
public void testRerankWithNamedParameters() {
assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
- var queryParams = new QueryParams(
- List.of(
- paramAsConstant("queryText", "query text"),
- paramAsConstant("inferenceId", "reranker"),
- paramAsConstant("scoreColumnName", "rerank_score")
- )
- );
+ var queryParams = new QueryParams(List.of(paramAsConstant("queryText", "query text"), paramAsConstant("inferenceId", "reranker")));
var rerank = as(
parser.createStatement(
- "row a = 1 | RERANK ?queryText ON title WITH inferenceId=?inferenceId, scoreColumn=?scoreColumnName",
+ "row a = 1 | RERANK rerank_score=?queryText ON title WITH { \"inference_id\": ?inferenceId }",
queryParams,
EsqlTestUtils.TEST_CFG
),
@@ -3873,22 +3848,46 @@ public void testRerankWithNamedParameters() {
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.inferenceId(), equalTo(literalString("reranker")));
assertThat(rerank.rerankFields(), equalTo(List.of(alias("title", attribute("title")))));
+ assertThat(rerank.scoreAttribute(), equalTo(attribute("rerank_score")));
}
public void testInvalidRerank() {
assumeTrue("RERANK requires corresponding capability", EsqlCapabilities.Cap.RERANK.isEnabled());
- expectError("FROM foo* | RERANK ON title WITH inferenceId", "line 1:20: mismatched input 'ON' expecting {QUOTED_STRING");
+ expectError(
+ "FROM foo* | RERANK \"query text\" ON title WITH { \"inference_id\": 3 }",
+ "line 1:65: Option [inference_id] must be a valid string, found [3]"
+ );
+ expectError(
+ "FROM foo* | RERANK \"query text\" ON title WITH { \"inference_id\": \"inferenceId\", \"unknown_option\": 3 }",
+ "line 1:42: Inavalid option [unknown_option] in RERANK, expected one of [[inference_id]]"
+ );
+ expectError("FROM foo* | RERANK 45 ON title", "Query must be a valid string in RERANK, found [45]");
+ expectError("FROM foo* | RERANK ON title WITH inferenceId", "line 1:20: extraneous input 'ON' expecting {QUOTED_STRING");
expectError("FROM foo* | RERANK \"query text\" WITH inferenceId", "line 1:33: mismatched input 'WITH' expecting 'on'");
var fromPatterns = randomIndexPatterns(CROSS_CLUSTER);
expectError(
- "FROM " + fromPatterns + " | RERANK \"query text\" ON title WITH inferenceId=inferenceId",
+ "FROM " + fromPatterns + " | RERANK \"query text\" ON title WITH { \"inference_id\" : \"inference_id\" }",
"invalid index pattern [" + unquoteIndexPattern(fromPatterns) + "], remote clusters are not supported with RERANK"
);
}
+ public void testCompletionMissingOptions() {
+ expectError("FROM foo* | COMPLETION targetField = prompt", "line 1:44: Missing mandatory option [inference_id] in COMPLETION");
+ }
+
+ public void testCompletionEmptyOptions() {
+ expectError(
+ "FROM foo* | COMPLETION targetField = prompt WITH { }",
+ "line 1:45: Missing mandatory option [inference_id] in COMPLETION"
+ );
+ }
+
public void testCompletionUsingFieldAsPrompt() {
- var plan = as(processingCommand("COMPLETION targetField=prompt_field WITH inferenceID"), Completion.class);
+ var plan = as(
+ processingCommand("COMPLETION targetField=prompt_field WITH{ \"inference_id\" : \"inferenceID\" }"),
+ Completion.class
+ );
assertThat(plan.prompt(), equalTo(attribute("prompt_field")));
assertThat(plan.inferenceId(), equalTo(literalString("inferenceID")));
@@ -3896,7 +3895,10 @@ public void testCompletionUsingFieldAsPrompt() {
}
public void testCompletionUsingFunctionAsPrompt() {
- var plan = as(processingCommand("COMPLETION targetField=CONCAT(fieldA, fieldB) WITH inferenceID"), Completion.class);
+ var plan = as(
+ processingCommand("COMPLETION targetField=CONCAT(fieldA, fieldB) WITH { \"inference_id\" : \"inferenceID\" }"),
+ Completion.class
+ );
assertThat(plan.prompt(), equalTo(function("CONCAT", List.of(attribute("fieldA"), attribute("fieldB")))));
assertThat(plan.inferenceId(), equalTo(literalString("inferenceID")));
@@ -3904,7 +3906,7 @@ public void testCompletionUsingFunctionAsPrompt() {
}
public void testCompletionDefaultFieldName() {
- var plan = as(processingCommand("COMPLETION prompt_field WITH inferenceID"), Completion.class);
+ var plan = as(processingCommand("COMPLETION prompt_field WITH{ \"inference_id\" : \"inferenceID\" }"), Completion.class);
assertThat(plan.prompt(), equalTo(attribute("prompt_field")));
assertThat(plan.inferenceId(), equalTo(literalString("inferenceID")));
@@ -3914,7 +3916,11 @@ public void testCompletionDefaultFieldName() {
public void testCompletionWithPositionalParameters() {
var queryParams = new QueryParams(List.of(paramAsConstant(null, "inferenceId")));
var plan = as(
- parser.createStatement("row a = 1 | COMPLETION prompt_field WITH ?", queryParams, EsqlTestUtils.TEST_CFG),
+ parser.createStatement(
+ "row a = 1 | COMPLETION prompt_field WITH { \"inference_id\" : ? }",
+ queryParams,
+ EsqlTestUtils.TEST_CFG
+ ),
Completion.class
);
@@ -3926,7 +3932,11 @@ public void testCompletionWithPositionalParameters() {
public void testCompletionWithNamedParameters() {
var queryParams = new QueryParams(List.of(paramAsConstant("inferenceId", "myInference")));
var plan = as(
- parser.createStatement("row a = 1 | COMPLETION prompt_field WITH ?inferenceId", queryParams, EsqlTestUtils.TEST_CFG),
+ parser.createStatement(
+ "row a = 1 | COMPLETION prompt_field WITH { \"inference_id\" : ?inferenceId }",
+ queryParams,
+ EsqlTestUtils.TEST_CFG
+ ),
Completion.class
);
@@ -3936,15 +3946,22 @@ public void testCompletionWithNamedParameters() {
}
public void testInvalidCompletion() {
- expectError("FROM foo* | COMPLETION WITH inferenceId", "line 1:24: extraneous input 'WITH' expecting {");
+ expectError(
+ "FROM foo* | COMPLETION prompt WITH { \"inference_id\": 3 }",
+ "line 1:54: Option [inference_id] must be a valid string, found [3]"
+ );
+ expectError(
+ "FROM foo* | COMPLETION prompt WITH { \"inference_id\": \"inferenceId\", \"unknown_option\": 3 }",
+ "line 1:31: Inavalid option [unknown_option] in COMPLETION, expected one of [[inference_id]]"
+ );
- expectError("FROM foo* | COMPLETION completion=prompt WITH", "line 1:46: mismatched input '' expecting {");
+ expectError("FROM foo* | COMPLETION WITH inferenceId", "line 1:24: extraneous input 'WITH' expecting {");
- expectError("FROM foo* | COMPLETION completion=prompt", "line 1:41: mismatched input '' expecting {");
+ expectError("FROM foo* | COMPLETION completion=prompt WITH", "ine 1:46: mismatched input '' expecting '{'");
var fromPatterns = randomIndexPatterns(CROSS_CLUSTER);
expectError(
- "FROM " + fromPatterns + " | COMPLETION prompt_field WITH inferenceId",
+ "FROM " + fromPatterns + " | COMPLETION prompt_field WITH { \"inference_id\" : \"inference_id\" }",
"invalid index pattern [" + unquoteIndexPattern(fromPatterns) + "], remote clusters are not supported with COMPLETION"
);
}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java
index 13887fbd1740c..62c4812d157e5 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java
@@ -24,7 +24,6 @@
import java.util.Set;
public class JoinTests extends ESTestCase {
- @AwaitsFix(bugUrl = "Test needs updating to the new JOIN planning")
public void testExpressionsAndReferences() {
int numMatchFields = between(1, 10);
diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java
index fb6b48f33fc16..c4574a3115624 100644
--- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java
+++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java
@@ -78,7 +78,7 @@ public void testTooManyPartitions() throws Exception {
// Assert we haven't violated the limit too much
GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0);
ModelSizeStats modelSizeStats = jobStats.getModelSizeStats();
- assertThat(modelSizeStats.getModelBytes(), lessThan(32000000L));
+ assertThat(modelSizeStats.getModelBytes(), lessThan(50300000L));
assertThat(modelSizeStats.getModelBytes(), greaterThan(24000000L));
assertThat(
modelSizeStats.getMemoryStatus(),
@@ -125,7 +125,7 @@ public void testTooManyByFields() throws Exception {
// Assert we haven't violated the limit too much
GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0);
ModelSizeStats modelSizeStats = jobStats.getModelSizeStats();
- assertThat(modelSizeStats.getModelBytes(), lessThan(35000000L));
+ assertThat(modelSizeStats.getModelBytes(), lessThan(45000000L));
assertThat(modelSizeStats.getModelBytes(), greaterThan(25000000L));
assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT));
}
@@ -176,7 +176,7 @@ public void testTooManyByAndOverFields() throws Exception {
// Assert we haven't violated the limit too much
GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0);
ModelSizeStats modelSizeStats = jobStats.getModelSizeStats();
- assertThat(modelSizeStats.getModelBytes(), lessThan(33000000L));
+ assertThat(modelSizeStats.getModelBytes(), lessThan(72000000L));
assertThat(modelSizeStats.getModelBytes(), greaterThan(24000000L));
assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT));
}
@@ -226,7 +226,7 @@ public void testManyDistinctOverFields() throws Exception {
GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0);
ModelSizeStats modelSizeStats = jobStats.getModelSizeStats();
assertThat(modelSizeStats.getModelBytes(), lessThan(120500000L));
- assertThat(modelSizeStats.getModelBytes(), greaterThan(90000000L));
+ assertThat(modelSizeStats.getModelBytes(), greaterThan(70000000L));
assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT));
}
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java
index 0cc19c3fd0d2c..7a97a74cf44b8 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java
@@ -2313,11 +2313,6 @@ public void testCacheCleanupOnProjectDeletion() throws Exception {
doCallRealMethod().when(fileRolesStore).accept(anySet(), anyActionListener());
when(fileRolesStore.roleDescriptors(anySet())).thenReturn(Set.of(roleDescriptor));
- final NativeRolesStore nativeRolesStore = mock(NativeRolesStore.class);
- doCallRealMethod().when(nativeRolesStore).accept(anySet(), anyActionListener());
- final ReservedRolesStore reservedRolesStore = mock(ReservedRolesStore.class);
- doCallRealMethod().when(reservedRolesStore).accept(anySet(), anyActionListener());
-
final AtomicReference projectId = new AtomicReference<>(ProjectId.DEFAULT);
final ProjectResolver projectResolver = TestProjectResolvers.singleProject(projectId::get);
@@ -2325,8 +2320,8 @@ public void testCacheCleanupOnProjectDeletion() throws Exception {
Settings.EMPTY,
clusterService,
fileRolesStore,
- nativeRolesStore,
- reservedRolesStore,
+ null,
+ null,
null,
null,
null,
diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java
index 0929561909963..bb03585a8fdfe 100644
--- a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java
+++ b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java
@@ -33,6 +33,11 @@ public class MinioRepositoryAnalysisRestIT extends AbstractRepositoryAnalysisRes
.keystore("s3.client.repository_test_kit.secret_key", "s3_test_secret_key")
.setting("s3.client.repository_test_kit.endpoint", minioFixture::getAddress)
.setting("xpack.security.enabled", "false")
+ // Skip listing of pre-existing uploads during a CAS because MinIO sometimes leaks them; also reduce the delay before proceeding
+ // TODO do not set these if running a MinIO version in which https://github.com/minio/minio/issues/21189
+ // and https://github.com/minio/minio/issues/21456 are both fixed
+ .setting("repository_s3.compare_and_exchange.time_to_live", "-1")
+ .setting("repository_s3.compare_and_exchange.anti_contention_delay", "100ms")
.setting("xpack.ml.enabled", "false")
.build();
diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/test/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AbstractRepositoryAnalysisRestTestCase.java b/x-pack/plugin/snapshot-repo-test-kit/src/test/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AbstractRepositoryAnalysisRestTestCase.java
index a971772975128..69ab919ec3392 100644
--- a/x-pack/plugin/snapshot-repo-test-kit/src/test/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AbstractRepositoryAnalysisRestTestCase.java
+++ b/x-pack/plugin/snapshot-repo-test-kit/src/test/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AbstractRepositoryAnalysisRestTestCase.java
@@ -7,9 +7,12 @@
package org.elasticsearch.repositories.blobstore.testkit.analyze;
+import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpPost;
import org.elasticsearch.client.Request;
+import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.core.TimeValue;
import org.elasticsearch.test.rest.ESRestTestCase;
public abstract class AbstractRepositoryAnalysisRestTestCase extends ESRestTestCase {
@@ -26,12 +29,18 @@ public void testRepositoryAnalysis() throws Exception {
logger.info("creating repository [{}] of type [{}]", repository, repositoryType);
registerRepository(repository, repositoryType, true, repositorySettings);
+ final TimeValue timeout = TimeValue.timeValueSeconds(120);
final Request request = new Request(HttpPost.METHOD_NAME, "/_snapshot/" + repository + "/_analyze");
request.addParameter("blob_count", "10");
request.addParameter("concurrency", "4");
request.addParameter("max_blob_size", randomFrom("1mb", "10mb"));
- request.addParameter("timeout", "120s");
+ request.addParameter("timeout", timeout.getStringRep());
request.addParameter("seed", Long.toString(randomLong()));
+ request.setOptions(
+ RequestOptions.DEFAULT.toBuilder()
+ .setRequestConfig(RequestConfig.custom().setSocketTimeout(Math.toIntExact(timeout.millis() + 10_000)).build())
+ );
+
assertOK(client().performRequest(request));
}
diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java
index a2f8dd91176e2..bd1178937b463 100644
--- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java
+++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java
@@ -37,7 +37,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry {
// The stack template registry version. This number must be incremented when we make changes
// to built-in templates.
- public static final int REGISTRY_VERSION = 16;
+ public static final int REGISTRY_VERSION = 17;
public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version";
public static final Setting STACK_TEMPLATES_ENABLED = Setting.boolSetting(
diff --git a/x-pack/plugin/stack/src/yamlRestTest/resources/rest-api-spec/test/stack/10_basic.yml b/x-pack/plugin/stack/src/yamlRestTest/resources/rest-api-spec/test/stack/10_basic.yml
index 3b8bfa9dfb7c8..8d6bf14e419a0 100644
--- a/x-pack/plugin/stack/src/yamlRestTest/resources/rest-api-spec/test/stack/10_basic.yml
+++ b/x-pack/plugin/stack/src/yamlRestTest/resources/rest-api-spec/test/stack/10_basic.yml
@@ -276,7 +276,6 @@ setup:
data_stream.namespace: "namespace1"
- do:
- catch: bad_request
index:
index: logs-dataset0-namespace1
body:
@@ -284,6 +283,7 @@ setup:
data_stream.type: "metrics"
data_stream.dataset: "dataset0"
data_stream.namespace: "namespace1"
+ - match: { failure_store: used }
- do:
catch: bad_request