Skip to content

Commit 68665f4

Browse files
committed
Merge branch 'new-tv-logic' of github.com:JVerwolf/elasticsearch into new-tv-logic
2 parents 0e36d2b + fd02122 commit 68665f4

File tree

436 files changed

+10859
-5220
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

436 files changed

+10859
-5220
lines changed

README.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,7 @@ For the complete Elasticsearch documentation visit
275275
https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html[elastic.co].
276276

277277
For information about our documentation processes, see the
278-
xref:docs/README.asciidoc[docs README].
278+
xref:https://github.com/elastic/elasticsearch/blob/main/docs/README.md[docs README].
279279

280280
[[examples]]
281281
== Examples and guides

benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/QueryPlanningBenchmark.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,11 +70,11 @@ public class QueryPlanningBenchmark {
7070
private EsqlParser defaultParser;
7171
private Analyzer manyFieldsAnalyzer;
7272
private LogicalPlanOptimizer defaultOptimizer;
73+
private Configuration config;
7374

7475
@Setup
7576
public void setup() {
76-
77-
var config = new Configuration(
77+
this.config = new Configuration(
7878
DateUtils.UTC,
7979
Locale.US,
8080
null,
@@ -116,7 +116,7 @@ public void setup() {
116116
}
117117

118118
private LogicalPlan plan(EsqlParser parser, Analyzer analyzer, LogicalPlanOptimizer optimizer, String query) {
119-
var parsed = parser.createStatement(query, new QueryParams(), telemetry);
119+
var parsed = parser.createStatement(query, new QueryParams(), telemetry, config);
120120
var analyzed = analyzer.analyze(parsed);
121121
var optimized = optimizer.optimize(analyzed);
122122
return optimized;

benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java renamed to benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/ValuesSourceReaderBenchmark.java

Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
* License v3.0 only", or the "Server Side Public License, v 1".
88
*/
99

10-
package org.elasticsearch.benchmark.compute.operator;
10+
package org.elasticsearch.benchmark._nightly.esql;
1111

1212
import org.apache.lucene.document.FieldType;
1313
import org.apache.lucene.document.NumericDocValuesField;
@@ -85,10 +85,18 @@
8585
@State(Scope.Thread)
8686
@Fork(1)
8787
public class ValuesSourceReaderBenchmark {
88+
private static final String[] SUPPORTED_LAYOUTS = new String[] { "in_order", "shuffled", "shuffled_singles" };
89+
private static final String[] SUPPORTED_NAMES = new String[] {
90+
"long",
91+
"int",
92+
"double",
93+
"keyword",
94+
"stored_keyword",
95+
"3_stored_keywords" };
96+
8897
private static final int BLOCK_LENGTH = 16 * 1024;
8998
private static final int INDEX_SIZE = 10 * BLOCK_LENGTH;
9099
private static final int COMMIT_INTERVAL = 500;
91-
private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE;
92100
private static final BlockFactory blockFactory = BlockFactory.getInstance(
93101
new NoopCircuitBreaker("noop"),
94102
BigArrays.NON_RECYCLING_INSTANCE
@@ -104,8 +112,8 @@ static void selfTest() {
104112
ValuesSourceReaderBenchmark benchmark = new ValuesSourceReaderBenchmark();
105113
benchmark.setupIndex();
106114
try {
107-
for (String layout : ValuesSourceReaderBenchmark.class.getField("layout").getAnnotationsByType(Param.class)[0].value()) {
108-
for (String name : ValuesSourceReaderBenchmark.class.getField("name").getAnnotationsByType(Param.class)[0].value()) {
115+
for (String layout : ValuesSourceReaderBenchmark.SUPPORTED_LAYOUTS) {
116+
for (String name : ValuesSourceReaderBenchmark.SUPPORTED_NAMES) {
109117
benchmark.layout = layout;
110118
benchmark.name = name;
111119
try {
@@ -119,7 +127,7 @@ static void selfTest() {
119127
} finally {
120128
benchmark.teardownIndex();
121129
}
122-
} catch (IOException | NoSuchFieldException e) {
130+
} catch (IOException e) {
123131
throw new AssertionError(e);
124132
}
125133
}
@@ -321,10 +329,10 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() {
321329
* each page has a single document rather than {@code BLOCK_SIZE} docs.</li>
322330
* </ul>
323331
*/
324-
@Param({ "in_order", "shuffled", "shuffled_singles" })
332+
@Param({ "in_order", "shuffled" })
325333
public String layout;
326334

327-
@Param({ "long", "int", "double", "keyword", "stored_keyword", "3_stored_keywords" })
335+
@Param({ "long", "keyword", "stored_keyword" })
328336
public String name;
329337

330338
private Directory directory;

benchmarks/src/main/java/org/elasticsearch/benchmark/vector/Int4ScorerBenchmark.java

Lines changed: 72 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,14 @@
88
*/
99
package org.elasticsearch.benchmark.vector;
1010

11+
import org.apache.lucene.index.VectorSimilarityFunction;
1112
import org.apache.lucene.store.Directory;
1213
import org.apache.lucene.store.IOContext;
1314
import org.apache.lucene.store.IndexInput;
1415
import org.apache.lucene.store.IndexOutput;
1516
import org.apache.lucene.store.MMapDirectory;
1617
import org.apache.lucene.util.VectorUtil;
18+
import org.apache.lucene.util.quantization.OptimizedScalarQuantizer;
1719
import org.elasticsearch.common.logging.LogConfigurator;
1820
import org.elasticsearch.core.IOUtils;
1921
import org.elasticsearch.simdvec.ES91Int4VectorsScorer;
@@ -52,20 +54,26 @@ public class Int4ScorerBenchmark {
5254
LogConfigurator.configureESLogging(); // native access requires logging to be initialized
5355
}
5456

55-
@Param({ "384", "702", "1024" })
57+
@Param({ "384", "782", "1024" })
5658
int dims;
5759

58-
int numVectors = 200;
59-
int numQueries = 10;
60+
int numVectors = 20 * ES91Int4VectorsScorer.BULK_SIZE;
61+
int numQueries = 5;
6062

6163
byte[] scratch;
6264
byte[][] binaryVectors;
6365
byte[][] binaryQueries;
66+
float[] scores = new float[ES91Int4VectorsScorer.BULK_SIZE];
67+
68+
float[] scratchFloats = new float[3];
6469

6570
ES91Int4VectorsScorer scorer;
6671
Directory dir;
6772
IndexInput in;
6873

74+
OptimizedScalarQuantizer.QuantizationResult queryCorrections;
75+
float centroidDp;
76+
6977
@Setup
7078
public void setup() throws IOException {
7179
binaryVectors = new byte[numVectors][dims];
@@ -77,9 +85,19 @@ public void setup() throws IOException {
7785
binaryVector[i] = (byte) ThreadLocalRandom.current().nextInt(16);
7886
}
7987
out.writeBytes(binaryVector, 0, binaryVector.length);
88+
ThreadLocalRandom.current().nextBytes(binaryVector);
89+
out.writeBytes(binaryVector, 0, 14); // corrections
8090
}
8191
}
8292

93+
queryCorrections = new OptimizedScalarQuantizer.QuantizationResult(
94+
ThreadLocalRandom.current().nextFloat(),
95+
ThreadLocalRandom.current().nextFloat(),
96+
ThreadLocalRandom.current().nextFloat(),
97+
Short.toUnsignedInt((short) ThreadLocalRandom.current().nextInt())
98+
);
99+
centroidDp = ThreadLocalRandom.current().nextFloat();
100+
83101
in = dir.openInput("vectors", IOContext.DEFAULT);
84102
binaryQueries = new byte[numVectors][dims];
85103
for (byte[] binaryVector : binaryVectors) {
@@ -105,18 +123,66 @@ public void scoreFromArray(Blackhole bh) throws IOException {
105123
in.seek(0);
106124
for (int i = 0; i < numVectors; i++) {
107125
in.readBytes(scratch, 0, dims);
108-
bh.consume(VectorUtil.int4DotProduct(binaryQueries[j], scratch));
126+
int dp = VectorUtil.int4DotProduct(binaryQueries[j], scratch);
127+
in.readFloats(scratchFloats, 0, 3);
128+
float score = scorer.applyCorrections(
129+
queryCorrections.lowerInterval(),
130+
queryCorrections.upperInterval(),
131+
queryCorrections.quantizedComponentSum(),
132+
queryCorrections.additionalCorrection(),
133+
VectorSimilarityFunction.EUCLIDEAN,
134+
centroidDp, // assuming no centroid dot product for this benchmark
135+
scratchFloats[0],
136+
scratchFloats[1],
137+
Short.toUnsignedInt(in.readShort()),
138+
scratchFloats[2],
139+
dp
140+
);
141+
bh.consume(score);
109142
}
110143
}
111144
}
112145

113146
@Benchmark
114147
@Fork(jvmArgsPrepend = { "--add-modules=jdk.incubator.vector" })
115-
public void scoreFromMemorySegmentOnlyVector(Blackhole bh) throws IOException {
148+
public void scoreFromMemorySegment(Blackhole bh) throws IOException {
116149
for (int j = 0; j < numQueries; j++) {
117150
in.seek(0);
118151
for (int i = 0; i < numVectors; i++) {
119-
bh.consume(scorer.int4DotProduct(binaryQueries[j]));
152+
bh.consume(
153+
scorer.score(
154+
binaryQueries[j],
155+
queryCorrections.lowerInterval(),
156+
queryCorrections.upperInterval(),
157+
queryCorrections.quantizedComponentSum(),
158+
queryCorrections.additionalCorrection(),
159+
VectorSimilarityFunction.EUCLIDEAN,
160+
centroidDp
161+
)
162+
);
163+
}
164+
}
165+
}
166+
167+
@Benchmark
168+
@Fork(jvmArgsPrepend = { "--add-modules=jdk.incubator.vector" })
169+
public void scoreFromMemorySegmentBulk(Blackhole bh) throws IOException {
170+
for (int j = 0; j < numQueries; j++) {
171+
in.seek(0);
172+
for (int i = 0; i < numVectors; i += ES91Int4VectorsScorer.BULK_SIZE) {
173+
scorer.scoreBulk(
174+
binaryQueries[j],
175+
queryCorrections.lowerInterval(),
176+
queryCorrections.upperInterval(),
177+
queryCorrections.quantizedComponentSum(),
178+
queryCorrections.additionalCorrection(),
179+
VectorSimilarityFunction.EUCLIDEAN,
180+
centroidDp,
181+
scores
182+
);
183+
for (float score : scores) {
184+
bh.consume(score);
185+
}
120186
}
121187
}
122188
}

benchmarks/src/test/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmarkTests.java renamed to benchmarks/src/test/java/org/elasticsearch/benchmark/_nightly/esql/ValuesSourceReaderBenchmarkTests.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
* License v3.0 only", or the "Server Side Public License, v 1".
88
*/
99

10-
package org.elasticsearch.benchmark.compute.operator;
10+
package org.elasticsearch.benchmark._nightly.esql;
1111

1212
import org.elasticsearch.test.ESTestCase;
1313

build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/LocateTransportVersionsPlugin.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
public class LocateTransportVersionsPlugin implements Plugin<Project> {
1919
public static final String TRANSPORT_VERSION_NAMES_FILE = "generated-transport-info/transport-version-set-names.txt";
20+
2021
@Override
2122
public void apply(Project project) {
2223
// TODO figure out what the classpath needs to be to be able to scan the server classes

build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/ValidateTransportVersionsTask.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
package org.elasticsearch.gradle.internal.transport;
1111

1212
import groovy.json.JsonSlurper;
13+
1314
import org.gradle.api.DefaultTask;
1415
import org.gradle.api.file.RegularFileProperty;
1516
import org.gradle.api.tasks.InputDirectory;
@@ -43,7 +44,6 @@ public void validateTransportVersions() throws IOException {
4344
var dataFileDirectory = getDataFileDirectory();
4445
var tvDataDir = dataFileDirectory.getAsFile().get();
4546

46-
4747
Set<String> tvSetNamesInDataFiles = new HashSet<>();
4848
for (var tvDataFile : Objects.requireNonNull(tvDataDir.listFiles())) {
4949
if (tvDataFile.getName().endsWith("-LATEST.json") == false) {

distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/MachineDependentHeap.java

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,8 @@ public class MachineDependentHeap {
4040

4141
private static final FeatureFlag NEW_ML_MEMORY_COMPUTATION_FEATURE_FLAG = new FeatureFlag("new_ml_memory_computation");
4242

43+
private boolean useNewMlMemoryComputation = false;
44+
4345
public MachineDependentHeap() {}
4446

4547
/**
@@ -55,6 +57,11 @@ public final List<String> determineHeapSettings(
5557
SystemMemoryInfo systemMemoryInfo,
5658
List<String> userDefinedJvmOptions
5759
) throws IOException, InterruptedException {
60+
if (userDefinedJvmOptions.contains("-Des.new_ml_memory_computation_feature_flag_enabled=true")
61+
|| NEW_ML_MEMORY_COMPUTATION_FEATURE_FLAG.isEnabled()) {
62+
useNewMlMemoryComputation = true;
63+
}
64+
5865
// TODO: this could be more efficient, to only parse final options once
5966
final Map<String, JvmOption> finalJvmOptions = JvmOption.findFinalOptions(userDefinedJvmOptions);
6067
if (isMaxHeapSpecified(finalJvmOptions) || isMinHeapSpecified(finalJvmOptions) || isInitialHeapSpecified(finalJvmOptions)) {
@@ -107,7 +114,7 @@ protected int getHeapSizeMb(Settings nodeSettings, MachineNodeRole role, long av
107114
case ML_ONLY -> {
108115
double heapFractionBelow16GB = 0.4;
109116
double heapFractionAbove16GB = 0.1;
110-
if (NEW_ML_MEMORY_COMPUTATION_FEATURE_FLAG.isEnabled()) {
117+
if (useNewMlMemoryComputation) {
111118
heapFractionBelow16GB = 0.4 / (1.0 + JvmErgonomics.DIRECT_MEMORY_TO_HEAP_FACTOR);
112119
heapFractionAbove16GB = 0.1 / (1.0 + JvmErgonomics.DIRECT_MEMORY_TO_HEAP_FACTOR);
113120
}

docs/changelog/129013.yaml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
pr: 129013
2+
summary: "Add remote index support to LOOKUP JOIN"
3+
area: ES|QL
4+
type: feature
5+
issues: [ ]
6+
highlight:
7+
title: Add remote index support to LOOKUP JOIN
8+
body: |-
9+
Queries containing LOOKUP JOIN now can be preformed on cross-cluster indices, for example:
10+
[source,yaml]
11+
----------------------------
12+
FROM logs-*, remote:logs-* | LOOKUP JOIN clients on ip | SORT timestamp | LIMIT 100
13+
----------------------------

docs/changelog/130427.yaml

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,17 @@
11
pr: 130427
2-
summary: Disallow brackets in unquoted index pattersn
2+
summary: Disallow brackets in unquoted index patterns
33
area: ES|QL
4-
type: bug
5-
issues: []
4+
type: breaking
5+
issues:
6+
- 130378
7+
breaking:
8+
title: Unquoted index patterns do not allow `(` and `)` characters
9+
area: ES|QL
10+
details: >-
11+
Previously, ES|QL accepted unquoted index patterns containing brackets, such as `FROM index(1) | ENRICH policy(2)`.
12+
13+
This query syntax is no longer valid because it could conflict with subquery syntax, where brackets are used as delimiters.
14+
15+
Brackets are now only allowed in quoted index patterns. For example: `FROM "index(1)" | ENRICH "policy(2)"`.
16+
impact: "This affects existing queries containing brackets in index or policy names, i.e. in FROM, ENRICH, and LOOKUP JOIN commands."
17+
notable: false

0 commit comments

Comments
 (0)