Skip to content

Commit 25e6a0d

Browse files
authored
Merge branch 'main' into tests/knn-fix-search-with-no-dimensions
2 parents 6641302 + cfdf316 commit 25e6a0d

File tree

68 files changed

+1491
-248
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

68 files changed

+1491
-248
lines changed
Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
* License v3.0 only", or the "Server Side Public License, v 1".
88
*/
99

10-
package org.elasticsearch.benchmark.compute.operator;
10+
package org.elasticsearch.benchmark._nightly.esql;
1111

1212
import org.apache.lucene.document.FieldType;
1313
import org.apache.lucene.document.NumericDocValuesField;
@@ -85,10 +85,18 @@
8585
@State(Scope.Thread)
8686
@Fork(1)
8787
public class ValuesSourceReaderBenchmark {
88+
private static final String[] SUPPORTED_LAYOUTS = new String[] { "in_order", "shuffled", "shuffled_singles" };
89+
private static final String[] SUPPORTED_NAMES = new String[] {
90+
"long",
91+
"int",
92+
"double",
93+
"keyword",
94+
"stored_keyword",
95+
"3_stored_keywords" };
96+
8897
private static final int BLOCK_LENGTH = 16 * 1024;
8998
private static final int INDEX_SIZE = 10 * BLOCK_LENGTH;
9099
private static final int COMMIT_INTERVAL = 500;
91-
private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE;
92100
private static final BlockFactory blockFactory = BlockFactory.getInstance(
93101
new NoopCircuitBreaker("noop"),
94102
BigArrays.NON_RECYCLING_INSTANCE
@@ -104,8 +112,8 @@ static void selfTest() {
104112
ValuesSourceReaderBenchmark benchmark = new ValuesSourceReaderBenchmark();
105113
benchmark.setupIndex();
106114
try {
107-
for (String layout : ValuesSourceReaderBenchmark.class.getField("layout").getAnnotationsByType(Param.class)[0].value()) {
108-
for (String name : ValuesSourceReaderBenchmark.class.getField("name").getAnnotationsByType(Param.class)[0].value()) {
115+
for (String layout : ValuesSourceReaderBenchmark.SUPPORTED_LAYOUTS) {
116+
for (String name : ValuesSourceReaderBenchmark.SUPPORTED_NAMES) {
109117
benchmark.layout = layout;
110118
benchmark.name = name;
111119
try {
@@ -119,7 +127,7 @@ static void selfTest() {
119127
} finally {
120128
benchmark.teardownIndex();
121129
}
122-
} catch (IOException | NoSuchFieldException e) {
130+
} catch (IOException e) {
123131
throw new AssertionError(e);
124132
}
125133
}
@@ -321,10 +329,10 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() {
321329
* each page has a single document rather than {@code BLOCK_SIZE} docs.</li>
322330
* </ul>
323331
*/
324-
@Param({ "in_order", "shuffled", "shuffled_singles" })
332+
@Param({ "in_order", "shuffled" })
325333
public String layout;
326334

327-
@Param({ "long", "int", "double", "keyword", "stored_keyword", "3_stored_keywords" })
335+
@Param({ "long", "keyword", "stored_keyword" })
328336
public String name;
329337

330338
private Directory directory;
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
* License v3.0 only", or the "Server Side Public License, v 1".
88
*/
99

10-
package org.elasticsearch.benchmark.compute.operator;
10+
package org.elasticsearch.benchmark._nightly.esql;
1111

1212
import org.elasticsearch.test.ESTestCase;
1313

docs/changelog/130427.yaml

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,17 @@
11
pr: 130427
2-
summary: Disallow brackets in unquoted index pattersn
2+
summary: Disallow brackets in unquoted index patterns
33
area: ES|QL
4-
type: bug
5-
issues: []
4+
type: breaking
5+
issues:
6+
- 130378
7+
breaking:
8+
title: Unquoted index patterns do not allow `(` and `)` characters
9+
area: ES|QL
10+
details: >-
11+
Previously, ES|QL accepted unquoted index patterns containing brackets, such as `FROM index(1) | ENRICH policy(2)`.
12+
13+
This query syntax is no longer valid because it could conflict with subquery syntax, where brackets are used as delimiters.
14+
15+
Brackets are now only allowed in quoted index patterns. For example: `FROM "index(1)" | ENRICH "policy(2)"`.
16+
impact: "This affects existing queries containing brackets in index or policy names, i.e. in FROM, ENRICH, and LOOKUP JOIN commands."
17+
notable: false

docs/changelog/130939.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 130939
2+
summary: Expose HTTP connection metrics to telemetry
3+
area: Network
4+
type: enhancement
5+
issues: []

docs/changelog/131032.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 131032
2+
summary: "Fix: `GET _synonyms` returns synonyms with empty rules"
3+
area: Relevance
4+
type: bug
5+
issues: []

docs/reference/query-languages/esql/limitations.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -250,3 +250,6 @@ Work around this limitation by converting the field to single value with one of
250250
* CSV export from Discover shows no more than 10,000 rows. This limit only applies to the number of rows that are retrieved by the query and displayed in Discover. Queries and aggregations run on the full data set.
251251
* Querying many indices at once without any filters can cause an error in kibana which looks like `[esql] > Unexpected error from Elasticsearch: The content length (536885793) is bigger than the maximum allowed string (536870888)`. The response from {{esql}} is too long. Use [`DROP`](/reference/query-languages/esql/commands/processing-commands.md#esql-drop) or [`KEEP`](/reference/query-languages/esql/commands/processing-commands.md#esql-keep) to limit the number of fields returned.
252252

253+
## Known issues [esql-known-issues]
254+
255+
Refer to [Known issues](/release-notes/known-issues.md) for a list of known issues for {{esql}}.

docs/release-notes/known-issues.md

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,19 @@ mapped_pages:
88
Known issues are significant defects or limitations that may impact your implementation. These issues are actively being worked on and will be addressed in a future release. Review the Elasticsearch known issues to help you make informed decisions, such as upgrading to a new version.
99

1010
## 9.0.3 [elasticsearch-9.0.3-known-issues]
11-
A bug in the merge scheduler in Elasticsearch 9.0.3 may prevent shards from closing when there isn’t enough disk space to complete a merge. As a result, operations such as closing or relocating an index may hang until sufficient disk space becomes available.
11+
* A bug in the merge scheduler in Elasticsearch 9.0.3 may prevent shards from closing when there isn’t enough disk space to complete a merge. As a result, operations such as closing or relocating an index may hang until sufficient disk space becomes available.
1212
To mitigate this issue, the disk space checker is disabled by default in 9.0.3 by setting `indices.merge.disk.check_interval` to `0` seconds. Manually enabling this setting is not recommended.
1313

14-
This issue is planned to be fixed in future patch release [#129613](https://github.com/elastic/elasticsearch/pull/129613)
14+
This issue is planned to be fixed in future patch release [#129613](https://github.com/elastic/elasticsearch/pull/129613)
15+
16+
* A bug in the ES|QL STATS command may yield incorrect results. The bug only happens in very specific cases that follow this pattern: `STATS ... BY keyword1, keyword2`, i.e. the command must have exactly two grouping fields, both keywords, where the first field has high cardinality (more than 65k distinct values).
17+
18+
The bug is described in detail in [this issue](https://github.com/elastic/elasticsearch/issues/130644).
19+
The problem was introduced in 8.16.0 and [fixed](https://github.com/elastic/elasticsearch/pull/130705) in 8.17.9, 8.18.7, 9.0.4.
20+
21+
Possible workarounds include:
22+
* switching the order of the grouping keys (eg. `STATS ... BY keyword2, keyword1`, if the `keyword2` has a lower cardinality)
23+
* reducing the grouping key cardinality, by filtering out values before STATS
1524

1625
## 9.0.0 [elasticsearch-9.0.0-known-issues]
1726
* Elasticsearch on Windows might fail to start, or might forbid some file-related operations, when referencing paths with a case different from the one stored by the filesystem. Windows treats paths as case-insensitive, but the filesystem stores them with case. Entitlements, the new security system used by Elasticsearch, treat all paths as case-sensitive, and can therefore prevent access to a path that should be accessible.
@@ -40,3 +49,12 @@ This issue will be fixed in a future patch release (see [PR #126990](https://git
4049
DELETE _index_template/.watches
4150
POST /_watcher/_start
4251
```
52+
53+
* A bug in the ES|QL STATS command may yield incorrect results. The bug only happens in very specific cases that follow this pattern: `STATS ... BY keyword1, keyword2`, i.e. the command must have exactly two grouping fields, both keywords, where the first field has high cardinality (more than 65k distinct values).
54+
55+
The bug is described in detail in [this issue](https://github.com/elastic/elasticsearch/issues/130644).
56+
The problem was introduced in 8.16.0 and [fixed](https://github.com/elastic/elasticsearch/pull/130705) in 8.17.9, 8.18.7, 9.0.4.
57+
58+
Possible workarounds include:
59+
* switching the order of the grouping keys (eg. `STATS ... BY keyword2, keyword1`, if the `keyword2` has a lower cardinality)
60+
* reducing the grouping key cardinality, by filtering out values before STATS

libs/x-content/src/main/java/org/elasticsearch/xcontent/AbstractObjectParser.java

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
package org.elasticsearch.xcontent;
1111

1212
import org.elasticsearch.core.CheckedFunction;
13+
import org.elasticsearch.core.UpdateForV10;
1314
import org.elasticsearch.xcontent.ObjectParser.NamedObjectParser;
1415
import org.elasticsearch.xcontent.ObjectParser.ValueType;
1516

@@ -230,11 +231,13 @@ public void declareDoubleOrNull(BiConsumer<Value, Double> consumer, double nullV
230231
);
231232
}
232233

234+
@UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // https://github.com/elastic/elasticsearch/issues/130797
233235
public void declareLong(BiConsumer<Value, Long> consumer, ParseField field) {
234236
// Using a method reference here angers some compilers
235237
declareField(consumer, p -> p.longValue(), field, ValueType.LONG);
236238
}
237239

240+
@UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // https://github.com/elastic/elasticsearch/issues/130797
238241
public void declareLongOrNull(BiConsumer<Value, Long> consumer, long nullValue, ParseField field) {
239242
// Using a method reference here angers some compilers
240243
declareField(
@@ -245,6 +248,7 @@ public void declareLongOrNull(BiConsumer<Value, Long> consumer, long nullValue,
245248
);
246249
}
247250

251+
@UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // https://github.com/elastic/elasticsearch/issues/130797
248252
public void declareInt(BiConsumer<Value, Integer> consumer, ParseField field) {
249253
// Using a method reference here angers some compilers
250254
declareField(consumer, p -> p.intValue(), field, ValueType.INT);
@@ -253,6 +257,7 @@ public void declareInt(BiConsumer<Value, Integer> consumer, ParseField field) {
253257
/**
254258
* Declare an integer field that parses explicit {@code null}s in the json to a default value.
255259
*/
260+
@UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // https://github.com/elastic/elasticsearch/issues/130797
256261
public void declareIntOrNull(BiConsumer<Value, Integer> consumer, int nullValue, ParseField field) {
257262
declareField(
258263
consumer,
@@ -320,10 +325,12 @@ public void declareFloatArray(BiConsumer<Value, List<Float>> consumer, ParseFiel
320325
declareFieldArray(consumer, (p, c) -> p.floatValue(), field, ValueType.FLOAT_ARRAY);
321326
}
322327

328+
@UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // https://github.com/elastic/elasticsearch/issues/130797
323329
public void declareLongArray(BiConsumer<Value, List<Long>> consumer, ParseField field) {
324330
declareFieldArray(consumer, (p, c) -> p.longValue(), field, ValueType.LONG_ARRAY);
325331
}
326332

333+
@UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // https://github.com/elastic/elasticsearch/issues/130797
327334
public void declareIntArray(BiConsumer<Value, List<Integer>> consumer, ParseField field) {
328335
declareFieldArray(consumer, (p, c) -> p.intValue(), field, ValueType.INT_ARRAY);
329336
}

modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexBasicTests.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import java.util.Map;
2424
import java.util.stream.Collectors;
2525

26+
import static org.elasticsearch.index.IndexSettings.SYNTHETIC_VECTORS;
2627
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
2728
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
2829
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
@@ -181,6 +182,7 @@ public void testReindexFromComplexDateMathIndexName() throws Exception {
181182
}
182183

183184
public void testReindexIncludeVectors() throws Exception {
185+
assumeTrue("This test requires synthetic vectors to be enabled", SYNTHETIC_VECTORS);
184186
var resp1 = prepareCreate("test").setSettings(
185187
Settings.builder().put(IndexSettings.INDEX_MAPPING_SOURCE_SYNTHETIC_VECTORS_SETTING.getKey(), true).build()
186188
).setMapping("foo", "type=dense_vector,similarity=l2_norm", "bar", "type=sparse_vector").get();

modules/reindex/src/test/java/org/elasticsearch/reindex/UpdateByQueryBasicTests.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
import java.util.Map;
2525
import java.util.stream.Collectors;
2626

27+
import static org.elasticsearch.index.IndexSettings.SYNTHETIC_VECTORS;
2728
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
2829
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
2930
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
@@ -157,6 +158,7 @@ public void testMissingSources() {
157158
}
158159

159160
public void testUpdateByQueryIncludeVectors() throws Exception {
161+
assumeTrue("This test requires synthetic vectors to be enabled", SYNTHETIC_VECTORS);
160162
var resp1 = prepareCreate("test").setSettings(
161163
Settings.builder().put(IndexSettings.INDEX_MAPPING_SOURCE_SYNTHETIC_VECTORS_SETTING.getKey(), true).build()
162164
).setMapping("foo", "type=dense_vector,similarity=l2_norm", "bar", "type=sparse_vector").get();

0 commit comments

Comments
 (0)