Skip to content

Commit 98d34e3

Browse files
authored
Merge branch '9.1' into backport/9.1/pr-130924
2 parents 3465647 + 9e054f5 commit 98d34e3

File tree

20 files changed

+127
-59
lines changed

20 files changed

+127
-59
lines changed
Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
* License v3.0 only", or the "Server Side Public License, v 1".
88
*/
99

10-
package org.elasticsearch.benchmark.compute.operator;
10+
package org.elasticsearch.benchmark._nightly.esql;
1111

1212
import org.apache.lucene.document.FieldType;
1313
import org.apache.lucene.document.NumericDocValuesField;
@@ -85,10 +85,18 @@
8585
@State(Scope.Thread)
8686
@Fork(1)
8787
public class ValuesSourceReaderBenchmark {
88+
private static final String[] SUPPORTED_LAYOUTS = new String[] { "in_order", "shuffled", "shuffled_singles" };
89+
private static final String[] SUPPORTED_NAMES = new String[] {
90+
"long",
91+
"int",
92+
"double",
93+
"keyword",
94+
"stored_keyword",
95+
"3_stored_keywords" };
96+
8897
private static final int BLOCK_LENGTH = 16 * 1024;
8998
private static final int INDEX_SIZE = 10 * BLOCK_LENGTH;
9099
private static final int COMMIT_INTERVAL = 500;
91-
private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE;
92100
private static final BlockFactory blockFactory = BlockFactory.getInstance(
93101
new NoopCircuitBreaker("noop"),
94102
BigArrays.NON_RECYCLING_INSTANCE
@@ -104,8 +112,8 @@ static void selfTest() {
104112
ValuesSourceReaderBenchmark benchmark = new ValuesSourceReaderBenchmark();
105113
benchmark.setupIndex();
106114
try {
107-
for (String layout : ValuesSourceReaderBenchmark.class.getField("layout").getAnnotationsByType(Param.class)[0].value()) {
108-
for (String name : ValuesSourceReaderBenchmark.class.getField("name").getAnnotationsByType(Param.class)[0].value()) {
115+
for (String layout : ValuesSourceReaderBenchmark.SUPPORTED_LAYOUTS) {
116+
for (String name : ValuesSourceReaderBenchmark.SUPPORTED_NAMES) {
109117
benchmark.layout = layout;
110118
benchmark.name = name;
111119
try {
@@ -119,7 +127,7 @@ static void selfTest() {
119127
} finally {
120128
benchmark.teardownIndex();
121129
}
122-
} catch (IOException | NoSuchFieldException e) {
130+
} catch (IOException e) {
123131
throw new AssertionError(e);
124132
}
125133
}
@@ -321,10 +329,10 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() {
321329
* each page has a single document rather than {@code BLOCK_SIZE} docs.</li>
322330
* </ul>
323331
*/
324-
@Param({ "in_order", "shuffled", "shuffled_singles" })
332+
@Param({ "in_order", "shuffled" })
325333
public String layout;
326334

327-
@Param({ "long", "int", "double", "keyword", "stored_keyword", "3_stored_keywords" })
335+
@Param({ "long", "keyword", "stored_keyword" })
328336
public String name;
329337

330338
private Directory directory;
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
* License v3.0 only", or the "Server Side Public License, v 1".
88
*/
99

10-
package org.elasticsearch.benchmark.compute.operator;
10+
package org.elasticsearch.benchmark._nightly.esql;
1111

1212
import org.elasticsearch.test.ESTestCase;
1313

docs/changelog/130427.yaml

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,17 @@
11
pr: 130427
2-
summary: Disallow brackets in unquoted index pattersn
2+
summary: Disallow brackets in unquoted index patterns
33
area: ES|QL
4-
type: bug
5-
issues: []
4+
type: breaking
5+
issues:
6+
- 130378
7+
breaking:
8+
title: Unquoted index patterns do not allow `(` and `)` characters
9+
area: ES|QL
10+
details: >-
11+
Previously, ES|QL accepted unquoted index patterns containing brackets, such as `FROM index(1) | ENRICH policy(2)`.
12+
13+
This query syntax is no longer valid because it could conflict with subquery syntax, where brackets are used as delimiters.
14+
15+
Brackets are now only allowed in quoted index patterns. For example: `FROM "index(1)" | ENRICH "policy(2)"`.
16+
impact: "This affects existing queries containing brackets in index or policy names, i.e. in FROM, ENRICH, and LOOKUP JOIN commands."
17+
notable: false

docs/changelog/130914.yaml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
pr: 130914
2+
summary: Fix LIMIT NPE with null value
3+
area: ES|QL
4+
type: bug
5+
issues:
6+
- 130908

rest-api-spec/src/main/resources/rest-api-spec/api/search_mvt.json

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,15 @@
7373
"description":"Determines the geometry type for features in the aggs layer.",
7474
"default":"grid"
7575
},
76+
"grid_agg":{
77+
"type":"enum",
78+
"options":[
79+
"geotile",
80+
"geohex"
81+
],
82+
"description":"Aggregation used to create a grid for `field`.",
83+
"default":"geotile"
84+
},
7685
"size":{
7786
"type":"int",
7887
"description":"Maximum number of features to return in the hits layer. Accepts 0-10000.",

x-pack/plugin/build.gradle

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,7 @@ tasks.named("yamlRestCompatTestTransform").configure({ task ->
136136
task.skipTest("esql/191_lookup_join_on_datastreams/data streams not supported in LOOKUP JOIN", "Added support for aliases in JOINs")
137137
task.skipTest("esql/190_lookup_join/non-lookup index", "Error message changed")
138138
task.skipTest("esql/192_lookup_join_on_aliases/alias-pattern-multiple", "Error message changed")
139+
task.skipTest("esql/10_basic/Test wrong LIMIT parameter", "Error message changed")
139140
})
140141

141142
tasks.named('yamlRestCompatTest').configure {

x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ public Factory(
5858
taskConcurrency,
5959
limit,
6060
false,
61-
ScoreMode.COMPLETE_NO_SCORES
61+
shardContext -> ScoreMode.COMPLETE_NO_SCORES
6262
);
6363
this.shardRefCounters = contexts;
6464
}

x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMaxFactory.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ public LuceneMaxFactory(
129129
taskConcurrency,
130130
limit,
131131
false,
132-
ScoreMode.COMPLETE_NO_SCORES
132+
shardContext -> ScoreMode.COMPLETE_NO_SCORES
133133
);
134134
this.contexts = contexts;
135135
this.fieldName = fieldName;

x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMinFactory.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ public LuceneMinFactory(
130130
taskConcurrency,
131131
limit,
132132
false,
133-
ScoreMode.COMPLETE_NO_SCORES
133+
shardContext -> ScoreMode.COMPLETE_NO_SCORES
134134
);
135135
this.shardRefCounters = contexts;
136136
this.fieldName = fieldName;

x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,11 +112,18 @@ protected Factory(
112112
int taskConcurrency,
113113
int limit,
114114
boolean needsScore,
115-
ScoreMode scoreMode
115+
Function<ShardContext, ScoreMode> scoreModeFunction
116116
) {
117117
this.limit = limit;
118118
this.dataPartitioning = dataPartitioning;
119-
this.sliceQueue = LuceneSliceQueue.create(contexts, queryFunction, dataPartitioning, autoStrategy, taskConcurrency, scoreMode);
119+
this.sliceQueue = LuceneSliceQueue.create(
120+
contexts,
121+
queryFunction,
122+
dataPartitioning,
123+
autoStrategy,
124+
taskConcurrency,
125+
scoreModeFunction
126+
);
120127
this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency);
121128
this.needsScore = needsScore;
122129
}

0 commit comments

Comments
 (0)