Skip to content

Commit 7adee7a

Browse files
committed
Merge remote-tracking branch 'elasticsearch/main' into fork_grammar_update
2 parents 2758b53 + 0193dad commit 7adee7a

File tree

84 files changed

+3933
-1388
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

84 files changed

+3933
-1388
lines changed

benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java

Lines changed: 31 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,7 @@ public class AggregatorBenchmark {
7373
static final int BLOCK_LENGTH = 8 * 1024;
7474
private static final int OP_COUNT = 1024;
7575
private static final int GROUPS = 5;
76+
private static final int TOP_N_LIMIT = 3;
7677

7778
private static final BlockFactory blockFactory = BlockFactory.getInstance(
7879
new NoopCircuitBreaker("noop"),
@@ -90,6 +91,7 @@ public class AggregatorBenchmark {
9091
private static final String TWO_ORDINALS = "two_" + ORDINALS;
9192
private static final String LONGS_AND_BYTES_REFS = LONGS + "_and_" + BYTES_REFS;
9293
private static final String TWO_LONGS_AND_BYTES_REFS = "two_" + LONGS + "_and_" + BYTES_REFS;
94+
private static final String TOP_N_LONGS = "top_n_" + LONGS;
9395

9496
private static final String VECTOR_DOUBLES = "vector_doubles";
9597
private static final String HALF_NULL_DOUBLES = "half_null_doubles";
@@ -147,7 +149,8 @@ static void selfTest() {
147149
TWO_BYTES_REFS,
148150
TWO_ORDINALS,
149151
LONGS_AND_BYTES_REFS,
150-
TWO_LONGS_AND_BYTES_REFS }
152+
TWO_LONGS_AND_BYTES_REFS,
153+
TOP_N_LONGS }
151154
)
152155
public String grouping;
153156

@@ -161,8 +164,7 @@ static void selfTest() {
161164
public String filter;
162165

163166
private static Operator operator(DriverContext driverContext, String grouping, String op, String dataType, String filter) {
164-
165-
if (grouping.equals("none")) {
167+
if (grouping.equals(NONE)) {
166168
return new AggregationOperator(
167169
List.of(supplier(op, dataType, filter).aggregatorFactory(AggregatorMode.SINGLE, List.of(0)).apply(driverContext)),
168170
driverContext
@@ -188,6 +190,9 @@ private static Operator operator(DriverContext driverContext, String grouping, S
188190
new BlockHash.GroupSpec(1, ElementType.LONG),
189191
new BlockHash.GroupSpec(2, ElementType.BYTES_REF)
190192
);
193+
case TOP_N_LONGS -> List.of(
194+
new BlockHash.GroupSpec(0, ElementType.LONG, false, new BlockHash.TopNDef(0, true, true, TOP_N_LIMIT))
195+
);
191196
default -> throw new IllegalArgumentException("unsupported grouping [" + grouping + "]");
192197
};
193198
return new HashAggregationOperator(
@@ -271,10 +276,14 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
271276
case BOOLEANS -> 2;
272277
default -> GROUPS;
273278
};
279+
int availableGroups = switch (grouping) {
280+
case TOP_N_LONGS -> TOP_N_LIMIT;
281+
default -> groups;
282+
};
274283
switch (op) {
275284
case AVG -> {
276285
DoubleBlock dValues = (DoubleBlock) values;
277-
for (int g = 0; g < groups; g++) {
286+
for (int g = 0; g < availableGroups; g++) {
278287
long group = g;
279288
long sum = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).sum();
280289
long count = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).count();
@@ -286,7 +295,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
286295
}
287296
case COUNT -> {
288297
LongBlock lValues = (LongBlock) values;
289-
for (int g = 0; g < groups; g++) {
298+
for (int g = 0; g < availableGroups; g++) {
290299
long group = g;
291300
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).count() * opCount;
292301
if (lValues.getLong(g) != expected) {
@@ -296,7 +305,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
296305
}
297306
case COUNT_DISTINCT -> {
298307
LongBlock lValues = (LongBlock) values;
299-
for (int g = 0; g < groups; g++) {
308+
for (int g = 0; g < availableGroups; g++) {
300309
long group = g;
301310
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).distinct().count();
302311
long count = lValues.getLong(g);
@@ -310,15 +319,15 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
310319
switch (dataType) {
311320
case LONGS -> {
312321
LongBlock lValues = (LongBlock) values;
313-
for (int g = 0; g < groups; g++) {
322+
for (int g = 0; g < availableGroups; g++) {
314323
if (lValues.getLong(g) != (long) g) {
315324
throw new AssertionError(prefix + "expected [" + g + "] but was [" + lValues.getLong(g) + "]");
316325
}
317326
}
318327
}
319328
case DOUBLES -> {
320329
DoubleBlock dValues = (DoubleBlock) values;
321-
for (int g = 0; g < groups; g++) {
330+
for (int g = 0; g < availableGroups; g++) {
322331
if (dValues.getDouble(g) != (long) g) {
323332
throw new AssertionError(prefix + "expected [" + g + "] but was [" + dValues.getDouble(g) + "]");
324333
}
@@ -331,7 +340,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
331340
switch (dataType) {
332341
case LONGS -> {
333342
LongBlock lValues = (LongBlock) values;
334-
for (int g = 0; g < groups; g++) {
343+
for (int g = 0; g < availableGroups; g++) {
335344
long group = g;
336345
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).max().getAsLong();
337346
if (lValues.getLong(g) != expected) {
@@ -341,7 +350,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
341350
}
342351
case DOUBLES -> {
343352
DoubleBlock dValues = (DoubleBlock) values;
344-
for (int g = 0; g < groups; g++) {
353+
for (int g = 0; g < availableGroups; g++) {
345354
long group = g;
346355
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).max().getAsLong();
347356
if (dValues.getDouble(g) != expected) {
@@ -356,7 +365,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
356365
switch (dataType) {
357366
case LONGS -> {
358367
LongBlock lValues = (LongBlock) values;
359-
for (int g = 0; g < groups; g++) {
368+
for (int g = 0; g < availableGroups; g++) {
360369
long group = g;
361370
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).sum() * opCount;
362371
if (lValues.getLong(g) != expected) {
@@ -366,7 +375,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
366375
}
367376
case DOUBLES -> {
368377
DoubleBlock dValues = (DoubleBlock) values;
369-
for (int g = 0; g < groups; g++) {
378+
for (int g = 0; g < availableGroups; g++) {
370379
long group = g;
371380
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).sum() * opCount;
372381
if (dValues.getDouble(g) != expected) {
@@ -391,6 +400,14 @@ private static void checkGroupingBlock(String prefix, String grouping, Block blo
391400
}
392401
}
393402
}
403+
case TOP_N_LONGS -> {
404+
LongBlock groups = (LongBlock) block;
405+
for (int g = 0; g < TOP_N_LIMIT; g++) {
406+
if (groups.getLong(g) != (long) g) {
407+
throw new AssertionError(prefix + "bad group expected [" + g + "] but was [" + groups.getLong(g) + "]");
408+
}
409+
}
410+
}
394411
case INTS -> {
395412
IntBlock groups = (IntBlock) block;
396413
for (int g = 0; g < GROUPS; g++) {
@@ -495,7 +512,7 @@ private static void checkUngrouped(String prefix, String op, String dataType, Pa
495512

496513
private static Page page(BlockFactory blockFactory, String grouping, String blockType) {
497514
Block dataBlock = dataBlock(blockFactory, blockType);
498-
if (grouping.equals("none")) {
515+
if (grouping.equals(NONE)) {
499516
return new Page(dataBlock);
500517
}
501518
List<Block> blocks = groupingBlocks(grouping, blockType);
@@ -564,7 +581,7 @@ private static Block groupingBlock(String grouping, String blockType) {
564581
default -> throw new UnsupportedOperationException("bad grouping [" + grouping + "]");
565582
};
566583
return switch (grouping) {
567-
case LONGS -> {
584+
case TOP_N_LONGS, LONGS -> {
568585
var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
569586
for (int i = 0; i < BLOCK_LENGTH; i++) {
570587
for (int v = 0; v < valuesPerGroup; v++) {

build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/Utils.java

Lines changed: 26 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,11 @@
2424
import java.util.HexFormat;
2525
import java.util.Locale;
2626
import java.util.function.Function;
27+
import java.util.jar.Attributes;
2728
import java.util.jar.JarEntry;
2829
import java.util.jar.JarFile;
2930
import java.util.jar.JarOutputStream;
31+
import java.util.jar.Manifest;
3032
import java.util.stream.Collectors;
3133

3234
import static org.objectweb.asm.ClassWriter.COMPUTE_FRAMES;
@@ -60,6 +62,10 @@ public String toString() {
6062
}
6163
}
6264

65+
public static void patchJar(File inputJar, File outputJar, Collection<PatcherInfo> patchers) {
66+
patchJar(inputJar, outputJar, patchers, false);
67+
}
68+
6369
/**
6470
* Patches the classes in the input JAR file, using the collection of patchers. Each patcher specifies a target class (its jar entry
6571
* name) and the SHA256 digest on the class bytes.
@@ -69,8 +75,11 @@ public String toString() {
6975
* @param inputFile the JAR file to patch
7076
* @param outputFile the output (patched) JAR file
7177
* @param patchers list of patcher info (classes to patch (jar entry name + optional SHA256 digest) and ASM visitor to transform them)
78+
* @param unsignJar whether to remove class signatures from the JAR Manifest; set this to true when patching a signed JAR,
79+
* otherwise the patched classes will fail to load at runtime due to mismatched signatures.
80+
* @see <a href="https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html">Understanding Signing and Verification</a>
7281
*/
73-
public static void patchJar(File inputFile, File outputFile, Collection<PatcherInfo> patchers) {
82+
public static void patchJar(File inputFile, File outputFile, Collection<PatcherInfo> patchers, boolean unsignJar) {
7483
var classPatchers = patchers.stream().collect(Collectors.toMap(PatcherInfo::jarEntryName, Function.identity()));
7584
var mismatchedClasses = new ArrayList<MismatchInfo>();
7685
try (JarFile jarFile = new JarFile(inputFile); JarOutputStream jos = new JarOutputStream(new FileOutputStream(outputFile))) {
@@ -101,9 +110,23 @@ public static void patchJar(File inputFile, File outputFile, Collection<PatcherI
101110
);
102111
}
103112
} else {
104-
// Read the entry's data and write it to the new JAR
105113
try (InputStream is = jarFile.getInputStream(entry)) {
106-
is.transferTo(jos);
114+
if (unsignJar && entryName.equals("META-INF/MANIFEST.MF")) {
115+
var manifest = new Manifest(is);
116+
for (var manifestEntry : manifest.getEntries().entrySet()) {
117+
var nonSignatureAttributes = new Attributes();
118+
for (var attribute : manifestEntry.getValue().entrySet()) {
119+
if (attribute.getKey().toString().endsWith("Digest") == false) {
120+
nonSignatureAttributes.put(attribute.getKey(), attribute.getValue());
121+
}
122+
}
123+
manifestEntry.setValue(nonSignatureAttributes);
124+
}
125+
manifest.write(jos);
126+
} else if (unsignJar == false || entryName.matches("META-INF/.*\\.SF") == false) {
127+
// Read the entry's data and write it to the new JAR
128+
is.transferTo(jos);
129+
}
107130
}
108131
}
109132
jos.closeEntry();

build-tools-internal/src/main/resources/changelog-schema.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,7 @@
8686
"Rollup",
8787
"SQL",
8888
"Search",
89+
"Searchable Snapshots",
8990
"Security",
9091
"Snapshot/Restore",
9192
"Stats",

docs/changelog/127148.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 127148
2+
summary: Skip unused STATS groups by adding a Top N `BlockHash` implementation
3+
area: ES|QL
4+
type: enhancement
5+
issues: []

docs/changelog/128613.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 128613
2+
summary: Improve support for bytecode patching signed jars
3+
area: Infra/Core
4+
type: enhancement
5+
issues: []

docs/changelog/128910.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 128910
2+
summary: Fix `FieldAttribute` name usage in `InferNonNullAggConstraint`
3+
area: ES|QL
4+
type: bug
5+
issues: []

docs/reference/query-languages/esql/_snippets/commands/layout/rename.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,16 @@ The `RENAME` processing command renames one or more columns.
88
RENAME old_name1 AS new_name1[, ..., old_nameN AS new_nameN]
99
```
1010

11+
The following syntax is also supported {applies_to}`stack: ga 9.1`:
12+
13+
```esql
14+
RENAME new_name1 = old_name1[, ..., new_nameN = old_nameN]
15+
```
16+
17+
::::{tip}
18+
Both syntax options can be used interchangeably but we recommend sticking to one for consistency and readability.
19+
::::
20+
1121
**Parameters**
1222

1323
`old_nameX`

docs/reference/query-languages/esql/_snippets/lists/processing-commands.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
* [`CHANGE_POINT`](../../commands/processing-commands.md#esql-change_point)
1+
* [preview] [`CHANGE_POINT`](../../commands/processing-commands.md#esql-change_point)
22
* [`DISSECT`](../../commands/processing-commands.md#esql-dissect)
33
* [`DROP`](../../commands/processing-commands.md#esql-drop)
44
* [`ENRICH`](../../commands/processing-commands.md#esql-enrich)

muted-tests.yml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -517,6 +517,12 @@ tests:
517517
- class: org.elasticsearch.xpack.esql.qa.single_node.GenerativeForkIT
518518
method: test {lookup-join.EnrichLookupStatsBug SYNC}
519519
issue: https://github.com/elastic/elasticsearch/issues/129229
520+
- class: org.elasticsearch.xpack.esql.qa.single_node.GenerativeForkIT
521+
method: test {lookup-join.MultipleBatches*
522+
issue: https://github.com/elastic/elasticsearch/issues/129210
523+
- class: org.elasticsearch.compute.data.sort.LongTopNSetTests
524+
method: testCrankyBreaker
525+
issue: https://github.com/elastic/elasticsearch/issues/129257
520526

521527
# Examples:
522528
#
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
---
2+
setup:
3+
- requires:
4+
cluster_features: ["mapper.search_load_per_shard"]
5+
reason: Shard search load stats were introduced in 9.1
6+
---
7+
"Search load is tracked at shard level":
8+
- do:
9+
indices.create:
10+
index: index
11+
body:
12+
mappings:
13+
properties:
14+
name:
15+
type: text
16+
description:
17+
type: text
18+
price:
19+
type: double
20+
21+
- do:
22+
indices.stats:
23+
index: "index"
24+
level: shards
25+
metric: [ search ]
26+
27+
- match: { _all.total.search.recent_search_load: 0.0 }
28+
- match: { indices.index.total.search.recent_search_load: 0.0 }
29+
- match: { indices.index.shards.0.0.search.recent_search_load: 0.0 }
30+
31+
- do:
32+
index:
33+
index: index
34+
body: { "name": "specialty coffee", "description": "arabica coffee beans", "price": 100 }
35+
36+
- do:
37+
search:
38+
index: index
39+
body:
40+
query:
41+
match: { name: "specialty coffee" }
42+
size: 1
43+
44+
- do:
45+
indices.stats:
46+
index: "index"
47+
level: shards
48+
metric: [ search ]
49+
50+
- gte: { _all.total.search.recent_search_load: 0.0 }
51+
- gte: { indices.index.total.search.recent_search_load: 0.0 }
52+
- gte: { indices.index.shards.0.0.search.recent_search_load: 0.0 }

0 commit comments

Comments
 (0)