Skip to content

Commit a903309

Browse files
authored
Merge branch 'main' into s3-failIfAlreadyExists
2 parents 4c479f4 + 7519de6 commit a903309

File tree

1,060 files changed

+34526
-8024
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,060 files changed

+34526
-8024
lines changed
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
steps:
2+
- label: ":pipeline: Generate steps"
3+
command: bash .buildkite/scripts/generate-pr-performance-benchmark.sh | buildkite-agent pipeline upload

.buildkite/pull-requests.json

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,20 @@
1515
"trigger_comment_regex": "(run\\W+elasticsearch-ci.+)|(^\\s*((buildkite|@elastic(search)?machine)\\s*)?test\\s+this(\\s+please)?)",
1616
"cancel_intermediate_builds": true,
1717
"cancel_intermediate_builds_on_comment": false
18+
},
19+
{
20+
"enabled": true,
21+
"pipeline_slug": "elasticsearch-pull-request-performance-benchmark",
22+
"allow_org_users": true,
23+
"allowed_repo_permissions": [
24+
"admin",
25+
"write"
26+
],
27+
"set_commit_status": false,
28+
"build_on_commit": false,
29+
"build_on_comment": true,
30+
"target_branch": "main",
31+
"trigger_comment_regex": "^(buildkite|@elastic(search)?machine) benchmark this with (?<benchmark>\\S+)( please)?$"
1832
}
1933
]
2034
}
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
#!/bin/bash
2+
3+
set -euo pipefail
4+
5+
env_id_baseline=$(python3 -c 'import uuid; print(uuid.uuid4())')
6+
env_id_contender=$(python3 -c 'import uuid; print(uuid.uuid4())')
7+
merge_base=$(git merge-base "${GITHUB_PR_TARGET_BRANCH}" HEAD)
8+
9+
buildkite-agent meta-data set pr_comment:custom-body:body \
10+
"This build attempted two ${GITHUB_PR_COMMENT_VAR_BENCHMARK} benchmarks to evaluate performance impact of this PR."
11+
buildkite-agent meta-data set pr_comment:custom-baseline:head \
12+
"* Baseline: ${merge_base} (env ID ${env_id_baseline})"
13+
buildkite-agent meta-data set pr_comment:custom-contender:head \
14+
"* Contender: ${GITHUB_PR_TRIGGERED_SHA} (env ID ${env_id_contender})"
15+
16+
cat << _EOF_
17+
steps:
18+
- label: Trigger baseline benchmark
19+
trigger: elasticsearch-performance-esbench-pr
20+
build:
21+
message: Baseline benchmark for PR${GITHUB_PR_NUMBER}
22+
branch: master
23+
env:
24+
CONFIGURATION_NAME: ${GITHUB_PR_COMMENT_VAR_BENCHMARK}
25+
ENV_ID: ${env_id_baseline}
26+
REVISION: ${merge_base}
27+
- label: Trigger contender benchmark
28+
trigger: elasticsearch-performance-esbench-pr
29+
build:
30+
message: Contender benchmark for PR${GITHUB_PR_NUMBER}
31+
branch: master
32+
env:
33+
CONFIGURATION_NAME: ${GITHUB_PR_COMMENT_VAR_BENCHMARK}
34+
ENV_ID: ${env_id_contender}
35+
ES_REPO_URL: https://github.com/${GITHUB_PR_OWNER}/${GITHUB_PR_REPO}.git
36+
REVISION: ${GITHUB_PR_TRIGGERED_SHA}
37+
- wait: ~
38+
- label: Modify PR comment
39+
command: buildkite-agent meta-data set pr_comment:custom-comparison:head "* [Benchmark results](<https://esbench-metrics.kb.us-east-2.aws.elastic-cloud.com:9243/app/dashboards#/view/d9079962-5866-49ef-b9f5-145f2141cd31?_a=(query:(language:kuery,query:'user-tags.env-id:${env_id_baseline} or user-tags.env-id:${env_id_contender}'))>)"
40+
_EOF_

.gitignore

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,7 @@
11

2+
# claude
3+
.claude
4+
25
# intellij files
36
.idea/
47
*.iml
@@ -69,7 +72,7 @@ testfixtures_shared/
6972
# Generated
7073
checkstyle_ide.xml
7174
x-pack/plugin/esql/src/main/generated-src/generated/
72-
server/src/main/resources/transport/defined/manifest.txt
75+
server/src/main/resources/transport/definitions/manifest.txt
7376

7477
# JEnv
7578
.java-version

BUILDING.md

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ uses the changed dependencies. In most cases, `precommit` or `check` are good ca
9292
We prefer sha256 checksums as md5 and sha1 are not considered safe anymore these days. The generated entry
9393
will have the `origin` attribute been set to `Generated by Gradle`.
9494

95-
> [!Tip]
95+
> [!Tip]
9696
> A manual confirmation of the Gradle generated checksums is currently not mandatory.
9797
> If you want to add a level of verification you can manually confirm the checksum (e.g. by looking it up on the website of the library)
9898
> Please replace the content of the `origin` attribute by `official site` in that case.
@@ -186,6 +186,25 @@ dependencies {
186186

187187
To test an unreleased development version of a third party dependency you have several options.
188188

189+
### How do I test against java early access (ea) versions?
190+
191+
Currently only openjdk EA builds by oracle are supported.
192+
To test against an early access version java version you can pass the major
193+
java version appended with `-ea` as a system property (e.g. -Druntime.java=26-ea) to the Gradle build:
194+
195+
```
196+
./gradlew clean test -Druntime.java=26-ea
197+
```
198+
199+
This will run the tests using the JDK 26 EA version and pick the latest available build of the matching JDK EA version we expose
200+
in our custom jdk catalogue at `https://storage.googleapis.com/elasticsearch-jdk-archive/jdks/openjdk/latest.json`.
201+
202+
To run against a specific build number of the EA build you can pass a second system property (e.g. `-Druntime.java.build=6`):
203+
204+
```
205+
./gradlew clean test -Druntime.java=26-ea -Druntime.java.build=6
206+
```
207+
189208
#### How to use a Maven based third party dependency via `mavenlocal`?
190209

191210
1. Clone the third party repository locally
@@ -229,7 +248,7 @@ In addition to snapshot builds JitPack supports building Pull Requests. Simply u
229248
3. Run the Gradle build as needed. Keep in mind the initial resolution might take a bit longer as this needs to be built
230249
by JitPack in the background before we can resolve the adhoc built dependency.
231250

232-
> [!Note]
251+
> [!Note]
233252
> You should only use that approach locally or on a developer branch for production dependencies as we do
234253
not want to ship unreleased libraries into our releases.
235254

@@ -261,7 +280,7 @@ allprojects {
261280
```
262281
4. Run the Gradle build as needed with `--write-verification-metadata` to ensure the Gradle dependency verification does not fail on your custom dependency.
263282

264-
> [!Note]
283+
> [!Note]
265284
> As Gradle prefers to use modules whose descriptor has been created from real meta-data rather than being generated,
266285
flat directory repositories cannot be used to override artifacts with real meta-data from other repositories declared in the build.
267286
> For example, if Gradle finds only `jmxri-1.2.1.jar` in a flat directory repository, but `jmxri-1.2.1.pom` in another repository

benchmarks/build.gradle

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ dependencies {
4747
api(project(':x-pack:plugin:core'))
4848
api(project(':x-pack:plugin:esql'))
4949
api(project(':x-pack:plugin:esql:compute'))
50+
api(project(':x-pack:plugin:mapper-exponential-histogram'))
5051
implementation project(path: ':libs:native')
5152
implementation project(path: ':libs:simdvec')
5253
implementation project(path: ':libs:exponential-histogram')

benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/QueryPlanningBenchmark.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,8 @@ public void setup() {
9292
var fields = 10_000;
9393
var mapping = LinkedHashMap.<String, EsField>newLinkedHashMap(fields);
9494
for (int i = 0; i < fields; i++) {
95-
mapping.put("field" + i, new EsField("field-" + i, TEXT, emptyMap(), true));
95+
// We're creating a standard index, so none of these fields should be marked as dimensions.
96+
mapping.put("field" + i, new EsField("field-" + i, TEXT, emptyMap(), true, EsField.TimeSeriesFieldType.NONE));
9697
}
9798

9899
var esIndex = new EsIndex("test", mapping, Map.of("test", IndexMode.STANDARD));

benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/ValuesSourceReaderBenchmark.java

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -142,11 +142,26 @@ static void selfTest() {
142142
private static List<ValuesSourceReaderOperator.FieldInfo> fields(String name) {
143143
return switch (name) {
144144
case "3_stored_keywords" -> List.of(
145-
new ValuesSourceReaderOperator.FieldInfo("keyword_1", ElementType.BYTES_REF, shardIdx -> blockLoader("stored_keyword_1")),
146-
new ValuesSourceReaderOperator.FieldInfo("keyword_2", ElementType.BYTES_REF, shardIdx -> blockLoader("stored_keyword_2")),
147-
new ValuesSourceReaderOperator.FieldInfo("keyword_3", ElementType.BYTES_REF, shardIdx -> blockLoader("stored_keyword_3"))
145+
new ValuesSourceReaderOperator.FieldInfo(
146+
"keyword_1",
147+
ElementType.BYTES_REF,
148+
false,
149+
shardIdx -> blockLoader("stored_keyword_1")
150+
),
151+
new ValuesSourceReaderOperator.FieldInfo(
152+
"keyword_2",
153+
ElementType.BYTES_REF,
154+
false,
155+
shardIdx -> blockLoader("stored_keyword_2")
156+
),
157+
new ValuesSourceReaderOperator.FieldInfo(
158+
"keyword_3",
159+
ElementType.BYTES_REF,
160+
false,
161+
shardIdx -> blockLoader("stored_keyword_3")
162+
)
148163
);
149-
default -> List.of(new ValuesSourceReaderOperator.FieldInfo(name, elementType(name), shardIdx -> blockLoader(name)));
164+
default -> List.of(new ValuesSourceReaderOperator.FieldInfo(name, elementType(name), false, shardIdx -> blockLoader(name)));
150165
};
151166
}
152167

Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
/*
2+
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
3+
* or more contributor license agreements. Licensed under the "Elastic License
4+
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
5+
* Public License v 1"; you may not use this file except in compliance with, at
6+
* your election, the "Elastic License 2.0", the "GNU Affero General Public
7+
* License v3.0 only", or the "Server Side Public License, v 1".
8+
*/
9+
10+
package org.elasticsearch.benchmark.common.network;
11+
12+
import org.elasticsearch.common.network.InetAddresses;
13+
import org.elasticsearch.xcontent.Text;
14+
import org.elasticsearch.xcontent.XContentString;
15+
import org.openjdk.jmh.annotations.Benchmark;
16+
import org.openjdk.jmh.annotations.BenchmarkMode;
17+
import org.openjdk.jmh.annotations.Fork;
18+
import org.openjdk.jmh.annotations.Measurement;
19+
import org.openjdk.jmh.annotations.Mode;
20+
import org.openjdk.jmh.annotations.OutputTimeUnit;
21+
import org.openjdk.jmh.annotations.Param;
22+
import org.openjdk.jmh.annotations.Scope;
23+
import org.openjdk.jmh.annotations.Setup;
24+
import org.openjdk.jmh.annotations.State;
25+
import org.openjdk.jmh.annotations.Warmup;
26+
import org.openjdk.jmh.infra.Blackhole;
27+
28+
import java.net.InetAddress;
29+
import java.net.UnknownHostException;
30+
import java.util.Random;
31+
import java.util.concurrent.TimeUnit;
32+
33+
@Warmup(iterations = 2)
34+
@Measurement(iterations = 3)
35+
@BenchmarkMode(Mode.Throughput)
36+
@OutputTimeUnit(TimeUnit.SECONDS)
37+
@State(Scope.Benchmark)
38+
@Fork(1)
39+
public class IpAddressesBenchmarks {
40+
41+
@Param("1000")
42+
private int size;
43+
private String[] ipV6Addresses;
44+
private String[] ipV4Addresses;
45+
private XContentString[] ipV6AddressesBytes;
46+
private XContentString[] ipV4AddressesBytes;
47+
48+
@Setup
49+
public void setup() throws UnknownHostException {
50+
Random random = new Random();
51+
ipV6Addresses = new String[size];
52+
ipV4Addresses = new String[size];
53+
ipV6AddressesBytes = new XContentString[size];
54+
ipV4AddressesBytes = new XContentString[size];
55+
byte[] ipv6Bytes = new byte[16];
56+
byte[] ipv4Bytes = new byte[4];
57+
for (int i = 0; i < size; i++) {
58+
random.nextBytes(ipv6Bytes);
59+
random.nextBytes(ipv4Bytes);
60+
String ipv6String = InetAddresses.toAddrString(InetAddress.getByAddress(ipv6Bytes));
61+
String ipv4String = InetAddresses.toAddrString(InetAddress.getByAddress(ipv4Bytes));
62+
ipV6Addresses[i] = ipv6String;
63+
ipV4Addresses[i] = ipv4String;
64+
ipV6AddressesBytes[i] = new Text(ipv6String);
65+
ipV4AddressesBytes[i] = new Text(ipv4String);
66+
}
67+
}
68+
69+
@Benchmark
70+
public boolean isInetAddressIpv6() {
71+
boolean b = true;
72+
for (int i = 0; i < size; i++) {
73+
b ^= InetAddresses.isInetAddress(ipV6Addresses[i]);
74+
}
75+
return b;
76+
}
77+
78+
@Benchmark
79+
public boolean isInetAddressIpv4() {
80+
boolean b = true;
81+
for (int i = 0; i < size; i++) {
82+
b ^= InetAddresses.isInetAddress(ipV4Addresses[i]);
83+
}
84+
return b;
85+
}
86+
87+
@Benchmark
88+
public void getIpOrHostIpv6(Blackhole blackhole) {
89+
for (int i = 0; i < size; i++) {
90+
blackhole.consume(InetAddresses.getIpOrHost(ipV6Addresses[i]));
91+
}
92+
}
93+
94+
@Benchmark
95+
public void getIpOrHostIpv4(Blackhole blackhole) {
96+
for (int i = 0; i < size; i++) {
97+
blackhole.consume(InetAddresses.forString(ipV4Addresses[i]));
98+
}
99+
}
100+
101+
@Benchmark
102+
public void forStringIpv6String(Blackhole blackhole) {
103+
for (int i = 0; i < size; i++) {
104+
blackhole.consume(InetAddresses.forString(ipV6Addresses[i]));
105+
}
106+
}
107+
108+
@Benchmark
109+
public void forStringIpv4String(Blackhole blackhole) {
110+
for (int i = 0; i < size; i++) {
111+
blackhole.consume(InetAddresses.forString(ipV4Addresses[i]));
112+
}
113+
}
114+
115+
@Benchmark
116+
public void forStringIpv6Bytes(Blackhole blackhole) {
117+
for (int i = 0; i < size; i++) {
118+
blackhole.consume(InetAddresses.forString(ipV6AddressesBytes[i].bytes()));
119+
}
120+
}
121+
122+
@Benchmark
123+
public void forStringIpv4Bytes(Blackhole blackhole) {
124+
for (int i = 0; i < size; i++) {
125+
blackhole.consume(InetAddresses.forString(ipV4AddressesBytes[i].bytes()));
126+
}
127+
}
128+
129+
@Benchmark
130+
public void encodeAsIpv6WithIpv6(Blackhole blackhole) {
131+
for (int i = 0; i < size; i++) {
132+
blackhole.consume(InetAddresses.encodeAsIpv6(ipV6AddressesBytes[i]));
133+
}
134+
}
135+
136+
@Benchmark
137+
public void encodeAsIpv6WithIpv4(Blackhole blackhole) {
138+
for (int i = 0; i < size; i++) {
139+
blackhole.consume(InetAddresses.encodeAsIpv6(ipV4AddressesBytes[i]));
140+
}
141+
}
142+
}

benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java

Lines changed: 21 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -212,7 +212,7 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) {
212212
FieldAttribute timestamp = new FieldAttribute(
213213
Source.EMPTY,
214214
"timestamp",
215-
new EsField("timestamp", DataType.DATETIME, Map.of(), true)
215+
new EsField("timestamp", DataType.DATETIME, Map.of(), true, EsField.TimeSeriesFieldType.NONE)
216216
);
217217
yield EvalMapper.toEvaluator(
218218
FOLD_CONTEXT,
@@ -321,19 +321,35 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) {
321321
}
322322

323323
private static FieldAttribute longField() {
324-
return new FieldAttribute(Source.EMPTY, "long", new EsField("long", DataType.LONG, Map.of(), true));
324+
return new FieldAttribute(
325+
Source.EMPTY,
326+
"long",
327+
new EsField("long", DataType.LONG, Map.of(), true, EsField.TimeSeriesFieldType.NONE)
328+
);
325329
}
326330

327331
private static FieldAttribute doubleField() {
328-
return new FieldAttribute(Source.EMPTY, "double", new EsField("double", DataType.DOUBLE, Map.of(), true));
332+
return new FieldAttribute(
333+
Source.EMPTY,
334+
"double",
335+
new EsField("double", DataType.DOUBLE, Map.of(), true, EsField.TimeSeriesFieldType.NONE)
336+
);
329337
}
330338

331339
private static FieldAttribute intField() {
332-
return new FieldAttribute(Source.EMPTY, "int", new EsField("int", DataType.INTEGER, Map.of(), true));
340+
return new FieldAttribute(
341+
Source.EMPTY,
342+
"int",
343+
new EsField("int", DataType.INTEGER, Map.of(), true, EsField.TimeSeriesFieldType.NONE)
344+
);
333345
}
334346

335347
private static FieldAttribute keywordField() {
336-
return new FieldAttribute(Source.EMPTY, "keyword", new EsField("keyword", DataType.KEYWORD, Map.of(), true));
348+
return new FieldAttribute(
349+
Source.EMPTY,
350+
"keyword",
351+
new EsField("keyword", DataType.KEYWORD, Map.of(), true, EsField.TimeSeriesFieldType.NONE)
352+
);
337353
}
338354

339355
private static Configuration configuration() {

0 commit comments

Comments
 (0)