Skip to content

Commit dd9e6a4

Browse files
Merge branch 'main' into prevent-field-caps-from-failing-due-to-can-match-failure
2 parents 6a09d8e + 3066619 commit dd9e6a4

File tree

349 files changed

+11117
-3018
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

349 files changed

+11117
-3018
lines changed
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
steps:
2+
- command: .buildkite/scripts/trigger-if-java-ea-new-build.sh
3+
env:
4+
RECENT_TIME_WINDOW: "24" # time window to consider a build as new in hours
5+
agents:
6+
image: "docker.elastic.co/ci-agent-images/eck-region/buildkite-agent:1.5"
7+
memory: "4G"
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
steps: []
Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
#
2+
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
3+
# or more contributor license agreements. Licensed under the "Elastic License
4+
# 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
5+
# Public License v 1"; you may not use this file except in compliance with, at
6+
# your election, the "Elastic License 2.0", the "GNU Affero General Public
7+
# License v3.0 only", or the "Server Side Public License, v 1".
8+
#
9+
10+
#!/bin/bash
11+
12+
# Allow overriding the time window (in hours) to check for new builds, defaults to 24
13+
RECENT_TIME_WINDOW=${RECENT_TIME_WINDOW:-24}
14+
15+
# Extract current JDK major version from bundled_jdk in version.properties
16+
CURRENT_JDK=$(grep "^bundled_jdk =" build-tools-internal/version.properties | cut -d'=' -f2 | tr -d ' ' | cut -d'.' -f1)
17+
TARGET_JDK=$((CURRENT_JDK + 1))
18+
19+
echo "Current JDK major version: $CURRENT_JDK"
20+
echo "Target JDK major version: $TARGET_JDK"
21+
22+
# Query Elasticsearch JDK archive for available JDKs
23+
JDK_ARCHIVE_URL="https://builds.es-jdk-archive.com/jdks/openjdk/recent.json"
24+
echo "Querying JDK archive: $JDK_ARCHIVE_URL"
25+
26+
# Fetch JDK info and filter for target major version
27+
JDK_DATA=$(curl -s "$JDK_ARCHIVE_URL")
28+
29+
if [[ -z "$JDK_DATA" ]]; then
30+
echo "Failed to fetch JDK data from archive"
31+
exit 1
32+
fi
33+
34+
# Find the latest build for the target JDK version
35+
LATEST_BUILD=$(echo "$JDK_DATA" | jq -r --arg target "$TARGET_JDK" '
36+
.majors[$target].builds |
37+
sort_by(.archived_at) |
38+
last'
39+
)
40+
41+
if [[ "$LATEST_BUILD" == "null" || -z "$LATEST_BUILD" ]]; then
42+
echo "No builds found for JDK $TARGET_JDK"
43+
exit 1
44+
fi
45+
46+
# Extract timestamp and JDK identifier
47+
TIMESTAMP=$(echo "$LATEST_BUILD" | jq -r '.archived_at')
48+
JDK_IDENTIFIER=$(echo "$LATEST_BUILD" | jq -r '.id')
49+
50+
echo "Latest JDK ${TARGET_JDK} build from ES archive:"
51+
echo " Timestamp: $TIMESTAMP"
52+
echo " JDK Identifier: $JDK_IDENTIFIER"
53+
54+
# Set variables for use in the pipeline trigger
55+
jdkbuild="$JDK_IDENTIFIER"
56+
jdk_timestamp="$TIMESTAMP"
57+
58+
# Check if timestamp is within last 24 hours
59+
CURRENT_TIME=$(date +%s)
60+
BUILD_TIME=$(date -d "$TIMESTAMP" +%s 2>/dev/null || date -j -f "%Y-%m-%dT%H:%M:%S" "${TIMESTAMP%Z}" +%s 2>/dev/null || echo "0")
61+
62+
if [[ "$BUILD_TIME" == "0" ]]; then
63+
echo "Failed to parse timestamp: $TIMESTAMP"
64+
SHOULD_TRIGGER="false"
65+
else
66+
TIME_DIFF=$((CURRENT_TIME - BUILD_TIME))
67+
TIME_WINDOW=$((RECENT_TIME_WINDOW * 60 * 60))
68+
69+
if [[ $TIME_DIFF -lt $TIME_WINDOW ]]; then
70+
echo "Build is recent (less than ${RECENT_TIME_WINDOW}h old)"
71+
SHOULD_TRIGGER="true"
72+
else
73+
echo "Build is older than ${RECENT_TIME_WINDOW} hours"
74+
SHOULD_TRIGGER="false"
75+
fi
76+
fi
77+
78+
echo "SHOULD_TRIGGER: $SHOULD_TRIGGER"
79+
80+
81+
if [[ "$SHOULD_TRIGGER" == "true" ]]; then
82+
EFFECTIVE_START_DATE=$(date -u -d "@$BUILD_TIME" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || date -u -r "$BUILD_TIME" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || echo "")
83+
echo "Triggering performance-esbench-jdk for new jdk build $JDK_IDENTIFIER"
84+
cat << EOF | buildkite-agent pipeline upload
85+
steps:
86+
- trigger: elasticsearch-performance-esbench-jdk
87+
label: Triggering performance-esbench-jdk for new jdk build $JDK_IDENTIFIER
88+
async: true
89+
build:
90+
branch: "$BUILDKITE_BRANCH"
91+
env:
92+
EFFECTIVE_START_DATE: "$EFFECTIVE_START_DATE"
93+
EXECUTION_MODE: "start-run"
94+
EOF
95+
fi
Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
/*
2+
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
3+
* or more contributor license agreements. Licensed under the "Elastic License
4+
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
5+
* Public License v 1"; you may not use this file except in compliance with, at
6+
* your election, the "Elastic License 2.0", the "GNU Affero General Public
7+
* License v3.0 only", or the "Server Side Public License, v 1".
8+
*/
9+
10+
package org.elasticsearch.benchmark.vector;
11+
12+
import org.elasticsearch.common.logging.LogConfigurator;
13+
import org.elasticsearch.index.codec.vectors.cluster.NeighborHood;
14+
import org.openjdk.jmh.annotations.Benchmark;
15+
import org.openjdk.jmh.annotations.BenchmarkMode;
16+
import org.openjdk.jmh.annotations.Fork;
17+
import org.openjdk.jmh.annotations.Measurement;
18+
import org.openjdk.jmh.annotations.Mode;
19+
import org.openjdk.jmh.annotations.OutputTimeUnit;
20+
import org.openjdk.jmh.annotations.Param;
21+
import org.openjdk.jmh.annotations.Scope;
22+
import org.openjdk.jmh.annotations.Setup;
23+
import org.openjdk.jmh.annotations.State;
24+
import org.openjdk.jmh.annotations.Warmup;
25+
import org.openjdk.jmh.infra.Blackhole;
26+
27+
import java.io.IOException;
28+
import java.util.Random;
29+
import java.util.concurrent.TimeUnit;
30+
31+
@BenchmarkMode(Mode.AverageTime)
32+
@OutputTimeUnit(TimeUnit.SECONDS)
33+
@State(Scope.Benchmark)
34+
// first iteration is complete garbage, so make sure we really warmup
35+
@Warmup(iterations = 1, time = 1)
36+
// real iterations. not useful to spend tons of time here, better to fork more
37+
@Measurement(iterations = 3, time = 1)
38+
// engage some noise reduction
39+
@Fork(value = 1)
40+
public class ComputeNeighboursBenchmark {
41+
42+
static {
43+
LogConfigurator.configureESLogging(); // native access requires logging to be initialized
44+
}
45+
46+
@Param({ "1000", "2000", "3000", "5000", "10000", "20000", "50000" })
47+
int numVectors;
48+
49+
@Param({ "384", "782", "1024" })
50+
int dims;
51+
52+
float[][] vectors;
53+
int clusterPerNeighbour = 128;
54+
55+
@Setup
56+
public void setup() throws IOException {
57+
Random random = new Random(123);
58+
vectors = new float[numVectors][dims];
59+
for (float[] vector : vectors) {
60+
for (int i = 0; i < dims; i++) {
61+
vector[i] = random.nextFloat();
62+
}
63+
}
64+
}
65+
66+
@Benchmark
67+
@Fork(jvmArgsPrepend = { "--add-modules=jdk.incubator.vector" })
68+
public void bruteForce(Blackhole bh) {
69+
bh.consume(NeighborHood.computeNeighborhoodsBruteForce(vectors, clusterPerNeighbour));
70+
}
71+
72+
@Benchmark
73+
@Fork(jvmArgsPrepend = { "--add-modules=jdk.incubator.vector" })
74+
public void graph(Blackhole bh) throws IOException {
75+
bh.consume(NeighborHood.computeNeighborhoodsGraph(vectors, clusterPerNeighbour));
76+
}
77+
}

build-tools-internal/version.properties

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ log4j = 2.19.0
1414
slf4j = 2.0.6
1515
ecsLogging = 1.2.0
1616
jna = 5.12.1
17-
netty = 4.1.118.Final
17+
netty = 4.1.126.Final
1818
commons_lang3 = 3.9
1919
google_oauth_client = 1.34.1
2020
awsv2sdk = 2.31.78
@@ -29,6 +29,8 @@ opensaml = 4.3.0
2929
# client dependencies
3030
httpclient = 4.5.14
3131
httpcore = 4.4.16
32+
httpclient5 = 5.5
33+
httpcore5 = 5.3.5
3234
httpasyncclient = 4.1.5
3335
commonslogging = 1.2
3436
commonscodec = 1.15

catalog-info.yaml

Lines changed: 39 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,45 @@ spec:
138138
cronline: "0 4 * * * America/New_York"
139139
message: "Run java EA tests 1x per day"
140140
---
141-
141+
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json
142+
apiVersion: backstage.io/v1alpha1
143+
kind: Resource
144+
metadata:
145+
name: buildkite-pipeline-elasticsearch-java-ea-check-new-build
146+
description: Check for new jdk ea build and trigger downstream jobs
147+
links:
148+
- title: Pipeline
149+
url: https://buildkite.com/elastic/elasticsearch-java-ea-check-new-build
150+
spec:
151+
type: buildkite-pipeline
152+
system: buildkite
153+
owner: group:elasticsearch-team
154+
implementation:
155+
apiVersion: buildkite.elastic.dev/v1
156+
kind: Pipeline
157+
metadata:
158+
description: ":java: Check for new pre release jdk build and trigger downstream jobs"
159+
name: elasticsearch / java-ea / check-new-build
160+
spec:
161+
repository: elastic/elasticsearch
162+
pipeline_file: .buildkite/pipelines/java-ea-check-new-build.yml
163+
branch_configuration: main
164+
teams:
165+
elasticsearch-team: {}
166+
ml-core: {}
167+
everyone:
168+
access_level: BUILD_AND_READ
169+
provider_settings:
170+
build_branches: false
171+
build_pull_requests: false
172+
publish_commit_status: false
173+
trigger_mode: none
174+
schedules:
175+
Periodically on main:
176+
branch: main
177+
cronline: "0 6 * * * UTC"
178+
message: "Check for new java pre release build 1x per day"
179+
---
142180
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json
143181
apiVersion: backstage.io/v1alpha1
144182
kind: Resource

docs/changelog/132003.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 132003
2+
summary: Add `copy_from` option to the Append processor
3+
area: Ingest Node
4+
type: enhancement
5+
issues: []

docs/changelog/133546.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 133546
2+
summary: "Support geohash, geotile and geohex grid types in ST_INTERSECTS and ST_DISJOINT"
3+
area: "ES|QL"
4+
type: enhancement
5+
issues: []

docs/changelog/133599.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 133599
2+
summary: Support Gemini thinking budget in inference API
3+
area: Machine Learning
4+
type: enhancement
5+
issues: []

docs/changelog/133954.yaml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
pr: 133954
2+
summary: "ILM: Force merge on zero-replica cloned index before snapshotting for searchable snapshots"
3+
area: ILM+SLM
4+
type: enhancement
5+
issues:
6+
- 75478

0 commit comments

Comments
 (0)