Skip to content

Commit 3d461f0

Browse files
Merge remote-tracking branch 'origin/main' into pkar/resolve-index-force-reconn
2 parents 7e747f0 + 3e5eed8 commit 3d461f0

File tree

284 files changed

+12538
-3766
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

284 files changed

+12538
-3766
lines changed

.buildkite/pipelines/periodic-java-ea.template.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
env:
22
JAVA_EA_VERSION: "${JAVA_EA_VERSION:-26-pre}"
3+
EXTRA_GRADLE_ARGS: "--continue"
34

45
steps:
56
- group: bwc

.buildkite/pipelines/periodic-java-ea.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# This file is auto-generated. See .buildkite/pipelines/periodic-java-ea.template.yml
22
env:
33
JAVA_EA_VERSION: "${JAVA_EA_VERSION:-26-pre}"
4+
EXTRA_GRADLE_ARGS: "--continue"
45

56
steps:
67
- group: bwc
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
notify:
2+
- slack:
3+
channels:
4+
- "#es-delivery-alerts"
5+
if: (build.branch == 'main' || build.branch =~ /^[0-9]+\.[0-9x]+\$/) && (build.state == 'passed' || build.state == 'failed')
6+
- slack:
7+
channels:
8+
- "#es-delivery-alerts"
9+
message: |
10+
🚦 Pipeline waiting for approval 🚦
11+
Repo: `${REPO}`
12+
13+
Ready to fetch DRA artifacts - please unblock when ready.
14+
New version: `${NEW_VERSION}`
15+
Branch: `${BRANCH}`
16+
Workflow: `${WORKFLOW}`
17+
${BUILDKITE_BUILD_URL}
18+
if: build.state == "blocked"
19+
20+
steps:
21+
# TODO: replace this block step by real version bump logic
22+
- block: "Ready to fetch for DRA artifacts?"
23+
prompt: |
24+
Unblock when your team is ready to proceed.
25+
26+
Trigger parameters:
27+
- NEW_VERSION: ${NEW_VERSION}
28+
- BRANCH: ${BRANCH}
29+
- WORKFLOW: ${WORKFLOW}
30+
key: block-get-dra-artifacts
31+
blocked_state: running
32+
33+
- label: "Fetch DRA Artifacts"
34+
key: fetch-dra-artifacts
35+
depends_on: block-get-dra-artifacts
36+
agents:
37+
image: docker.elastic.co/release-eng/wolfi-build-essential-release-eng:latest
38+
cpu: 250m
39+
memory: 512Mi
40+
ephemeralStorage: 1Gi
41+
command:
42+
- echo "Starting DRA artifacts retrieval..."
43+
timeout_in_minutes: 240
44+
retry:
45+
automatic:
46+
- exit_status: "*"
47+
limit: 2
48+
manual:
49+
permit_on_passed: true
50+
51+
plugins:
52+
- elastic/json-watcher#v1.0.0:
53+
url: "https://artifacts-staging.elastic.co/elasticsearch/latest/${BRANCH}.json"
54+
field: ".version"
55+
expected_value: "${NEW_VERSION}"
56+
polling_interval: "30"
57+
- elastic/json-watcher#v1.0.0:
58+
url: "https://storage.googleapis.com/elastic-artifacts-snapshot/elasticsearch/latest/${BRANCH}.json"
59+
field: ".version"
60+
expected_value: "${NEW_VERSION}-SNAPSHOT"
61+
polling_interval: "30"

.buildkite/scripts/promql-compliance-bench.sh

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ readonly PIPELINE_FILE="${PIPELINE_FILE:-/tmp/${JOB_NAME}-pipeline.yml}"
1313
readonly REPO="${BENCH_REPO:-elastic/grafana-dashboards-analysis}"
1414
readonly REPO_DIR="${REPO_DIR:-/tmp/${JOB_NAME}-repo}"
1515
readonly INPUT_SUBDIR="${INPUT_SUBDIR:-results}"
16+
readonly INPUT_GLOB="${INPUT_GLOB:-*_raw_queries_simple.csv}"
1617

1718
readonly GRADLE_TASK="${GRADLE_TASK:-:x-pack:plugin:esql:analyzePromqlQueries}"
1819

@@ -69,9 +70,12 @@ find_input_files() {
6970

7071
[[ -d "${root}" ]] || die "input directory does not exist: ${root}"
7172

72-
mapfile -d '' -t INPUT_FILES < <(find "${root}" -type f -name '*.csv' -print0 | sort -z)
73+
# The analyzer expects one query per line in the form "<dashboardId>;<query>".
74+
# The dashboards-analysis repo exports that shape as *_raw_queries_simple.csv;
75+
# the other CSVs are summaries and spreadsheet-oriented reports.
76+
mapfile -d '' -t INPUT_FILES < <(find "${root}" -type f -name "${INPUT_GLOB}" -print0 | sort -z)
7377

74-
(( ${#INPUT_FILES[@]} > 0 )) || die "no CSV files found in ${root}"
78+
(( ${#INPUT_FILES[@]} > 0 )) || die "no CSV files matching ${INPUT_GLOB} found in ${root}"
7579

7680
log "found ${#INPUT_FILES[@]} input files"
7781
}

.ci/scripts/run-gradle.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,4 +49,4 @@ if [[ -n "${TESTS_SEED:-}" ]]; then
4949
echo "Using test seed: $TESTS_SEED"
5050
fi
5151

52-
$GRADLEW -S --max-workers=$MAX_WORKERS $TESTS_SEED_PARAM "$@"
52+
$GRADLEW -S --max-workers=$MAX_WORKERS $TESTS_SEED_PARAM ${EXTRA_GRADLE_ARGS:-} "$@"

benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/QueryPlanningBenchmark.java

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
import org.elasticsearch.xpack.esql.inference.InferenceSettings;
3131
import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext;
3232
import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer;
33+
import org.elasticsearch.xpack.esql.parser.EsqlConfig;
3334
import org.elasticsearch.xpack.esql.parser.EsqlParser;
3435
import org.elasticsearch.xpack.esql.parser.QueryParams;
3536
import org.elasticsearch.xpack.esql.plan.IndexPattern;
@@ -77,6 +78,7 @@ public class QueryPlanningBenchmark {
7778
private Analyzer manyFieldsAnalyzer;
7879
private LogicalPlanOptimizer defaultOptimizer;
7980
private Configuration config;
81+
private EsqlParser parser;
8082

8183
@Setup
8284
public void setup() {
@@ -111,6 +113,7 @@ public void setup() {
111113
var esIndex = new EsIndex("test", mapping, Map.of("test", IndexMode.STANDARD), Map.of(), Map.of(), Set.of());
112114

113115
var functionRegistry = new EsqlFunctionRegistry();
116+
parser = new EsqlParser(new EsqlConfig(functionRegistry));
114117

115118
// Assume all nodes are on the current version for the benchmark.
116119
TransportVersion minimumVersion = TransportVersion.current();
@@ -141,6 +144,6 @@ private LogicalPlan plan(EsqlParser parser, Analyzer analyzer, LogicalPlanOptimi
141144

142145
@Benchmark
143146
public void manyFields(Blackhole blackhole) {
144-
blackhole.consume(plan(EsqlParser.INSTANCE, manyFieldsAnalyzer, defaultOptimizer, "FROM test | LIMIT 10"));
147+
blackhole.consume(plan(parser, manyFieldsAnalyzer, defaultOptimizer, "FROM test | LIMIT 10"));
145148
}
146149
}

benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ParallelParsingBenchmark.java

Lines changed: 6 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import org.elasticsearch.xpack.esql.core.type.EsField;
2424
import org.elasticsearch.xpack.esql.datasources.CloseableIterator;
2525
import org.elasticsearch.xpack.esql.datasources.ParallelParsingCoordinator;
26+
import org.elasticsearch.xpack.esql.datasources.spi.FormatReadContext;
2627
import org.elasticsearch.xpack.esql.datasources.spi.SegmentableFormatReader;
2728
import org.elasticsearch.xpack.esql.datasources.spi.SourceMetadata;
2829
import org.elasticsearch.xpack.esql.datasources.spi.StorageObject;
@@ -120,8 +121,7 @@ public void parallelParse(Blackhole bh) throws Exception {
120121
List.of("line"),
121122
1000,
122123
parallelism,
123-
executor,
124-
SCHEMA
124+
executor
125125
)
126126
) {
127127
while (iter.hasNext()) {
@@ -161,30 +161,20 @@ public SourceMetadata metadata(StorageObject object) {
161161
}
162162

163163
@Override
164-
public CloseableIterator<Page> read(StorageObject object, List<String> projectedColumns, int batchSize) throws IOException {
165-
return readSplit(object, projectedColumns, batchSize, false, true, SCHEMA);
166-
}
167-
168-
@Override
169-
public CloseableIterator<Page> readSplit(
170-
StorageObject object,
171-
List<String> projectedColumns,
172-
int batchSize,
173-
boolean skipFirstLine,
174-
boolean lastSplit,
175-
List<Attribute> resolvedAttributes
176-
) throws IOException {
164+
public CloseableIterator<Page> read(StorageObject object, FormatReadContext context) throws IOException {
177165
final byte[] data;
178166
try (InputStream stream = object.newStream()) {
179167
data = stream.readAllBytes();
180168
}
169+
final int batchSize = context.batchSize();
170+
final boolean skipFirstLine = context.firstSplit() == false;
181171

182172
return new CloseableIterator<>() {
183173
private int pos = 0;
184174
private Page nextPage = null;
185175

186176
{
187-
// Skip first line if needed
177+
// Skip first line if needed (when not first split)
188178
if (skipFirstLine) {
189179
while (pos < data.length && data[pos] != '\n') {
190180
pos++;

benchmarks/src/main/java/org/elasticsearch/benchmark/esql/CsvErrorPolicyBenchmark.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import org.elasticsearch.xpack.esql.datasource.csv.CsvFormatReader;
1616
import org.elasticsearch.xpack.esql.datasources.CloseableIterator;
1717
import org.elasticsearch.xpack.esql.datasources.spi.ErrorPolicy;
18+
import org.elasticsearch.xpack.esql.datasources.spi.FormatReadContext;
1819
import org.elasticsearch.xpack.esql.datasources.spi.StorageObject;
1920
import org.elasticsearch.xpack.esql.datasources.spi.StoragePath;
2021
import org.openjdk.jmh.annotations.Benchmark;
@@ -100,7 +101,7 @@ private int readAll(ErrorPolicy policy) throws IOException {
100101
CsvFormatReader reader = new CsvFormatReader(blockFactory);
101102
StorageObject obj = createStorageObject(csvData);
102103
int totalRows = 0;
103-
try (CloseableIterator<Page> iter = reader.read(obj, null, 1000, policy)) {
104+
try (CloseableIterator<Page> iter = reader.read(obj, FormatReadContext.builder().batchSize(1000).errorPolicy(policy).build())) {
104105
while (iter.hasNext()) {
105106
totalRows += iter.next().getPositionCount();
106107
}

catalog-info.yaml

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -417,3 +417,36 @@ spec:
417417
provider_settings:
418418
build_pull_requests: true
419419
trigger_mode: none
420+
---
421+
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json
422+
apiVersion: backstage.io/v1alpha1
423+
kind: Resource
424+
metadata:
425+
name: buildkite-pipeline-elasticsearch-version-bump
426+
description: Buildkite Pipeline for elasticsearch version bump
427+
links:
428+
- title: Pipeline
429+
url: https://buildkite.com/elastic/elasticsearch-version-bump
430+
431+
spec:
432+
type: buildkite-pipeline
433+
owner: group:elasticsearch-team
434+
system: buildkite
435+
implementation:
436+
apiVersion: buildkite.elastic.dev/v1
437+
kind: Pipeline
438+
metadata:
439+
description: Buildkite Pipeline for elasticsearch version bump
440+
name: elasticsearch-version-bump
441+
spec:
442+
pipeline_file: ".buildkite/pipelines/version-bump-pipeline.yml"
443+
provider_settings:
444+
trigger_mode: none
445+
repository: elastic/elasticsearch
446+
teams:
447+
elasticsearch-team: {}
448+
ml-core: {}
449+
everyone:
450+
access_level: READ_ONLY
451+
release-eng:
452+
access_level: BUILD_AND_READ

docs/changelog/143381.yaml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
area: Downsampling
2+
issues:
3+
- 136178
4+
pr: 143381
5+
summary: Aggregate counter downsampling preserves resets
6+
type: enhancement

0 commit comments

Comments
 (0)