Skip to content

Commit 18f4503

Browse files
committed
Merge remote-tracking branch 'es/main' into ds_rollover_tsdb_to_logsdb
2 parents 078968b + 7ad2369 commit 18f4503

File tree

34 files changed

+540
-264
lines changed

34 files changed

+540
-264
lines changed

.buildkite/scripts/dra-workflow.sh

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@ echo --- Building release artifacts
7070
$VERSION_QUALIFIER_ARG \
7171
buildReleaseArtifacts \
7272
exportCompressedDockerImages \
73+
exportDockerContexts \
7374
:distribution:generateDependenciesReport
7475

7576
PATH="$PATH:${JAVA_HOME}/bin" # Required by the following script

benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/AllocationBenchmark.java

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,10 @@
1414
import org.elasticsearch.cluster.ClusterState;
1515
import org.elasticsearch.cluster.metadata.IndexMetadata;
1616
import org.elasticsearch.cluster.metadata.Metadata;
17+
import org.elasticsearch.cluster.metadata.ProjectId;
18+
import org.elasticsearch.cluster.metadata.ProjectMetadata;
1719
import org.elasticsearch.cluster.node.DiscoveryNodes;
20+
import org.elasticsearch.cluster.routing.GlobalRoutingTable;
1821
import org.elasticsearch.cluster.routing.RoutingTable;
1922
import org.elasticsearch.cluster.routing.ShardRouting;
2023
import org.elasticsearch.cluster.routing.allocation.AllocationService;
@@ -126,19 +129,20 @@ public void setUp() throws Exception {
126129
Settings.builder().put("cluster.routing.allocation.awareness.attributes", "tag").build()
127130
);
128131

129-
Metadata.Builder mb = Metadata.builder();
132+
final ProjectId projectId = ProjectId.DEFAULT;
133+
ProjectMetadata.Builder pmb = ProjectMetadata.builder(projectId);
130134
for (int i = 1; i <= numIndices; i++) {
131-
mb.put(
135+
pmb.put(
132136
IndexMetadata.builder("test_" + i)
133137
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
134138
.numberOfShards(numShards)
135139
.numberOfReplicas(numReplicas)
136140
);
137141
}
138-
Metadata metadata = mb.build();
142+
Metadata metadata = Metadata.builder().put(pmb).build();
139143
RoutingTable.Builder rb = RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY);
140144
for (int i = 1; i <= numIndices; i++) {
141-
rb.addAsNew(metadata.getProject().index("test_" + i));
145+
rb.addAsNew(metadata.getProject(projectId).index("test_" + i));
142146
}
143147
RoutingTable routingTable = rb.build();
144148
DiscoveryNodes.Builder nb = DiscoveryNodes.builder();
@@ -151,7 +155,7 @@ public void setUp() throws Exception {
151155
}
152156
initialClusterState = ClusterState.builder(ClusterName.DEFAULT)
153157
.metadata(metadata)
154-
.routingTable(routingTable)
158+
.routingTable(GlobalRoutingTable.builder().put(projectId, routingTable).build())
155159
.nodes(nb)
156160
.nodeIdsToCompatibilityVersions(compatibilityVersions)
157161
.build();

distribution/docker/build.gradle

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ dependencies {
132132
fips "org.bouncycastle:bctls-fips:1.0.19"
133133
}
134134

135-
ext.expansions = { Architecture architecture, DockerBase base ->
135+
ext.expansions = { Architecture architecture, DockerBase base, String publicationContext = '' ->
136136
def (major, minor) = VersionProperties.elasticsearch.split("\\.")
137137

138138
// We tag our Docker images with various pieces of information, including a timestamp
@@ -152,6 +152,7 @@ ext.expansions = { Architecture architecture, DockerBase base ->
152152
'license' : base == DockerBase.IRON_BANK ? 'Elastic License 2.0' : 'Elastic-License-2.0',
153153
'package_manager' : base.packageManager,
154154
'docker_base' : base.name().toLowerCase(),
155+
'docker_context' : publicationContext,
155156
'version' : VersionProperties.elasticsearch,
156157
'major_minor_version': "${major}.${minor}",
157158
'retry' : ShellRetry
@@ -179,9 +180,9 @@ private static String taskName(String prefix, Architecture architecture, DockerB
179180
suffix
180181
}
181182

182-
ext.dockerBuildContext = { Architecture architecture, DockerBase base ->
183+
ext.dockerBuildContext = { Architecture architecture, DockerBase base, String publicationContext = '' ->
183184
copySpec {
184-
final Map<String, String> varExpansions = expansions(architecture, base)
185+
final Map<String, String> varExpansions = expansions(architecture, base, publicationContext)
185186
final Path projectDir = project.projectDir.toPath()
186187

187188
if (base == DockerBase.IRON_BANK) {
@@ -291,17 +292,22 @@ tasks.named("composeUp").configure {
291292
dependsOn tasks.named("preProcessFixture")
292293
}
293294

294-
void addBuildDockerContextTask(Architecture architecture, DockerBase base) {
295+
296+
def exportDockerImages = tasks.register("exportDockerImages")
297+
def exportCompressedDockerImages = tasks.register("exportCompressedDockerImages")
298+
def exportDockerContexts = tasks.register("exportDockerContexts")
299+
300+
void addBuildDockerContextTask(Architecture architecture, DockerBase base, String taskSuffix = 'DockerContext', String classifier = "docker-build-context") {
295301
String configDirectory = base == DockerBase.IRON_BANK ? 'scripts' : 'config'
296302
String arch = architecture == Architecture.AARCH64 ? '-aarch64' : ''
297303

298304
final TaskProvider<Tar> buildDockerContextTask =
299-
tasks.register(taskName('build', architecture, base, 'DockerContext'), Tar) {
305+
tasks.register(taskName('build', architecture, base, taskSuffix), Tar) {
300306
archiveExtension = 'tar.gz'
301307
compression = Compression.GZIP
302-
archiveClassifier = "docker-build-context${arch}"
308+
archiveClassifier = "${classifier}${arch}"
303309
archiveBaseName = "elasticsearch${base.suffix}"
304-
with dockerBuildContext(architecture, base)
310+
with dockerBuildContext(architecture, base, classifier)
305311

306312
into(configDirectory) {
307313
from(configurations.log4jConfig) {
@@ -344,6 +350,10 @@ void addBuildDockerContextTask(Architecture architecture, DockerBase base) {
344350
onlyIf("$architecture supported") { serviceProvider.get().isArchitectureSupported(architecture) }
345351
}
346352

353+
exportDockerContexts.configure {
354+
dependsOn buildDockerContextTask
355+
}
356+
347357
if (base == DockerBase.IRON_BANK) {
348358
tasks.named("assemble").configure {
349359
dependsOn(buildDockerContextTask)
@@ -578,12 +588,14 @@ for (final Architecture architecture : Architecture.values()) {
578588
addTransformDockerContextTask(architecture, base)
579589
addBuildDockerImageTask(architecture, base)
580590
}
591+
if(base == DockerBase.DEFAULT) {
592+
// Add additional docker hub specific context which we use solely for publishing to docker hub.
593+
// At the moment it only differs in not labels added that we need for openshift certification
594+
addBuildDockerContextTask(architecture, base, 'DockerHubContext', "docker-hub-build-context")
595+
}
581596
}
582597
}
583598

584-
def exportDockerImages = tasks.register("exportDockerImages")
585-
def exportCompressedDockerImages = tasks.register("exportCompressedDockerImages")
586-
587599
/*
588600
* The export subprojects write out the generated Docker images to disk, so
589601
* that they can be easily reloaded, for example into a VM for distribution testing

distribution/docker/src/docker/Dockerfile.default

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -139,13 +139,15 @@ LABEL org.label-schema.build-date="${build_date}" \\
139139
org.opencontainers.image.vendor="Elastic" \\
140140
org.opencontainers.image.version="${version}"
141141

142+
<% if (docker_context != 'docker-hub-build-context') { %>
142143
LABEL name="Elasticsearch" \\
143144
maintainer="[email protected]" \\
144145
vendor="Elastic" \\
145146
version="${version}" \\
146147
release="1" \\
147148
summary="Elasticsearch" \\
148149
description="You know, for search."
150+
<% } %>
149151

150152
RUN mkdir /licenses && ln LICENSE.txt /licenses/LICENSE
151153

docs/changelog/126637.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 126637
2+
summary: Improve resiliency of `UpdateTimeSeriesRangeService`
3+
area: TSDB
4+
type: bug
5+
issues: []

docs/reference/query-languages/esql/functions-operators/grouping-functions.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,10 @@ The [`STATS`](/reference/query-languages/esql/commands/processing-commands.md#es
1616
:::{include} ../_snippets/functions/layout/bucket.md
1717
:::
1818

19+
:::{note}
20+
The `CATEGORIZE` function requires a [platinum license](https://www.elastic.co/subscriptions).
21+
:::
22+
1923
:::{include} ../_snippets/functions/layout/categorize.md
2024
:::
2125

docs/release-notes/breaking-changes.md

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,12 @@ Aggregations:
2020
Allocation:
2121
* Increase minimum threshold in shard balancer [#115831](https://github.com/elastic/elasticsearch/pull/115831)
2222
* Remove `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` setting [#114207](https://github.com/elastic/elasticsearch/pull/114207)
23-
* Remove cluster state from `/_cluster/reroute` response [#114231](https://github.com/elastic/elasticsearch/pull/114231) (issue: {es-issue}88978[#88978])
23+
* Remove cluster state from `/_cluster/reroute` response [#114231](https://github.com/elastic/elasticsearch/pull/114231) (issue: https://github.com/elastic/elasticsearch/issues/88978[#88978])
2424

2525
Analysis:
2626
* Snowball stemmers have been upgraded [#114146](https://github.com/elastic/elasticsearch/pull/114146)
2727
* The 'german2' stemmer is now an alias for the 'german' snowball stemmer [#113614](https://github.com/elastic/elasticsearch/pull/113614)
28-
* The 'persian' analyzer has stemmer by default [#113482](https://github.com/elastic/elasticsearch/pull/113482) (issue: {es-issue}113050[#113050])
28+
* The 'persian' analyzer has stemmer by default [#113482](https://github.com/elastic/elasticsearch/pull/113482) (issue: https://github.com/elastic/elasticsearch/issues/113050[#113050])
2929
* The Korean dictionary for Nori has been updated [#114124](https://github.com/elastic/elasticsearch/pull/114124)
3030

3131
Authentication:
@@ -56,32 +56,32 @@ Indices APIs:
5656
Infra/Core:
5757
* Change Elasticsearch timeouts to 429 response instead of 5xx [#116026](https://github.com/elastic/elasticsearch/pull/116026)
5858
* Limit `ByteSizeUnit` to 2 decimals [#120142](https://github.com/elastic/elasticsearch/pull/120142)
59-
* Remove `client.type` setting [#118192](https://github.com/elastic/elasticsearch/pull/118192) (issue: {es-issue}104574[#104574])
59+
* Remove `client.type` setting [#118192](https://github.com/elastic/elasticsearch/pull/118192) (issue: https://github.com/elastic/elasticsearch/issues/104574[#104574])
6060
* Remove any references to org.elasticsearch.core.RestApiVersion#V_7 [#118103](https://github.com/elastic/elasticsearch/pull/118103)
6161

6262
Infra/Logging:
63-
* Change `deprecation.elasticsearch` keyword to `elasticsearch.deprecation` [#117933](https://github.com/elastic/elasticsearch/pull/117933) (issue: {es-issue}83251[#83251])
64-
* Rename deprecation index template [#125606](https://github.com/elastic/elasticsearch/pull/125606) (issue: {es-issue}125445[#125445])
63+
* Change `deprecation.elasticsearch` keyword to `elasticsearch.deprecation` [#117933](https://github.com/elastic/elasticsearch/pull/117933) (issue: https://github.com/elastic/elasticsearch/issues/83251[#83251])
64+
* Rename deprecation index template [#125606](https://github.com/elastic/elasticsearch/pull/125606) (issue: https://github.com/elastic/elasticsearch/issues/125445[#125445])
6565

6666
Infra/Metrics:
6767
* Deprecated tracing.apm.* settings got removed. [#119926](https://github.com/elastic/elasticsearch/pull/119926)
6868

6969
Infra/REST API:
70-
* Output a consistent format when generating error json [#90529](https://github.com/elastic/elasticsearch/pull/90529) (issue: {es-issue}89387[#89387])
70+
* Output a consistent format when generating error json [#90529](https://github.com/elastic/elasticsearch/pull/90529) (issue: https://github.com/elastic/elasticsearch/issues/89387[#89387])
7171

7272
Ingest Node:
7373
* Remove `ecs` option on `user_agent` processor [#116077](https://github.com/elastic/elasticsearch/pull/116077)
7474
* Remove ignored fallback option on GeoIP processor [#116112](https://github.com/elastic/elasticsearch/pull/116112)
7575

7676
Logs:
77-
* Conditionally enable logsdb by default for data streams matching with logs-*-* pattern. [#121049](https://github.com/elastic/elasticsearch/pull/121049) (issue: {es-issue}106489[#106489])
77+
* Conditionally enable logsdb by default for data streams matching with logs-*-* pattern. [#121049](https://github.com/elastic/elasticsearch/pull/121049) (issue: https://github.com/elastic/elasticsearch/issues/106489[#106489])
7878

7979
Machine Learning:
8080
* Disable machine learning on macOS x86_64 [#104125](https://github.com/elastic/elasticsearch/pull/104125)
8181

8282
Mapping:
8383
* Remove support for type, fields, `copy_to` and boost in metadata field definition [#118825](https://github.com/elastic/elasticsearch/pull/118825)
84-
* Turn `_source` meta fieldmapper's mode attribute into a no-op [#119072](https://github.com/elastic/elasticsearch/pull/119072) (issue: {es-issue}118596[#118596])
84+
* Turn `_source` meta fieldmapper's mode attribute into a no-op [#119072](https://github.com/elastic/elasticsearch/pull/119072) (issue: https://github.com/elastic/elasticsearch/issues/118596[#118596])
8585

8686
Search:
8787
* Adjust `random_score` default field to `_seq_no` field [#118671](https://github.com/elastic/elasticsearch/pull/118671)

modules/data-streams/src/main/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeService.java

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -127,14 +127,14 @@ private ProjectMetadata.Builder updateTimeSeriesTemporalRange(ProjectMetadata pr
127127

128128
// getWriteIndex() selects the latest added index:
129129
Index head = dataStream.getWriteIndex();
130-
IndexMetadata im = project.getIndexSafe(head);
131-
Instant currentEnd = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings());
132-
TimeValue lookAheadTime = DataStreamsPlugin.getLookAheadTime(im.getSettings());
133-
Instant newEnd = DataStream.getCanonicalTimestampBound(
134-
now.plus(lookAheadTime.getMillis(), ChronoUnit.MILLIS).plus(pollInterval.getMillis(), ChronoUnit.MILLIS)
135-
);
136-
if (newEnd.isAfter(currentEnd)) {
137-
try {
130+
try {
131+
IndexMetadata im = project.getIndexSafe(head);
132+
Instant currentEnd = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings());
133+
TimeValue lookAheadTime = DataStreamsPlugin.getLookAheadTime(im.getSettings());
134+
Instant newEnd = DataStream.getCanonicalTimestampBound(
135+
now.plus(lookAheadTime.getMillis(), ChronoUnit.MILLIS).plus(pollInterval.getMillis(), ChronoUnit.MILLIS)
136+
);
137+
if (newEnd.isAfter(currentEnd)) {
138138
Settings settings = Settings.builder()
139139
.put(IndexSettings.TIME_SERIES_END_TIME.getKey(), DEFAULT_DATE_TIME_FORMATTER.format(newEnd))
140140
.build();
@@ -151,17 +151,17 @@ private ProjectMetadata.Builder updateTimeSeriesTemporalRange(ProjectMetadata pr
151151
mBuilder.updateSettings(settings, head.getName());
152152
// Verify that all temporal ranges of each backing index is still valid:
153153
dataStream.validate(mBuilder::get);
154-
} catch (Exception e) {
155-
LOGGER.error(
156-
() -> format(
157-
"unable to update [%s] for data stream [%s] and backing index [%s]",
158-
IndexSettings.TIME_SERIES_END_TIME.getKey(),
159-
dataStream.getName(),
160-
head.getName()
161-
),
162-
e
163-
);
164154
}
155+
} catch (Exception e) {
156+
LOGGER.error(
157+
() -> format(
158+
"unable to update [%s] for data stream [%s] and backing index [%s]",
159+
IndexSettings.TIME_SERIES_END_TIME.getKey(),
160+
dataStream.getName(),
161+
head.getName()
162+
),
163+
e
164+
);
165165
}
166166
}
167167
return mBuilder;

0 commit comments

Comments
 (0)