Skip to content

Commit 317f7d8

Browse files
committed
Merge branch 'main' into enhancement/esql-match-disjunction-restrictions
2 parents 4968e9e + 312c21a commit 317f7d8

File tree

32 files changed

+683
-151
lines changed

32 files changed

+683
-151
lines changed

build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -155,10 +155,8 @@ org.elasticsearch.cluster.ClusterState#compatibilityVersions()
155155

156156
@defaultMessage ClusterFeatures#nodeFeatures is for internal use only. Use FeatureService#clusterHasFeature to determine if a feature is present on the cluster.
157157
org.elasticsearch.cluster.ClusterFeatures#nodeFeatures()
158-
@defaultMessage ClusterFeatures#allNodeFeatures is for internal use only. Use FeatureService#clusterHasFeature to determine if a feature is present on the cluster.
159-
org.elasticsearch.cluster.ClusterFeatures#allNodeFeatures()
160158
@defaultMessage ClusterFeatures#clusterHasFeature is for internal use only. Use FeatureService#clusterHasFeature to determine if a feature is present on the cluster.
161-
org.elasticsearch.cluster.ClusterFeatures#clusterHasFeature(org.elasticsearch.features.NodeFeature)
159+
org.elasticsearch.cluster.ClusterFeatures#clusterHasFeature(org.elasticsearch.cluster.node.DiscoveryNodes, org.elasticsearch.features.NodeFeature)
162160

163161
@defaultMessage Do not construct this records outside the source files they are declared in
164162
org.elasticsearch.cluster.SnapshotsInProgress$ShardSnapshotStatus#<init>(java.lang.String, org.elasticsearch.cluster.SnapshotsInProgress$ShardState, org.elasticsearch.repositories.ShardGeneration, java.lang.String, org.elasticsearch.repositories.ShardSnapshotResult)

docs/Versions.asciidoc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[]
99

1010
:docker-repo: docker.elastic.co/elasticsearch/elasticsearch
1111
:docker-image: {docker-repo}:{version}
12+
:docker-wolfi-image: {docker-repo}-wolfi:{version}
1213
:kib-docker-repo: docker.elastic.co/kibana/kibana
1314
:kib-docker-image: {kib-docker-repo}:{version}
1415
:plugin_url: https://artifacts.elastic.co/downloads/elasticsearch-plugins

docs/changelog/118143.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 118143
2+
summary: Infrastructure for assuming cluster features in the next major version
3+
area: "Infra/Core"
4+
type: feature
5+
issues: []

docs/reference/setup/install/docker.asciidoc

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,12 @@ docker pull {docker-image}
5555
// REVIEWED[DEC.10.24]
5656
--
5757

58+
Alternatevely, you can use the Wolfi based image. Using Wolfi based images requires Docker version 20.10.10 or superior.
59+
[source,sh,subs="attributes"]
60+
----
61+
docker pull {docker-wolfi-image}
62+
----
63+
5864
. Optional: Install
5965
https://docs.sigstore.dev/cosign/system_config/installation/[Cosign] for your
6066
environment. Then use Cosign to verify the {es} image's signature.

modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,6 @@
4343
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
4444
import org.elasticsearch.common.io.stream.StreamInput;
4545
import org.elasticsearch.common.io.stream.StreamOutput;
46-
import org.elasticsearch.common.logging.DeprecationLogger;
4746
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
4847
import org.elasticsearch.common.xcontent.XContentHelper;
4948
import org.elasticsearch.index.IndexVersion;
@@ -80,28 +79,19 @@
8079
import java.util.Collections;
8180
import java.util.List;
8281
import java.util.Objects;
83-
import java.util.function.BiConsumer;
8482
import java.util.function.Supplier;
8583

8684
import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES;
8785
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
8886
import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg;
8987

9088
public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBuilder> {
91-
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ParseField.class);
92-
static final String DOCUMENT_TYPE_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [percolate] queries. "
93-
+ "The [document_type] should no longer be specified.";
94-
static final String TYPE_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [percolate] queries. "
95-
+ "The [type] of the indexed document should no longer be specified.";
96-
9789
public static final String NAME = "percolate";
9890

9991
static final ParseField DOCUMENT_FIELD = new ParseField("document");
10092
static final ParseField DOCUMENTS_FIELD = new ParseField("documents");
10193
private static final ParseField NAME_FIELD = new ParseField("name");
10294
private static final ParseField QUERY_FIELD = new ParseField("field");
103-
private static final ParseField DOCUMENT_TYPE_FIELD = new ParseField("document_type");
104-
private static final ParseField INDEXED_DOCUMENT_FIELD_TYPE = new ParseField("type");
10595
private static final ParseField INDEXED_DOCUMENT_FIELD_INDEX = new ParseField("index");
10696
private static final ParseField INDEXED_DOCUMENT_FIELD_ID = new ParseField("id");
10797
private static final ParseField INDEXED_DOCUMENT_FIELD_ROUTING = new ParseField("routing");
@@ -368,10 +358,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
368358
);
369359
}
370360

371-
private static BiConsumer<PercolateQueryBuilder, String> deprecateAndIgnoreType(String key, String message) {
372-
return (target, type) -> deprecationLogger.compatibleCritical(key, message);
373-
}
374-
375361
private static BytesReference parseDocument(XContentParser parser) throws IOException {
376362
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
377363
builder.copyCurrentStructure(parser);

muted-tests.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -156,9 +156,6 @@ tests:
156156
issue: https://github.com/elastic/elasticsearch/issues/117473
157157
- class: org.elasticsearch.repositories.s3.RepositoryS3EcsClientYamlTestSuiteIT
158158
issue: https://github.com/elastic/elasticsearch/issues/117525
159-
- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
160-
method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set}
161-
issue: https://github.com/elastic/elasticsearch/issues/116777
162159
- class: "org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT"
163160
method: "test {scoring.*}"
164161
issue: https://github.com/elastic/elasticsearch/issues/117641
@@ -307,6 +304,9 @@ tests:
307304
issue: https://github.com/elastic/elasticsearch/issues/118806
308305
- class: org.elasticsearch.xpack.esql.session.IndexResolverFieldNamesTests
309306
issue: https://github.com/elastic/elasticsearch/issues/118814
307+
- class: org.elasticsearch.index.engine.RecoverySourcePruneMergePolicyTests
308+
method: testPruneSome
309+
issue: https://github.com/elastic/elasticsearch/issues/118728
310310

311311
# Examples:
312312
#

rest-api-spec/build.gradle

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,4 +69,5 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task ->
6969
task.skipTest("search/520_fetch_fields/fetch _seq_no via fields", "error code is changed from 5xx to 400 in 9.0")
7070
task.skipTest("search.vectors/41_knn_search_bbq_hnsw/Test knn search", "Scoring has changed in latest versions")
7171
task.skipTest("search.vectors/42_knn_search_bbq_flat/Test knn search", "Scoring has changed in latest versions")
72+
task.skipTest("synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set", "Can't work until auto-expand replicas is 0-1 for synonyms index")
7273
})

rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml

Lines changed: 27 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
1-
---
2-
"Reload analyzers for specific synonym set":
1+
setup:
32
- requires:
43
cluster_features: ["gte_v8.10.0"]
54
reason: Reloading analyzers for specific synonym set is introduced in 8.10.0
5+
66
# Create synonyms_set1
77
- do:
88
synonyms.put_synonym:
@@ -100,21 +100,25 @@
100100
- '{"index": {"_index": "my_index2", "_id": "2"}}'
101101
- '{"my_field": "goodbye"}'
102102

103-
# An update of synonyms_set1 must trigger auto-reloading of analyzers only for synonyms_set1
103+
---
104+
"Reload analyzers for specific synonym set":
105+
# These specific tests can't succeed in BwC, as synonyms auto-expand replicas are 0-all. Replicas can't be associated to
106+
# upgraded nodes, and thus we are not able to guarantee that the shards are not failed.
107+
# This test is skipped for BwC until synonyms index has auto-exapnd replicas set to 0-1.
108+
104109
- do:
105110
synonyms.put_synonym:
106111
id: synonyms_set1
107112
body:
108113
synonyms_set:
109114
- synonyms: "hello, salute"
110115
- synonyms: "ciao => goodbye"
116+
111117
- match: { result: "updated" }
112118
- gt: { reload_analyzers_details._shards.total: 0 }
113119
- gt: { reload_analyzers_details._shards.successful: 0 }
114120
- match: { reload_analyzers_details._shards.failed: 0 }
115-
- length: { reload_analyzers_details.reload_details: 1 } # reload details contain only a single index
116-
- match: { reload_analyzers_details.reload_details.0.index: "my_index1" }
117-
- match: { reload_analyzers_details.reload_details.0.reloaded_analyzers.0: "my_analyzer1" }
121+
118122

119123
# Confirm that the index analyzers are reloaded for my_index1
120124
- do:
@@ -127,6 +131,23 @@
127131
query: salute
128132
- match: { hits.total.value: 1 }
129133

134+
---
135+
"Check analyzer reloaded and non failed shards for bwc tests":
136+
137+
- do:
138+
synonyms.put_synonym:
139+
id: synonyms_set1
140+
body:
141+
synonyms_set:
142+
- synonyms: "hello, salute"
143+
- synonyms: "ciao => goodbye"
144+
- match: { result: "updated" }
145+
- gt: { reload_analyzers_details._shards.total: 0 }
146+
- gt: { reload_analyzers_details._shards.successful: 0 }
147+
- length: { reload_analyzers_details.reload_details: 1 } # reload details contain only a single index
148+
- match: { reload_analyzers_details.reload_details.0.index: "my_index1" }
149+
- match: { reload_analyzers_details.reload_details.0.reloaded_analyzers.0: "my_analyzer1" }
150+
130151
# Confirm that the index analyzers are still the same for my_index2
131152
- do:
132153
search:

server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java

Lines changed: 45 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,12 @@
99

1010
package org.elasticsearch.cluster;
1111

12+
import org.elasticsearch.cluster.node.DiscoveryNode;
13+
import org.elasticsearch.cluster.node.DiscoveryNodes;
1214
import org.elasticsearch.common.io.stream.StreamInput;
1315
import org.elasticsearch.common.io.stream.StreamOutput;
1416
import org.elasticsearch.common.xcontent.ChunkedToXContent;
1517
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
16-
import org.elasticsearch.core.SuppressForbidden;
1718
import org.elasticsearch.features.NodeFeature;
1819
import org.elasticsearch.xcontent.ToXContent;
1920

@@ -79,28 +80,61 @@ public Map<String, Set<String>> nodeFeatures() {
7980
return nodeFeatures;
8081
}
8182

82-
/**
83-
* The features in all nodes in the cluster.
84-
* <p>
85-
* NOTE: This should not be used directly.
86-
* Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead.
87-
*/
88-
public Set<String> allNodeFeatures() {
83+
private Set<String> allNodeFeatures() {
8984
if (allNodeFeatures == null) {
9085
allNodeFeatures = Set.copyOf(calculateAllNodeFeatures(nodeFeatures.values()));
9186
}
9287
return allNodeFeatures;
9388
}
9489

90+
/**
91+
* Returns {@code true} if {@code node} can have assumed features.
92+
* @see org.elasticsearch.env.BuildVersion#canRemoveAssumedFeatures
93+
*/
94+
public static boolean featuresCanBeAssumedForNode(DiscoveryNode node) {
95+
return node.getBuildVersion().canRemoveAssumedFeatures();
96+
}
97+
98+
/**
99+
* Returns {@code true} if one or more nodes in {@code nodes} can have assumed features.
100+
* @see org.elasticsearch.env.BuildVersion#canRemoveAssumedFeatures
101+
*/
102+
public static boolean featuresCanBeAssumedForNodes(DiscoveryNodes nodes) {
103+
return nodes.getAllNodes().stream().anyMatch(n -> n.getBuildVersion().canRemoveAssumedFeatures());
104+
}
105+
95106
/**
96107
* {@code true} if {@code feature} is present on all nodes in the cluster.
97108
* <p>
98109
* NOTE: This should not be used directly.
99110
* Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead.
100111
*/
101-
@SuppressForbidden(reason = "directly reading cluster features")
102-
public boolean clusterHasFeature(NodeFeature feature) {
103-
return allNodeFeatures().contains(feature.id());
112+
public boolean clusterHasFeature(DiscoveryNodes nodes, NodeFeature feature) {
113+
assert nodes.getNodes().keySet().equals(nodeFeatures.keySet())
114+
: "Cluster features nodes " + nodeFeatures.keySet() + " is different to discovery nodes " + nodes.getNodes().keySet();
115+
116+
// basic case
117+
boolean allNodesHaveFeature = allNodeFeatures().contains(feature.id());
118+
if (allNodesHaveFeature) {
119+
return true;
120+
}
121+
122+
// if the feature is assumed, check the versions more closely
123+
// it's actually ok if the feature is assumed, and all nodes missing the feature can assume it
124+
// TODO: do we need some kind of transient cache of this calculation?
125+
if (feature.assumedAfterNextCompatibilityBoundary()) {
126+
for (var nf : nodeFeatures.entrySet()) {
127+
if (nf.getValue().contains(feature.id()) == false
128+
&& featuresCanBeAssumedForNode(nodes.getNodes().get(nf.getKey())) == false) {
129+
return false;
130+
}
131+
}
132+
133+
// all nodes missing the feature can assume it - so that's alright then
134+
return true;
135+
}
136+
137+
return false;
104138
}
105139

106140
/**

0 commit comments

Comments
 (0)