Skip to content

Commit e701697

Browse files
authored
Remove historical features infrastructure (#117043)
v9 can only talk to 8.18, and historical features are a maximum of 8.12, so we can remove all historical features and infrastructure.
1 parent 5f3b380 commit e701697

File tree

19 files changed

+62
-337
lines changed

19 files changed

+62
-337
lines changed

build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
119119
noticeFile.set(file("NOTICE"))
120120
"""
121121
when:
122-
def result = gradleRunner("assemble", "-x", "generateHistoricalFeaturesMetadata").build()
122+
def result = gradleRunner("assemble", "-x", "generateClusterFeaturesMetadata").build()
123123
then:
124124
result.task(":assemble").outcome == TaskOutcome.SUCCESS
125125
file("build/distributions/hello-world.jar").exists()

build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -303,7 +303,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
303303
"""
304304

305305
when:
306-
def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateHistoricalFeaturesMetadata').build()
306+
def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateClusterFeaturesMetadata').build()
307307

308308
then:
309309
result.task(":generatePom").outcome == TaskOutcome.SUCCESS

build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
import org.elasticsearch.gradle.internal.conventions.util.Util;
1515
import org.elasticsearch.gradle.internal.info.BuildParameterExtension;
1616
import org.elasticsearch.gradle.internal.precommit.JarHellPrecommitPlugin;
17-
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
17+
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin;
1818
import org.elasticsearch.gradle.plugin.PluginBuildPlugin;
1919
import org.elasticsearch.gradle.plugin.PluginPropertiesExtension;
2020
import org.elasticsearch.gradle.testclusters.ElasticsearchCluster;
@@ -38,7 +38,7 @@ public void apply(Project project) {
3838
project.getPluginManager().apply(PluginBuildPlugin.class);
3939
project.getPluginManager().apply(JarHellPrecommitPlugin.class);
4040
project.getPluginManager().apply(ElasticsearchJavaPlugin.class);
41-
project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class);
41+
project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class);
4242
boolean isCi = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class).isCi();
4343
// Clear default dependencies added by public PluginBuildPlugin as we add our
4444
// own project dependencies for internal builds

build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin;
1313
import org.elasticsearch.gradle.internal.precommit.InternalPrecommitTasks;
1414
import org.elasticsearch.gradle.internal.snyk.SnykDependencyMonitoringGradlePlugin;
15-
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
15+
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin;
1616
import org.gradle.api.InvalidUserDataException;
1717
import org.gradle.api.Plugin;
1818
import org.gradle.api.Project;
@@ -63,7 +63,7 @@ public void apply(final Project project) {
6363
project.getPluginManager().apply(ElasticsearchJavadocPlugin.class);
6464
project.getPluginManager().apply(DependenciesInfoPlugin.class);
6565
project.getPluginManager().apply(SnykDependencyMonitoringGradlePlugin.class);
66-
project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class);
66+
project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class);
6767
InternalPrecommitTasks.create(project, true);
6868
configureLicenseAndNotice(project);
6969
}
Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,10 @@
2121
import java.util.Map;
2222

2323
/**
24-
* Extracts historical feature metadata into a machine-readable format for use in backward compatibility testing.
24+
* Extracts cluster feature metadata into a machine-readable format for use in backward compatibility testing.
2525
*/
26-
public class HistoricalFeaturesMetadataPlugin implements Plugin<Project> {
27-
public static final String HISTORICAL_FEATURES_JSON = "historical-features.json";
26+
public class ClusterFeaturesMetadataPlugin implements Plugin<Project> {
27+
public static final String CLUSTER_FEATURES_JSON = "cluster-features.json";
2828
public static final String FEATURES_METADATA_TYPE = "features-metadata-json";
2929
public static final String FEATURES_METADATA_CONFIGURATION = "featuresMetadata";
3030

@@ -40,13 +40,13 @@ public void apply(Project project) {
4040
SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
4141
SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME);
4242

43-
TaskProvider<HistoricalFeaturesMetadataTask> generateTask = project.getTasks()
44-
.register("generateHistoricalFeaturesMetadata", HistoricalFeaturesMetadataTask.class, task -> {
43+
TaskProvider<ClusterFeaturesMetadataTask> generateTask = project.getTasks()
44+
.register("generateClusterFeaturesMetadata", ClusterFeaturesMetadataTask.class, task -> {
4545
task.setClasspath(
4646
featureMetadataExtractorConfig.plus(mainSourceSet.getRuntimeClasspath())
4747
.plus(project.getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME))
4848
);
49-
task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(HISTORICAL_FEATURES_JSON));
49+
task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(CLUSTER_FEATURES_JSON));
5050
});
5151

5252
Configuration featuresMetadataArtifactConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> {
Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
import javax.inject.Inject;
2727

2828
@CacheableTask
29-
public abstract class HistoricalFeaturesMetadataTask extends DefaultTask {
29+
public abstract class ClusterFeaturesMetadataTask extends DefaultTask {
3030
private FileCollection classpath;
3131

3232
@OutputFile
@@ -46,30 +46,30 @@ public void setClasspath(FileCollection classpath) {
4646

4747
@TaskAction
4848
public void execute() {
49-
getWorkerExecutor().noIsolation().submit(HistoricalFeaturesMetadataWorkAction.class, params -> {
49+
getWorkerExecutor().noIsolation().submit(ClusterFeaturesMetadataWorkAction.class, params -> {
5050
params.getClasspath().setFrom(getClasspath());
5151
params.getOutputFile().set(getOutputFile());
5252
});
5353
}
5454

55-
public interface HistoricalFeaturesWorkParameters extends WorkParameters {
55+
public interface ClusterFeaturesWorkParameters extends WorkParameters {
5656
ConfigurableFileCollection getClasspath();
5757

5858
RegularFileProperty getOutputFile();
5959
}
6060

61-
public abstract static class HistoricalFeaturesMetadataWorkAction implements WorkAction<HistoricalFeaturesWorkParameters> {
61+
public abstract static class ClusterFeaturesMetadataWorkAction implements WorkAction<ClusterFeaturesWorkParameters> {
6262
private final ExecOperations execOperations;
6363

6464
@Inject
65-
public HistoricalFeaturesMetadataWorkAction(ExecOperations execOperations) {
65+
public ClusterFeaturesMetadataWorkAction(ExecOperations execOperations) {
6666
this.execOperations = execOperations;
6767
}
6868

6969
@Override
7070
public void execute() {
7171
LoggedExec.javaexec(execOperations, spec -> {
72-
spec.getMainClass().set("org.elasticsearch.extractor.features.HistoricalFeaturesMetadataExtractor");
72+
spec.getMainClass().set("org.elasticsearch.extractor.features.ClusterFeaturesMetadataExtractor");
7373
spec.classpath(getParameters().getClasspath());
7474
spec.args(getParameters().getOutputFile().get().getAsFile().getAbsolutePath());
7575
});

build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020
import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes;
2121
import org.elasticsearch.gradle.internal.ElasticsearchJavaBasePlugin;
2222
import org.elasticsearch.gradle.internal.InternalDistributionDownloadPlugin;
23+
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin;
2324
import org.elasticsearch.gradle.internal.test.ErrorReportingTestListener;
24-
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
2525
import org.elasticsearch.gradle.plugin.BasePluginBuildPlugin;
2626
import org.elasticsearch.gradle.plugin.PluginBuildPlugin;
2727
import org.elasticsearch.gradle.plugin.PluginPropertiesExtension;
@@ -116,12 +116,12 @@ public void apply(Project project) {
116116
extractedPluginsConfiguration.extendsFrom(pluginsConfiguration);
117117
configureArtifactTransforms(project);
118118

119-
// Create configuration for aggregating historical feature metadata
119+
// Create configuration for aggregating feature metadata
120120
FileCollection featureMetadataConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> {
121121
c.setCanBeConsumed(false);
122122
c.setCanBeResolved(true);
123123
c.attributes(
124-
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
124+
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
125125
);
126126
c.defaultDependencies(d -> d.add(project.getDependencies().project(Map.of("path", ":server"))));
127127
c.withDependencies(dependencies -> {
@@ -136,10 +136,7 @@ public void apply(Project project) {
136136
c.setCanBeConsumed(false);
137137
c.setCanBeResolved(true);
138138
c.attributes(
139-
a -> a.attribute(
140-
ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE,
141-
HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE
142-
)
139+
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
143140
);
144141
c.defaultDependencies(
145142
d -> d.add(project.getDependencies().project(Map.of("path", ":distribution", "configuration", "featuresMetadata")))

distribution/build.gradle

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ import org.elasticsearch.gradle.VersionProperties
1414
import org.elasticsearch.gradle.internal.ConcatFilesTask
1515
import org.elasticsearch.gradle.internal.DependenciesInfoPlugin
1616
import org.elasticsearch.gradle.internal.NoticeTask
17-
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin
17+
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin
1818

1919
import java.nio.file.Files
2020
import java.nio.file.Path
@@ -33,7 +33,7 @@ configurations {
3333
}
3434
featuresMetadata {
3535
attributes {
36-
attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
36+
attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
3737
}
3838
}
3939
}

server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ public Set<String> allNodeFeatures() {
9595
/**
9696
* {@code true} if {@code feature} is present on all nodes in the cluster.
9797
* <p>
98-
* NOTE: This should not be used directly, as it does not read historical features.
98+
* NOTE: This should not be used directly.
9999
* Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead.
100100
*/
101101
@SuppressForbidden(reason = "directly reading cluster features")

server/src/main/java/org/elasticsearch/features/FeatureData.java

Lines changed: 4 additions & 65 deletions
Original file line numberDiff line numberDiff line change
@@ -9,25 +9,19 @@
99

1010
package org.elasticsearch.features;
1111

12-
import org.elasticsearch.Version;
1312
import org.elasticsearch.common.Strings;
1413
import org.elasticsearch.logging.LogManager;
1514
import org.elasticsearch.logging.Logger;
1615

17-
import java.util.Collections;
1816
import java.util.HashMap;
1917
import java.util.HashSet;
2018
import java.util.List;
2119
import java.util.Map;
22-
import java.util.NavigableMap;
2320
import java.util.Set;
24-
import java.util.TreeMap;
25-
26-
import static org.elasticsearch.features.FeatureService.CLUSTER_FEATURES_ADDED_VERSION;
2721

2822
/**
29-
* Reads and consolidate features exposed by a list {@link FeatureSpecification}, grouping them into historical features and node
30-
* features for the consumption of {@link FeatureService}
23+
* Reads and consolidate features exposed by a list {@link FeatureSpecification},
24+
* grouping them together for the consumption of {@link FeatureService}
3125
*/
3226
public class FeatureData {
3327

@@ -40,19 +34,14 @@ public class FeatureData {
4034
}
4135
}
4236

43-
private final NavigableMap<Version, Set<String>> historicalFeatures;
4437
private final Map<String, NodeFeature> nodeFeatures;
4538

46-
private FeatureData(NavigableMap<Version, Set<String>> historicalFeatures, Map<String, NodeFeature> nodeFeatures) {
47-
this.historicalFeatures = historicalFeatures;
39+
private FeatureData(Map<String, NodeFeature> nodeFeatures) {
4840
this.nodeFeatures = nodeFeatures;
4941
}
5042

5143
public static FeatureData createFromSpecifications(List<? extends FeatureSpecification> specs) {
5244
Map<String, FeatureSpecification> allFeatures = new HashMap<>();
53-
54-
// Initialize historicalFeatures with empty version to guarantee there's a floor entry for every version
55-
NavigableMap<Version, Set<String>> historicalFeatures = new TreeMap<>(Map.of(Version.V_EMPTY, Set.of()));
5645
Map<String, NodeFeature> nodeFeatures = new HashMap<>();
5746
for (FeatureSpecification spec : specs) {
5847
Set<NodeFeature> specFeatures = spec.getFeatures();
@@ -61,39 +50,6 @@ public static FeatureData createFromSpecifications(List<? extends FeatureSpecifi
6150
specFeatures.addAll(spec.getTestFeatures());
6251
}
6352

64-
for (var hfe : spec.getHistoricalFeatures().entrySet()) {
65-
FeatureSpecification existing = allFeatures.putIfAbsent(hfe.getKey().id(), spec);
66-
// the same SPI class can be loaded multiple times if it's in the base classloader
67-
if (existing != null && existing.getClass() != spec.getClass()) {
68-
throw new IllegalArgumentException(
69-
Strings.format("Duplicate feature - [%s] is declared by both [%s] and [%s]", hfe.getKey().id(), existing, spec)
70-
);
71-
}
72-
73-
if (hfe.getValue().after(CLUSTER_FEATURES_ADDED_VERSION)) {
74-
throw new IllegalArgumentException(
75-
Strings.format(
76-
"Historical feature [%s] declared by [%s] for version [%s] is not a historical version",
77-
hfe.getKey().id(),
78-
spec,
79-
hfe.getValue()
80-
)
81-
);
82-
}
83-
84-
if (specFeatures.contains(hfe.getKey())) {
85-
throw new IllegalArgumentException(
86-
Strings.format(
87-
"Feature [%s] cannot be declared as both a regular and historical feature by [%s]",
88-
hfe.getKey().id(),
89-
spec
90-
)
91-
);
92-
}
93-
94-
historicalFeatures.computeIfAbsent(hfe.getValue(), k -> new HashSet<>()).add(hfe.getKey().id());
95-
}
96-
9753
for (NodeFeature f : specFeatures) {
9854
FeatureSpecification existing = allFeatures.putIfAbsent(f.id(), spec);
9955
if (existing != null && existing.getClass() != spec.getClass()) {
@@ -106,24 +62,7 @@ public static FeatureData createFromSpecifications(List<? extends FeatureSpecifi
10662
}
10763
}
10864

109-
return new FeatureData(consolidateHistoricalFeatures(historicalFeatures), Map.copyOf(nodeFeatures));
110-
}
111-
112-
private static NavigableMap<Version, Set<String>> consolidateHistoricalFeatures(
113-
NavigableMap<Version, Set<String>> declaredHistoricalFeatures
114-
) {
115-
// update each version by adding in all features from previous versions
116-
Set<String> featureAggregator = new HashSet<>();
117-
for (Map.Entry<Version, Set<String>> versions : declaredHistoricalFeatures.entrySet()) {
118-
featureAggregator.addAll(versions.getValue());
119-
versions.setValue(Set.copyOf(featureAggregator));
120-
}
121-
122-
return Collections.unmodifiableNavigableMap(declaredHistoricalFeatures);
123-
}
124-
125-
public NavigableMap<Version, Set<String>> getHistoricalFeatures() {
126-
return historicalFeatures;
65+
return new FeatureData(Map.copyOf(nodeFeatures));
12766
}
12867

12968
public Map<String, NodeFeature> getNodeFeatures() {

0 commit comments

Comments
 (0)