diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index d50e927634df5..e16d34d597759 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1384,7 +1384,7 @@ public void testSearchAllResolvesDataStreams() throws Exception { public void testGetDataStream() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, maximumNumberOfReplicas() + 2).build(); - DataStreamLifecycle lifecycle = DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.builder().dataRetention(randomPositiveTimeValue()).build(); putComposableIndexTemplate("template_for_foo", null, List.of("metrics-foo*"), settings, null, null, lifecycle, false); int numDocsFoo = randomIntBetween(2, 16); indexDocs("metrics-foo", numDocsFoo); @@ -1400,7 +1400,7 @@ public void testGetDataStream() throws Exception { assertThat(metricsFooDataStream.getDataStreamStatus(), is(ClusterHealthStatus.YELLOW)); assertThat(metricsFooDataStream.getIndexTemplate(), is("template_for_foo")); assertThat(metricsFooDataStream.getIlmPolicy(), is(nullValue())); - assertThat(dataStream.getLifecycle(), is(lifecycle)); + assertThat(dataStream.getLifecycle(), is(lifecycle.toDataStreamLifecycle())); assertThat(metricsFooDataStream.templatePreferIlmValue(), is(true)); GetDataStreamAction.Response.IndexProperties indexProperties = metricsFooDataStream.getIndexSettingsValues() .get(dataStream.getWriteIndex()); @@ -2450,7 +2450,7 @@ static void putComposableIndexTemplate( @Nullable Settings settings, @Nullable Map metadata, @Nullable Map aliases, - @Nullable DataStreamLifecycle lifecycle, + @Nullable DataStreamLifecycle.Template lifecycle, boolean withFailureStore ) throws IOException { TransportPutComposableIndexTemplateAction.Request request = new TransportPutComposableIndexTemplateAction.Request(id); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudDataStreamLifecycleIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudDataStreamLifecycleIT.java index 8bb8405412295..d6edd671131e9 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudDataStreamLifecycleIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudDataStreamLifecycleIT.java @@ -25,7 +25,7 @@ import java.util.List; import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleFixtures.putComposableIndexTemplate; -import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleFixtures.randomLifecycle; +import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleFixtures.randomLifecycleTemplate; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -39,7 +39,7 @@ protected Collection> nodePlugins() { } public void testGetLifecycle() throws Exception { - DataStreamLifecycle lifecycle = randomLifecycle(); + DataStreamLifecycle.Template lifecycle = randomLifecycleTemplate(); putComposableIndexTemplate("id1", null, List.of("with-lifecycle*"), null, null, lifecycle); putComposableIndexTemplate("id2", null, List.of("without-lifecycle*"), null, null, null); { @@ -82,9 +82,9 @@ public void testGetLifecycle() throws Exception { ).get(); assertThat(response.getDataStreamLifecycles().size(), equalTo(3)); assertThat(response.getDataStreamLifecycles().get(0).dataStreamName(), equalTo("with-lifecycle-1")); - assertThat(response.getDataStreamLifecycles().get(0).lifecycle(), equalTo(lifecycle)); + assertThat(response.getDataStreamLifecycles().get(0).lifecycle(), equalTo(lifecycle.toDataStreamLifecycle())); assertThat(response.getDataStreamLifecycles().get(1).dataStreamName(), equalTo("with-lifecycle-2")); - assertThat(response.getDataStreamLifecycles().get(1).lifecycle(), equalTo(lifecycle)); + assertThat(response.getDataStreamLifecycles().get(1).lifecycle(), equalTo(lifecycle.toDataStreamLifecycle())); assertThat(response.getDataStreamLifecycles().get(2).dataStreamName(), equalTo("without-lifecycle")); assertThat(response.getDataStreamLifecycles().get(2).lifecycle(), is(nullValue())); assertThat(response.getRolloverConfiguration(), nullValue()); @@ -102,9 +102,9 @@ public void testGetLifecycle() throws Exception { ).get(); assertThat(response.getDataStreamLifecycles().size(), equalTo(2)); assertThat(response.getDataStreamLifecycles().get(0).dataStreamName(), equalTo("with-lifecycle-1")); - assertThat(response.getDataStreamLifecycles().get(0).lifecycle(), equalTo(lifecycle)); + assertThat(response.getDataStreamLifecycles().get(0).lifecycle(), equalTo(lifecycle.toDataStreamLifecycle())); assertThat(response.getDataStreamLifecycles().get(1).dataStreamName(), equalTo("with-lifecycle-2")); - assertThat(response.getDataStreamLifecycles().get(1).lifecycle(), is(lifecycle)); + assertThat(response.getDataStreamLifecycles().get(1).lifecycle(), is(lifecycle.toDataStreamLifecycle())); assertThat(response.getRolloverConfiguration(), nullValue()); } @@ -120,7 +120,7 @@ public void testGetLifecycle() throws Exception { ).get(); assertThat(response.getDataStreamLifecycles().size(), equalTo(2)); assertThat(response.getDataStreamLifecycles().get(0).dataStreamName(), equalTo("with-lifecycle-1")); - assertThat(response.getDataStreamLifecycles().get(0).lifecycle(), equalTo(lifecycle)); + assertThat(response.getDataStreamLifecycles().get(0).lifecycle(), equalTo(lifecycle.toDataStreamLifecycle())); assertThat(response.getRolloverConfiguration(), nullValue()); } @@ -135,9 +135,9 @@ public void testGetLifecycle() throws Exception { ).get(); assertThat(responseWithRollover.getDataStreamLifecycles().size(), equalTo(3)); assertThat(responseWithRollover.getDataStreamLifecycles().get(0).dataStreamName(), equalTo("with-lifecycle-1")); - assertThat(responseWithRollover.getDataStreamLifecycles().get(0).lifecycle(), equalTo(lifecycle)); + assertThat(responseWithRollover.getDataStreamLifecycles().get(0).lifecycle(), equalTo(lifecycle.toDataStreamLifecycle())); assertThat(responseWithRollover.getDataStreamLifecycles().get(1).dataStreamName(), equalTo("with-lifecycle-2")); - assertThat(responseWithRollover.getDataStreamLifecycles().get(1).lifecycle(), equalTo(lifecycle)); + assertThat(responseWithRollover.getDataStreamLifecycles().get(1).lifecycle(), equalTo(lifecycle.toDataStreamLifecycle())); assertThat(responseWithRollover.getDataStreamLifecycles().get(2).dataStreamName(), equalTo("without-lifecycle")); assertThat(responseWithRollover.getDataStreamLifecycles().get(2).lifecycle(), is(nullValue())); assertThat(responseWithRollover.getRolloverConfiguration(), notNullValue()); @@ -192,8 +192,8 @@ public void testPutLifecycle() throws Exception { ).get(); assertThat(response.getDataStreamLifecycles().size(), equalTo(1)); assertThat(response.getDataStreamLifecycles().get(0).dataStreamName(), equalTo("my-data-stream")); - assertThat(response.getDataStreamLifecycles().get(0).lifecycle().getDataStreamRetention(), equalTo(dataRetention)); - assertThat(response.getDataStreamLifecycles().get(0).lifecycle().isEnabled(), equalTo(true)); + assertThat(response.getDataStreamLifecycles().get(0).lifecycle().dataRetention(), equalTo(dataRetention)); + assertThat(response.getDataStreamLifecycles().get(0).lifecycle().enabled(), equalTo(true)); } // Disable the lifecycle @@ -220,13 +220,13 @@ public void testPutLifecycle() throws Exception { ).get(); assertThat(response.getDataStreamLifecycles().size(), equalTo(1)); assertThat(response.getDataStreamLifecycles().get(0).dataStreamName(), equalTo("my-data-stream")); - assertThat(response.getDataStreamLifecycles().get(0).lifecycle().getDataStreamRetention(), equalTo(dataRetention)); - assertThat(response.getDataStreamLifecycles().get(0).lifecycle().isEnabled(), equalTo(false)); + assertThat(response.getDataStreamLifecycles().get(0).lifecycle().dataRetention(), equalTo(dataRetention)); + assertThat(response.getDataStreamLifecycles().get(0).lifecycle().enabled(), equalTo(false)); } } public void testDeleteLifecycle() throws Exception { - DataStreamLifecycle lifecycle = DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.builder().dataRetention(randomPositiveTimeValue()).build(); putComposableIndexTemplate("id1", null, List.of("with-lifecycle*"), null, null, lifecycle); putComposableIndexTemplate("id2", null, List.of("without-lifecycle*"), null, null, null); { diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java index f6f72af5010a6..b6c5b554a6451 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java @@ -207,7 +207,7 @@ public Collection getSystemDataStreamDescriptors() { Template.builder() .settings(Settings.EMPTY) .mappings(mappings) - .lifecycle(DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build()) + .lifecycle(DataStreamLifecycle.Template.builder().dataRetention(randomPositiveTimeValue()).build()) ) .dataStreamTemplate(new DataStreamTemplate()) .build(), diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java index a39f117a10a52..c987f0a5570c5 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java @@ -146,7 +146,7 @@ public void cleanup() { public void testRolloverLifecycle() throws Exception { // empty lifecycle contains the default rollover - DataStreamLifecycle lifecycle = new DataStreamLifecycle(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.DEFAULT; putComposableIndexTemplate("id1", null, List.of("metrics-foo*"), null, null, lifecycle, false); String dataStreamName = "metrics-foo"; @@ -178,7 +178,7 @@ public void testRolloverLifecycle() throws Exception { } public void testRolloverAndRetention() throws Exception { - DataStreamLifecycle lifecycle = DataStreamLifecycle.newBuilder().dataRetention(0).build(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.builder().dataRetention(TimeValue.ZERO).build(); putComposableIndexTemplate("id1", null, List.of("metrics-foo*"), null, null, lifecycle, false); @@ -321,7 +321,7 @@ public void testOriginationDate() throws Exception { * days ago, and one with an origination date 1 day ago. After data stream lifecycle runs, we expect the one with the old * origination date to have been deleted, and the one with the newer origination date to remain. */ - DataStreamLifecycle lifecycle = DataStreamLifecycle.newBuilder().dataRetention(TimeValue.timeValueDays(7)).build(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.builder().dataRetention(TimeValue.timeValueDays(7)).build(); putComposableIndexTemplate("id1", null, List.of("metrics-foo*"), null, null, lifecycle, false); @@ -393,7 +393,7 @@ public void testOriginationDate() throws Exception { } public void testUpdatingLifecycleAppliesToAllBackingIndices() throws Exception { - DataStreamLifecycle lifecycle = new DataStreamLifecycle(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.DEFAULT; putComposableIndexTemplate("id1", null, List.of("metrics-foo*"), null, null, lifecycle, false); @@ -437,7 +437,7 @@ public void testAutomaticForceMerge() throws Exception { * because all necessary merging has already happened automatically. So in order to detect whether forcemerge has been called, we * use a SendRequestBehavior in the MockTransportService to detect it. */ - DataStreamLifecycle lifecycle = new DataStreamLifecycle(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.DEFAULT; disableDataStreamLifecycle(); String dataStreamName = "metrics-foo"; putComposableIndexTemplate( @@ -539,7 +539,7 @@ private static void disableDataStreamLifecycle() { public void testErrorRecordingOnRollover() throws Exception { // empty lifecycle contains the default rollover - DataStreamLifecycle lifecycle = new DataStreamLifecycle(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.DEFAULT; /* * We set index.auto_expand_replicas to 0-1 so that if we get a single-node cluster it is not yellow. The cluster being yellow * could result in data stream lifecycle's automatic forcemerge failing, which would result in an unexpected error in the error @@ -697,7 +697,7 @@ public void testErrorRecordingOnRollover() throws Exception { public void testErrorRecordingOnRetention() throws Exception { // starting with a lifecycle without retention so we can rollover the data stream and manipulate the second generation index such // that its retention execution fails - DataStreamLifecycle lifecycle = new DataStreamLifecycle(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.DEFAULT; /* * We set index.auto_expand_replicas to 0-1 so that if we get a single-node cluster it is not yellow. The cluster being yellow @@ -871,7 +871,7 @@ public void testErrorRecordingOnRetention() throws Exception { } public void testDataLifecycleServiceConfiguresTheMergePolicy() throws Exception { - DataStreamLifecycle lifecycle = new DataStreamLifecycle(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.DEFAULT; putComposableIndexTemplate( "id1", @@ -972,7 +972,7 @@ public void testDataLifecycleServiceConfiguresTheMergePolicy() throws Exception public void testReenableDataStreamLifecycle() throws Exception { // start with a lifecycle that's not enabled - DataStreamLifecycle lifecycle = new DataStreamLifecycle(null, null, false); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.builder().enabled(false).build(); putComposableIndexTemplate("id1", null, List.of("metrics-foo*"), null, null, lifecycle, false); String dataStreamName = "metrics-foo"; @@ -1031,15 +1031,13 @@ public void testReenableDataStreamLifecycle() throws Exception { public void testLifecycleAppliedToFailureStore() throws Exception { // We configure a lifecycle with downsampling to ensure it doesn't fail - DataStreamLifecycle lifecycle = DataStreamLifecycle.newBuilder() - .dataRetention(20_000) + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.builder() + .dataRetention(TimeValue.timeValueSeconds(20)) .downsampling( - new DataStreamLifecycle.Downsampling( - List.of( - new DataStreamLifecycle.Downsampling.Round( - TimeValue.timeValueMillis(10), - new DownsampleConfig(new DateHistogramInterval("10m")) - ) + List.of( + new DataStreamLifecycle.DownsamplingRound( + TimeValue.timeValueMillis(10), + new DownsampleConfig(new DateHistogramInterval("10m")) ) ) ) @@ -1205,7 +1203,7 @@ static void putComposableIndexTemplate( List patterns, @Nullable Settings settings, @Nullable Map metadata, - @Nullable DataStreamLifecycle lifecycle, + @Nullable DataStreamLifecycle.Template lifecycle, boolean withFailureStore ) throws IOException { TransportPutComposableIndexTemplateAction.Request request = new TransportPutComposableIndexTemplateAction.Request(id); @@ -1268,7 +1266,7 @@ public Collection getSystemDataStreamDescriptors() { Template.builder() .settings(Settings.EMPTY) .lifecycle( - DataStreamLifecycle.newBuilder() + DataStreamLifecycle.Template.builder() .dataRetention(TimeValue.timeValueDays(SYSTEM_DATA_STREAM_RETENTION_DAYS)) ) ) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java index 3c100d9cfe615..62b68b2cc69bb 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java @@ -86,7 +86,7 @@ public void cleanup() { public void testExplainLifecycle() throws Exception { // empty lifecycle contains the default rollover - DataStreamLifecycle lifecycle = new DataStreamLifecycle(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.DEFAULT; putComposableIndexTemplate("id1", null, List.of("metrics-foo*"), null, null, lifecycle); String dataStreamName = "metrics-foo"; @@ -134,7 +134,7 @@ public void testExplainLifecycle() throws Exception { assertThat(explainIndex.isManagedByLifecycle(), is(true)); assertThat(explainIndex.getIndexCreationDate(), notNullValue()); assertThat(explainIndex.getLifecycle(), notNullValue()); - assertThat(explainIndex.getLifecycle().getDataStreamRetention(), nullValue()); + assertThat(explainIndex.getLifecycle().dataRetention(), nullValue()); if (internalCluster().numDataNodes() > 1) { // If the number of nodes is 1 then the cluster will be yellow so forcemerge will report an error if it has run assertThat(explainIndex.getError(), nullValue()); @@ -193,7 +193,7 @@ public void testExplainLifecycle() throws Exception { assertThat(explainIndex.isManagedByLifecycle(), is(true)); assertThat(explainIndex.getIndexCreationDate(), notNullValue()); assertThat(explainIndex.getLifecycle(), notNullValue()); - assertThat(explainIndex.getLifecycle().getDataStreamRetention(), nullValue()); + assertThat(explainIndex.getLifecycle().dataRetention(), nullValue()); if (explainIndex.getIndex().equals(DataStream.getDefaultBackingIndexName(dataStreamName, 1))) { // first generation index was rolled over @@ -263,7 +263,7 @@ public void testSystemExplainLifecycle() throws Exception { assertThat(explainIndex.getIndexCreationDate(), notNullValue()); assertThat(explainIndex.getLifecycle(), notNullValue()); assertThat( - explainIndex.getLifecycle().getDataStreamRetention(), + explainIndex.getLifecycle().dataRetention(), equalTo(TimeValue.timeValueDays(SYSTEM_DATA_STREAM_RETENTION_DAYS)) ); } @@ -274,7 +274,7 @@ public void testSystemExplainLifecycle() throws Exception { public void testExplainLifecycleForIndicesWithErrors() throws Exception { // empty lifecycle contains the default rollover - DataStreamLifecycle lifecycle = new DataStreamLifecycle(); + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.DEFAULT; putComposableIndexTemplate("id1", null, List.of("metrics-foo*"), null, null, lifecycle); @@ -329,7 +329,7 @@ public void testExplainLifecycleForIndicesWithErrors() throws Exception { assertThat(explainIndex.isManagedByLifecycle(), is(true)); assertThat(explainIndex.getIndexCreationDate(), notNullValue()); assertThat(explainIndex.getLifecycle(), notNullValue()); - assertThat(explainIndex.getLifecycle().getDataStreamRetention(), nullValue()); + assertThat(explainIndex.getLifecycle().dataRetention(), nullValue()); assertThat(explainIndex.getRolloverDate(), nullValue()); assertThat(explainIndex.getTimeSinceRollover(System::currentTimeMillis), nullValue()); // index has not been rolled over yet @@ -374,7 +374,7 @@ public void testExplainDataStreamLifecycleForUnmanagedIndices() throws Exception List.of("metrics-foo*"), null, null, - DataStreamLifecycle.newBuilder().enabled(false).build() + DataStreamLifecycle.Template.builder().enabled(false).build() ); CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request( TEST_REQUEST_TIMEOUT, @@ -439,7 +439,7 @@ static void putComposableIndexTemplate( List patterns, @Nullable Settings settings, @Nullable Map metadata, - @Nullable DataStreamLifecycle lifecycle + @Nullable DataStreamLifecycle.Template lifecycle ) throws IOException { TransportPutComposableIndexTemplateAction.Request request = new TransportPutComposableIndexTemplateAction.Request(id); request.indexTemplate( diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index 914dd4e97b5c7..a41ed15f79d35 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -479,7 +479,7 @@ Set maybeExecuteDownsampling(ClusterState state, DataStream dataStream, L for (Index index : targetIndices) { IndexMetadata backingIndexMeta = metadata.getProject().index(index); assert backingIndexMeta != null : "the data stream backing indices must exist"; - List downsamplingRounds = dataStream.getDownsamplingRoundsFor( + List downsamplingRounds = dataStream.getDownsamplingRoundsFor( index, metadata.getProject()::index, nowSupplier @@ -517,18 +517,18 @@ Set maybeExecuteDownsampling(ClusterState state, DataStream dataStream, L private Set waitForInProgressOrTriggerDownsampling( DataStream dataStream, IndexMetadata backingIndex, - List downsamplingRounds, + List downsamplingRounds, Metadata metadata ) { assert dataStream.getIndices().contains(backingIndex.getIndex()) : "the provided backing index must be part of data stream:" + dataStream.getName(); assert downsamplingRounds.isEmpty() == false : "the index should be managed and have matching downsampling rounds"; Set affectedIndices = new HashSet<>(); - DataStreamLifecycle.Downsampling.Round lastRound = downsamplingRounds.get(downsamplingRounds.size() - 1); + DataStreamLifecycle.DownsamplingRound lastRound = downsamplingRounds.get(downsamplingRounds.size() - 1); Index index = backingIndex.getIndex(); String indexName = index.getName(); - for (DataStreamLifecycle.Downsampling.Round round : downsamplingRounds) { + for (DataStreamLifecycle.DownsamplingRound round : downsamplingRounds) { // the downsample index name for each round is deterministic String downsampleIndexName = DownsampleConfig.generateDownsampleIndexName( DOWNSAMPLED_INDEX_PREFIX, @@ -566,7 +566,7 @@ private Set waitForInProgressOrTriggerDownsampling( /** * Issues a request downsample the source index to the downsample index for the specified round. */ - private void downsampleIndexOnce(DataStreamLifecycle.Downsampling.Round round, String sourceIndex, String downsampleIndexName) { + private void downsampleIndexOnce(DataStreamLifecycle.DownsamplingRound round, String sourceIndex, String downsampleIndexName) { DownsampleAction.Request request = new DownsampleAction.Request( TimeValue.THIRTY_SECONDS /* TODO should this be longer/configurable? */, sourceIndex, @@ -599,8 +599,8 @@ private void downsampleIndexOnce(DataStreamLifecycle.Downsampling.Round round, S private Set evaluateDownsampleStatus( DataStream dataStream, IndexMetadata.DownsampleTaskStatus downsampleStatus, - DataStreamLifecycle.Downsampling.Round currentRound, - DataStreamLifecycle.Downsampling.Round lastRound, + DataStreamLifecycle.DownsamplingRound currentRound, + DataStreamLifecycle.DownsamplingRound lastRound, Index backingIndex, Index downsampleIndex ) { diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleStatsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleStatsAction.java index 3e416a102fe1f..52ffdba26e0f1 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleStatsAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleStatsAction.java @@ -76,7 +76,7 @@ GetDataStreamLifecycleStatsAction.Response collectStats(ClusterState state) { Set indicesInErrorStore = lifecycleService.getErrorStore().getAllIndices(); List dataStreamStats = new ArrayList<>(); for (DataStream dataStream : state.metadata().getProject().dataStreams().values()) { - if (dataStream.getLifecycle() != null && dataStream.getLifecycle().isEnabled()) { + if (dataStream.getLifecycle() != null && dataStream.getLifecycle().enabled()) { int total = 0; int inError = 0; for (Index index : dataStream.getIndices()) { diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataIndexTemplateServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataIndexTemplateServiceTests.java index b5f9fbbabfeef..23eb8e87b6622 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataIndexTemplateServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataIndexTemplateServiceTests.java @@ -17,13 +17,13 @@ import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.metadata.ProjectMetadata; +import org.elasticsearch.cluster.metadata.ResettableValue; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettingProviders; @@ -139,53 +139,53 @@ public void testRequireRoutingPath() throws Exception { public void testLifecycleComposition() { // No lifecycles result to null { - List lifecycles = List.of(); + List lifecycles = List.of(); assertThat(composeDataLifecycles(lifecycles), nullValue()); } // One lifecycle results to this lifecycle as the final { - DataStreamLifecycle lifecycle = DataStreamLifecycle.newBuilder() + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.builder() .dataRetention(randomRetention()) .downsampling(randomDownsampling()) .build(); - List lifecycles = List.of(lifecycle); - DataStreamLifecycle result = composeDataLifecycles(lifecycles); + List lifecycles = List.of(lifecycle); + DataStreamLifecycle result = composeDataLifecycles(lifecycles).toDataStreamLifecycle(); // Defaults to true - assertThat(result.isEnabled(), equalTo(true)); - assertThat(result.getDataStreamRetention(), equalTo(lifecycle.getDataStreamRetention())); - assertThat(result.getDownsamplingRounds(), equalTo(lifecycle.getDownsamplingRounds())); + assertThat(result.enabled(), equalTo(true)); + assertThat(result.dataRetention(), equalTo(lifecycle.dataRetention().get())); + assertThat(result.downsampling(), equalTo(lifecycle.downsampling().get())); } // If the last lifecycle is missing a property (apart from enabled) we keep the latest from the previous ones // Enabled is always true unless it's explicitly set to false { - DataStreamLifecycle lifecycle = DataStreamLifecycle.newBuilder() + DataStreamLifecycle.Template lifecycle = DataStreamLifecycle.Template.builder() .enabled(false) - .dataRetention(randomNonEmptyRetention()) - .downsampling(randomNonEmptyDownsampling()) + .dataRetention(randomPositiveTimeValue()) + .downsampling(randomRounds()) .build(); - List lifecycles = List.of(lifecycle, new DataStreamLifecycle()); - DataStreamLifecycle result = composeDataLifecycles(lifecycles); - assertThat(result.isEnabled(), equalTo(true)); - assertThat(result.getDataStreamRetention(), equalTo(lifecycle.getDataStreamRetention())); - assertThat(result.getDownsamplingRounds(), equalTo(lifecycle.getDownsamplingRounds())); + List lifecycles = List.of(lifecycle, DataStreamLifecycle.Template.DEFAULT); + DataStreamLifecycle result = composeDataLifecycles(lifecycles).toDataStreamLifecycle(); + assertThat(result.enabled(), equalTo(true)); + assertThat(result.dataRetention(), equalTo(lifecycle.dataRetention().get())); + assertThat(result.downsampling(), equalTo(lifecycle.downsampling().get())); } // If both lifecycle have all properties, then the latest one overwrites all the others { - DataStreamLifecycle lifecycle1 = DataStreamLifecycle.newBuilder() + DataStreamLifecycle.Template lifecycle1 = DataStreamLifecycle.Template.builder() .enabled(false) - .dataRetention(randomNonEmptyRetention()) - .downsampling(randomNonEmptyDownsampling()) + .dataRetention(randomPositiveTimeValue()) + .downsampling(randomRounds()) .build(); - DataStreamLifecycle lifecycle2 = DataStreamLifecycle.newBuilder() + DataStreamLifecycle.Template lifecycle2 = DataStreamLifecycle.Template.builder() .enabled(true) - .dataRetention(randomNonEmptyRetention()) - .downsampling(randomNonEmptyDownsampling()) + .dataRetention(randomPositiveTimeValue()) + .downsampling(randomRounds()) .build(); - List lifecycles = List.of(lifecycle1, lifecycle2); - DataStreamLifecycle result = composeDataLifecycles(lifecycles); - assertThat(result.isEnabled(), equalTo(lifecycle2.isEnabled())); - assertThat(result.getDataStreamRetention(), equalTo(lifecycle2.getDataStreamRetention())); - assertThat(result.getDownsamplingRounds(), equalTo(lifecycle2.getDownsamplingRounds())); + List lifecycles = List.of(lifecycle1, lifecycle2); + DataStreamLifecycle result = composeDataLifecycles(lifecycles).toDataStreamLifecycle(); + assertThat(result.enabled(), equalTo(lifecycle2.enabled())); + assertThat(result.dataRetention(), equalTo(lifecycle2.dataRetention().get())); + assertThat(result.downsampling(), equalTo(lifecycle2.downsampling().get())); } } @@ -230,49 +230,44 @@ public static ShardLimitValidator createTestShardLimitService(int maxShardsPerNo return new ShardLimitValidator(limitOnlySettings, clusterService); } - @Nullable - private static DataStreamLifecycle.Retention randomRetention() { - return switch (randomInt(2)) { - case 0 -> null; - case 1 -> DataStreamLifecycle.Retention.NULL; - default -> randomNonEmptyRetention(); - }; - } - - private static DataStreamLifecycle.Retention randomNonEmptyRetention() { - return new DataStreamLifecycle.Retention(TimeValue.timeValueMillis(randomMillisUpToYear9999())); - } - - @Nullable - private static DataStreamLifecycle.Downsampling randomDownsampling() { - return switch (randomInt(2)) { - case 0 -> null; - case 1 -> DataStreamLifecycle.Downsampling.NULL; - default -> randomNonEmptyDownsampling(); - }; - } - - private static DataStreamLifecycle.Downsampling randomNonEmptyDownsampling() { + private static List randomRounds() { var count = randomIntBetween(0, 9); - List rounds = new ArrayList<>(); - var previous = new DataStreamLifecycle.Downsampling.Round( + List rounds = new ArrayList<>(); + var previous = new DataStreamLifecycle.DownsamplingRound( TimeValue.timeValueDays(randomIntBetween(1, 365)), new DownsampleConfig(new DateHistogramInterval(randomIntBetween(1, 24) + "h")) ); rounds.add(previous); for (int i = 0; i < count; i++) { - DataStreamLifecycle.Downsampling.Round round = nextRound(previous); + DataStreamLifecycle.DownsamplingRound round = nextRound(previous); rounds.add(round); previous = round; } - return new DataStreamLifecycle.Downsampling(rounds); + return rounds; } - private static DataStreamLifecycle.Downsampling.Round nextRound(DataStreamLifecycle.Downsampling.Round previous) { + private static DataStreamLifecycle.DownsamplingRound nextRound(DataStreamLifecycle.DownsamplingRound previous) { var after = TimeValue.timeValueDays(previous.after().days() + randomIntBetween(1, 10)); var fixedInterval = new DownsampleConfig( new DateHistogramInterval((previous.config().getFixedInterval().estimateMillis() * randomIntBetween(2, 5)) + "ms") ); - return new DataStreamLifecycle.Downsampling.Round(after, fixedInterval); + return new DataStreamLifecycle.DownsamplingRound(after, fixedInterval); + } + + private static ResettableValue randomRetention() { + return switch (randomIntBetween(0, 2)) { + case 0 -> ResettableValue.undefined(); + case 1 -> ResettableValue.reset(); + case 2 -> ResettableValue.create(TimeValue.timeValueDays(randomIntBetween(1, 100))); + default -> throw new IllegalStateException("Unknown randomisation path"); + }; + } + + private static ResettableValue> randomDownsampling() { + return switch (randomIntBetween(0, 1)) { + case 0 -> ResettableValue.reset(); + case 1 -> ResettableValue.create(randomRounds()); + default -> throw new IllegalStateException("Unknown randomisation path"); + }; } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index 09d43989bff14..6741cc808ebed 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -158,7 +158,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti .setGeneration(3) .setAllowCustomRouting(true) .setIndexMode(IndexMode.STANDARD) - .setLifecycle(new DataStreamLifecycle(null, null, false)) + .setLifecycle(new DataStreamLifecycle(false, null, null)) .setDataStreamOptions(DataStreamOptions.FAILURE_STORE_ENABLED) .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStores).build()) .build(); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java index c0b03bc973b23..cdcb4dbcad930 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java @@ -19,6 +19,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.ProjectMetadata; +import org.elasticsearch.cluster.metadata.ResettableValue; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -26,18 +27,18 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.function.Supplier; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.newInstance; import static org.elasticsearch.test.ESIntegTestCase.client; import static org.elasticsearch.test.ESTestCase.frequently; -import static org.elasticsearch.test.ESTestCase.randomInt; import static org.elasticsearch.test.ESTestCase.randomIntBetween; -import static org.elasticsearch.test.ESTestCase.randomMillisUpToYear9999; import static org.junit.Assert.assertTrue; /** @@ -150,7 +151,7 @@ static void putComposableIndexTemplate( List patterns, @Nullable Settings settings, @Nullable Map metadata, - @Nullable DataStreamLifecycle lifecycle + @Nullable DataStreamLifecycle.Template lifecycle ) throws IOException { TransportPutComposableIndexTemplateAction.Request request = new TransportPutComposableIndexTemplateAction.Request(id); request.indexTemplate( @@ -169,51 +170,44 @@ static void putComposableIndexTemplate( assertTrue(client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet().isAcknowledged()); } - static DataStreamLifecycle randomLifecycle() { - return DataStreamLifecycle.newBuilder() - .dataRetention(randomRetention()) - .downsampling(randomDownsampling()) + static DataStreamLifecycle.Template randomLifecycleTemplate() { + return DataStreamLifecycle.Template.builder() + .dataRetention(randomResettable(ESTestCase::randomTimeValue)) + .downsampling(randomResettable(DataStreamLifecycleFixtures::randomDownsamplingRounds)) .enabled(frequently()) .build(); } - @Nullable - private static DataStreamLifecycle.Retention randomRetention() { - return switch (randomInt(2)) { - case 0 -> null; - case 1 -> DataStreamLifecycle.Retention.NULL; - default -> new DataStreamLifecycle.Retention(TimeValue.timeValueMillis(randomMillisUpToYear9999())); + private static ResettableValue randomResettable(Supplier supplier) { + return switch (randomIntBetween(0, 2)) { + case 0 -> ResettableValue.undefined(); + case 1 -> ResettableValue.reset(); + case 2 -> ResettableValue.create(supplier.get()); + default -> throw new IllegalStateException("Unknown randomisation path"); }; } - @Nullable - private static DataStreamLifecycle.Downsampling randomDownsampling() { - return switch (randomInt(2)) { - case 0 -> null; - case 1 -> DataStreamLifecycle.Downsampling.NULL; - default -> { - var count = randomIntBetween(0, 9); - List rounds = new ArrayList<>(); - var previous = new DataStreamLifecycle.Downsampling.Round( - TimeValue.timeValueDays(randomIntBetween(1, 365)), - new DownsampleConfig(new DateHistogramInterval(randomIntBetween(1, 24) + "h")) - ); - rounds.add(previous); - for (int i = 0; i < count; i++) { - DataStreamLifecycle.Downsampling.Round round = nextRound(previous); - rounds.add(round); - previous = round; - } - yield new DataStreamLifecycle.Downsampling(rounds); - } - }; + private static List randomDownsamplingRounds() { + var count = randomIntBetween(0, 9); + List rounds = new ArrayList<>(); + var previous = new DataStreamLifecycle.DownsamplingRound( + TimeValue.timeValueDays(randomIntBetween(1, 365)), + new DownsampleConfig(new DateHistogramInterval(randomIntBetween(1, 24) + "h")) + ); + rounds.add(previous); + for (int i = 0; i < count; i++) { + DataStreamLifecycle.DownsamplingRound round = nextRound(previous); + rounds.add(round); + previous = round; + } + return rounds; } - private static DataStreamLifecycle.Downsampling.Round nextRound(DataStreamLifecycle.Downsampling.Round previous) { + private static DataStreamLifecycle.DownsamplingRound nextRound(DataStreamLifecycle.DownsamplingRound previous) { var after = TimeValue.timeValueDays(previous.after().days() + randomIntBetween(1, 10)); var fixedInterval = new DownsampleConfig( new DateHistogramInterval((previous.config().getFixedInterval().estimateMillis() * randomIntBetween(2, 5)) + "ms") ); - return new DataStreamLifecycle.Downsampling.Round(after, fixedInterval); + return new DataStreamLifecycle.DownsamplingRound(after, fixedInterval); } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index 8173e8e93887d..cfab412b45afc 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -39,8 +39,7 @@ import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetentionSettings; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; -import org.elasticsearch.cluster.metadata.DataStreamLifecycle.Downsampling; -import org.elasticsearch.cluster.metadata.DataStreamLifecycle.Downsampling.Round; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle.DownsamplingRound; import org.elasticsearch.cluster.metadata.DataStreamOptions; import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -206,7 +205,7 @@ public void testOperationsExecutedOnce() { numBackingIndices, 2, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(0).build(), + DataStreamLifecycle.builder().dataRetention(0).build(), now ); builder.put(dataStream); @@ -269,7 +268,7 @@ public void testRetentionNotExecutedDueToAge() { numBackingIndices, numFailureIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.timeValueDays(700)).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.timeValueDays(700)).build(), now ); builder.put(dataStream); @@ -302,7 +301,7 @@ public void testRetentionNotExecutedForTSIndicesWithinTimeBounds() { dataStream.copy() .setName(dataStreamName) .setGeneration(dataStream.getGeneration() + 1) - .setLifecycle(DataStreamLifecycle.newBuilder().dataRetention(0L).build()) + .setLifecycle(DataStreamLifecycle.builder().dataRetention(0L).build()) .build() ); clusterState = ClusterState.builder(clusterState).metadata(builder).build(); @@ -339,7 +338,7 @@ public void testMergePolicyNotExecutedForTSIndicesWithinTimeBounds() { dataStream.copy() .setName(dataStreamName) .setGeneration(dataStream.getGeneration() + 1) - .setLifecycle(DataStreamLifecycle.newBuilder().build()) + .setLifecycle(DataStreamLifecycle.builder().build()) .build() ); clusterState = ClusterState.builder(clusterState).metadata(builder).build(); @@ -374,7 +373,7 @@ public void testRetentionSkippedWhilstDownsamplingInProgress() { dataStreamName, numBackingIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.timeValueMillis(0)).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.timeValueMillis(0)).build(), now ); builder.put(dataStream); @@ -454,7 +453,7 @@ public void testIlmManagedIndicesAreSkipped() { Settings.builder() .put(IndexMetadata.LIFECYCLE_NAME, "ILM_policy") .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(0).build(), + DataStreamLifecycle.builder().dataRetention(0).build(), now ); builder.put(dataStream); @@ -543,7 +542,7 @@ public void testErrorStoreIsClearedOnBackingIndexBecomingUnmanaged() { dataStreamName, numBackingIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.timeValueDays(700)).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.timeValueDays(700)).build(), now ); // all backing indices are in the error store @@ -581,7 +580,7 @@ public void testBackingIndicesFromMultipleDataStreamsInErrorStore() { ilmManagedDataStreamName, 3, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.timeValueDays(700)).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.timeValueDays(700)).build(), now ); // all backing indices are in the error store @@ -594,7 +593,7 @@ public void testBackingIndicesFromMultipleDataStreamsInErrorStore() { dataStreamWithBackingIndicesInErrorState, 5, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.timeValueDays(700)).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.timeValueDays(700)).build(), now ); // put all backing indices in the error store @@ -645,7 +644,7 @@ public void testForceMerge() throws Exception { numBackingIndices, settings(IndexVersion.current()).put(MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING.getKey(), ONE_HUNDRED_MB) .put(MergePolicyConfig.INDEX_MERGE_POLICY_MERGE_FACTOR_SETTING.getKey(), TARGET_MERGE_FACTOR_VALUE), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.MAX_VALUE).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.MAX_VALUE).build(), now ); builder.put(dataStream); @@ -771,7 +770,7 @@ public void testForceMergeRetries() throws Exception { numBackingIndices, settings(IndexVersion.current()).put(MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING.getKey(), ONE_HUNDRED_MB) .put(MergePolicyConfig.INDEX_MERGE_POLICY_MERGE_FACTOR_SETTING.getKey(), TARGET_MERGE_FACTOR_VALUE), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.MAX_VALUE).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.MAX_VALUE).build(), now ); builder.put(dataStream); @@ -950,7 +949,7 @@ public void testForceMergeDedup() throws Exception { dataStreamName, numBackingIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.MAX_VALUE).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.MAX_VALUE).build(), now ); builder.put(dataStream); @@ -1142,7 +1141,7 @@ public void testMergePolicySettingsAreConfiguredBeforeForcemerge() throws Except dataStreamName, numBackingIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.MAX_VALUE).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.MAX_VALUE).build(), now ); builder.put(dataStream); @@ -1224,11 +1223,9 @@ public void testDownsampling() throws Exception { .put(MergePolicyConfig.INDEX_MERGE_POLICY_MERGE_FACTOR_SETTING.getKey(), TARGET_MERGE_FACTOR_VALUE) .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES) .put("index.routing_path", "@timestamp"), - DataStreamLifecycle.newBuilder() + DataStreamLifecycle.builder() .downsampling( - new Downsampling( - List.of(new Round(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m")))) - ) + List.of(new DownsamplingRound(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m")))) ) .dataRetention(TimeValue.MAX_VALUE) .build(), @@ -1362,11 +1359,9 @@ public void testDownsamplingWhenTargetIndexNameClashYieldsException() throws Exc .put(MergePolicyConfig.INDEX_MERGE_POLICY_MERGE_FACTOR_SETTING.getKey(), TARGET_MERGE_FACTOR_VALUE) .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES) .put("index.routing_path", "@timestamp"), - DataStreamLifecycle.newBuilder() + DataStreamLifecycle.builder() .downsampling( - new Downsampling( - List.of(new Round(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m")))) - ) + List.of(new DownsamplingRound(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m")))) ) .dataRetention(TimeValue.MAX_VALUE) .build(), @@ -1550,7 +1545,7 @@ public void testFailureStoreIsManagedEvenWhenDisabled() { numBackingIndices, 2, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(0).build(), + DataStreamLifecycle.builder().dataRetention(0).build(), now ).copy().setDataStreamOptions(DataStreamOptions.FAILURE_STORE_DISABLED).build(); // failure store is managed even when disabled builder.put(dataStream); @@ -1613,11 +1608,9 @@ private ClusterState downsampleSetup(String dataStreamName, IndexMetadata.Downsa 2, settings(IndexVersion.current()).put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES) .put("index.routing_path", "@timestamp"), - DataStreamLifecycle.newBuilder() + DataStreamLifecycle.builder() .downsampling( - new Downsampling( - List.of(new Round(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m")))) - ) + List.of(new DownsamplingRound(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m")))) ) .dataRetention(TimeValue.timeValueMillis(1)) .build(), diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleStatsActionTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleStatsActionTests.java index 7e030e9b82b13..3be1be12487b9 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleStatsActionTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleStatsActionTests.java @@ -94,7 +94,7 @@ public void testMixedDataStreams() { "dsl-managed-index", numBackingIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.timeValueDays(10)).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.timeValueDays(10)).build(), Clock.systemUTC().millis() ); indicesInError.add(dslDataStream.getIndices().get(randomInt(numBackingIndices - 1)).getName()); @@ -130,7 +130,7 @@ public void testMixedDataStreams() { IndexMetadata indexMetadata = indexMetaBuilder.build(); builder.put(indexMetadata, false); backingIndices.add(indexMetadata.getIndex()); - builder.put(newInstance(dataStreamName, backingIndices, 3, null, false, DataStreamLifecycle.newBuilder().build())); + builder.put(newInstance(dataStreamName, backingIndices, 3, null, false, DataStreamLifecycle.builder().build())); } ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(builder).build(); when(errorStore.getAllIndices()).thenReturn(indicesInError); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/downsampling/DeleteSourceAndAddDownsampleToDSTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/downsampling/DeleteSourceAndAddDownsampleToDSTests.java index e942288d2508c..d860840dfe2c5 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/downsampling/DeleteSourceAndAddDownsampleToDSTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/downsampling/DeleteSourceAndAddDownsampleToDSTests.java @@ -52,7 +52,7 @@ public void testDownsampleIndexMissingIsNoOp() { dataStreamName, numBackingIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.MAX_VALUE).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.MAX_VALUE).build(), now ); builder.put(dataStream); @@ -80,7 +80,7 @@ public void testDownsampleIsAddedToDSEvenIfSourceDeleted() { dataStreamName, numBackingIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.MAX_VALUE).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.MAX_VALUE).build(), now ); String firstGenIndex = DataStream.getDefaultBackingIndexName(dataStreamName, 1); @@ -125,7 +125,7 @@ public void testSourceIndexIsWriteIndexThrowsException() { dataStreamName, numBackingIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.MAX_VALUE).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.MAX_VALUE).build(), now ); builder.put(dataStream); @@ -159,7 +159,7 @@ public void testSourceIsDeleteAndDownsampleOriginationDateIsConfigured() { dataStreamName, numBackingIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.MAX_VALUE).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.MAX_VALUE).build(), now ); String firstGenIndex = DataStream.getDefaultBackingIndexName(dataStreamName, 1); @@ -221,7 +221,7 @@ public void testSourceWithoutLifecycleMetaAndDestWithOriginationDateAlreadyConfi dataStreamName, numBackingIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.MAX_VALUE).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.MAX_VALUE).build(), now ); String firstGenIndex = DataStream.getDefaultBackingIndexName(dataStreamName, 1); @@ -272,7 +272,7 @@ public void testSourceIndexIsDeleteEvenIfNotPartOfDSAnymore() { dataStreamName, numBackingIndices, settings(IndexVersion.current()), - DataStreamLifecycle.newBuilder().dataRetention(TimeValue.MAX_VALUE).build(), + DataStreamLifecycle.builder().dataRetention(TimeValue.MAX_VALUE).build(), now ); String firstGenIndex = DataStream.getDefaultBackingIndexName(dataStreamName, 1); diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 6912efa47ccd9..72f9780509a2e 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -188,6 +188,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_FAILURE_FROM_REMOTE = def(9_030_00_0); public static final TransportVersion INDEX_RESHARDING_METADATA = def(9_031_0_00); public static final TransportVersion INFERENCE_MODEL_REGISTRY_METADATA = def(9_032_0_00); + public static final TransportVersion INTRODUCE_LIFECYCLE_TEMPLATE = def(9_033_0_00); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index 55efdd6baf513..2299975949498 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -344,9 +344,9 @@ public static Template resolveTemplate( ); Settings settings = Settings.builder().put(additionalSettings.build()).put(templateSettings).build(); - DataStreamLifecycle lifecycle = resolveLifecycle(simulatedProject, matchingTemplate); + DataStreamLifecycle.Template lifecycle = resolveLifecycle(simulatedProject, matchingTemplate); if (template.getDataStreamTemplate() != null && lifecycle == null && isDslOnlyMode) { - lifecycle = DataStreamLifecycle.DEFAULT; + lifecycle = DataStreamLifecycle.Template.DEFAULT; } return new Template( settings, diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 0fdf30fa4ca9b..c13f93acaf290 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -471,7 +471,7 @@ private void addAutoShardingEvent(XContentBuilder builder, Params params, DataSt */ public ManagedBy getNextGenerationManagedBy() { // both ILM and DSL are configured so let's check the prefer_ilm setting to see which system takes precedence - if (ilmPolicyName != null && dataStream.getLifecycle() != null && dataStream.getLifecycle().isEnabled()) { + if (ilmPolicyName != null && dataStream.getLifecycle() != null && dataStream.getLifecycle().enabled()) { return templatePreferIlmValue ? ManagedBy.ILM : ManagedBy.LIFECYCLE; } @@ -479,7 +479,7 @@ public ManagedBy getNextGenerationManagedBy() { return ManagedBy.ILM; } - if (dataStream.getLifecycle() != null && dataStream.getLifecycle().isEnabled()) { + if (dataStream.getLifecycle() != null && dataStream.getLifecycle().enabled()) { return ManagedBy.LIFECYCLE; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java index c2b7de8d5df8b..1dbfb97851aa6 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java @@ -28,11 +28,11 @@ import java.io.IOException; import java.util.Arrays; +import java.util.List; import java.util.Objects; import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DATA_RETENTION_FIELD; import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DOWNSAMPLING_FIELD; -import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.Downsampling; import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.ENABLED_FIELD; /** @@ -47,30 +47,34 @@ private PutDataStreamLifecycleAction() {/* no instances */} public static final class Request extends AcknowledgedRequest implements IndicesRequest.Replaceable, ToXContentObject { public interface Factory { - Request create(@Nullable TimeValue dataRetention, @Nullable Boolean enabled, @Nullable Downsampling downsampling); + Request create( + @Nullable TimeValue dataRetention, + @Nullable Boolean enabled, + @Nullable List downsampling + ); } + @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "put_data_stream_lifecycle_request", false, - (args, factory) -> factory.create((TimeValue) args[0], (Boolean) args[1], (Downsampling) args[2]) + (args, factory) -> factory.create((TimeValue) args[0], (Boolean) args[1], (List) args[2]) ); static { PARSER.declareField( ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), DATA_RETENTION_FIELD.getPreferredName()), + (p, c) -> TimeValue.parseTimeValue(p.text(), DATA_RETENTION_FIELD.getPreferredName()), DATA_RETENTION_FIELD, - ObjectParser.ValueType.STRING_OR_NULL + ObjectParser.ValueType.STRING ); PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ENABLED_FIELD); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { - if (p.currentToken() == XContentParser.Token.VALUE_NULL) { - return Downsampling.NULL; - } else { - return new Downsampling(AbstractObjectParser.parseArray(p, null, Downsampling.Round::fromXContent)); - } - }, DOWNSAMPLING_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_NULL); + PARSER.declareField( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> AbstractObjectParser.parseArray(p, null, DataStreamLifecycle.DownsamplingRound::fromXContent), + DOWNSAMPLING_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY + ); } public static Request parseRequest(XContentParser parser, Factory factory) { @@ -141,11 +145,11 @@ public Request( String[] names, @Nullable TimeValue dataRetention, @Nullable Boolean enabled, - @Nullable Downsampling downsampling + @Nullable List downsampling ) { super(masterNodeTimeout, ackTimeout); this.names = names; - this.lifecycle = DataStreamLifecycle.newBuilder() + this.lifecycle = DataStreamLifecycle.builder() .dataRetention(dataRetention) .enabled(enabled == null || enabled) .downsampling(downsampling) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index f3f2f6723a1a3..75296fa2a352f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.SimpleDiffable; -import org.elasticsearch.cluster.metadata.DataStreamLifecycle.Downsampling.Round; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle.DownsamplingRound; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -946,7 +946,7 @@ public List getIndicesPastRetention( DataStreamGlobalRetention globalRetention ) { if (lifecycle == null - || lifecycle.isEnabled() == false + || lifecycle.enabled() == false || lifecycle.getEffectiveDataRetention(globalRetention, isInternal()) == null) { return List.of(); } @@ -967,13 +967,13 @@ public List getIndicesPastRetention( * * An empty list is returned for indices that are not time series. */ - public List getDownsamplingRoundsFor( + public List getDownsamplingRoundsFor( Index index, Function indexMetadataSupplier, LongSupplier nowSupplier ) { assert backingIndices.indices.contains(index) : "the provided index must be a backing index for this datastream"; - if (lifecycle == null || lifecycle.getDownsamplingRounds() == null) { + if (lifecycle == null || lifecycle.downsampling() == null) { return List.of(); } @@ -986,8 +986,8 @@ public List getDownsamplingRoundsFor( if (indexGenerationTime != null) { long nowMillis = nowSupplier.getAsLong(); long indexGenerationTimeMillis = indexGenerationTime.millis(); - List orderedRoundsForIndex = new ArrayList<>(lifecycle.getDownsamplingRounds().size()); - for (Round round : lifecycle.getDownsamplingRounds()) { + List orderedRoundsForIndex = new ArrayList<>(lifecycle.downsampling().size()); + for (DownsamplingRound round : lifecycle.downsampling()) { if (nowMillis >= indexGenerationTimeMillis + round.after().getMillis()) { orderedRoundsForIndex.add(round); } @@ -1076,11 +1076,11 @@ public boolean isIndexManagedByDataStreamLifecycle(Index index, Function PARSER = new ConstructingObjectParser<>( "lifecycle", false, - (args, unused) -> new DataStreamLifecycle((Retention) args[0], (Downsampling) args[1], (Boolean) args[2]) + (args, unused) -> new DataStreamLifecycle((Boolean) args[0], (TimeValue) args[1], (List) args[2]) ); static { + PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ENABLED_FIELD); + // For the retention and the downsampling, there was a bug that would allow an explicit null value to be + // stored in the data stream when the lifecycle was not composed with another one. We need to be able to read + // from a previous cluster state so we allow here explicit null values also to be parsed. PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { String value = p.textOrNull(); if (value == null) { - return Retention.NULL; + return null; } else { - return new Retention(TimeValue.parseTimeValue(value, DATA_RETENTION_FIELD.getPreferredName())); + return TimeValue.parseTimeValue(value, DATA_RETENTION_FIELD.getPreferredName()); } }, DATA_RETENTION_FIELD, ObjectParser.ValueType.STRING_OR_NULL); PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { if (p.currentToken() == XContentParser.Token.VALUE_NULL) { - return Downsampling.NULL; + return null; } else { - return new Downsampling(AbstractObjectParser.parseArray(p, c, Downsampling.Round::fromXContent)); + return AbstractObjectParser.parseArray(p, c, DownsamplingRound::fromXContent); } }, DOWNSAMPLING_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_NULL); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ENABLED_FIELD); } + private final boolean enabled; @Nullable - private final Retention dataRetention; + private final TimeValue dataRetention; @Nullable - private final Downsampling downsampling; - private final boolean enabled; + private final List downsampling; public DataStreamLifecycle() { this(null, null, null); } - public DataStreamLifecycle(@Nullable Retention dataRetention, @Nullable Downsampling downsampling, @Nullable Boolean enabled) { + public DataStreamLifecycle( + @Nullable Boolean enabled, + @Nullable TimeValue dataRetention, + @Nullable List downsampling + ) { this.enabled = enabled == null || enabled; this.dataRetention = dataRetention; + DownsamplingRound.validateRounds(downsampling); this.downsampling = downsampling; } /** - * Returns true, if this data stream lifecycle configuration is enabled and false otherwise + * Returns true, if this data stream lifecycle configuration is enabled, false otherwise */ - public boolean isEnabled() { + public boolean enabled() { return enabled; } @@ -173,22 +181,21 @@ public Tuple getEffectiveDataRetentionWithSource( boolean isInternalDataStream ) { // If lifecycle is disabled there is no effective retention - if (enabled == false) { + if (enabled() == false) { return INFINITE_RETENTION; } - var dataStreamRetention = getDataStreamRetention(); if (globalRetention == null || isInternalDataStream) { - return Tuple.tuple(dataStreamRetention, RetentionSource.DATA_STREAM_CONFIGURATION); + return Tuple.tuple(dataRetention(), RetentionSource.DATA_STREAM_CONFIGURATION); } - if (dataStreamRetention == null) { + if (dataRetention() == null) { return globalRetention.defaultRetention() != null ? Tuple.tuple(globalRetention.defaultRetention(), RetentionSource.DEFAULT_GLOBAL_RETENTION) : Tuple.tuple(globalRetention.maxRetention(), RetentionSource.MAX_GLOBAL_RETENTION); } - if (globalRetention.maxRetention() != null && globalRetention.maxRetention().getMillis() < dataStreamRetention.getMillis()) { + if (globalRetention.maxRetention() != null && globalRetention.maxRetention().getMillis() < dataRetention().getMillis()) { return Tuple.tuple(globalRetention.maxRetention(), RetentionSource.MAX_GLOBAL_RETENTION); } else { - return Tuple.tuple(dataStreamRetention, RetentionSource.DATA_STREAM_CONFIGURATION); + return Tuple.tuple(dataRetention(), RetentionSource.DATA_STREAM_CONFIGURATION); } } @@ -198,8 +205,8 @@ public Tuple getEffectiveDataRetentionWithSource( * @return the time period or null, null represents that data should never be deleted. */ @Nullable - public TimeValue getDataStreamRetention() { - return dataRetention == null ? null : dataRetention.value; + public TimeValue dataRetention() { + return dataRetention; } /** @@ -228,10 +235,10 @@ public void addWarningHeaderIfDataRetentionNotEffective( + "] will be applied." ); case MAX_GLOBAL_RETENTION -> { - String retentionProvidedPart = getDataStreamRetention() == null + String retentionProvidedPart = dataRetention() == null ? "Not providing a retention is not allowed for this project." : "The retention provided [" - + (getDataStreamRetention() == null ? "infinite" : getDataStreamRetention().getStringRep()) + + (dataRetention() == null ? "infinite" : dataRetention().getStringRep()) + "] is exceeding the max allowed data retention of this project [" + effectiveRetentionStringRep + "]."; @@ -244,35 +251,12 @@ public void addWarningHeaderIfDataRetentionNotEffective( } } - /** - * The configuration as provided by the user about the least amount of time data should be kept by elasticsearch. - * This method differentiates between a missing retention and a nullified retention and this is useful for template - * composition. - * @return one of the following: - * - `null`, represents that the user did not provide data retention, this represents the user has no opinion about retention - * - `Retention{value = null}`, represents that the user explicitly wants to have infinite retention - * - `Retention{value = "10d"}`, represents that the user has requested the data to be kept at least 10d. - */ - @Nullable - Retention getDataRetention() { - return dataRetention; - } - /** * The configured downsampling rounds with the `after` and the `fixed_interval` per round. If downsampling is * not configured then it returns null. */ @Nullable - public List getDownsamplingRounds() { - return downsampling == null ? null : downsampling.rounds(); - } - - /** - * Returns the configured wrapper object as it was defined in the template. This should be used only during - * template composition. - */ - @Nullable - Downsampling getDownsampling() { + public List downsampling() { return downsampling; } @@ -289,28 +273,45 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(dataRetention, downsampling, enabled); + return Objects.hash(enabled, dataRetention, downsampling); } @Override public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { - out.writeOptionalWriteable(dataRetention); + if (out.getTransportVersion().onOrAfter(TransportVersions.INTRODUCE_LIFECYCLE_TEMPLATE)) { + out.writeOptionalTimeValue(dataRetention); + } else { + writeLegacyOptionalValue(dataRetention, out, StreamOutput::writeTimeValue); + } + } if (out.getTransportVersion().onOrAfter(ADDED_ENABLED_FLAG_VERSION)) { - out.writeOptionalWriteable(downsampling); - out.writeBoolean(enabled); + if (out.getTransportVersion().onOrAfter(TransportVersions.INTRODUCE_LIFECYCLE_TEMPLATE)) { + out.writeOptionalCollection(downsampling); + } else { + writeLegacyOptionalValue(downsampling, out, StreamOutput::writeCollection); + } + out.writeBoolean(enabled()); } } public DataStreamLifecycle(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { - dataRetention = in.readOptionalWriteable(Retention::read); + if (in.getTransportVersion().onOrAfter(TransportVersions.INTRODUCE_LIFECYCLE_TEMPLATE)) { + dataRetention = in.readOptionalTimeValue(); + } else { + dataRetention = readLegacyOptionalValue(in, StreamInput::readTimeValue); + } } else { dataRetention = null; } if (in.getTransportVersion().onOrAfter(ADDED_ENABLED_FLAG_VERSION)) { - downsampling = in.readOptionalWriteable(Downsampling::read); + if (in.getTransportVersion().onOrAfter(TransportVersions.INTRODUCE_LIFECYCLE_TEMPLATE)) { + downsampling = in.readOptionalCollectionAsList(DownsamplingRound::read); + } else { + downsampling = readLegacyOptionalValue(in, is -> is.readCollectionAsList(DownsamplingRound::read)); + } enabled = in.readBoolean(); } else { downsampling = null; @@ -318,6 +319,43 @@ public DataStreamLifecycle(StreamInput in) throws IOException { } } + /** + * Previous versions were serialising value in way that also captures an explicit null. Meaning, first they serialise + * a boolean flag signaling if this value is defined, and then another boolean flag if the value is non-null. We do not need explicit + * null values anymore, so we treat them the same as non defined, but for bwc reasons we still need to write all the flags. + * @param value value to be serialised that used to be explicitly nullable. + * @param writer the writer of the value, it should NOT be an optional writer + * @throws IOException + */ + private static void writeLegacyOptionalValue(T value, StreamOutput out, Writer writer) throws IOException { + boolean isDefined = value != null; + out.writeBoolean(isDefined); + if (isDefined) { + // There are no explicit null values anymore, so it's always true + out.writeBoolean(true); + writer.write(out, value); + } + } + + /** + * Previous versions were de-serialising value in way that also captures an explicit null. Meaning, first they de-serialise + * a boolean flag signaling if this value is defined, and then another boolean flag if the value is non-null. We do not need explicit + * null values anymore, so we treat them the same as non defined, but for bwc reasons we still need to read all the flags. + * @param reader the reader of the value, it should NOT be an optional reader + * @throws IOException + */ + private static T readLegacyOptionalValue(StreamInput in, Reader reader) throws IOException { + T value = null; + boolean isDefined = in.readBoolean(); + if (isDefined) { + boolean isNotNull = in.readBoolean(); + if (isNotNull) { + value = reader.read(in); + } + } + return value; + } + public static Diff readDiffFrom(StreamInput in) throws IOException { return SimpleDiffable.readDiffFrom(DataStreamLifecycle::new, in); } @@ -349,11 +387,7 @@ public XContentBuilder toXContent( builder.startObject(); builder.field(ENABLED_FIELD.getPreferredName(), enabled); if (dataRetention != null) { - if (dataRetention.value() == null) { - builder.nullField(DATA_RETENTION_FIELD.getPreferredName()); - } else { - builder.field(DATA_RETENTION_FIELD.getPreferredName(), dataRetention.value().getStringRep()); - } + builder.field(DATA_RETENTION_FIELD.getPreferredName(), dataRetention.getStringRep()); } Tuple effectiveDataRetentionWithSource = getEffectiveDataRetentionWithSource( globalRetention, @@ -367,8 +401,7 @@ public XContentBuilder toXContent( } if (downsampling != null) { - builder.field(DOWNSAMPLING_FIELD.getPreferredName()); - downsampling.toXContent(builder, params); + builder.field(DOWNSAMPLING_FIELD.getPreferredName(), downsampling); } if (rolloverConfiguration != null) { builder.field(ROLLOVER_FIELD.getPreferredName()); @@ -397,218 +430,395 @@ public static ToXContent.Params addEffectiveRetentionParams(ToXContent.Params pa return new DelegatingMapParams(INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); } - public static Builder newBuilder(DataStreamLifecycle lifecycle) { - return new Builder().dataRetention(lifecycle.getDataRetention()) - .downsampling(lifecycle.getDownsampling()) - .enabled(lifecycle.isEnabled()); + public static Builder builder(DataStreamLifecycle lifecycle) { + return new Builder(lifecycle); } - public static Builder newBuilder() { - return new Builder(); + public static Builder builder() { + return new Builder(null); } /** * This builder helps during the composition of the data stream lifecycle templates. */ public static class Builder { + private boolean enabled = true; @Nullable - private Retention dataRetention = null; + private TimeValue dataRetention = null; @Nullable - private Downsampling downsampling = null; - private boolean enabled = true; + private List downsampling = null; - public Builder enabled(boolean value) { - enabled = value; - return this; + private Builder(@Nullable DataStreamLifecycle lifecycle) { + if (lifecycle != null) { + enabled = lifecycle.enabled; + dataRetention = lifecycle.dataRetention; + downsampling = lifecycle.downsampling; + } } - public Builder dataRetention(@Nullable Retention value) { - dataRetention = value; + public Builder enabled(boolean value) { + enabled = value; return this; } public Builder dataRetention(@Nullable TimeValue value) { - dataRetention = value == null ? null : new Retention(value); + dataRetention = value; return this; } public Builder dataRetention(long value) { - dataRetention = new Retention(TimeValue.timeValueMillis(value)); + dataRetention = TimeValue.timeValueMillis(value); return this; } - public Builder downsampling(@Nullable Downsampling value) { - downsampling = value; + public Builder downsampling(@Nullable List rounds) { + downsampling = rounds; return this; } public DataStreamLifecycle build() { - return new DataStreamLifecycle(dataRetention, downsampling, enabled); + return new DataStreamLifecycle(enabled, dataRetention, downsampling); } } /** - * Retention is the least amount of time that the data will be kept by elasticsearch. Public for testing. - * @param value is a time period or null. Null represents an explicitly set infinite retention period + * This enum represents all configuration sources that can influence the retention of a data stream. */ - public record Retention(@Nullable TimeValue value) implements Writeable { - - // For testing - public static final Retention NULL = new Retention(null); - - public static Retention read(StreamInput in) throws IOException { - return new Retention(in.readOptionalTimeValue()); - } + public enum RetentionSource { + DATA_STREAM_CONFIGURATION, + DEFAULT_GLOBAL_RETENTION, + MAX_GLOBAL_RETENTION; - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalTimeValue(value); + public String displayName() { + return this.toString().toLowerCase(Locale.ROOT); } } /** - * Downsampling holds the configuration about when should elasticsearch downsample a backing index. - * @param rounds is a list of downsampling configuration which instructs when a backing index should be downsampled (`after`) and at - * which interval (`fixed_interval`). Null represents an explicit no downsampling during template composition. + * A round represents the configuration for when and how elasticsearch will downsample a backing index. + * @param after is a TimeValue configuring how old (based on generation age) should a backing index be before downsampling + * @param config contains the interval that the backing index is going to be downsampled. */ - public record Downsampling(@Nullable List rounds) implements Writeable, ToXContentFragment { + public record DownsamplingRound(TimeValue after, DownsampleConfig config) implements Writeable, ToXContentObject { + public static final ParseField AFTER_FIELD = new ParseField("after"); + public static final ParseField FIXED_INTERVAL_FIELD = new ParseField("fixed_interval"); public static final long FIVE_MINUTES_MILLIS = TimeValue.timeValueMinutes(5).getMillis(); - /** - * A round represents the configuration for when and how elasticsearch will downsample a backing index. - * @param after is a TimeValue configuring how old (based on generation age) should a backing index be before downsampling - * @param config contains the interval that the backing index is going to be downsampled. - */ - public record Round(TimeValue after, DownsampleConfig config) implements Writeable, ToXContentObject { - - public static final ParseField AFTER_FIELD = new ParseField("after"); - public static final ParseField FIXED_INTERVAL_FIELD = new ParseField("fixed_interval"); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "downsampling_round", + false, + (args, unused) -> new DownsamplingRound((TimeValue) args[0], new DownsampleConfig((DateHistogramInterval) args[1])) + ); - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "downsampling_round", - false, - (args, unused) -> new Round((TimeValue) args[0], new DownsampleConfig((DateHistogramInterval) args[1])) + static { + PARSER.declareString( + ConstructingObjectParser.optionalConstructorArg(), + value -> TimeValue.parseTimeValue(value, AFTER_FIELD.getPreferredName()), + AFTER_FIELD + ); + PARSER.declareField( + constructorArg(), + p -> new DateHistogramInterval(p.text()), + new ParseField(FIXED_INTERVAL_FIELD.getPreferredName()), + ObjectParser.ValueType.STRING ); + } - static { - PARSER.declareString( - ConstructingObjectParser.optionalConstructorArg(), - value -> TimeValue.parseTimeValue(value, AFTER_FIELD.getPreferredName()), - AFTER_FIELD + static void validateRounds(List rounds) { + if (rounds == null) { + return; + } + if (rounds.isEmpty()) { + throw new IllegalArgumentException("Downsampling configuration should have at least one round configured."); + } + if (rounds.size() > 10) { + throw new IllegalArgumentException( + "Downsampling configuration supports maximum 10 configured rounds. Found: " + rounds.size() ); - PARSER.declareField( - constructorArg(), - p -> new DateHistogramInterval(p.text()), - new ParseField(FIXED_INTERVAL_FIELD.getPreferredName()), - ObjectParser.ValueType.STRING + } + DownsamplingRound previous = null; + for (DownsamplingRound round : rounds) { + if (previous == null) { + previous = round; + } else { + if (round.after.compareTo(previous.after) < 0) { + throw new IllegalArgumentException( + "A downsampling round must have a later 'after' value than the proceeding, " + + round.after.getStringRep() + + " is not after " + + previous.after.getStringRep() + + "." + ); + } + DownsampleConfig.validateSourceAndTargetIntervals(previous.config(), round.config()); + } + } + } + + public static DownsamplingRound read(StreamInput in) throws IOException { + return new DownsamplingRound(in.readTimeValue(), new DownsampleConfig(in)); + } + + public DownsamplingRound { + if (config.getFixedInterval().estimateMillis() < FIVE_MINUTES_MILLIS) { + throw new IllegalArgumentException( + "A downsampling round must have a fixed interval of at least five minutes but found: " + config.getFixedInterval() ); } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeTimeValue(after); + out.writeWriteable(config); + } - public static Round read(StreamInput in) throws IOException { - return new Round(in.readTimeValue(), new DownsampleConfig(in)); + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(AFTER_FIELD.getPreferredName(), after.getStringRep()); + config.toXContentFragment(builder); + builder.endObject(); + return builder; + } + + public static DownsamplingRound fromXContent(XContentParser parser, Void context) throws IOException { + return PARSER.parse(parser, context); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + } + + /** + * Represents the template configuration of a lifecycle. It supports explicitly resettable values + * to allow value reset during template composition. + */ + public record Template( + boolean enabled, + ResettableValue dataRetention, + ResettableValue> downsampling + ) implements ToXContentObject, Writeable { + + public Template { + if (downsampling.isDefined() && downsampling.get() != null) { + DownsamplingRound.validateRounds(downsampling.get()); } + } + + public static final DataStreamLifecycle.Template DEFAULT = new DataStreamLifecycle.Template( + true, + ResettableValue.undefined(), + ResettableValue.undefined() + ); - public Round { - if (config.getFixedInterval().estimateMillis() < FIVE_MINUTES_MILLIS) { - throw new IllegalArgumentException( - "A downsampling round must have a fixed interval of at least five minutes but found: " + config.getFixedInterval() - ); + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "lifecycle_template", + false, + (args, unused) -> new DataStreamLifecycle.Template( + args[0] == null || (boolean) args[0], + args[1] == null ? ResettableValue.undefined() : (ResettableValue) args[1], + args[2] == null ? ResettableValue.undefined() : (ResettableValue>) args[2] + ) + ); + + static { + PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ENABLED_FIELD); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { + String value = p.textOrNull(); + return value == null + ? ResettableValue.reset() + : ResettableValue.create(TimeValue.parseTimeValue(value, DATA_RETENTION_FIELD.getPreferredName())); + }, DATA_RETENTION_FIELD, ObjectParser.ValueType.STRING_OR_NULL); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return ResettableValue.reset(); + } else { + return ResettableValue.create(AbstractObjectParser.parseArray(p, c, DownsamplingRound::fromXContent)); } - } + }, DOWNSAMPLING_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_NULL); + } - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeTimeValue(after); - out.writeWriteable(config); + @Override + public void writeTo(StreamOutput out) throws IOException { + // The order of the fields is like this for bwc reasons + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.INTRODUCE_LIFECYCLE_TEMPLATE)) { + ResettableValue.write(out, dataRetention, StreamOutput::writeTimeValue); + } else { + writeLegacyValue(out, dataRetention, StreamOutput::writeTimeValue); + } } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(AFTER_FIELD.getPreferredName(), after.getStringRep()); - config.toXContentFragment(builder); - builder.endObject(); - return builder; + if (out.getTransportVersion().onOrAfter(ADDED_ENABLED_FLAG_VERSION)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.INTRODUCE_LIFECYCLE_TEMPLATE)) { + ResettableValue.write(out, downsampling, StreamOutput::writeCollection); + } else { + writeLegacyValue(out, downsampling, StreamOutput::writeCollection); + } + out.writeBoolean(enabled); } + } - public static Round fromXContent(XContentParser parser, Void context) throws IOException { - return PARSER.parse(parser, context); + /** + * Before the introduction of the ResettableValues we used to serialise the explicit nulls differently. Legacy codes defines the + * two boolean flags as "isDefined" and "hasValue" while the ResettableValue, "isDefined" and "shouldReset". This inverts the + * semantics of the second flag and that's why we use this method. + */ + private static void writeLegacyValue(StreamOutput out, ResettableValue value, Writeable.Writer writer) + throws IOException { + out.writeBoolean(value.isDefined()); + if (value.isDefined()) { + out.writeBoolean(value.shouldReset() == false); + if (value.shouldReset() == false) { + writer.write(out, value.get()); + } } + } - @Override - public String toString() { - return Strings.toString(this, true, true); + /** + * Before the introduction of the ResettableValues we used to serialise the explicit nulls differently. Legacy codes defines the + * two boolean flags as "isDefined" and "hasValue" while the ResettableValue, "isDefined" and "shouldReset". This inverts the + * semantics of the second flag and that's why we use this method. + */ + static ResettableValue readLegacyValues(StreamInput in, Writeable.Reader reader) throws IOException { + boolean isDefined = in.readBoolean(); + if (isDefined == false) { + return ResettableValue.undefined(); + } + boolean hasNonNullValue = in.readBoolean(); + if (hasNonNullValue == false) { + return ResettableValue.reset(); } + T value = reader.read(in); + return ResettableValue.create(value); } - // For testing - public static final Downsampling NULL = new Downsampling(null); - - public Downsampling { - if (rounds != null) { - if (rounds.isEmpty()) { - throw new IllegalArgumentException("Downsampling configuration should have at least one round configured."); + public static Template read(StreamInput in) throws IOException { + boolean enabled = true; + ResettableValue dataRetention = ResettableValue.undefined(); + ResettableValue> downsampling = ResettableValue.undefined(); + + // The order of the fields is like this for bwc reasons + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.INTRODUCE_LIFECYCLE_TEMPLATE)) { + dataRetention = ResettableValue.read(in, StreamInput::readTimeValue); + } else { + dataRetention = readLegacyValues(in, StreamInput::readTimeValue); } - if (rounds.size() > 10) { - throw new IllegalArgumentException( - "Downsampling configuration supports maximum 10 configured rounds. Found: " + rounds.size() - ); - } - Round previous = null; - for (Round round : rounds) { - if (previous == null) { - previous = round; - } else { - if (round.after.compareTo(previous.after) < 0) { - throw new IllegalArgumentException( - "A downsampling round must have a later 'after' value than the proceeding, " - + round.after.getStringRep() - + " is not after " - + previous.after.getStringRep() - + "." - ); - } - DownsampleConfig.validateSourceAndTargetIntervals(previous.config(), round.config()); - } + } + if (in.getTransportVersion().onOrAfter(ADDED_ENABLED_FLAG_VERSION)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.INTRODUCE_LIFECYCLE_TEMPLATE)) { + downsampling = ResettableValue.read(in, i -> i.readCollectionAsList(DownsamplingRound::read)); + } else { + downsampling = readLegacyValues(in, i -> i.readCollectionAsList(DownsamplingRound::read)); } + enabled = in.readBoolean(); } + return new Template(enabled, dataRetention, downsampling); } - public static Downsampling read(StreamInput in) throws IOException { - return new Downsampling(in.readOptionalCollectionAsList(Round::read)); + public static Template fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); } + /** + * Converts the template to XContent, depending on the {@param params} set by {@link ResettableValue#hideResetValues(Params)} + * it may or may not display any explicit nulls when the value is to be reset. + */ @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalCollection(rounds, StreamOutput::writeWriteable); + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return toXContent(builder, params, null, null, false); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (rounds == null) { - builder.nullValue(); - } else { - builder.startArray(); - for (Round round : rounds) { - round.toXContent(builder, params); - } - builder.endArray(); + /** + * Converts the template to XContent, depending on the {@param params} set by {@link ResettableValue#hideResetValues(Params)} + * it may or may not display any explicit nulls when the value is to be reset. + */ + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention, + boolean isInternalDataStream + ) throws IOException { + builder.startObject(); + builder.field(ENABLED_FIELD.getPreferredName(), enabled); + dataRetention.toXContent(builder, params, DATA_RETENTION_FIELD.getPreferredName(), TimeValue::getStringRep); + downsampling.toXContent(builder, params, DOWNSAMPLING_FIELD.getPreferredName()); + if (rolloverConfiguration != null) { + builder.field(ROLLOVER_FIELD.getPreferredName()); + rolloverConfiguration.evaluateAndConvertToXContent( + builder, + params, + toDataStreamLifecycle().getEffectiveDataRetention(globalRetention, isInternalDataStream) + ); } + builder.endObject(); return builder; } - } - /** - * This enum represents all configuration sources that can influence the retention of a data stream. - */ - public enum RetentionSource { - DATA_STREAM_CONFIGURATION, - DEFAULT_GLOBAL_RETENTION, - MAX_GLOBAL_RETENTION; + public static Builder builder(DataStreamLifecycle.Template template) { + return new Builder(template); + } - public String displayName() { - return this.toString().toLowerCase(Locale.ROOT); + public static Builder builder() { + return new Builder(null); + } + + public static class Builder { + private boolean enabled = true; + private ResettableValue dataRetention = ResettableValue.undefined(); + private ResettableValue> downsampling = ResettableValue.undefined(); + + private Builder(Template template) { + if (template != null) { + enabled = template.enabled(); + dataRetention = template.dataRetention(); + downsampling = template.downsampling(); + } + } + + public Builder enabled(boolean enabled) { + this.enabled = enabled; + return this; + } + + public Builder dataRetention(ResettableValue dataRetention) { + this.dataRetention = dataRetention; + return this; + } + + public Builder dataRetention(@Nullable TimeValue dataRetention) { + this.dataRetention = ResettableValue.create(dataRetention); + return this; + } + + public Builder downsampling(ResettableValue> downsampling) { + this.downsampling = downsampling; + return this; + } + + public Builder downsampling(@Nullable List downsampling) { + this.downsampling = ResettableValue.create(downsampling); + return this; + } + + public Template build() { + return new Template(enabled, dataRetention, downsampling); + } + } + + public DataStreamLifecycle toDataStreamLifecycle() { + return new DataStreamLifecycle(enabled, dataRetention.get(), downsampling.get()); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 629b4b4ac0f29..c00ee039eeafa 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -325,9 +325,10 @@ static ClusterState createDataStream( dsBackingIndices.add(writeIndex.getIndex()); boolean hidden = isSystem || template.getDataStreamTemplate().isHidden(); final IndexMode indexMode = newProject.retrieveIndexModeFromTemplate(template); - final DataStreamLifecycle lifecycle = isSystem + final DataStreamLifecycle.Template lifecycleTemplate = isSystem ? MetadataIndexTemplateService.resolveLifecycle(template, systemDataStreamDescriptor.getComponentTemplates()) : MetadataIndexTemplateService.resolveLifecycle(template, newProject.componentTemplates()); + final DataStreamLifecycle lifecycle = lifecycleTemplate == null ? null : lifecycleTemplate.toDataStreamLifecycle(); List failureIndices = failureStoreIndex == null ? List.of() : List.of(failureStoreIndex.getIndex()); DataStream newDataStream = new DataStream( dataStreamName, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index c6a24a3711b51..2260c23edb659 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -379,7 +379,10 @@ public ProjectMetadata addComponentTemplate( if (finalComponentTemplate.template().lifecycle() != null) { // We do not know if this lifecycle will belong to an internal data stream, so we fall back to a non internal. - finalComponentTemplate.template().lifecycle().addWarningHeaderIfDataRetentionNotEffective(globalRetentionSettings.get(), false); + finalComponentTemplate.template() + .lifecycle() + .toDataStreamLifecycle() + .addWarningHeaderIfDataRetentionNotEffective(globalRetentionSettings.get(), false); } logger.info("{} component template [{}]", existing == null ? "adding" : "updating", name); @@ -816,7 +819,7 @@ static void validateLifecycle( ComposableIndexTemplate template, @Nullable DataStreamGlobalRetention globalRetention ) { - DataStreamLifecycle lifecycle = resolveLifecycle(template, project.componentTemplates()); + DataStreamLifecycle.Template lifecycle = resolveLifecycle(template, project.componentTemplates()); if (lifecycle != null) { if (template.getDataStreamTemplate() == null) { throw new IllegalArgumentException( @@ -829,7 +832,7 @@ static void validateLifecycle( // We cannot know for sure if the template will apply to internal data streams, so we use a simpler heuristic: // If all the index patterns start with a dot, we consider that all the connected data streams are internal. boolean isInternalDataStream = template.indexPatterns().stream().allMatch(indexPattern -> indexPattern.charAt(0) == '.'); - lifecycle.addWarningHeaderIfDataRetentionNotEffective(globalRetention, isInternalDataStream); + lifecycle.toDataStreamLifecycle().addWarningHeaderIfDataRetentionNotEffective(globalRetention, isInternalDataStream); } } } @@ -1620,7 +1623,7 @@ static List> resolveAliases( * Resolve the given v2 template into a {@link DataStreamLifecycle} object */ @Nullable - public static DataStreamLifecycle resolveLifecycle(ProjectMetadata metadata, final String templateName) { + public static DataStreamLifecycle.Template resolveLifecycle(ProjectMetadata metadata, final String templateName) { final ComposableIndexTemplate template = metadata.templatesV2().get(templateName); assert template != null : "attempted to resolve lifecycle for a template [" + templateName + "] that did not exist in the cluster state"; @@ -1634,19 +1637,19 @@ public static DataStreamLifecycle resolveLifecycle(ProjectMetadata metadata, fin * Resolve the provided v2 template and component templates into a {@link DataStreamLifecycle} object */ @Nullable - public static DataStreamLifecycle resolveLifecycle( + public static DataStreamLifecycle.Template resolveLifecycle( ComposableIndexTemplate template, Map componentTemplates ) { Objects.requireNonNull(template, "attempted to resolve lifecycle for a null template"); Objects.requireNonNull(componentTemplates, "attempted to resolve lifecycle with null component templates"); - List lifecycles = new ArrayList<>(); + List lifecycles = new ArrayList<>(); for (String componentTemplateName : template.composedOf()) { if (componentTemplates.containsKey(componentTemplateName) == false) { continue; } - DataStreamLifecycle lifecycle = componentTemplates.get(componentTemplateName).template().lifecycle(); + DataStreamLifecycle.Template lifecycle = componentTemplates.get(componentTemplateName).template().lifecycle(); if (lifecycle != null) { lifecycles.add(lifecycle); } @@ -1697,18 +1700,18 @@ public static DataStreamLifecycle resolveLifecycle( * @return the final lifecycle */ @Nullable - public static DataStreamLifecycle composeDataLifecycles(List lifecycles) { - DataStreamLifecycle.Builder builder = null; - for (DataStreamLifecycle current : lifecycles) { + public static DataStreamLifecycle.Template composeDataLifecycles(List lifecycles) { + DataStreamLifecycle.Template.Builder builder = null; + for (DataStreamLifecycle.Template current : lifecycles) { if (builder == null) { - builder = DataStreamLifecycle.newBuilder(current); + builder = DataStreamLifecycle.Template.builder(current); } else { - builder.enabled(current.isEnabled()); - if (current.getDataRetention() != null) { - builder.dataRetention(current.getDataRetention()); + builder.enabled(current.enabled()); + if (current.dataRetention().isDefined()) { + builder.dataRetention(current.dataRetention()); } - if (current.getDownsampling() != null) { - builder.downsampling(current.getDownsampling()); + if (current.downsampling().isDefined()) { + builder.downsampling(current.downsampling()); } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java index 42f50bd2e7917..85d8a81aa126d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java @@ -1079,7 +1079,7 @@ public boolean isIndexManagedByILM(IndexMetadata indexMetadata) { } DataStream parentDataStream = indexAbstraction.getParentDataStream(); - if (parentDataStream != null && parentDataStream.getLifecycle() != null && parentDataStream.getLifecycle().isEnabled()) { + if (parentDataStream != null && parentDataStream.getLifecycle() != null && parentDataStream.getLifecycle().enabled()) { // index has both ILM and data stream lifecycle configured so let's check which is preferred return PREFER_ILM_SETTING.get(indexMetadata.getSettings()); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ResettableValue.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ResettableValue.java index 4f38d2b8386a6..e6ab7d1722e60 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ResettableValue.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ResettableValue.java @@ -118,7 +118,6 @@ static void write(StreamOutput out, ResettableValue value, Writeable.Writ * * @throws IOException */ - @Nullable static ResettableValue read(StreamInput in, Writeable.Reader reader) throws IOException { boolean isDefined = in.readBoolean(); if (isDefined == false) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java index 7d354768ca987..656bd442a3eb5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java @@ -57,7 +57,7 @@ public class Template implements SimpleDiffable