Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.datastreams.GetDataStreamAction;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.ingest.DeletePipelineRequest;
import org.elasticsearch.action.ingest.DeletePipelineTransportAction;
Expand Down Expand Up @@ -81,6 +82,7 @@
import org.elasticsearch.cluster.InternalClusterInfoService;
import org.elasticsearch.cluster.coordination.ElasticsearchNodeCommand;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
Expand Down Expand Up @@ -846,6 +848,22 @@ private static Settings.Builder getExcludeSettings(int num, Settings.Builder bui
return builder;
}

/**
* Returns a list of the data stream's backing index names.
*/
public List<String> getDataStreamBackingIndexNames(String dataStreamName) {
GetDataStreamAction.Response response = safeGet(
client().execute(
GetDataStreamAction.INSTANCE,
new GetDataStreamAction.Request(TEST_REQUEST_TIMEOUT, new String[] { dataStreamName })
)
);
assertThat(response.getDataStreams().size(), equalTo(1));
DataStream dataStream = response.getDataStreams().getFirst().getDataStream();
assertThat(dataStream.getName(), equalTo(dataStreamName));
return dataStream.getIndices().stream().map(Index::getName).toList();
}

/**
* Waits until all nodes have no pending tasks.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2024,6 +2024,20 @@ protected static boolean aliasExists(String index, String alias) throws IOExcept
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
}

/**
* Returns a list of the data stream's backing index names.
*/
@SuppressWarnings("unchecked")
protected static List<String> getDataStreamBackingIndexNames(String dataStreamName) throws IOException {
Map<String, Object> response = getAsMap(client(), "/_data_stream/" + dataStreamName);
List<?> dataStreams = (List<?>) response.get("data_streams");
assertThat(dataStreams.size(), equalTo(1));
Map<?, ?> dataStream = (Map<?, ?>) dataStreams.getFirst();
assertThat(dataStream.get("name"), equalTo(dataStreamName));
List<?> indices = (List<?>) dataStream.get("indices");
return indices.stream().map(index -> ((Map<String, String>) index).get("index_name")).toList();
}

@SuppressWarnings("unchecked")
protected static Map<String, Object> getAlias(final String index, final String alias) throws IOException {
String endpoint = "/_alias";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.Template;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
Expand All @@ -30,6 +29,7 @@
import org.junit.Before;

import java.io.IOException;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit;
Expand All @@ -41,6 +41,7 @@
import static org.elasticsearch.xpack.TimeSeriesRestDriver.indexDocument;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.rolloverMaxOneDocCondition;
import static org.hamcrest.CoreMatchers.containsStringIgnoringCase;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.is;

Expand Down Expand Up @@ -107,7 +108,9 @@ public void testSearchableSnapshotActionErrorsOnInvalidLicense() throws Exceptio
// rolling over the data stream so we can apply the searchable snapshot policy to a backing index that's not the write index
rolloverMaxOneDocCondition(client(), dataStream);

String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, 1L);
List<String> backingIndices = getDataStreamBackingIndexNames(dataStream);
assertThat(backingIndices.size(), equalTo(2));
String backingIndexName = backingIndices.getFirst();
// the searchable_snapshot action should start failing (and retrying) due to invalid license
assertBusy(() -> {
Map<String, Object> explainIndex = explainIndex(client(), backingIndexName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import static org.elasticsearch.xpack.TimeSeriesRestDriver.createNewSingletonPolicy;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.createSnapshotRepo;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.explainIndex;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.getBackingIndices;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.getOnlyIndexSettings;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.getStepKeyForIndex;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.getTemplate;
Expand Down Expand Up @@ -81,12 +80,12 @@ public void testRolloverAction() throws Exception {
indexDocument(client(), dataStream, true);

assertBusy(() -> {
final var backingIndices = getBackingIndices(client(), dataStream);
final var backingIndices = getDataStreamBackingIndexNames(dataStream);
assertEquals(2, backingIndices.size());
assertTrue(Boolean.parseBoolean((String) getIndexSettingsAsMap(backingIndices.getLast()).get("index.hidden")));
});
assertBusy(() -> {
final var backingIndices = getBackingIndices(client(), dataStream);
final var backingIndices = getDataStreamBackingIndexNames(dataStream);
assertEquals(PhaseCompleteStep.finalStep("hot").getKey(), getStepKeyForIndex(client(), backingIndices.getFirst()));
});
}
Expand All @@ -98,7 +97,7 @@ public void testRolloverIsSkippedOnManualDataStreamRollover() throws Exception {

indexDocument(client(), dataStream, true);

String firstGenerationIndex = getBackingIndices(client(), dataStream).getFirst();
String firstGenerationIndex = getDataStreamBackingIndexNames(dataStream).getFirst();
assertBusy(
() -> assertThat(getStepKeyForIndex(client(), firstGenerationIndex).name(), equalTo(WaitForRolloverReadyStep.NAME)),
30,
Expand All @@ -107,7 +106,7 @@ public void testRolloverIsSkippedOnManualDataStreamRollover() throws Exception {

rolloverMaxOneDocCondition(client(), dataStream);
assertBusy(() -> {
final var backingIndices = getBackingIndices(client(), dataStream);
final var backingIndices = getDataStreamBackingIndexNames(dataStream);
assertEquals(2, backingIndices.size());
}, 30, TimeUnit.SECONDS);

Expand All @@ -125,7 +124,7 @@ public void testShrinkActionInPolicyWithoutHotPhase() throws Exception {
createComposableTemplate(client(), template, dataStream + "*", getTemplate(policyName));
indexDocument(client(), dataStream, true);

String backingIndexName = getBackingIndices(client(), dataStream).getFirst();
String backingIndexName = getDataStreamBackingIndexNames(dataStream).getFirst();
assertBusy(
() -> assertThat(
"original index must wait in the " + CheckNotDataStreamWriteIndexStep.NAME + " until it is not the write index anymore",
Expand All @@ -140,7 +139,7 @@ public void testShrinkActionInPolicyWithoutHotPhase() throws Exception {
rolloverMaxOneDocCondition(client(), dataStream);
// Wait for rollover to happen
assertBusy(
() -> assertEquals("the rollover action created the rollover index", 2, getBackingIndices(client(), dataStream).size()),
() -> assertEquals("the rollover action created the rollover index", 2, getDataStreamBackingIndexNames(dataStream).size()),
30,
TimeUnit.SECONDS
);
Expand All @@ -159,7 +158,7 @@ public void testSearchableSnapshotAction() throws Exception {
createComposableTemplate(client(), template, dataStream + "*", getTemplate(policyName));
indexDocument(client(), dataStream, true);

String backingIndexName = getBackingIndices(client(), dataStream).getFirst();
String backingIndexName = getDataStreamBackingIndexNames(dataStream).getFirst();
String restoredIndexName = SearchableSnapshotAction.FULL_RESTORED_INDEX_PREFIX + backingIndexName;

assertBusy(
Expand Down Expand Up @@ -190,7 +189,7 @@ public void testReadOnlyAction() throws Exception {
createComposableTemplate(client(), template, dataStream + "*", getTemplate(policyName));
indexDocument(client(), dataStream, true);

String backingIndexName = getBackingIndices(client(), dataStream).getFirst();
String backingIndexName = getDataStreamBackingIndexNames(dataStream).getFirst();
assertBusy(
() -> assertThat(
"index must wait in the " + CheckNotDataStreamWriteIndexStep.NAME + " until it is not the write index anymore",
Expand Down Expand Up @@ -225,7 +224,7 @@ public void testFreezeAction() throws Exception {
indexDocument(client(), dataStream, true);

// The freeze action is a noop action with only noop steps and should pass through to complete the phase asap.
String backingIndexName = getBackingIndices(client(), dataStream).getFirst();
String backingIndexName = getDataStreamBackingIndexNames(dataStream).getFirst();
assertBusy(() -> {
try {
assertThat(explainIndex(client(), backingIndexName).get("step"), is(PhaseCompleteStep.NAME));
Expand All @@ -247,7 +246,7 @@ public void checkForceMergeAction(String codec) throws Exception {
createComposableTemplate(client(), template, dataStream + "*", getTemplate(policyName));
indexDocument(client(), dataStream, true);

String backingIndexName = getBackingIndices(client(), dataStream).getFirst();
String backingIndexName = getDataStreamBackingIndexNames(dataStream).getFirst();
assertBusy(
() -> assertThat(
"index must wait in the " + CheckNotDataStreamWriteIndexStep.NAME + " until it is not the write index anymore",
Expand Down Expand Up @@ -321,7 +320,7 @@ public void testDataStreamWithMultipleIndicesAndWriteIndexInDeletePhase() throws
client().performRequest(new Request("POST", dataStream + "/_rollover"));
indexDocument(client(), dataStream, true);

String secondGenerationIndex = getBackingIndices(client(), dataStream).get(1);
String secondGenerationIndex = getDataStreamBackingIndexNames(dataStream).get(1);
assertBusy(() -> {
Request explainRequest = new Request("GET", "/_data_stream/" + dataStream);
Response response = client().performRequest(explainRequest);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.WarningFailureException;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
Expand Down Expand Up @@ -1231,7 +1230,7 @@ private void assertHistoryIsPresent(
}

// Finally, check that the history index is in a good state
String historyIndexName = DataStream.getDefaultBackingIndexName("ilm-history-7", 1);
String historyIndexName = getDataStreamBackingIndexNames("ilm-history-7").getFirst();
Response explainHistoryIndex = client().performRequest(new Request("GET", historyIndexName + "/_lifecycle/explain"));
Map<String, Object> responseMap;
try (InputStream is = explainHistoryIndex.getEntity().getContent()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexMetadata.DownsampleTaskStatus;
import org.elasticsearch.common.Strings;
Expand Down Expand Up @@ -49,7 +48,6 @@
import static org.elasticsearch.xpack.TimeSeriesRestDriver.createIndexWithSettings;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.createNewSingletonPolicy;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.explainIndex;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.getBackingIndices;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.getOnlyIndexSettings;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.getStepKeyForIndex;
import static org.elasticsearch.xpack.TimeSeriesRestDriver.index;
Expand Down Expand Up @@ -318,7 +316,7 @@ public void testTsdbDataStreams() throws Exception {

index(client(), dataStream, true, null, "@timestamp", "2020-01-01T05:10:00Z", "volume", 11.0, "metricset", randomAlphaOfLength(5));

String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, 1);
String backingIndexName = getDataStreamBackingIndexNames(dataStream).getFirst();
assertBusy(
() -> assertThat(
"index must wait in the " + CheckNotDataStreamWriteIndexStep.NAME + " until it is not the write index anymore",
Expand Down Expand Up @@ -365,7 +363,7 @@ public void testILMWaitsForTimeSeriesEndTimeToLapse() throws Exception {
String now = DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(Instant.now());
index(client(), dataStream, true, null, "@timestamp", now, "volume", 11.0, "metricset", randomAlphaOfLength(5));

String backingIndexName = getBackingIndices(client(), dataStream).get(0);
String backingIndexName = getDataStreamBackingIndexNames(dataStream).getFirst();
assertBusy(
() -> assertThat(
"index must wait in the " + CheckNotDataStreamWriteIndexStep.NAME + " until it is not the write index anymore",
Expand Down Expand Up @@ -459,7 +457,7 @@ public void testDownsampleTwice() throws Exception {

index(client(), dataStream, true, null, "@timestamp", "2020-01-01T05:10:00Z", "volume", 11.0, "metricset", randomAlphaOfLength(5));

String firstBackingIndex = DataStream.getDefaultBackingIndexName(dataStream, 1);
String firstBackingIndex = getDataStreamBackingIndexNames(dataStream).getFirst();
logger.info("--> firstBackingIndex: {}", firstBackingIndex);
assertBusy(
() -> assertThat(
Expand Down Expand Up @@ -540,7 +538,7 @@ public void testDownsampleTwiceSameInterval() throws Exception {

index(client(), dataStream, true, null, "@timestamp", "2020-01-01T05:10:00Z", "volume", 11.0, "metricset", randomAlphaOfLength(5));

String firstBackingIndex = getBackingIndices(client(), dataStream).get(0);
String firstBackingIndex = getDataStreamBackingIndexNames(dataStream).getFirst();
logger.info("--> firstBackingIndex: {}", firstBackingIndex);
assertBusy(
() -> assertThat(
Expand Down
Loading