Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions docs/changelog/124352.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 124352
summary: Add `force_merge_max_num_segments` option to downsample api and downsample
ilm action
area: Downsampling
type: enhancement
issues: []
Original file line number Diff line number Diff line change
Expand Up @@ -1038,7 +1038,7 @@ public void testLifecycleAppliedToFailureStore() throws Exception {
List.of(
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueMillis(10),
new DownsampleConfig(new DateHistogramInterval("10m"))
new DownsampleConfig(new DateHistogramInterval("10m"), null)
)
)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ private static DataStreamLifecycle.Downsampling randomNonEmptyDownsampling() {
List<DataStreamLifecycle.Downsampling.Round> rounds = new ArrayList<>();
var previous = new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueDays(randomIntBetween(1, 365)),
new DownsampleConfig(new DateHistogramInterval(randomIntBetween(1, 24) + "h"))
new DownsampleConfig(new DateHistogramInterval(randomIntBetween(1, 24) + "h"), null)
);
rounds.add(previous);
for (int i = 0; i < count; i++) {
Expand All @@ -271,7 +271,8 @@ private static DataStreamLifecycle.Downsampling randomNonEmptyDownsampling() {
private static DataStreamLifecycle.Downsampling.Round nextRound(DataStreamLifecycle.Downsampling.Round previous) {
var after = TimeValue.timeValueDays(previous.after().days() + randomIntBetween(1, 10));
var fixedInterval = new DownsampleConfig(
new DateHistogramInterval((previous.config().getFixedInterval().estimateMillis() * randomIntBetween(2, 5)) + "ms")
new DateHistogramInterval((previous.config().getFixedInterval().estimateMillis() * randomIntBetween(2, 5)) + "ms"),
null
);
return new DataStreamLifecycle.Downsampling.Round(after, fixedInterval);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ private static DataStreamLifecycle.Downsampling randomDownsampling() {
List<DataStreamLifecycle.Downsampling.Round> rounds = new ArrayList<>();
var previous = new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueDays(randomIntBetween(1, 365)),
new DownsampleConfig(new DateHistogramInterval(randomIntBetween(1, 24) + "h"))
new DownsampleConfig(new DateHistogramInterval(randomIntBetween(1, 24) + "h"), null)
);
rounds.add(previous);
for (int i = 0; i < count; i++) {
Expand All @@ -170,7 +170,8 @@ private static DataStreamLifecycle.Downsampling randomDownsampling() {
private static DataStreamLifecycle.Downsampling.Round nextRound(DataStreamLifecycle.Downsampling.Round previous) {
var after = TimeValue.timeValueDays(previous.after().days() + randomIntBetween(1, 10));
var fixedInterval = new DownsampleConfig(
new DateHistogramInterval((previous.config().getFixedInterval().estimateMillis() * randomIntBetween(2, 5)) + "ms")
new DateHistogramInterval((previous.config().getFixedInterval().estimateMillis() * randomIntBetween(2, 5)) + "ms"),
null
);
return new DataStreamLifecycle.Downsampling.Round(after, fixedInterval);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1227,7 +1227,7 @@ public void testDownsampling() throws Exception {
DataStreamLifecycle.newBuilder()
.downsampling(
new Downsampling(
List.of(new Round(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m"))))
List.of(new Round(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m"), null)))
)
)
.dataRetention(TimeValue.MAX_VALUE)
Expand Down Expand Up @@ -1365,7 +1365,7 @@ public void testDownsamplingWhenTargetIndexNameClashYieldsException() throws Exc
DataStreamLifecycle.newBuilder()
.downsampling(
new Downsampling(
List.of(new Round(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m"))))
List.of(new Round(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m"), null)))
)
)
.dataRetention(TimeValue.MAX_VALUE)
Expand Down Expand Up @@ -1616,7 +1616,7 @@ private ClusterState downsampleSetup(String dataStreamName, IndexMetadata.Downsa
DataStreamLifecycle.newBuilder()
.downsampling(
new Downsampling(
List.of(new Round(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m"))))
List.of(new Round(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m"), null)))
)
)
.dataRetention(TimeValue.timeValueMillis(1))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,7 @@ static TransportVersion def(int id) {
public static final TransportVersion INCLUDE_INDEX_MODE_IN_GET_DATA_STREAM = def(9_023_0_00);
public static final TransportVersion MAX_OPERATION_SIZE_REJECTIONS_ADDED = def(9_024_0_00);
public static final TransportVersion RETRY_ILM_ASYNC_ACTION_REQUIRE_ERROR = def(9_025_0_00);
public static final TransportVersion DOWNSAMPLE_FORCE_MERGE_MAX_NUM_SEGMENTS_PARAMETER = def(9_026_0_00);

/*
* STOP! READ THIS FIRST! No, really,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

package org.elasticsearch.action.downsample;

import org.elasticsearch.TransportVersions;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.Strings;
Expand All @@ -31,6 +32,7 @@
import java.util.Objects;

import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg;

/**
* This class holds the configuration details of a DownsampleAction that downsamples time series
Expand All @@ -56,20 +58,24 @@ public class DownsampleConfig implements NamedWriteable, ToXContentObject {

private static final String NAME = "downsample/action/config";
public static final String FIXED_INTERVAL = "fixed_interval";
public static final String FORCE_MERGE_MAX_NUM_SEGMENTS = "force_merge_max_num_segments";
public static final String TIME_ZONE = "time_zone";
public static final String DEFAULT_TIMEZONE = ZoneId.of("UTC").getId();
private static final int DEFAULT_MAX_NUM_SEGMENTS = 1;

private static final String timestampField = DataStreamTimestampFieldMapper.DEFAULT_PATH;
private final DateHistogramInterval fixedInterval;
private final int forceMergeMaxNumSegments;
private final String timeZone = DEFAULT_TIMEZONE;
private final String intervalType = FIXED_INTERVAL;

private static final ConstructingObjectParser<DownsampleConfig, Void> PARSER;
static {
PARSER = new ConstructingObjectParser<>(NAME, a -> {
DateHistogramInterval fixedInterval = (DateHistogramInterval) a[0];
Integer forceMergeMaxNumSegments = (Integer) a[1];
if (fixedInterval != null) {
return new DownsampleConfig(fixedInterval);
return new DownsampleConfig(fixedInterval, forceMergeMaxNumSegments);
} else {
throw new IllegalArgumentException("Parameter [" + FIXED_INTERVAL + "] is required.");
}
Expand All @@ -81,24 +87,40 @@ public class DownsampleConfig implements NamedWriteable, ToXContentObject {
new ParseField(FIXED_INTERVAL),
ObjectParser.ValueType.STRING
);
PARSER.declareField(
optionalConstructorArg(),
p -> p.intValue(),
new ParseField(FORCE_MERGE_MAX_NUM_SEGMENTS),
ObjectParser.ValueType.INT
);
}

/**
* Create a new {@link DownsampleConfig} using the given configuration parameters.
* @param fixedInterval the fixed interval to use for computing the date histogram for the rolled up documents (required).
*/
public DownsampleConfig(final DateHistogramInterval fixedInterval) {
public DownsampleConfig(final DateHistogramInterval fixedInterval, Integer forceMergeMaxNumSegments) {
if (fixedInterval == null) {
throw new IllegalArgumentException("Parameter [" + FIXED_INTERVAL + "] is required.");
}
this.fixedInterval = fixedInterval;

// validate interval
createRounding(this.fixedInterval.toString(), this.timeZone);

if (forceMergeMaxNumSegments == null) {
forceMergeMaxNumSegments = 1;
}
this.forceMergeMaxNumSegments = forceMergeMaxNumSegments;
}

public DownsampleConfig(final StreamInput in) throws IOException {
fixedInterval = new DateHistogramInterval(in);
if (in.getTransportVersion().onOrAfter(TransportVersions.DOWNSAMPLE_FORCE_MERGE_MAX_NUM_SEGMENTS_PARAMETER)) {
forceMergeMaxNumSegments = in.readInt();
} else {
forceMergeMaxNumSegments = DEFAULT_MAX_NUM_SEGMENTS;
}
}

/**
Expand Down Expand Up @@ -135,6 +157,9 @@ public static void validateSourceAndTargetIntervals(DownsampleConfig source, Dow
@Override
public void writeTo(final StreamOutput out) throws IOException {
fixedInterval.writeTo(out);
if (out.getTransportVersion().onOrAfter(TransportVersions.DOWNSAMPLE_FORCE_MERGE_MAX_NUM_SEGMENTS_PARAMETER)) {
out.writeInt(forceMergeMaxNumSegments);
}
}

/**
Expand Down Expand Up @@ -180,6 +205,10 @@ public Rounding.Prepared createRounding() {
return createRounding(fixedInterval.toString(), timeZone);
}

public int getForceMergeMaxNumSegments() {
return forceMergeMaxNumSegments;
}

@Override
public String getWriteableName() {
return NAME;
Expand All @@ -195,7 +224,11 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa
}

public XContentBuilder toXContentFragment(final XContentBuilder builder) throws IOException {
return builder.field(FIXED_INTERVAL, fixedInterval.toString());
builder.field(FIXED_INTERVAL, fixedInterval.toString());
if (forceMergeMaxNumSegments != DEFAULT_MAX_NUM_SEGMENTS) {
builder.field(FORCE_MERGE_MAX_NUM_SEGMENTS, forceMergeMaxNumSegments);
}
return builder;
}

public static DownsampleConfig fromXContent(final XContentParser parser) throws IOException {
Expand All @@ -212,13 +245,14 @@ public boolean equals(final Object other) {
}
final DownsampleConfig that = (DownsampleConfig) other;
return Objects.equals(fixedInterval, that.fixedInterval)
&& Objects.equals(forceMergeMaxNumSegments, that.forceMergeMaxNumSegments)
&& Objects.equals(intervalType, that.intervalType)
&& ZoneId.of(timeZone, ZoneId.SHORT_IDS).getRules().equals(ZoneId.of(that.timeZone, ZoneId.SHORT_IDS).getRules());
}

@Override
public int hashCode() {
return Objects.hash(fixedInterval, intervalType, ZoneId.of(timeZone));
return Objects.hash(fixedInterval, forceMergeMaxNumSegments, intervalType, ZoneId.of(timeZone));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,7 @@ public record Round(TimeValue after, DownsampleConfig config) implements Writeab
private static final ConstructingObjectParser<Round, Void> PARSER = new ConstructingObjectParser<>(
"downsampling_round",
false,
(args, unused) -> new Round((TimeValue) args[0], new DownsampleConfig((DateHistogramInterval) args[1]))
(args, unused) -> new Round((TimeValue) args[0], new DownsampleConfig((DateHistogramInterval) args[1], null))
);

static {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,11 +182,11 @@ public void testInvalidDownsamplingConfiguration() {
List.of(
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueDays(10),
new DownsampleConfig(new DateHistogramInterval("2h"))
new DownsampleConfig(new DateHistogramInterval("2h"), null)
),
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueDays(3),
new DownsampleConfig(new DateHistogramInterval("2h"))
new DownsampleConfig(new DateHistogramInterval("2h"), null)
)
)
)
Expand All @@ -203,11 +203,11 @@ public void testInvalidDownsamplingConfiguration() {
List.of(
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueDays(10),
new DownsampleConfig(new DateHistogramInterval("2h"))
new DownsampleConfig(new DateHistogramInterval("2h"), null)
),
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueDays(30),
new DownsampleConfig(new DateHistogramInterval("2h"))
new DownsampleConfig(new DateHistogramInterval("2h"), null)
)
)
)
Expand All @@ -221,11 +221,11 @@ public void testInvalidDownsamplingConfiguration() {
List.of(
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueDays(10),
new DownsampleConfig(new DateHistogramInterval("2h"))
new DownsampleConfig(new DateHistogramInterval("2h"), null)
),
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueDays(30),
new DownsampleConfig(new DateHistogramInterval("3h"))
new DownsampleConfig(new DateHistogramInterval("3h"), null)
)
)
)
Expand All @@ -248,7 +248,7 @@ public void testInvalidDownsamplingConfiguration() {
.map(
i -> new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueDays(i),
new DownsampleConfig(new DateHistogramInterval(i + "h"))
new DownsampleConfig(new DateHistogramInterval(i + "h"), randomBoolean() ? null : randomIntBetween(-1, 128))
)
)
.toList()
Expand All @@ -264,7 +264,7 @@ public void testInvalidDownsamplingConfiguration() {
List.of(
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueDays(10),
new DownsampleConfig(new DateHistogramInterval("2m"))
new DownsampleConfig(new DateHistogramInterval("2m"), null)
)
)
)
Expand Down Expand Up @@ -420,9 +420,10 @@ static DataStreamLifecycle.Downsampling randomDownsampling() {
default -> {
var count = randomIntBetween(0, 9);
List<DataStreamLifecycle.Downsampling.Round> rounds = new ArrayList<>();
Integer forceMergeMaxNumSegments = randomBoolean() ? null : randomIntBetween(-1, 128);
var previous = new DataStreamLifecycle.Downsampling.Round(
randomTimeValue(1, 365, TimeUnit.DAYS),
new DownsampleConfig(new DateHistogramInterval(randomIntBetween(1, 24) + "h"))
new DownsampleConfig(new DateHistogramInterval(randomIntBetween(1, 24) + "h"), forceMergeMaxNumSegments)
);
rounds.add(previous);
for (int i = 0; i < count; i++) {
Expand All @@ -437,8 +438,10 @@ static DataStreamLifecycle.Downsampling randomDownsampling() {

private static DataStreamLifecycle.Downsampling.Round nextRound(DataStreamLifecycle.Downsampling.Round previous) {
var after = TimeValue.timeValueDays(previous.after().days() + randomIntBetween(1, 10));
Integer forceMergeMaxNumSegments = randomBoolean() ? null : randomIntBetween(-1, 128);
var fixedInterval = new DownsampleConfig(
new DateHistogramInterval((previous.config().getFixedInterval().estimateMillis() * randomIntBetween(2, 5)) + "ms")
new DateHistogramInterval((previous.config().getFixedInterval().estimateMillis() * randomIntBetween(2, 5)) + "ms"),
forceMergeMaxNumSegments
);
return new DataStreamLifecycle.Downsampling.Round(after, fixedInterval);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1647,15 +1647,15 @@ public void testGetDownsampleRounds() {
List.of(
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueMillis(2000),
new DownsampleConfig(new DateHistogramInterval("10m"))
new DownsampleConfig(new DateHistogramInterval("10m"), null)
),
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueMillis(3200),
new DownsampleConfig(new DateHistogramInterval("100m"))
new DownsampleConfig(new DateHistogramInterval("100m"), null)
),
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueMillis(3500),
new DownsampleConfig(new DateHistogramInterval("1000m"))
new DownsampleConfig(new DateHistogramInterval("1000m"), null)
)
)
)
Expand Down Expand Up @@ -1708,15 +1708,15 @@ public void testGetDownsampleRounds() {
List.of(
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueMillis(2000),
new DownsampleConfig(new DateHistogramInterval("10m"))
new DownsampleConfig(new DateHistogramInterval("10m"), null)
),
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueMillis(3200),
new DownsampleConfig(new DateHistogramInterval("100m"))
new DownsampleConfig(new DateHistogramInterval("100m"), null)
),
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueMillis(3500),
new DownsampleConfig(new DateHistogramInterval("1000m"))
new DownsampleConfig(new DateHistogramInterval("1000m"), null)
)
)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1501,7 +1501,7 @@ public void testResolveLifecycle() throws Exception {
List.of(
new DataStreamLifecycle.Downsampling.Round(
TimeValue.timeValueDays(30),
new DownsampleConfig(new DateHistogramInterval("3h"))
new DownsampleConfig(new DateHistogramInterval("3h"), null)
)
)
)
Expand Down
Loading
Loading