Skip to content
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,10 @@
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;

import java.io.IOException;

Expand Down Expand Up @@ -192,108 +190,6 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par
return builder.endObject();
}

public static SnapshotStats fromXContent(XContentParser parser) throws IOException {
// Parse this old school style instead of using the ObjectParser since there's an impedance mismatch between how the
// object has historically been written as JSON versus how it is structured in Java.
XContentParser.Token token = parser.currentToken();
if (token == null) {
token = parser.nextToken();
}
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
long startTime = 0;
long time = 0;
int incrementalFileCount = 0;
int totalFileCount = 0;
int processedFileCount = 0;
long incrementalSize = 0;
long totalSize = 0;
long processedSize = 0;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
String currentName = parser.currentName();
token = parser.nextToken();
if (currentName.equals(Fields.INCREMENTAL)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
String innerName = parser.currentName();
token = parser.nextToken();
if (innerName.equals(Fields.FILE_COUNT)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
incrementalFileCount = parser.intValue();
} else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
incrementalSize = parser.longValue();
} else {
// Unknown sub field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
} else if (currentName.equals(Fields.PROCESSED)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
String innerName = parser.currentName();
token = parser.nextToken();
if (innerName.equals(Fields.FILE_COUNT)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
processedFileCount = parser.intValue();
} else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
processedSize = parser.longValue();
} else {
// Unknown sub field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
} else if (currentName.equals(Fields.TOTAL)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
String innerName = parser.currentName();
token = parser.nextToken();
if (innerName.equals(Fields.FILE_COUNT)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
totalFileCount = parser.intValue();
} else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
totalSize = parser.longValue();
} else {
// Unknown sub field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
} else if (currentName.equals(Fields.START_TIME_IN_MILLIS)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
startTime = parser.longValue();
} else if (currentName.equals(Fields.TIME_IN_MILLIS)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
time = parser.longValue();
} else {
// Unknown field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
return new SnapshotStats(
startTime,
time,
incrementalFileCount,
totalFileCount,
processedFileCount,
incrementalSize,
totalSize,
processedSize
);
}

/**
* Add stats instance to the total
* @param stats Stats instance to add
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,11 @@ protected boolean supportsUnknownFields() {
innerParser.declareString(constructorArg(), new ParseField(SnapshotIndexShardStatus.Fields.STAGE));
innerParser.declareString(optionalConstructorArg(), new ParseField(SnapshotIndexShardStatus.Fields.NODE));
innerParser.declareString(optionalConstructorArg(), new ParseField(SnapshotIndexShardStatus.Fields.REASON));
innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p), new ParseField(SnapshotStats.Fields.STATS));
innerParser.declareObject(
constructorArg(),
(p, c) -> SnapshotStatsTests.fromXContent(p),
new ParseField(SnapshotStats.Fields.STATS)
);
PARSER = (p, indexId, shardName) -> {
// Combine the index name in the context with the shard name passed in for the named object parser
// into a ShardId to pass as context for the inner parser.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,11 @@ public class SnapshotIndexStatusTests extends AbstractXContentTestCase<SnapshotI
(p, c) -> SnapshotShardsStatsTests.PARSER.apply(p, null),
new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS)
);
innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p), new ParseField(SnapshotStats.Fields.STATS));
innerParser.declareObject(
constructorArg(),
(p, c) -> SnapshotStatsTests.fromXContent(p),
new ParseField(SnapshotStats.Fields.STATS)
);
innerParser.declareNamedObjects(
constructorArg(),
SnapshotIndexShardStatusTests.PARSER,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

package org.elasticsearch.action.admin.cluster.snapshots.status;

import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.elasticsearch.xcontent.XContentParser;

Expand Down Expand Up @@ -39,13 +40,141 @@ protected SnapshotStats createTestInstance() {
);
}

public void testXContentSerializationWhenProcessedFileCountEqualsIncrementalFileCount() throws IOException {
final var instance = createTestInstance();
final var incrementalSameAsProcessed = new SnapshotStats(
instance.getStartTime(),
instance.getTime(),
instance.getIncrementalFileCount(),
instance.getTotalFileCount(),
instance.getIncrementalFileCount(), // processedFileCount
instance.getIncrementalSize(),
instance.getTotalSize(),
instance.getIncrementalSize() // processedSize
);
// toXContent() omits the "processed" sub-object in this case, make sure the processed values are set as expected in fromXContent().
testFromXContent(() -> incrementalSameAsProcessed);
}

public void testXContentSerializationForEmptyStats() throws IOException {
testFromXContent(SnapshotStats::new);
}

@Override
protected SnapshotStats doParseInstance(XContentParser parser) throws IOException {
return SnapshotStats.fromXContent(parser);
return fromXContent(parser);
}

@Override
protected boolean supportsUnknownFields() {
return true;
}

static SnapshotStats fromXContent(XContentParser parser) throws IOException {
// Parse this old school style instead of using the ObjectParser since there's an impedance mismatch between how the
// object has historically been written as JSON versus how it is structured in Java.
XContentParser.Token token = parser.currentToken();
if (token == null) {
token = parser.nextToken();
}
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
long startTime = 0;
long time = 0;
int incrementalFileCount = 0;
int totalFileCount = 0;
int processedFileCount = Integer.MIN_VALUE;
long incrementalSize = 0;
long totalSize = 0;
long processedSize = Long.MIN_VALUE;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
String currentName = parser.currentName();
token = parser.nextToken();
if (currentName.equals(SnapshotStats.Fields.INCREMENTAL)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
String innerName = parser.currentName();
token = parser.nextToken();
if (innerName.equals(SnapshotStats.Fields.FILE_COUNT)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
incrementalFileCount = parser.intValue();
} else if (innerName.equals(SnapshotStats.Fields.SIZE_IN_BYTES)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
incrementalSize = parser.longValue();
} else {
// Unknown sub field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
} else if (currentName.equals(SnapshotStats.Fields.PROCESSED)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
String innerName = parser.currentName();
token = parser.nextToken();
if (innerName.equals(SnapshotStats.Fields.FILE_COUNT)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
processedFileCount = parser.intValue();
} else if (innerName.equals(SnapshotStats.Fields.SIZE_IN_BYTES)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
processedSize = parser.longValue();
} else {
// Unknown sub field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
} else if (currentName.equals(SnapshotStats.Fields.TOTAL)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
String innerName = parser.currentName();
token = parser.nextToken();
if (innerName.equals(SnapshotStats.Fields.FILE_COUNT)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
totalFileCount = parser.intValue();
} else if (innerName.equals(SnapshotStats.Fields.SIZE_IN_BYTES)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
totalSize = parser.longValue();
} else {
// Unknown sub field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
} else if (currentName.equals(SnapshotStats.Fields.START_TIME_IN_MILLIS)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
startTime = parser.longValue();
} else if (currentName.equals(SnapshotStats.Fields.TIME_IN_MILLIS)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
time = parser.longValue();
} else {
// Unknown field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
// Handle the case where the "processed" sub-object is omitted in toXContent() when processedFileCount == incrementalFileCount.
if (processedFileCount == Integer.MIN_VALUE) {
assert processedSize == Long.MIN_VALUE;
processedFileCount = incrementalFileCount;
processedSize = incrementalSize;
}
return new SnapshotStats(
startTime,
time,
incrementalFileCount,
totalFileCount,
processedFileCount,
incrementalSize,
totalSize,
processedSize
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ public class SnapshotStatusTests extends AbstractChunkedSerializingTestCase<Snap
PARSER.declareBoolean(optionalConstructorArg(), new ParseField(SnapshotStatus.INCLUDE_GLOBAL_STATE));
PARSER.declareField(
constructorArg(),
SnapshotStats::fromXContent,
SnapshotStatsTests::fromXContent,
new ParseField(SnapshotStats.Fields.STATS),
ObjectParser.ValueType.OBJECT
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -284,9 +284,16 @@ public static <T extends ToXContent> void testFromXContent(
* both for equality and asserts equality on the two queries.
*/
public final void testFromXContent() throws IOException {
testFromXContent(this::createTestInstance);
}

/**
* Generic test that creates a new instance using the given supplier and verifies XContent round trip serialization.
*/
public final void testFromXContent(Supplier<T> testInstanceSupplier) throws IOException {
testFromXContent(
NUMBER_OF_TEST_RUNS,
this::createTestInstance,
testInstanceSupplier,
supportsUnknownFields(),
getShuffleFieldsExceptions(),
getRandomFieldsExcludeFilter(),
Expand Down