Skip to content

Commit 556304e

Browse files
authored
Merge branch 'main' into add-RollingUpgradeDeprecatedSettingsIT
2 parents 3c9a53f + 33a2bc9 commit 556304e

File tree

10 files changed

+629
-57
lines changed

10 files changed

+629
-57
lines changed

docs/changelog/122199.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 122199
2+
summary: Fix issues that prevents using search only snapshots for indices that use index sorting. This is includes Logsdb and time series indices.
3+
area: Logs
4+
type: bug
5+
issues: []

docs/changelog/122496.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 122496
2+
summary: Deduplicate `IngestStats` and `IngestStats.Stats` identity records when deserializing
3+
area: Ingest Node
4+
type: bug
5+
issues: []

server/src/main/java/org/elasticsearch/common/lucene/Lucene.java

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@
2020
import org.apache.lucene.index.ConcurrentMergeScheduler;
2121
import org.apache.lucene.index.CorruptIndexException;
2222
import org.apache.lucene.index.DirectoryReader;
23+
import org.apache.lucene.index.FieldInfo;
24+
import org.apache.lucene.index.FieldInfos;
2325
import org.apache.lucene.index.FilterCodecReader;
2426
import org.apache.lucene.index.FilterDirectoryReader;
2527
import org.apache.lucene.index.FilterLeafReader;
@@ -190,14 +192,26 @@ public static SegmentInfos pruneUnreferencedFiles(String segmentsFileName, Direc
190192
throw new IllegalStateException("no commit found in the directory");
191193
}
192194
}
195+
// Need to figure out what the parent field is that, so that validation in IndexWriter doesn't fail
196+
// if no parent field is configured, but FieldInfo says there is a parent field.
197+
String parentField = null;
193198
final IndexCommit cp = getIndexCommit(si, directory);
199+
try (var reader = DirectoryReader.open(cp)) {
200+
var topLevelFieldInfos = FieldInfos.getMergedFieldInfos(reader);
201+
for (FieldInfo fieldInfo : topLevelFieldInfos) {
202+
if (fieldInfo.isParentField()) {
203+
parentField = fieldInfo.getName();
204+
}
205+
}
206+
}
194207
try (
195208
IndexWriter writer = new IndexWriter(
196209
directory,
197210
indexWriterConfigWithNoMerging(Lucene.STANDARD_ANALYZER).setSoftDeletesField(Lucene.SOFT_DELETES_FIELD)
198211
.setIndexCommit(cp)
199212
.setCommitOnClose(false)
200213
.setOpenMode(IndexWriterConfig.OpenMode.APPEND)
214+
.setParentField(parentField)
201215
)
202216
) {
203217
// do nothing and close this will kick off IndexFileDeleter which will remove all pending files

server/src/main/java/org/elasticsearch/index/IndexService.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,8 @@ public IndexService(
232232
mapperMetrics
233233
);
234234
this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, circuitBreakerService);
235-
if (indexSettings.getIndexSortConfig().hasIndexSort()) {
235+
boolean sourceOnly = Boolean.parseBoolean(indexSettings.getSettings().get("index.source_only"));
236+
if (indexSettings.getIndexSortConfig().hasIndexSort() && sourceOnly == false) {
236237
// we delay the actual creation of the sort order for this index because the mapping has not been merged yet.
237238
// The sort order is validated right after the merge of the mapping later in the process.
238239
this.indexSortSupplier = () -> indexSettings.getIndexSortConfig()

server/src/main/java/org/elasticsearch/ingest/IngestStats.java

Lines changed: 21 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -57,22 +57,25 @@ public record IngestStats(Stats totalStats, List<PipelineStat> pipelineStats, Ma
5757
* Read from a stream.
5858
*/
5959
public static IngestStats read(StreamInput in) throws IOException {
60-
var stats = new Stats(in);
60+
var stats = readStats(in);
6161
var size = in.readVInt();
62+
if (stats == Stats.IDENTITY && size == 0) {
63+
return IDENTITY;
64+
}
6265
var pipelineStats = new ArrayList<PipelineStat>(size);
6366
var processorStats = Maps.<String, List<ProcessorStat>>newMapWithExpectedSize(size);
6467

6568
for (var i = 0; i < size; i++) {
6669
var pipelineId = in.readString();
67-
var pipelineStat = new Stats(in);
70+
var pipelineStat = readStats(in);
6871
var byteStat = in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? new ByteStats(in) : new ByteStats(0, 0);
6972
pipelineStats.add(new PipelineStat(pipelineId, pipelineStat, byteStat));
7073
int processorsSize = in.readVInt();
7174
var processorStatsPerPipeline = new ArrayList<ProcessorStat>(processorsSize);
7275
for (var j = 0; j < processorsSize; j++) {
7376
var processorName = in.readString();
7477
var processorType = in.readString();
75-
var processorStat = new Stats(in);
78+
var processorStat = readStats(in);
7679
processorStatsPerPipeline.add(new ProcessorStat(processorName, processorType, processorStat));
7780
}
7881
processorStats.put(pipelineId, Collections.unmodifiableList(processorStatsPerPipeline));
@@ -167,20 +170,28 @@ static Map<String, List<ProcessorStat>> merge(Map<String, List<ProcessorStat>> f
167170
return totalsPerPipelineProcessor;
168171
}
169172

173+
/**
174+
* Read {@link Stats} from a stream.
175+
*/
176+
private static Stats readStats(StreamInput in) throws IOException {
177+
long ingestCount = in.readVLong();
178+
long ingestTimeInMillis = in.readVLong();
179+
long ingestCurrent = in.readVLong();
180+
long ingestFailedCount = in.readVLong();
181+
if (ingestCount == 0 && ingestTimeInMillis == 0 && ingestCurrent == 0 && ingestFailedCount == 0) {
182+
return Stats.IDENTITY;
183+
} else {
184+
return new Stats(ingestCount, ingestTimeInMillis, ingestCurrent, ingestFailedCount);
185+
}
186+
}
187+
170188
public record Stats(long ingestCount, long ingestTimeInMillis, long ingestCurrent, long ingestFailedCount)
171189
implements
172190
Writeable,
173191
ToXContentFragment {
174192

175193
public static final Stats IDENTITY = new Stats(0, 0, 0, 0);
176194

177-
/**
178-
* Read from a stream.
179-
*/
180-
public Stats(StreamInput in) throws IOException {
181-
this(in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong());
182-
}
183-
184195
@Override
185196
public void writeTo(StreamOutput out) throws IOException {
186197
out.writeVLong(ingestCount);

server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import java.util.Map;
2020

2121
import static org.hamcrest.Matchers.containsInAnyOrder;
22+
import static org.hamcrest.Matchers.sameInstance;
2223

2324
public class IngestStatsTests extends ESTestCase {
2425

@@ -31,6 +32,11 @@ public void testSerialization() throws IOException {
3132
assertIngestStats(ingestStats, serializedStats);
3233
}
3334

35+
public void testIdentitySerialization() throws IOException {
36+
IngestStats serializedStats = serialize(IngestStats.IDENTITY);
37+
assertThat(serializedStats, sameInstance(IngestStats.IDENTITY));
38+
}
39+
3440
public void testStatsMerge() {
3541
var first = randomStats();
3642
var second = randomStats();

x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import org.apache.lucene.codecs.Codec;
1010
import org.apache.lucene.index.CheckIndex;
1111
import org.apache.lucene.index.DirectoryReader;
12+
import org.apache.lucene.index.DocValuesSkipIndexType;
1213
import org.apache.lucene.index.DocValuesType;
1314
import org.apache.lucene.index.FieldInfo;
1415
import org.apache.lucene.index.FieldInfos;
@@ -252,7 +253,7 @@ private SegmentCommitInfo syncSegment(
252253
false,
253254
IndexOptions.NONE,
254255
DocValuesType.NONE,
255-
fieldInfo.docValuesSkipIndexType(),
256+
DocValuesSkipIndexType.NONE,
256257
-1,
257258
fieldInfo.attributes(),
258259
0,

x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java

Lines changed: 59 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
import org.elasticsearch.common.TriFunction;
1414
import org.elasticsearch.common.time.DateFormatter;
1515
import org.elasticsearch.common.time.LegacyFormatNames;
16+
import org.elasticsearch.core.Strings;
1617
import org.elasticsearch.index.IndexModule;
1718
import org.elasticsearch.index.IndexSettings;
1819
import org.elasticsearch.index.IndexVersion;
@@ -95,25 +96,39 @@ private DeprecationIssue oldIndicesCheck(
9596
IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion();
9697
// We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks
9798
if (DeprecatedIndexPredicate.reindexRequired(indexMetadata, false) && isNotDataStreamIndex(indexMetadata, clusterState)) {
98-
return new DeprecationIssue(
99-
DeprecationIssue.Level.CRITICAL,
100-
"Old index with a compatibility version < 9.0",
101-
"https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html",
102-
"This index has version: " + currentCompatibilityVersion.toReleaseVersion(),
103-
false,
104-
meta(indexMetadata, indexToTransformIds)
105-
);
99+
var transforms = transformIdsForIndex(indexMetadata, indexToTransformIds);
100+
if (transforms.isEmpty() == false) {
101+
return new DeprecationIssue(
102+
DeprecationIssue.Level.CRITICAL,
103+
"One or more Transforms write to this index with a compatibility version < 9.0",
104+
"https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html"
105+
+ "#breaking_90_transform_destination_index",
106+
Strings.format(
107+
"This index was created in version [%s] and requires action before upgrading to 9.0. The following transforms are "
108+
+ "configured to write to this index: [%s]. Refer to the migration guide to learn more about how to prepare "
109+
+ "transforms destination indices for your upgrade.",
110+
currentCompatibilityVersion.toReleaseVersion(),
111+
String.join(", ", transforms)
112+
),
113+
false,
114+
Map.of("reindex_required", true, "transform_ids", transforms)
115+
);
116+
} else {
117+
return new DeprecationIssue(
118+
DeprecationIssue.Level.CRITICAL,
119+
"Old index with a compatibility version < 9.0",
120+
"https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html",
121+
"This index has version: " + currentCompatibilityVersion.toReleaseVersion(),
122+
false,
123+
Map.of("reindex_required", true)
124+
);
125+
}
106126
}
107127
return null;
108128
}
109129

110-
private Map<String, Object> meta(IndexMetadata indexMetadata, Map<String, List<String>> indexToTransformIds) {
111-
var transforms = indexToTransformIds.getOrDefault(indexMetadata.getIndex().getName(), List.of());
112-
if (transforms.isEmpty()) {
113-
return Map.of("reindex_required", true);
114-
} else {
115-
return Map.of("reindex_required", true, "transform_ids", transforms);
116-
}
130+
private List<String> transformIdsForIndex(IndexMetadata indexMetadata, Map<String, List<String>> indexToTransformIds) {
131+
return indexToTransformIds.getOrDefault(indexMetadata.getIndex().getName(), List.of());
117132
}
118133

119134
private DeprecationIssue ignoredOldIndicesCheck(
@@ -124,16 +139,35 @@ private DeprecationIssue ignoredOldIndicesCheck(
124139
IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion();
125140
// We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks
126141
if (DeprecatedIndexPredicate.reindexRequired(indexMetadata, true) && isNotDataStreamIndex(indexMetadata, clusterState)) {
127-
return new DeprecationIssue(
128-
DeprecationIssue.Level.WARNING,
129-
"Old index with a compatibility version < 9.0 Has Been Ignored",
130-
"https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html",
131-
"This read-only index has version: "
132-
+ currentCompatibilityVersion.toReleaseVersion()
133-
+ " and will be supported as read-only in 9.0",
134-
false,
135-
meta(indexMetadata, indexToTransformIds)
136-
);
142+
var transforms = transformIdsForIndex(indexMetadata, indexToTransformIds);
143+
if (transforms.isEmpty() == false) {
144+
return new DeprecationIssue(
145+
DeprecationIssue.Level.WARNING,
146+
"One or more Transforms write to this old index with a compatibility version < 9.0",
147+
"https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html"
148+
+ "#breaking_90_transform_destination_index",
149+
Strings.format(
150+
"This index was created in version [%s] and will be supported as a read-only index in 9.0. The following "
151+
+ "transforms are no longer able to write to this index: [%s]. Refer to the migration guide to learn more "
152+
+ "about how to handle your transforms destination indices.",
153+
currentCompatibilityVersion.toReleaseVersion(),
154+
String.join(", ", transforms)
155+
),
156+
false,
157+
Map.of("reindex_required", true, "transform_ids", transforms)
158+
);
159+
} else {
160+
return new DeprecationIssue(
161+
DeprecationIssue.Level.WARNING,
162+
"Old index with a compatibility version < 9.0 has been ignored",
163+
"https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html",
164+
"This read-only index has version: "
165+
+ currentCompatibilityVersion.toReleaseVersion()
166+
+ " and will be supported as read-only in 9.0",
167+
false,
168+
Map.of("reindex_required", true)
169+
);
170+
}
137171
}
138172
return null;
139173
}

0 commit comments

Comments
 (0)