Skip to content

Commit 3583c3e

Browse files
authored
Merge branch 'main' into fix_136365_prune_columns_when_fork
2 parents d44ee90 + 594a373 commit 3583c3e

File tree

6 files changed

+228
-26
lines changed

6 files changed

+228
-26
lines changed

server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ public class MapperFeatures implements FeatureSpecification {
6464
public static final NodeFeature GENERIC_VECTOR_FORMAT = new NodeFeature("mapper.vectors.generic_vector_format");
6565
public static final NodeFeature FIX_DENSE_VECTOR_WRONG_FIELDS = new NodeFeature("mapper.fix_dense_vector_wrong_fields");
6666
static final NodeFeature BBQ_DISK_STATS_SUPPORT = new NodeFeature("mapper.bbq_disk_stats_support");
67+
static final NodeFeature STORED_FIELDS_SPEC_MERGE_BUG = new NodeFeature("mapper.stored_fields_spec_merge_bug");
6768

6869
@Override
6970
public Set<NodeFeature> getTestFeatures() {
@@ -108,7 +109,8 @@ public Set<NodeFeature> getTestFeatures() {
108109
EXCLUDE_VECTORS_DOCVALUE_BUGFIX,
109110
BASE64_DENSE_VECTORS,
110111
FIX_DENSE_VECTOR_WRONG_FIELDS,
111-
BBQ_DISK_STATS_SUPPORT
112+
BBQ_DISK_STATS_SUPPORT,
113+
STORED_FIELDS_SPEC_MERGE_BUG
112114
);
113115
if (ES93GenericFlatVectorsFormat.GENERIC_VECTOR_FORMAT.isEnabled()) {
114116
features = new HashSet<>(features);

server/src/main/java/org/elasticsearch/search/fetch/StoredFieldsSpec.java

Lines changed: 28 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -77,17 +77,7 @@ public StoredFieldsSpec merge(StoredFieldsSpec other) {
7777
mergedFields = new HashSet<>(this.requiredStoredFields);
7878
mergedFields.addAll(other.requiredStoredFields);
7979
}
80-
Set<String> mergedSourcePaths;
81-
if (this.sourcePaths.isEmpty() == false && other.sourcePaths.isEmpty() == false) {
82-
mergedSourcePaths = new HashSet<>(this.sourcePaths);
83-
mergedSourcePaths.addAll(other.sourcePaths);
84-
} else if (this.sourcePaths.isEmpty() == false) {
85-
mergedSourcePaths = this.sourcePaths;
86-
} else if (other.sourcePaths.isEmpty() == false) {
87-
mergedSourcePaths = other.sourcePaths;
88-
} else {
89-
mergedSourcePaths = Set.of();
90-
}
80+
Set<String> mergedSourcePaths = mergeSourcePaths(other);
9181
IgnoredSourceFormat mergedFormat;
9282
if (this.ignoredSourceFormat == IgnoredSourceFormat.NO_IGNORED_SOURCE) {
9383
mergedFormat = other.ignoredSourceFormat;
@@ -114,6 +104,33 @@ public StoredFieldsSpec merge(StoredFieldsSpec other) {
114104
);
115105
}
116106

107+
/**
108+
* Returns the unique source paths that should be loaded from source. Other source paths may be filtered out.
109+
* If an empty set is returned, then all source paths need to be loaded.
110+
*/
111+
private Set<String> mergeSourcePaths(StoredFieldsSpec other) {
112+
Set<String> mergedSourcePaths;
113+
if (this.sourcePaths.isEmpty() == false && other.sourcePaths.isEmpty() == false) {
114+
mergedSourcePaths = new HashSet<>(this.sourcePaths);
115+
mergedSourcePaths.addAll(other.sourcePaths);
116+
} else if (this.sourcePaths.isEmpty() == false) {
117+
if (other.requiresSource) {
118+
mergedSourcePaths = Set.of();
119+
} else {
120+
mergedSourcePaths = this.sourcePaths;
121+
}
122+
} else if (other.sourcePaths.isEmpty() == false) {
123+
if (this.requiresSource) {
124+
mergedSourcePaths = Set.of();
125+
} else {
126+
mergedSourcePaths = other.sourcePaths;
127+
}
128+
} else {
129+
mergedSourcePaths = Set.of();
130+
}
131+
return mergedSourcePaths;
132+
}
133+
117134
public Set<String> requiredStoredFields() {
118135
if (sourcePaths.isEmpty() || ignoredSourceFormat == IgnoredSourceFormat.NO_IGNORED_SOURCE) {
119136
return requiredStoredFields;

server/src/test/java/org/elasticsearch/search/fetch/StoredFieldsSpecTests.java

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ public void testMergeSourcePaths() {
120120

121121
spec = spec.merge(
122122
new StoredFieldsSpec(
123-
true,
123+
false, // if set to true, then the new spec will require complete and source and so source paths would then be empty
124124
false,
125125
Set.of("other_field"),
126126
IgnoredSourceFieldMapper.IgnoredSourceFormat.NO_IGNORED_SOURCE,
@@ -135,6 +135,24 @@ public void testMergeSourcePaths() {
135135
assertThat(spec.sourcePaths(), sameInstance(pref));
136136
}
137137

138+
public void testMergeSourcePathsRequireCompleteSource() {
139+
var ignoredSourceFormat = IgnoredSourceFieldMapper.IgnoredSourceFormat.NO_IGNORED_SOURCE;
140+
StoredFieldsSpec spec = new StoredFieldsSpec(true, false, Set.of(), ignoredSourceFormat, Set.of("field1", "field2"));
141+
assertThat(spec.ignoredSourceFormat(), equalTo(ignoredSourceFormat));
142+
assertThat(spec.requiresSource(), equalTo(true));
143+
assertThat(spec.requiresMetadata(), equalTo(false));
144+
assertThat(spec.requiredStoredFields(), empty());
145+
assertThat(spec.sourcePaths(), containsInAnyOrder("field1", "field2"));
146+
147+
// Clears source paths, because this spec requires complete source (since no source paths are defined)
148+
spec = spec.merge(new StoredFieldsSpec(true, false, Set.of(), ignoredSourceFormat, Set.of()));
149+
assertThat(spec.ignoredSourceFormat(), equalTo(ignoredSourceFormat));
150+
assertThat(spec.requiresSource(), equalTo(true));
151+
assertThat(spec.requiresMetadata(), equalTo(false));
152+
assertThat(spec.requiredStoredFields(), empty());
153+
assertThat(spec.sourcePaths(), empty());
154+
}
155+
138156
private static SearchContext searchContext(SearchSourceBuilder sourceBuilder) {
139157
SearchContext sc = mock(SearchContext.class);
140158
when(sc.fetchSourceContext()).thenReturn(sourceBuilder.fetchSource());

test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java

Lines changed: 52 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
import java.util.Set;
4646
import java.util.concurrent.ConcurrentHashMap;
4747
import java.util.concurrent.ConcurrentMap;
48+
import java.util.concurrent.atomic.AtomicReference;
4849
import java.util.regex.Matcher;
4950
import java.util.regex.Pattern;
5051

@@ -248,6 +249,9 @@ public void handle(final HttpExchange exchange) throws IOException {
248249
if (isProtectOverwrite(exchange)) {
249250
throw new AssertionError("If-None-Match: * header is not supported here");
250251
}
252+
if (getRequiredExistingETag(exchange) != null) {
253+
throw new AssertionError("If-Match: * header is not supported here");
254+
}
251255

252256
var sourceBlob = blobs.get(copySource);
253257
if (sourceBlob == null) {
@@ -406,8 +410,9 @@ public void handle(final HttpExchange exchange) throws IOException {
406410
* Update the blob contents if and only if the preconditions in the request are satisfied.
407411
*
408412
* @return {@link RestStatus#OK} if the blob contents were updated, or else a different status code to indicate the error: possibly
409-
* {@link RestStatus#CONFLICT} or {@link RestStatus#PRECONDITION_FAILED} if the object exists and the precondition requires it
410-
* not to.
413+
* {@link RestStatus#CONFLICT} in any case, but if not then either {@link RestStatus#PRECONDITION_FAILED} if the object exists
414+
* but doesn't match the specified precondition, or {@link RestStatus#NOT_FOUND} if the object doesn't exist but is required to
415+
* do so by the precondition.
411416
*
412417
* @see <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/conditional-writes.html#conditional-error-response">AWS docs</a>
413418
*/
@@ -418,6 +423,25 @@ private RestStatus updateBlobContents(HttpExchange exchange, String path, BytesR
418423
: ESTestCase.randomFrom(RestStatus.PRECONDITION_FAILED, RestStatus.CONFLICT);
419424
}
420425

426+
final var requireExistingETag = getRequiredExistingETag(exchange);
427+
if (requireExistingETag != null) {
428+
final var responseCode = new AtomicReference<>(RestStatus.OK);
429+
blobs.compute(path, (ignoredPath, existingContents) -> {
430+
if (existingContents != null && requireExistingETag.equals(getEtagFromContents(existingContents))) {
431+
return newContents;
432+
}
433+
434+
responseCode.set(
435+
ESTestCase.randomFrom(
436+
existingContents == null ? RestStatus.NOT_FOUND : RestStatus.PRECONDITION_FAILED,
437+
RestStatus.CONFLICT
438+
)
439+
);
440+
return existingContents;
441+
});
442+
return responseCode.get();
443+
}
444+
421445
blobs.put(path, newContents);
422446
return RestStatus.OK;
423447
}
@@ -598,6 +622,9 @@ private static boolean isProtectOverwrite(final HttpExchange exchange) {
598622
return false;
599623
}
600624

625+
if (exchange.getRequestHeaders().get("If-Match") != null) {
626+
throw new AssertionError("Handling both If-None-Match and If-Match headers is not supported");
627+
}
601628
if (ifNoneMatch.size() != 1) {
602629
throw new AssertionError("multiple If-None-Match headers found: " + ifNoneMatch);
603630
}
@@ -609,6 +636,29 @@ private static boolean isProtectOverwrite(final HttpExchange exchange) {
609636
throw new AssertionError("invalid If-None-Match header: " + ifNoneMatch);
610637
}
611638

639+
@Nullable // if no If-Match header found
640+
private static String getRequiredExistingETag(final HttpExchange exchange) {
641+
final var ifMatch = exchange.getRequestHeaders().get("If-Match");
642+
643+
if (ifMatch == null) {
644+
return null;
645+
}
646+
647+
if (exchange.getRequestHeaders().get("If-None-Match") != null) {
648+
throw new AssertionError("Handling both If-None-Match and If-Match headers is not supported");
649+
}
650+
651+
final var iterator = ifMatch.iterator();
652+
if (iterator.hasNext()) {
653+
final var result = iterator.next();
654+
if (iterator.hasNext() == false) {
655+
return result;
656+
}
657+
}
658+
659+
throw new AssertionError("multiple If-Match headers found: " + ifMatch);
660+
}
661+
612662
MultipartUpload putUpload(String path) {
613663
final var upload = new MultipartUpload(UUIDs.randomBase64UUID(), path);
614664
synchronized (uploads) {

test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java

Lines changed: 56 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -412,13 +412,29 @@ public void testExtractPartEtags() {
412412
}
413413

414414
public void testPreventObjectOverwrite() {
415+
ensureExactlyOneSuccess(new S3HttpHandler("bucket", "path"), null);
416+
}
417+
418+
public void testConditionalOverwrite() {
415419
final var handler = new S3HttpHandler("bucket", "path");
416420

417-
var tasks = List.of(
418-
createPutObjectTask(handler),
419-
createPutObjectTask(handler),
420-
createMultipartUploadTask(handler),
421-
createMultipartUploadTask(handler)
421+
final var originalBody = new BytesArray(randomAlphaOfLength(50).getBytes(StandardCharsets.UTF_8));
422+
final var originalETag = S3HttpHandler.getEtagFromContents(originalBody);
423+
assertEquals(RestStatus.OK, handleRequest(handler, "PUT", "/bucket/path/blob", originalBody).status());
424+
assertEquals(
425+
new TestHttpResponse(RestStatus.OK, originalBody, addETag(originalETag, TestHttpExchange.EMPTY_HEADERS)),
426+
handleRequest(handler, "GET", "/bucket/path/blob")
427+
);
428+
429+
ensureExactlyOneSuccess(handler, originalETag);
430+
}
431+
432+
private static void ensureExactlyOneSuccess(S3HttpHandler handler, String originalETag) {
433+
final var tasks = List.of(
434+
createPutObjectTask(handler, originalETag),
435+
createPutObjectTask(handler, originalETag),
436+
createMultipartUploadTask(handler, originalETag),
437+
createMultipartUploadTask(handler, originalETag)
422438
);
423439

424440
runInParallel(tasks.size(), i -> tasks.get(i).consumer.run());
@@ -445,13 +461,38 @@ public void testPreventObjectOverwrite() {
445461
);
446462
}
447463

448-
private static TestWriteTask createPutObjectTask(S3HttpHandler handler) {
464+
public void testPutObjectIfMatchWithBlobNotFound() {
465+
final var handler = new S3HttpHandler("bucket", "path");
466+
while (true) {
467+
final var task = createPutObjectTask(handler, randomIdentifier());
468+
task.consumer.run();
469+
if (task.status == RestStatus.NOT_FOUND) {
470+
break;
471+
}
472+
assertEquals(RestStatus.CONFLICT, task.status); // chosen randomly so eventually we will escape the loop
473+
}
474+
}
475+
476+
public void testCompleteMultipartUploadIfMatchWithBlobNotFound() {
477+
final var handler = new S3HttpHandler("bucket", "path");
478+
while (true) {
479+
final var task = createMultipartUploadTask(handler, randomIdentifier());
480+
task.consumer.run();
481+
if (task.status == RestStatus.NOT_FOUND) {
482+
break;
483+
}
484+
assertEquals(RestStatus.CONFLICT, task.status); // chosen randomly so eventually we will escape the loop
485+
}
486+
}
487+
488+
private static TestWriteTask createPutObjectTask(S3HttpHandler handler, @Nullable String originalETag) {
449489
return new TestWriteTask(
450-
(task) -> task.status = handleRequest(handler, "PUT", "/bucket/path/blob", task.body, ifNoneMatchHeader()).status()
490+
(task) -> task.status = handleRequest(handler, "PUT", "/bucket/path/blob", task.body, conditionalWriteHeader(originalETag))
491+
.status()
451492
);
452493
}
453494

454-
private static TestWriteTask createMultipartUploadTask(S3HttpHandler handler) {
495+
private static TestWriteTask createMultipartUploadTask(S3HttpHandler handler, @Nullable String originalETag) {
455496
final var multipartUploadTask = new TestWriteTask(
456497
(task) -> task.status = handleRequest(
457498
handler,
@@ -465,7 +506,7 @@ private static TestWriteTask createMultipartUploadTask(S3HttpHandler handler) {
465506
<PartNumber>1</PartNumber>
466507
</Part>
467508
</CompleteMultipartUpload>""", task.etag)),
468-
ifNoneMatchHeader()
509+
conditionalWriteHeader(originalETag)
469510
).status()
470511
);
471512

@@ -594,9 +635,13 @@ private static Headers contentRangeHeader(long start, long end, long length) {
594635
return headers;
595636
}
596637

597-
private static Headers ifNoneMatchHeader() {
638+
private static Headers conditionalWriteHeader(@Nullable String originalEtag) {
598639
var headers = new Headers();
599-
headers.put("If-None-Match", List.of("*"));
640+
if (originalEtag == null) {
641+
headers.put("If-None-Match", List.of("*"));
642+
} else {
643+
headers.put("If-Match", List.of(originalEtag));
644+
}
600645
return headers;
601646
}
602647

x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -210,3 +210,73 @@ setup:
210210
"keyword" : "ok",
211211
"case" : "ok"
212212
}}
213+
214+
---
215+
"source meta field and dynamic false":
216+
- requires:
217+
cluster_features: [ "mapper.stored_fields_spec_merge_bug" ]
218+
reason: "Test can only run on clusters with the bug fix"
219+
- do:
220+
indices.create:
221+
index: my-index2
222+
body:
223+
mappings:
224+
dynamic: false
225+
properties:
226+
foo:
227+
type: text
228+
229+
- do:
230+
bulk:
231+
index: my-index2
232+
refresh: true
233+
body:
234+
- { "index": { } }
235+
- { "baz": "dasd" }
236+
- do:
237+
esql.query:
238+
body:
239+
query: 'FROM my-index2 METADATA _source | LIMIT 10'
240+
241+
- match: {columns.0.name: "foo"}
242+
- match: {columns.0.type: "text"}
243+
- match: {columns.1.name: "_source"}
244+
- match: {columns.1.type: "_source"}
245+
246+
- match: {values.0.0: null}
247+
- match: {values.0.1: { "baz": "dasd" }}
248+
249+
---
250+
"source meta field and keep":
251+
- requires:
252+
cluster_features: [ "mapper.stored_fields_spec_merge_bug" ]
253+
reason: "Test can only run on clusters with the bug fix"
254+
- do:
255+
indices.create:
256+
index: my-index2
257+
body:
258+
mappings:
259+
properties:
260+
foo:
261+
type: text
262+
263+
- do:
264+
bulk:
265+
index: my-index2
266+
refresh: true
267+
body:
268+
- { "index": { } }
269+
- { "baz": "dasd" }
270+
- do:
271+
esql.query:
272+
body:
273+
query: 'FROM my-index2 METADATA _source | KEEP foo, _source | LIMIT 10'
274+
275+
- match: {columns.0.name: "foo"}
276+
- match: {columns.0.type: "text"}
277+
- match: {columns.1.name: "_source"}
278+
- match: {columns.1.type: "_source"}
279+
280+
- match: {values.0.0: null}
281+
- match: {values.0.1: { "baz": "dasd" }}
282+

0 commit comments

Comments
 (0)