From 9b4dae56a99b13b364e66577d594f091c7395e31 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Tue, 5 Jul 2022 18:02:00 -0500 Subject: [PATCH 01/36] WIP ingest update metadata --- .../attachment/AttachmentProcessorTests.java | 4 +- .../ingest/common/DateProcessor.java | 2 +- .../ingest/common/DotExpanderProcessor.java | 2 +- .../ingest/common/JsonProcessor.java | 2 +- .../ingest/common/RemoveProcessor.java | 2 +- .../ingest/common/ScriptProcessor.java | 2 +- .../ingest/common/CsvProcessorTests.java | 4 +- .../common/DateIndexNameProcessorTests.java | 12 +- .../common/DotExpanderProcessorTests.java | 6 +- .../ingest/common/ForEachProcessorTests.java | 4 +- .../ingest/common/JsonProcessorTests.java | 4 +- .../ingest/common/RenameProcessorTests.java | 12 +- .../common/ScriptProcessorFactoryTests.java | 2 +- .../ingest/common/ScriptProcessorTests.java | 10 +- .../ingest/common/UriPartsProcessorTests.java | 4 +- .../ingest/geoip/GeoIpDownloaderIT.java | 10 +- ...gDatabasesWhilePerformingGeoLookupsIT.java | 4 +- .../geoip/GeoIpProcessorFactoryTests.java | 16 +- .../ingest/geoip/GeoIpProcessorTests.java | 42 +- .../useragent/UserAgentProcessorTests.java | 14 +- .../org.elasticsearch.script.update.txt | 15 + .../rest-api-spec/test/painless/15_update.yml | 44 ++ .../test/painless/25_script_upsert.yml | 94 +++ ...AsyncBulkByScrollActionScriptTestCase.java | 4 +- .../action/ingest/AsyncIngestProcessorIT.java | 4 +- .../elasticsearch/index/FinalPipelineIT.java | 8 +- .../ingest/WriteableIngestDocument.java | 2 +- .../action/update/UpdateHelper.java | 114 +--- .../ingest/ConditionalProcessor.java | 2 +- .../elasticsearch/ingest/IngestDocument.java | 48 +- .../elasticsearch/ingest/IngestService.java | 2 +- .../ingest/IngestSourceAndMetadata.java | 477 +------------- .../org/elasticsearch/script/Metadata.java | 24 +- .../script/SourceAndMetadataMap.java | 583 ++++++++++++++++++ .../elasticsearch/script/UpdateMetadata.java | 154 +++++ .../elasticsearch/script/UpdateScript.java | 15 +- .../elasticsearch/script/UpsertMetadata.java | 15 + .../ingest/SimulateExecutionServiceTests.java | 8 +- .../ingest/ConditionalProcessorTests.java | 8 +- .../ingest/IngestDocumentTests.java | 120 ++-- .../ingest/IngestServiceTests.java | 4 +- .../ingest/IngestSourceAndMetadataTests.java | 7 +- .../ingest/PipelineProcessorTests.java | 2 +- .../ingest/TrackingResultProcessorTests.java | 4 +- .../ingest/IngestDocumentMatcher.java | 2 +- .../ingest/RandomDocumentPicks.java | 4 +- .../ingest/TestIngestDocument.java | 10 +- .../script/MockScriptEngine.java | 2 +- .../results/InferenceResultsTestCase.java | 2 +- .../xpack/enrich/MatchProcessorTests.java | 8 +- .../inference/ingest/InferenceProcessor.java | 2 +- 51 files changed, 1197 insertions(+), 749 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java create mode 100644 server/src/main/java/org/elasticsearch/script/UpdateMetadata.java create mode 100644 server/src/main/java/org/elasticsearch/script/UpsertMetadata.java diff --git a/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index 1fead50a600e7..553a3937e40a4 100644 --- a/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -326,7 +326,7 @@ public void testParseAsBytesArray() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - Map attachmentData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + Map attachmentData = (Map) ingestDocument.getIngestContext().get("target_field"); assertThat(attachmentData.keySet(), containsInAnyOrder("language", "content", "content_type", "content_length")); assertThat(attachmentData.get("language"), is("en")); @@ -443,7 +443,7 @@ private Map parseDocument( attachmentProcessor.execute(ingestDocument); @SuppressWarnings("unchecked") - Map attachmentData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + Map attachmentData = (Map) ingestDocument.getIngestContext().get("target_field"); return attachmentData; } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java index 031ed9cf86bf7..2fa344456dd71 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java @@ -101,7 +101,7 @@ public IngestDocument execute(IngestDocument ingestDocument) { Exception lastException = null; for (Function, Function> dateParser : dateParsers) { try { - dateTime = dateParser.apply(ingestDocument.getSourceAndMetadata()).apply(value); + dateTime = dateParser.apply(ingestDocument.getIngestContext()).apply(value); break; } catch (Exception e) { // try the next parser and keep track of the exceptions diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java index 5174ea614c0ee..586e97801a0bb 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java @@ -45,7 +45,7 @@ public IngestDocument execute(IngestDocument ingestDocument) throws Exception { map = ingestDocument.getFieldValue(this.path, Map.class); } else { pathToExpand = field; - map = ingestDocument.getSourceAndMetadata(); + map = ingestDocument.getIngestContext(); } if (this.field.equals("*")) { diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java index 83cd59bc1b4be..8bcce51b9d92c 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java @@ -145,7 +145,7 @@ public static void recursiveMerge(Map target, Map IngestDocument.Metadata.isMetadata(documentField) == false) .filter(documentField -> shouldKeep(documentField, fieldsToKeep, document) == false) diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java index 2586b9aed919d..2c76c9b054be8 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java @@ -75,7 +75,7 @@ public IngestDocument execute(IngestDocument document) { if (factory == null) { factory = scriptService.compile(script, IngestScript.CONTEXT); } - factory.newInstance(script.getParams(), document.getMetadata(), document.getSourceAndMetadata()).execute(); + factory.newInstance(script.getParams(), document.getContextMetadata(), document.getIngestContext()).execute(); return document; } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/CsvProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/CsvProcessorTests.java index e111d5f18d210..7e9d868d1d037 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/CsvProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/CsvProcessorTests.java @@ -310,13 +310,13 @@ public void testEmptyHeaders() throws Exception { assumeTrue("single run only", quote.isEmpty()); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "abc,abc"); - HashMap metadata = new HashMap<>(ingestDocument.getSourceAndMetadata()); + HashMap metadata = new HashMap<>(ingestDocument.getIngestContext()); CsvProcessor processor = new CsvProcessor(randomAlphaOfLength(5), null, fieldName, new String[0], false, ',', '"', false, null); processor.execute(ingestDocument); - assertEquals(metadata, ingestDocument.getSourceAndMetadata()); + assertEquals(metadata, ingestDocument.getIngestContext()); } private IngestDocument processDocument(String[] headers, String csv) { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java index a79580d743a0b..48e64f3984345 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java @@ -43,7 +43,7 @@ public void testJavaPattern() throws Exception { Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z") ); processor.execute(document); - assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); + assertThat(document.getIngestContext().get("_index"), equalTo("")); } public void testTAI64N() throws Exception { @@ -65,7 +65,7 @@ public void testTAI64N() throws Exception { Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024") ); dateProcessor.execute(document); - assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); + assertThat(document.getIngestContext().get("_index"), equalTo("")); } public void testUnixMs() throws Exception { @@ -80,11 +80,11 @@ public void testUnixMs() throws Exception { ); IngestDocument document = new IngestDocument("_index", "_id", 1, null, null, Collections.singletonMap("_field", "1000500")); dateProcessor.execute(document); - assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); + assertThat(document.getIngestContext().get("_index"), equalTo("")); document = new IngestDocument("_index", "_id", 1, null, null, Collections.singletonMap("_field", 1000500L)); dateProcessor.execute(document); - assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); + assertThat(document.getIngestContext().get("_index"), equalTo("")); } public void testUnix() throws Exception { @@ -99,7 +99,7 @@ public void testUnix() throws Exception { ); IngestDocument document = new IngestDocument("_index", "_id", 1, null, null, Collections.singletonMap("_field", "1000.5")); dateProcessor.execute(document); - assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); + assertThat(document.getIngestContext().get("_index"), equalTo("")); } public void testTemplatedFields() throws Exception { @@ -122,7 +122,7 @@ public void testTemplatedFields() throws Exception { dateProcessor.execute(document); assertThat( - document.getSourceAndMetadata().get("_index"), + document.getIngestContext().get("_index"), equalTo( "<" + indexNamePrefix diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java index 1714717d0e6d3..c7b7211ee8a91 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java @@ -151,11 +151,11 @@ public void testEscapeFields_doNothingIfFieldNotInSourceDoc() throws Exception { // hasField returns false since it requires the expanded form, which is not expanded since we did not ask for it to be assertFalse(document.hasField("foo.bar")); // nothing has changed - assertEquals(document.getSourceAndMetadata().get("foo.bar"), "baz1"); + assertEquals(document.getIngestContext().get("foo.bar"), "baz1"); // abc.def is not found anywhere assertFalse(document.hasField("abc.def")); - assertFalse(document.getSourceAndMetadata().containsKey("abc")); - assertFalse(document.getSourceAndMetadata().containsKey("abc.def")); + assertFalse(document.getIngestContext().containsKey("abc")); + assertFalse(document.getIngestContext().containsKey("abc.def")); // asking to expand a (literal) field that does not exist, but the nested field does exist source = new HashMap<>(); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index 0d96e4c6680b7..6b914b7815215 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -107,8 +107,8 @@ public void testMetadataAvailable() { IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Collections.singletonMap("values", values)); TestProcessor innerProcessor = new TestProcessor(id -> { - id.setFieldValue("_ingest._value.index", id.getSourceAndMetadata().get("_index")); - id.setFieldValue("_ingest._value.id", id.getSourceAndMetadata().get("_id")); + id.setFieldValue("_ingest._value.index", id.getIngestContext().get("_index")); + id.setFieldValue("_ingest._value.id", id.getIngestContext().get("_id")); }); ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", innerProcessor, false); execProcessor(processor, ingestDocument, (result, e) -> {}); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index a9596fb0083ab..96c1eb2c9a830 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -157,7 +157,7 @@ public void testAddToRoot() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); jsonProcessor.execute(ingestDocument); - Map sourceAndMetadata = ingestDocument.getSourceAndMetadata(); + Map sourceAndMetadata = ingestDocument.getIngestContext(); assertEquals(1, sourceAndMetadata.get("a")); assertEquals(2, sourceAndMetadata.get("b")); assertEquals("see", sourceAndMetadata.get("c")); @@ -175,7 +175,7 @@ public void testDuplicateKeys() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); lenientJsonProcessor.execute(ingestDocument); - Map sourceAndMetadata = ingestDocument.getSourceAndMetadata(); + Map sourceAndMetadata = ingestDocument.getIngestContext(); assertEquals(2, sourceAndMetadata.get("a")); assertEquals("see", sourceAndMetadata.get("c")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java index 4cab0b999c248..47461df98fc9b 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java @@ -56,14 +56,14 @@ public void testRenameArrayElement() throws Exception { Processor processor = createRenameProcessor("list.0", "item", false); processor.execute(ingestDocument); - Object actualObject = ingestDocument.getSourceAndMetadata().get("list"); + Object actualObject = ingestDocument.getIngestContext().get("list"); assertThat(actualObject, instanceOf(List.class)); @SuppressWarnings("unchecked") List actualList = (List) actualObject; assertThat(actualList.size(), equalTo(2)); assertThat(actualList.get(0), equalTo("item2")); assertThat(actualList.get(1), equalTo("item3")); - actualObject = ingestDocument.getSourceAndMetadata().get("item"); + actualObject = ingestDocument.getIngestContext().get("item"); assertThat(actualObject, instanceOf(String.class)); assertThat(actualObject, equalTo("item1")); @@ -151,8 +151,8 @@ public void testRenameAtomicOperationSetFails() throws Exception { fail("processor execute should have failed"); } catch (UnsupportedOperationException e) { // the set failed, the old field has not been removed - assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false)); + assertThat(ingestDocument.getIngestContext().containsKey("list"), equalTo(true)); + assertThat(ingestDocument.getIngestContext().containsKey("new_field"), equalTo(false)); } } @@ -171,8 +171,8 @@ public void testRenameAtomicOperationRemoveFails() throws Exception { fail("processor execute should have failed"); } catch (UnsupportedOperationException e) { // the set failed, the old field has not been removed - assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false)); + assertThat(ingestDocument.getIngestContext().containsKey("list"), equalTo(true)); + assertThat(ingestDocument.getIngestContext().containsKey("new_field"), equalTo(false)); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java index 6fb39fa0fb803..99c97b5e67fd0 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java @@ -160,7 +160,7 @@ public void testInlineIsCompiled() throws Exception { assertNotNull(processor.getPrecompiledIngestScriptFactory()); IngestDocument doc = TestIngestDocument.emptyIngestDocument(); Map ctx = TestIngestDocument.emptyIngestDocument().getIngestSourceAndMetadata(); - processor.getPrecompiledIngestScriptFactory().newInstance(null, doc.getMetadata(), ctx).execute(); + processor.getPrecompiledIngestScriptFactory().newInstance(null, doc.getContextMetadata(), ctx).execute(); assertThat(ctx.get("foo"), equalTo("bar")); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java index 972ca029b7b03..975841a3f7f53 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java @@ -78,11 +78,11 @@ private IngestDocument randomDocument() { } private void assertIngestDocument(IngestDocument ingestDocument) { - assertThat(ingestDocument.getSourceAndMetadata(), hasKey("bytes_in")); - assertThat(ingestDocument.getSourceAndMetadata(), hasKey("bytes_out")); - assertThat(ingestDocument.getSourceAndMetadata(), hasKey("bytes_total")); + assertThat(ingestDocument.getIngestContext(), hasKey("bytes_in")); + assertThat(ingestDocument.getIngestContext(), hasKey("bytes_out")); + assertThat(ingestDocument.getIngestContext(), hasKey("bytes_total")); int bytesTotal = ingestDocument.getFieldValue("bytes_in", Integer.class) + ingestDocument.getFieldValue("bytes_out", Integer.class); - assertThat(ingestDocument.getSourceAndMetadata().get("bytes_total"), is(bytesTotal)); - assertThat(ingestDocument.getSourceAndMetadata().get("_dynamic_templates"), equalTo(Map.of("foo", "bar"))); + assertThat(ingestDocument.getIngestContext().get("bytes_total"), is(bytesTotal)); + assertThat(ingestDocument.getIngestContext().get("_dynamic_templates"), equalTo(Map.of("foo", "bar"))); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java index 88181446b9e11..6054484e32af9 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java @@ -192,7 +192,7 @@ public void testRemoveIfSuccessfulDoesNotRemoveTargetField() throws Exception { Map expectedSourceAndMetadata = new HashMap<>(); expectedSourceAndMetadata.put(field, Map.of("scheme", "http", "domain", "www.google.com", "path", "")); for (Map.Entry entry : expectedSourceAndMetadata.entrySet()) { - assertThat(output.getSourceAndMetadata(), hasEntry(entry.getKey(), entry.getValue())); + assertThat(output.getIngestContext(), hasEntry(entry.getKey(), entry.getValue())); } } @@ -237,7 +237,7 @@ private void testUriParsing(boolean keepOriginal, boolean removeIfSuccessful, St expectedSourceAndMetadata.put("url", values); for (Map.Entry entry : expectedSourceAndMetadata.entrySet()) { - assertThat(output.getSourceAndMetadata(), hasEntry(entry.getKey(), entry.getValue())); + assertThat(output.getIngestContext(), hasEntry(entry.getKey(), entry.getValue())); } } diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 71169e504f8ba..3a717eb0361b3 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -298,9 +298,9 @@ public void testUseGeoIpProcessorWithDownloadedDBs() throws Exception { assertThat(result.getIngestDocument(), notNullValue()); IngestDocument doc = result.getIngestDocument(); - assertThat(doc.getSourceAndMetadata(), hasKey("ip-city")); - assertThat(doc.getSourceAndMetadata(), hasKey("ip-asn")); - assertThat(doc.getSourceAndMetadata(), hasKey("ip-country")); + assertThat(doc.getIngestContext(), hasKey("ip-city")); + assertThat(doc.getIngestContext(), hasKey("ip-asn")); + assertThat(doc.getIngestContext(), hasKey("ip-country")); assertThat(doc.getFieldValue("ip-city.city_name", String.class), equalTo("Tumba")); assertThat(doc.getFieldValue("ip-asn.organization_name", String.class), equalTo("Bredband2 AB")); @@ -362,7 +362,7 @@ public void testStartWithNoDatabases() throws Exception { SimulateDocumentBaseResult result = simulatePipeline(); assertThat(result.getFailure(), nullValue()); assertThat(result.getIngestDocument(), notNullValue()); - Map source = result.getIngestDocument().getSourceAndMetadata(); + Map source = result.getIngestDocument().getIngestContext(); assertThat( source, hasEntry( @@ -388,7 +388,7 @@ private void verifyUpdatedDatabase() throws Exception { assertThat(result.getFailure(), nullValue()); assertThat(result.getIngestDocument(), notNullValue()); - Map source = result.getIngestDocument().getSourceAndMetadata(); + Map source = result.getIngestDocument().getIngestContext(); assertThat(source, not(hasKey("tags"))); assertThat(source, hasKey("ip-city")); assertThat(source, hasKey("ip-asn")); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java index fffd1343a3392..e021f22198d5f 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java @@ -102,7 +102,7 @@ public void test() throws Exception { Map.of("_field", "89.160.20.128") ); processor1.execute(document1); - assertThat(document1.getSourceAndMetadata().get("geoip"), notNullValue()); + assertThat(document1.getIngestContext().get("geoip"), notNullValue()); IngestDocument document2 = new IngestDocument( "index", "id", @@ -112,7 +112,7 @@ public void test() throws Exception { Map.of("_field", "89.160.20.128") ); processor2.execute(document2); - assertThat(document2.getSourceAndMetadata().get("geoip"), notNullValue()); + assertThat(document2.getIngestContext().get("geoip"), notNullValue()); numberOfIngestRuns.incrementAndGet(); } catch (Exception | AssertionError e) { logger.error("error in ingest thread after run [" + numberOfIngestRuns.get() + "]", e); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 0f97f212c42c0..9a421e36bddf6 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -423,7 +423,7 @@ public void testUpdateDatabaseWhileIngesting() throws Exception { { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("geoip"); + Map geoData = (Map) ingestDocument.getIngestContext().get("geoip"); assertThat(geoData.get("city_name"), equalTo("Tumba")); } { @@ -431,7 +431,7 @@ public void testUpdateDatabaseWhileIngesting() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); databaseNodeService.updateDatabase("GeoLite2-City.mmdb", "md5", geoipTmpDir.resolve("GeoLite2-City-Test.mmdb")); processor.execute(ingestDocument); - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("geoip"); + Map geoData = (Map) ingestDocument.getIngestContext().get("geoip"); assertThat(geoData.get("city_name"), equalTo("Linköping")); } { @@ -440,7 +440,7 @@ public void testUpdateDatabaseWhileIngesting() throws Exception { databaseNodeService.removeStaleEntries(List.of("GeoLite2-City.mmdb")); configDatabases.updateDatabase(geoIpConfigDir.resolve("GeoLite2-City.mmdb"), false); processor.execute(ingestDocument); - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("geoip"); + Map geoData = (Map) ingestDocument.getIngestContext().get("geoip"); assertThat(geoData, nullValue()); } { @@ -448,7 +448,7 @@ public void testUpdateDatabaseWhileIngesting() throws Exception { databaseNodeService.updateDatabase("GeoLite2-City-Test.mmdb", "md5", geoipTmpDir.resolve("GeoLite2-City-Test.mmdb")); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata(), hasEntry("tags", List.of("_geoip_database_unavailable_GeoLite2-City.mmdb"))); + assertThat(ingestDocument.getIngestContext(), hasEntry("tags", List.of("_geoip_database_unavailable_GeoLite2-City.mmdb"))); } } @@ -472,9 +472,9 @@ public void testDatabaseNotReadyYet() throws Exception { config ); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().get("geoip"), nullValue()); + assertThat(ingestDocument.getIngestContext().get("geoip"), nullValue()); assertThat( - ingestDocument.getSourceAndMetadata().get("tags"), + ingestDocument.getIngestContext().get("tags"), equalTo(List.of("_geoip_database_unavailable_GeoLite2-City.mmdb")) ); } @@ -493,8 +493,8 @@ public void testDatabaseNotReadyYet() throws Exception { GeoIpProcessor processor = (GeoIpProcessor) factory.create(null, null, null, config); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().get("tags"), nullValue()); - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("geoip"); + assertThat(ingestDocument.getIngestContext().get("tags"), nullValue()); + Map geoData = (Map) ingestDocument.getIngestContext().get("geoip"); assertThat(geoData.get("city_name"), equalTo("Linköping")); } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index bbefc307e800f..69f4a8d6ecdb8 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -54,9 +54,9 @@ public void testCity() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("8.8.8.8")); + assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo("8.8.8.8")); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); assertThat(geoData.size(), equalTo(6)); assertThat(geoData.get("ip"), equalTo("8.8.8.8")); assertThat(geoData.get("country_iso_code"), equalTo("US")); @@ -171,9 +171,9 @@ public void testCity_withIpV6() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(address)); + assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo(address)); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); assertThat(geoData.size(), equalTo(9)); assertThat(geoData.get("ip"), equalTo(address)); assertThat(geoData.get("country_iso_code"), equalTo("US")); @@ -208,9 +208,9 @@ public void testCityWithMissingLocation() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("80.231.5.0")); + assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo("80.231.5.0")); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); assertThat(geoData.size(), equalTo(1)); assertThat(geoData.get("ip"), equalTo("80.231.5.0")); } @@ -234,9 +234,9 @@ public void testCountry() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("82.170.213.79")); + assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo("82.170.213.79")); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); assertThat(geoData.size(), equalTo(4)); assertThat(geoData.get("ip"), equalTo("82.170.213.79")); assertThat(geoData.get("country_iso_code"), equalTo("NL")); @@ -263,9 +263,9 @@ public void testCountryWithMissingLocation() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("80.231.5.0")); + assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo("80.231.5.0")); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); assertThat(geoData.size(), equalTo(1)); assertThat(geoData.get("ip"), equalTo("80.231.5.0")); } @@ -290,9 +290,9 @@ public void testAsn() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); + assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo(ip)); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); assertThat(geoData.size(), equalTo(4)); assertThat(geoData.get("ip"), equalTo(ip)); assertThat(geoData.get("asn"), equalTo(1136L)); @@ -318,7 +318,7 @@ public void testAddressIsNotInTheDatabase() throws Exception { document.put("source_field", "127.0.0.1"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("target_field"), is(false)); + assertThat(ingestDocument.getIngestContext().containsKey("target_field"), is(false)); } /** @@ -365,7 +365,7 @@ public void testListAllValid() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - List> geoData = (List>) ingestDocument.getSourceAndMetadata().get("target_field"); + List> geoData = (List>) ingestDocument.getIngestContext().get("target_field"); Map location = new HashMap<>(); location.put("lat", 37.751d); @@ -395,7 +395,7 @@ public void testListPartiallyValid() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - List> geoData = (List>) ingestDocument.getSourceAndMetadata().get("target_field"); + List> geoData = (List>) ingestDocument.getIngestContext().get("target_field"); Map location = new HashMap<>(); location.put("lat", 37.751d); @@ -447,7 +447,7 @@ public void testListFirstOnly() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); Map location = new HashMap<>(); location.put("lat", 37.751d); @@ -474,7 +474,7 @@ public void testListFirstOnlyNoMatches() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("target_field"), is(false)); + assertThat(ingestDocument.getIngestContext().containsKey("target_field"), is(false)); } public void testInvalidDatabase() throws Exception { @@ -496,8 +496,8 @@ public void testInvalidDatabase() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("target_field"), is(false)); - assertThat(ingestDocument.getSourceAndMetadata(), hasEntry("tags", List.of("_geoip_expired_database"))); + assertThat(ingestDocument.getIngestContext().containsKey("target_field"), is(false)); + assertThat(ingestDocument.getIngestContext(), hasEntry("tags", List.of("_geoip_expired_database"))); } public void testNoDatabase() throws Exception { @@ -519,8 +519,8 @@ public void testNoDatabase() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("target_field"), is(false)); - assertThat(ingestDocument.getSourceAndMetadata(), hasEntry("tags", List.of("_geoip_database_unavailable_GeoLite2-City"))); + assertThat(ingestDocument.getIngestContext().containsKey("target_field"), is(false)); + assertThat(ingestDocument.getIngestContext(), hasEntry("tags", List.of("_geoip_database_unavailable_GeoLite2-City"))); } public void testNoDatabase_ignoreMissing() throws Exception { diff --git a/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java b/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java index 4da6ac77b50f8..b0f7a169bb4ec 100644 --- a/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java +++ b/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java @@ -135,7 +135,7 @@ public void testCommonBrowser() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getSourceAndMetadata(); + Map data = ingestDocument.getIngestContext(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -164,7 +164,7 @@ public void testWindowsOS() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getSourceAndMetadata(); + Map data = ingestDocument.getIngestContext(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -194,7 +194,7 @@ public void testUncommonDevice() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getSourceAndMetadata(); + Map data = ingestDocument.getIngestContext(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -221,7 +221,7 @@ public void testSpider() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getSourceAndMetadata(); + Map data = ingestDocument.getIngestContext(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -248,7 +248,7 @@ public void testTablet() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getSourceAndMetadata(); + Map data = ingestDocument.getIngestContext(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -276,7 +276,7 @@ public void testUnknown() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getSourceAndMetadata(); + Map data = ingestDocument.getIngestContext(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -314,7 +314,7 @@ public void testExtractDeviceTypeDisabled() { false ); userAgentProcessor.execute(ingestDocument); - Map data = ingestDocument.getSourceAndMetadata(); + Map data = ingestDocument.getIngestContext(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.update.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.update.txt index 1aacbb4d8cf40..bec70394a6d87 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.update.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.update.txt @@ -18,3 +18,18 @@ class org.elasticsearch.painless.api.Json { String dump(def) String dump(def, boolean) } + +class org.elasticsearch.script.Metadata { + String getIndex() + String getId() + String getRouting() + long getVersion() + String getOp() + void setOp(String) + ZonedDateTime getTimestamp() + String getType() +} + +class org.elasticsearch.script.UpdateScript { + Metadata metadata() +} diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml index a23a27a2e6578..7a761d34ee593 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml @@ -123,3 +123,47 @@ - match: { error.root_cause.0.type: "illegal_argument_exception" } - match: { error.type: "illegal_argument_exception" } - match: { error.reason: "Iterable object is self-referencing itself" } + +--- +"Script Update Metadata": + - skip: + version: " - 8.3.99" + reason: "update metadata introduced in 8.4.0" + + - do: + update: + index: test_1 + id: "2" + body: + script: + source: "ctx._source.bar = meta().id + '-extra'" + lang: "painless" + upsert: {} + scripted_upsert: true + + - do: + get: + index: test_1 + id: "2" + + - match: { _source.bar: 2-extra } + - match: { found: true } + + - do: + update: + index: test_1 + id: "2" + body: + script: + source: "meta().op = Op.DELETE" + lang: "painless" + upsert: {} + scripted_upsert: true + + - do: + catch: missing + get: + index: test_1 + id: "2" + + - match: { found: false } diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml index 559a54d28a19e..4145f6d7a7964 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml @@ -93,3 +93,97 @@ id: "4" - match: { _source.within_one_minute: true } + +--- +"Script Upsert Metadata": + - skip: + version: " - 8.3.99" + reason: "update metadata introduced in 8.4.0" + + - do: + catch: /type unavailable for insert/ + update: + index: test_1 + id: "1" + body: + script: + source: "ctx._source.foo = meta().type" + lang: "painless" + upsert: {} + scripted_upsert: true + + - do: + update: + index: test_1 + id: "2" + body: + script: + source: "ctx._source.foo = meta().index + '_1'; ctx._source.bar = 'nothing'" + lang: "painless" + upsert: {} + scripted_upsert: true + + - do: + get: + index: test_1 + id: "2" + + - match: { _source.foo: test_1_1 } + - match: { _source.bar: nothing } + + - do: + update: + index: test_1 + id: "3" + body: + script: + source: "meta().op = Op.NOOP; ctx._source.bar = 'skipped?'" + lang: "painless" + upsert: {} + scripted_upsert: true + + - do: + catch: missing + get: + index: test_1 + id: "3" + + - match: { found: false } + + - do: + update: + index: test_1 + id: "3" + body: + script: + source: "meta().op = Op.CREATE; ctx._source.bar = 'skipped?'" + lang: "painless" + upsert: {} + scripted_upsert: true + + - do: + get: + index: test_1 + id: "3" + + - match: { found: true } + - match: { _source.bar: "skipped?" } + + # update + - do: + update: + index: test_1 + id: "2" + body: + script: + source: "ctx._source.bar = meta().type + '-extra'" + lang: "painless" + upsert: {} + scripted_upsert: true + + - do: + get: + index: test_1 + id: "2" + + - match: { _source.bar: _doc-extra } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java index 3a6a7fcb6c42c..8914647574f0c 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java @@ -50,10 +50,10 @@ protected T applyScript(Consumer> IndexRequest index = new IndexRequest("index").id("1").source(singletonMap("foo", "bar")); ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "id", 0); when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn( - (params, ctx) -> new UpdateScript(Collections.emptyMap(), ctx) { + (params, ctx, md) -> new UpdateScript(Collections.emptyMap(), ctx, md) { @Override public void execute() { - scriptBody.accept(getCtx()); + scriptBody.accept(ctx); } } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java index 2cd64faf9e101..1bb5f6816b1ed 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java @@ -108,7 +108,7 @@ public Map getProcessors(Processor.Parameters paramet @Override public void execute(IngestDocument ingestDocument, BiConsumer handler) { threadPool.generic().execute(() -> { - String id = (String) ingestDocument.getSourceAndMetadata().get("_id"); + String id = (String) ingestDocument.getIngestContext().get("_id"); if (usually()) { try { Thread.sleep(10); @@ -134,7 +134,7 @@ public boolean isAsync() { }, "test", (processorFactories, tag, description, config) -> new AbstractProcessor(tag, description) { @Override public IngestDocument execute(IngestDocument ingestDocument) throws Exception { - String id = (String) ingestDocument.getSourceAndMetadata().get("_id"); + String id = (String) ingestDocument.getIngestContext().get("_id"); ingestDocument.setFieldValue("bar", "baz-" + id); return ingestDocument; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java index 9c0298ec6dacb..2f807c2c7d567 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java @@ -437,14 +437,14 @@ public void execute(IngestDocument ingestDocument, BiConsumer { if (exists != null) { - if (ingestDocument.getSourceAndMetadata().containsKey(exists) == false) { + if (ingestDocument.getIngestContext().containsKey(exists) == false) { handler.accept( null, new IllegalStateException( "expected document to contain [" + exists + "] but was [" - + ingestDocument.getSourceAndMetadata() + + ingestDocument.getIngestContext() ) ); } @@ -464,9 +464,9 @@ public IngestDocument execute(final IngestDocument ingestDocument) throws Except } else { // this asserts that this pipeline is the final pipeline executed if (exists != null) { - if (ingestDocument.getSourceAndMetadata().containsKey(exists) == false) { + if (ingestDocument.getIngestContext().containsKey(exists) == false) { throw new AssertionError( - "expected document to contain [" + exists + "] but was [" + ingestDocument.getSourceAndMetadata() + "expected document to contain [" + exists + "] but was [" + ingestDocument.getIngestContext() ); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index e9e2882763e33..96c2b428bfed9 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -96,7 +96,7 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeGenericMap(ingestDocument.getSourceAndMetadata()); + out.writeGenericMap(ingestDocument.getIngestContext()); out.writeGenericMap(ingestDocument.getIngestMetadata()); } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 06b850815681e..f25bc89c54b37 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; @@ -87,25 +86,10 @@ protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult * Execute a scripted upsert, where there is an existing upsert document and a script to be executed. The script is executed and a new * Tuple of operation and updated {@code _source} is returned. */ - Tuple> executeScriptedUpsert(Map upsertDoc, Script script, LongSupplier nowInMillis) { - Map ctx = Maps.newMapWithExpectedSize(3); - // Tell the script that this is a create and not an update - ctx.put(ContextFields.OP, UpdateOpType.CREATE.toString()); - ctx.put(ContextFields.SOURCE, upsertDoc); - ctx.put(ContextFields.NOW, nowInMillis.getAsLong()); - ctx = executeScript(script, ctx); - - UpdateOpType operation = UpdateOpType.lenientFromString((String) ctx.get(ContextFields.OP), logger, script.getIdOrCode()); - @SuppressWarnings("unchecked") - Map newSource = (Map) ctx.get(ContextFields.SOURCE); - - if (operation != UpdateOpType.CREATE && operation != UpdateOpType.NONE) { - // Only valid options for an upsert script are "create" (the default) or "none", meaning abort upsert - logger.warn("Invalid upsert operation [{}] for script [{}], doing nothing...", operation, script.getIdOrCode()); - operation = UpdateOpType.NONE; - } - - return new Tuple<>(operation, newSource); + Tuple> executeScriptedUpsert(Script script, UpdateScript.Metadata metadata) { + // Tell the script that this is a create and not an update (insert from upsert) + UpdateScript.Metadata md = executeScript(script, metadata); + return new Tuple<>(lenientGetOp(md, logger, script.getIdOrCode()), md.getSource()); } /** @@ -120,14 +104,13 @@ Result prepareUpsert(ShardId shardId, UpdateRequest request, final GetResult get if (request.scriptedUpsert() && request.script() != null) { // Run the script to perform the create logic IndexRequest upsert = request.upsertRequest(); - Tuple> upsertResult = executeScriptedUpsert( - upsert.sourceAsMap(), + Tuple> upsertResult = executeScriptedUpsert( request.script, - nowInMillis + UpdateScript.insert(getResult.getIndex(), getResult.getId(), Op.CREATE, nowInMillis.getAsLong(), upsert.sourceAsMap()) ); switch (upsertResult.v1()) { case CREATE -> indexRequest = Requests.indexRequest(request.index()).source(upsertResult.v2()); - case NONE -> { + case NOOP -> { UpdateResponse update = new UpdateResponse( shardId, getResult.getId(), @@ -237,26 +220,24 @@ Result prepareUpdateScriptRequest(ShardId shardId, UpdateRequest request, GetRes final String routing = calculateRouting(getResult, currentRequest); final Tuple> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true); final XContentType updateSourceContentType = sourceAndContent.v1(); - final Map sourceAsMap = sourceAndContent.v2(); - - Map ctx = Maps.newMapWithExpectedSize(16); - ctx.put(ContextFields.OP, UpdateOpType.INDEX.toString()); // The default operation is "index" - ctx.put(ContextFields.INDEX, getResult.getIndex()); - ctx.put(ContextFields.TYPE, MapperService.SINGLE_MAPPING_NAME); - ctx.put(ContextFields.ID, getResult.getId()); - ctx.put(ContextFields.VERSION, getResult.getVersion()); - ctx.put(ContextFields.ROUTING, routing); - ctx.put(ContextFields.SOURCE, sourceAsMap); - ctx.put(ContextFields.NOW, nowInMillis.getAsLong()); - - ctx = executeScript(request.script, ctx); - UpdateOpType operation = UpdateOpType.lenientFromString((String) ctx.get(ContextFields.OP), logger, request.script.getIdOrCode()); - - @SuppressWarnings("unchecked") - final Map updatedSourceAsMap = (Map) ctx.get(ContextFields.SOURCE); + UpdateScript.Metadata md = executeScript( + request.script, + UpdateScript.update( + getResult.getIndex(), + getResult.getId(), + getResult.getVersion(), + routing, + Op.INDEX, // The default operation is "index" + nowInMillis.getAsLong(), + MapperService.SINGLE_MAPPING_NAME, + sourceAndContent.v2() + ) + ); + Op op = lenientGetOp(md, logger, request.script.getIdOrCode()); + final Map updatedSourceAsMap = md.getSource(); - switch (operation) { + switch (op) { case INDEX -> { final IndexRequest indexRequest = Requests.indexRequest(request.index()) .id(request.id()) @@ -307,17 +288,17 @@ Result prepareUpdateScriptRequest(ShardId shardId, UpdateRequest request, GetRes } } - private Map executeScript(Script script, Map ctx) { + private Metadata executeScript(Script script, Map ctx) { try { if (scriptService != null) { UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT); - UpdateScript executableScript = factory.newInstance(script.getParams(), ctx); + UpdateScript executableScript = factory.newInstance(script.getParams(), metadata); executableScript.execute(); } } catch (Exception e) { throw new IllegalArgumentException("failed to execute script", e); } - return ctx; + return metadata; } /** @@ -405,42 +386,13 @@ public XContentType updateSourceContentType() { } } - /** - * After executing the script, this is the type of operation that will be used for subsequent actions. This corresponds to the "ctx.op" - * variable inside of scripts. - */ - enum UpdateOpType { - CREATE("create"), - INDEX("index"), - DELETE("delete"), - NONE("none"); - - private final String name; - - UpdateOpType(String name) { - this.name = name; - } - - public static UpdateOpType lenientFromString(String operation, Logger logger, String scriptId) { - switch (operation) { - case "create": - return UpdateOpType.CREATE; - case "index": - return UpdateOpType.INDEX; - case "delete": - return UpdateOpType.DELETE; - case "none": - return UpdateOpType.NONE; - default: - // TODO: can we remove this leniency yet?? - logger.warn("Used upsert operation [{}] for script [{}], doing nothing...", operation, scriptId); - return UpdateOpType.NONE; - } - } - - @Override - public String toString() { - return name; + protected Op lenientGetOp(UpdateScript.Metadata md, Logger logger, String scriptId) { + try { + return md.getOp(); + } catch (IllegalArgumentException err) { + // TODO: can we remove this leniency yet?? (this comment from 1907c466, April 2017 -@stu) + logger.warn("[{}] for script [{}], doing nothing...", err.getMessage(), scriptId); + return Op.NOOP; } } diff --git a/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java b/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java index 528bb402a59e8..82b4458e59e2f 100644 --- a/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java @@ -142,7 +142,7 @@ boolean evaluate(IngestDocument ingestDocument) { IngestConditionalScript.Factory factory = scriptService.compile(condition, IngestConditionalScript.CONTEXT); script = factory.newInstance(condition.getParams()); } - return script.execute(new UnmodifiableIngestData(new DynamicMap(ingestDocument.getSourceAndMetadata(), FUNCTIONS))); + return script.execute(new UnmodifiableIngestData(new DynamicMap(ingestDocument.getIngestContext(), FUNCTIONS))); } public Processor getInnerProcessor() { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index e6e8f428efb3a..513919a88d137 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -49,7 +49,7 @@ public final class IngestDocument { static final String TIMESTAMP = "timestamp"; - private final IngestSourceAndMetadata sourceAndMetadata; + private final IngestSourceAndMetadata ingestContext; private final Map ingestMetadata; // Contains all pipelines that have been executed for this document @@ -58,7 +58,7 @@ public final class IngestDocument { private boolean doNoSelfReferencesCheck = false; public IngestDocument(String index, String id, long version, String routing, VersionType versionType, Map source) { - this.sourceAndMetadata = new IngestSourceAndMetadata( + this.ingestContext = new IngestSourceAndMetadata( index, id, version, @@ -68,7 +68,7 @@ public IngestDocument(String index, String id, long version, String routing, Ver source ); this.ingestMetadata = new HashMap<>(); - this.ingestMetadata.put(TIMESTAMP, sourceAndMetadata.getTimestamp()); + this.ingestMetadata.put(TIMESTAMP, ingestContext.getTimestamp()); } /** @@ -77,8 +77,8 @@ public IngestDocument(String index, String id, long version, String routing, Ver public IngestDocument(IngestDocument other) { this( new IngestSourceAndMetadata( - deepCopyMap(other.sourceAndMetadata.getSource()), - deepCopyMap(other.sourceAndMetadata.getMetadata()), + deepCopyMap(other.ingestContext.getSource()), + deepCopyMap(other.ingestContext.getMetadata()), other.getIngestSourceAndMetadata().timestamp, other.getIngestSourceAndMetadata().validators ), @@ -89,9 +89,9 @@ public IngestDocument(IngestDocument other) { /** * Constructor to create an IngestDocument from its constituent maps. The maps are shallow copied. */ - public IngestDocument(Map sourceAndMetadata, Map ingestMetadata) { - Tuple, Map> sm = IngestSourceAndMetadata.splitSourceAndMetadata(sourceAndMetadata); - this.sourceAndMetadata = new IngestSourceAndMetadata( + public IngestDocument(Map ingestContext, Map ingestMetadata) { + Tuple, Map> sm = IngestSourceAndMetadata.splitSourceAndMetadata(ingestContext); + this.ingestContext = new IngestSourceAndMetadata( sm.v1(), sm.v2(), IngestSourceAndMetadata.getTimestamp(ingestMetadata), @@ -100,7 +100,7 @@ public IngestDocument(Map sourceAndMetadata, Map this.ingestMetadata = new HashMap<>(ingestMetadata); this.ingestMetadata.computeIfPresent(TIMESTAMP, (k, v) -> { if (v instanceof String) { - return this.sourceAndMetadata.getTimestamp(); + return this.ingestContext.getTimestamp(); } return v; }); @@ -109,8 +109,8 @@ public IngestDocument(Map sourceAndMetadata, Map /** * Constructor to create an IngestDocument from its constituent maps */ - IngestDocument(IngestSourceAndMetadata sourceAndMetadata, Map ingestMetadata) { - this.sourceAndMetadata = sourceAndMetadata; + IngestDocument(IngestSourceAndMetadata ingestContext, Map ingestMetadata) { + this.ingestContext = ingestContext; this.ingestMetadata = ingestMetadata; } @@ -714,8 +714,8 @@ public String renderTemplate(TemplateScript.Factory template) { private Map createTemplateModel() { return new LazyMap<>(() -> { - Map model = new HashMap<>(sourceAndMetadata); - model.put(SourceFieldMapper.NAME, sourceAndMetadata); + Map model = new HashMap<>(ingestContext); + model.put(SourceFieldMapper.NAME, ingestContext); // If there is a field in the source with the name '_ingest' it gets overwritten here, // if access to that field is required then it get accessed via '_source._ingest' model.put(INGEST_KEY, ingestMetadata); @@ -726,36 +726,36 @@ private Map createTemplateModel() { /** * Get source and metadata map */ - public Map getSourceAndMetadata() { - return sourceAndMetadata; + public Map getIngestContext() { + return ingestContext; } /** * Get source and metadata map as {@link IngestSourceAndMetadata} */ public IngestSourceAndMetadata getIngestSourceAndMetadata() { - return sourceAndMetadata; + return ingestContext; } /** * Get all Metadata values in a Map */ public Map getMetadataMap() { - return sourceAndMetadata.getMetadata(); + return ingestContext.getMetadata(); } /** * Get the strongly typed metadata */ - public org.elasticsearch.script.Metadata getMetadata() { - return sourceAndMetadata; + public org.elasticsearch.script.Metadata getContextMetadata() { + return ingestContext; } /** * Get all source values in a Map */ public Map getSource() { - return sourceAndMetadata.getSource(); + return ingestContext.getSource(); } /** @@ -899,17 +899,17 @@ public boolean equals(Object obj) { } IngestDocument other = (IngestDocument) obj; - return Objects.equals(sourceAndMetadata, other.sourceAndMetadata) && Objects.equals(ingestMetadata, other.ingestMetadata); + return Objects.equals(ingestContext, other.ingestContext) && Objects.equals(ingestMetadata, other.ingestMetadata); } @Override public int hashCode() { - return Objects.hash(sourceAndMetadata, ingestMetadata); + return Objects.hash(ingestContext, ingestMetadata); } @Override public String toString() { - return "IngestDocument{" + " sourceAndMetadata=" + sourceAndMetadata + ", ingestMetadata=" + ingestMetadata + '}'; + return "IngestDocument{" + " sourceAndMetadata=" + ingestContext + ", ingestMetadata=" + ingestMetadata + '}'; } public enum Metadata { @@ -956,7 +956,7 @@ private FieldPath(String path) { initialContext = ingestMetadata; newPath = path.substring(INGEST_KEY_PREFIX.length(), path.length()); } else { - initialContext = sourceAndMetadata; + initialContext = ingestContext; if (path.startsWith(SOURCE_PREFIX)) { newPath = path.substring(SOURCE_PREFIX.length(), path.length()); } else { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 67f0abae7b23d..4edf4bf62d0ec 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -947,7 +947,7 @@ private void innerExecute( private void postIngest(IngestDocument ingestDocument, IndexRequest indexRequest) { // cache timestamp from ingest source map - Object rawTimestamp = ingestDocument.getSourceAndMetadata().get(TimestampField.FIXED_TIMESTAMP_FIELD); + Object rawTimestamp = ingestDocument.getIngestContext().get(TimestampField.FIXED_TIMESTAMP_FIELD); if (rawTimestamp != null && indexRequest.getRawTimestamp() == null) { indexRequest.setRawTimestamp(rawTimestamp); } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java index 68d779be37812..598392f561d46 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.VersionType; import org.elasticsearch.script.Metadata; +import org.elasticsearch.script.SourceAndMetadataMap; import java.time.ZonedDateTime; import java.util.AbstractCollection; @@ -41,38 +42,33 @@ * * The map is expected to be used by processors, server code should the typed getter and setters where possible. */ -class IngestSourceAndMetadata extends AbstractMap implements Metadata { +class IngestSourceAndMetadata extends SourceAndMetadataMap { protected final ZonedDateTime timestamp; /** * map of key to validating function. Should throw {@link IllegalArgumentException} on invalid value */ - static final Map> VALIDATORS = Map.of( + protected static final Map VALIDATORS = Map.of( IngestDocument.Metadata.INDEX.getFieldName(), - IngestSourceAndMetadata::stringValidator, + SourceAndMetadataMap::stringValidator, IngestDocument.Metadata.ID.getFieldName(), - IngestSourceAndMetadata::stringValidator, + SourceAndMetadataMap::stringValidator, IngestDocument.Metadata.ROUTING.getFieldName(), - IngestSourceAndMetadata::stringValidator, + SourceAndMetadataMap::stringValidator, IngestDocument.Metadata.VERSION.getFieldName(), - IngestSourceAndMetadata::versionValidator, + SourceAndMetadataMap::nonNullLongValidator, IngestDocument.Metadata.VERSION_TYPE.getFieldName(), - IngestSourceAndMetadata::versionTypeValidator, + SourceAndMetadataMap::versionTypeValidator, IngestDocument.Metadata.DYNAMIC_TEMPLATES.getFieldName(), - IngestSourceAndMetadata::mapValidator, + SourceAndMetadataMap::mapValidator, IngestDocument.Metadata.IF_SEQ_NO.getFieldName(), - IngestSourceAndMetadata::longValidator, + SourceAndMetadataMap::longValidator, IngestDocument.Metadata.IF_PRIMARY_TERM.getFieldName(), - IngestSourceAndMetadata::longValidator, + SourceAndMetadataMap::longValidator, IngestDocument.Metadata.TYPE.getFieldName(), - IngestSourceAndMetadata::stringValidator + SourceAndMetadataMap::stringValidator ); - protected final Map source; - protected final Map metadata; - protected final Map> validators; - private EntrySet entrySet; // cache to avoid recreation - /** * Create an IngestSourceAndMetadata with the given metadata, source and default validators */ @@ -97,17 +93,18 @@ class IngestSourceAndMetadata extends AbstractMap implements Met * @param validators validators to run on metadata map, if a key is in this map, the value is stored in metadata. * if null, use the default validators from {@link #VALIDATORS} */ - IngestSourceAndMetadata( + protected IngestSourceAndMetadata( Map source, Map metadata, ZonedDateTime timestamp, - Map> validators + Map validators ) { - this.source = source != null ? source : new HashMap<>(); - this.metadata = metadata != null ? metadata : new HashMap<>(); + super( + source != null ? source : new HashMap<>(), + metadata != null ? metadata : new HashMap<>(), + validators != null ? validators : VALIDATORS + ); this.timestamp = timestamp; - this.validators = validators != null ? validators : VALIDATORS; - validateMetadata(); } /** @@ -134,7 +131,7 @@ public static Tuple, Map> splitSourceAndMeta if (sourceAndMetadata instanceof IngestSourceAndMetadata ingestSourceAndMetadata) { return new Tuple<>(new HashMap<>(ingestSourceAndMetadata.source), new HashMap<>(ingestSourceAndMetadata.metadata)); } - Map metadata = Maps.newHashMapWithExpectedSize(IngestDocument.Metadata.values().length); + Map metadata = Maps.newHashMapWithExpectedSize(VALIDATORS.size()); Map source = new HashMap<>(sourceAndMetadata); for (String metadataName : VALIDATORS.keySet()) { if (sourceAndMetadata.containsKey(metadataName)) { @@ -161,442 +158,8 @@ public static ZonedDateTime getTimestamp(Map ingestMetadata) { return null; } - /** - * get the source map, if externally modified then the guarantees of this class are not enforced - */ - public Map getSource() { - return source; - } - - /** - * get the metadata map, if externally modified then the guarantees of this class are not enforced - */ - public Map getMetadata() { - return metadata; - } - - // These are available to scripts - public String getIndex() { - return getString(IngestDocument.Metadata.INDEX.getFieldName()); - } - - public void setIndex(String index) { - put(IngestDocument.Metadata.INDEX.getFieldName(), index); - } - - public String getId() { - return getString(IngestDocument.Metadata.ID.getFieldName()); - } - - public void setId(String id) { - put(IngestDocument.Metadata.ID.getFieldName(), id); - } - - public String getRouting() { - return getString(IngestDocument.Metadata.ROUTING.getFieldName()); - } - - public void setRouting(String routing) { - put(IngestDocument.Metadata.ROUTING.getFieldName(), routing); - } - - public String getVersionType() { - return getString(IngestDocument.Metadata.VERSION_TYPE.getFieldName()); - } - - public void setVersionType(String versionType) { - put(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), versionType); - } - - public long getVersion() { - Number version = getNumber(IngestDocument.Metadata.VERSION.getFieldName()); - assert version != null : IngestDocument.Metadata.VERSION.getFieldName() + " validation allowed null version"; - return version.longValue(); - } - - public void setVersion(long version) { - put(IngestDocument.Metadata.VERSION.getFieldName(), version); - } - // timestamp isn't backed by the map public ZonedDateTime getTimestamp() { return timestamp; } - - // These are not available to scripts - public Number getIfSeqNo() { - return getNumber(IngestDocument.Metadata.IF_SEQ_NO.getFieldName()); - } - - public Number getIfPrimaryTerm() { - return getNumber(IngestDocument.Metadata.IF_PRIMARY_TERM.getFieldName()); - } - - @SuppressWarnings("unchecked") - public Map getDynamicTemplates() { - return (Map) metadata.get(IngestDocument.Metadata.DYNAMIC_TEMPLATES.getFieldName()); - } - - /** - * Check that all metadata map contains only valid metadata and no extraneous keys and source map contains no metadata - */ - protected void validateMetadata() { - int numMetadata = 0; - for (Map.Entry> entry : validators.entrySet()) { - String key = entry.getKey(); - if (metadata.containsKey(key)) { - numMetadata++; - } - entry.getValue().accept(key, metadata.get(key)); - if (source.containsKey(key)) { - throw new IllegalArgumentException("Unexpected metadata key [" + key + "] in source with value [" + source.get(key) + "]"); - } - } - if (numMetadata < metadata.size()) { - Set keys = new HashSet<>(metadata.keySet()); - keys.removeAll(validators.keySet()); - throw new IllegalArgumentException( - "Unexpected metadata keys [" - + keys.stream().sorted().map(k -> k + ":" + metadata.get(k)).collect(Collectors.joining(", ")) - + "]" - ); - } - } - - /** - * Returns an entrySet that respects the validators of the map. - */ - @Override - public Set> entrySet() { - if (entrySet == null) { - entrySet = new EntrySet(source.entrySet(), metadata.entrySet()); - } - return entrySet; - } - - /** - * Associate a key with a value. If the key has a validator, it is applied before association. - * @throws IllegalArgumentException if value does not pass validation for the given key - */ - @Override - public Object put(String key, Object value) { - BiConsumer validator = validators.get(key); - if (validator != null) { - validator.accept(key, value); - return metadata.put(key, value); - } - return source.put(key, value); - } - - /** - * Remove the mapping of key. If the key has a validator, it is checked before key removal. - * @throws IllegalArgumentException if the validator does not allow the key to be removed - */ - @Override - public Object remove(Object key) { - // uses map directly to avoid AbstractMaps linear time implementation using entrySet() - if (key instanceof String strKey) { - BiConsumer validator = validators.get(key); - if (validator != null) { - validator.accept(strKey, null); - return metadata.remove(key); - } - } - return source.remove(key); - } - - /** - * Clear entire map. For each key in the map with a validator, that validator is checked as in {@link #remove(Object)}. - * @throws IllegalArgumentException if any validator does not allow the key to be removed, in this case the map is unmodified - */ - @Override - public void clear() { - // AbstractMap uses entrySet().clear(), it should be quicker to run through the validators, then call the wrapped maps clear - validators.forEach((k, v) -> { - if (metadata.containsKey(k)) { - v.accept(k, null); - } - }); - metadata.clear(); - source.clear(); - } - - @Override - public int size() { - // uses map directly to avoid creating an EntrySet via AbstractMaps implementation, which returns entrySet().size() - return source.size() + metadata.size(); - } - - @Override - public boolean containsValue(Object value) { - // uses map directly to avoid AbstractMaps linear time implementation using entrySet() - return metadata.containsValue(value) || source.containsValue(value); - } - - @Override - public boolean containsKey(Object key) { - // uses map directly to avoid AbstractMaps linear time implementation using entrySet() - return metadata.containsKey(key) || source.containsKey(key); - } - - @Override - public Object get(Object key) { - // uses map directly to avoid AbstractMaps linear time implementation using entrySet() - if (validators.get(key) != null) { - return metadata.get(key); - } - return source.get(key); - } - - /** - * Get the String version of the value associated with {@code key}, or null - */ - public String getString(Object key) { - return Objects.toString(get(key), null); - } - - /** - * Get the {@link Number} associated with key, or null - * @throws IllegalArgumentException if the value is not a {@link Number} - */ - public Number getNumber(Object key) { - Object value = get(key); - if (value == null) { - return null; - } - if (value instanceof Number number) { - return number; - } - throw new IllegalArgumentException( - "unexpected type for [" + key + "] with value [" + value + "], expected Number, got [" + value.getClass().getName() + "]" - ); - } - - /** - * Set of entries of the wrapped map that calls the appropriate validator before changing an entries value or removing an entry. - * - * Inherits {@link AbstractSet#removeAll(Collection)}, which calls the overridden {@link #remove(Object)} which performs validation. - * - * Inherits {@link AbstractCollection#retainAll(Collection)} and {@link AbstractCollection#clear()}, which both use - * {@link EntrySetIterator#remove()} for removal. - */ - class EntrySet extends AbstractSet> { - Set> sourceSet; - Set> metadataSet; - - EntrySet(Set> sourceSet, Set> metadataSet) { - this.sourceSet = sourceSet; - this.metadataSet = metadataSet; - } - - @Override - public Iterator> iterator() { - return new EntrySetIterator(sourceSet.iterator(), metadataSet.iterator()); - } - - @Override - public int size() { - return sourceSet.size() + metadataSet.size(); - } - - @Override - public boolean remove(Object o) { - if (metadataSet.contains(o)) { - if (o instanceof Map.Entry entry) { - if (entry.getKey()instanceof String key) { - BiConsumer validator = validators.get(key); - if (validator != null) { - validator.accept(key, null); - return metadataSet.remove(o); - } - } - } - } - return sourceSet.remove(o); - } - } - - /** - * Iterator over the wrapped map that returns a validating {@link Entry} on {@link #next()} and validates on {@link #remove()}. - * - * {@link #remove()} is called by remove in {@link AbstractMap#values()}, {@link AbstractMap#keySet()}, {@link AbstractMap#clear()} via - * {@link AbstractSet#clear()} - */ - class EntrySetIterator implements Iterator> { - final Iterator> sourceIter; - final Iterator> metadataIter; - - boolean sourceCur = true; - Entry cur; - - EntrySetIterator(Iterator> sourceIter, Iterator> metadataIter) { - this.sourceIter = sourceIter; - this.metadataIter = metadataIter; - } - - @Override - public boolean hasNext() { - return sourceIter.hasNext() || metadataIter.hasNext(); - } - - @Override - public Map.Entry next() { - sourceCur = sourceIter.hasNext(); - return cur = new Entry(sourceCur ? sourceIter.next() : metadataIter.next(), sourceCur); - } - - /** - * Remove current entry from the backing Map. Checks the Entry's key's validator, if one exists, before removal. - * @throws IllegalArgumentException if the validator does not allow the Entry to be removed - * @throws IllegalStateException if remove is called before {@link #next()} - */ - @Override - public void remove() { - if (cur == null) { - throw new IllegalStateException(); - } - if (sourceCur) { - sourceIter.remove(); - } else { - BiConsumer validator = validators.get(cur.getKey()); - if (validator != null) { - validator.accept(cur.getKey(), null); - } - metadataIter.remove(); - } - } - } - - /** - * Wrapped Map.Entry that calls the key's validator on {@link #setValue(Object)} - */ - class Entry implements Map.Entry { - final Map.Entry entry; - final boolean isSource; - - Entry(Map.Entry entry, boolean isSource) { - this.entry = entry; - this.isSource = isSource; - } - - @Override - public String getKey() { - return entry.getKey(); - } - - @Override - public Object getValue() { - return entry.getValue(); - } - - /** - * Associate the value with the Entry's key in the linked Map. If the Entry's key has a validator, it is applied before association - * @throws IllegalArgumentException if value does not pass validation for the Entry's key - */ - @Override - public Object setValue(Object value) { - if (isSource == false) { - BiConsumer validator = validators.get(entry.getKey()); - if (validator != null) { - validator.accept(entry.getKey(), value); - } - } - return entry.setValue(value); - } - } - - /** - * Allow a String or null - */ - protected static void stringValidator(String key, Object value) { - if (value == null || value instanceof String) { - return; - } - throw new IllegalArgumentException( - key + " must be null or a String but was [" + value + "] with type [" + value.getClass().getName() + "]" - ); - } - - /** - * Allow Numbers that can be represented as longs without loss of precision - */ - protected static void longValidator(String key, Object value) { - if (value == null) { - return; // Allow null version for now - } - if (value instanceof Number number) { - long version = number.longValue(); - // did we round? - if (number.doubleValue() == version) { - return; - } - } - throw new IllegalArgumentException( - key + " may only be set to an int or a long but was [" + value + "] with type [" + value.getClass().getName() + "]" - ); - } - - /** - * Version must be non-null and representable as a long without loss of precision - */ - protected static void versionValidator(String key, Object value) { - if (value == null) { - throw new IllegalArgumentException(key + " cannot be null"); - } - longValidator(key, value); - } - - /** - * Allow lower case Strings that map to VersionType values, or null - */ - protected static void versionTypeValidator(String key, Object value) { - if (value == null) { - return; - } - if (value instanceof String versionType) { - try { - VersionType.fromString(versionType); - return; - } catch (IllegalArgumentException ignored) {} - } - throw new IllegalArgumentException( - key - + " must be a null or one of [" - + Arrays.stream(VersionType.values()).map(vt -> VersionType.toString(vt)).collect(Collectors.joining(", ")) - + "] but was [" - + value - + "] with type [" - + value.getClass().getName() - + "]" - ); - } - - /** - * Allow maps - */ - protected static void mapValidator(String key, Object value) { - if (value == null || value instanceof Map) { - return; - } - throw new IllegalArgumentException( - key + " must be a null or a Map but was [" + value + "] with type [" + value.getClass().getName() + "]" - ); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof IngestSourceAndMetadata) == false) return false; - if (super.equals(o) == false) return false; - IngestSourceAndMetadata that = (IngestSourceAndMetadata) o; - return Objects.equals(timestamp, that.timestamp) - && source.equals(that.source) - && metadata.equals(that.metadata) - && validators.equals(that.validators); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, source, metadata, validators); - } } diff --git a/server/src/main/java/org/elasticsearch/script/Metadata.java b/server/src/main/java/org/elasticsearch/script/Metadata.java index e598a211c109a..ada69e394361f 100644 --- a/server/src/main/java/org/elasticsearch/script/Metadata.java +++ b/server/src/main/java/org/elasticsearch/script/Metadata.java @@ -42,16 +42,36 @@ public interface Metadata { void setVersion(long version); + default boolean hasVersion() { + throw new UnsupportedOperationException(); + } + /** * The version type of the document, {@link org.elasticsearch.index.VersionType} as a lower-case string. */ - String getVersionType(); + default String getVersionType() { + throw new UnsupportedOperationException(); + } /** * Set the version type of the document. * @param versionType {@link org.elasticsearch.index.VersionType} as a lower-case string */ - void setVersionType(String versionType); + default void setVersionType(String versionType) { + throw new UnsupportedOperationException(); + } + + default String getOp() { + throw new UnsupportedOperationException(); + } + + default void setOp(String op) { + throw new UnsupportedOperationException(); + } + + default String getType() { + throw new UnsupportedOperationException(); + } /** * Timestamp of this ingestion or update diff --git a/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java b/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java new file mode 100644 index 0000000000000..f8af1f46cb746 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java @@ -0,0 +1,583 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.script; + +import org.elasticsearch.index.VersionType; +import org.elasticsearch.ingest.IngestDocument; + +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.AbstractCollection; +import java.util.AbstractMap; +import java.util.AbstractSet; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.stream.Collectors; + +/** + * Check if the key is metadata and, if so, that it can be set to value + * @param key the key to check + * @param value the value to check, if key is being deleted, value should be null. This means there + * is no difference between setting a key to null and deleting. + * @return {@code true} if the {@param key} is metdata and may be set to {@param value} + * {@code false} if the {@param key} is not metadata + * @throws IllegalArgumentException if {@param key} is metadata but may not be set to {@param value}. + */ + +/** + * + */ +public class SourceAndMetadataMap extends AbstractMap implements Metadata { + public static final String INDEX = "_index"; + public static final String ID = "_id"; + public static final String TYPE = "_type"; + public static final String ROUTING = "_routing"; + public static final String VERSION = "_version"; + public static final String VERSION_TYPE = "_version_type"; + + public static final String IF_SEQ_NO = "_if_seq_no"; + public static final String IF_PRIMARY_TERM = "_if_primary_term"; + public static final String DYNAMIC_TEMPLATES = "_dynamic_templates"; + + public String timestampKey; + public String opKey; + + protected final Map validators; + protected final Map source; + protected final Map metadata; + + protected SourceAndMetadataMap(Map source, Map metadata, Map validators) { + this.source = source; + this.metadata = metadata; + this.validators = validators; + validateMetadata(); + } + + protected SourceAndMetadataMap(Map source, Map metadata, Map validators, String timestampKey, String opKey) { + this.source = source; + this.metadata = metadata; + this.validators = validators; + this.timestampKey = timestampKey; + this.opKey = opKey; + validateMetadata(); + } + + protected AbstractSet> entrySet; // cache to avoid recreation + + /** + * get the source map, if externally modified then the guarantees of this class are not enforced + */ + public Map getSource() { + return source; + } + + /** + * get the metadata map, if externally modified then the guarantees of this class are not enforced + */ + public Map getMetadata() { + return metadata; + } + + /** + * Check that all metadata map contains only valid metadata and no extraneous keys and source map contains no metadata + */ + protected void validateMetadata() { + int numMetadata = 0; + for (Map.Entry entry : validators.entrySet()) { + String key = entry.getKey(); + if (metadata.containsKey(key)) { + numMetadata++; + } + entry.getValue().accept(MapOperation.INIT, key, metadata.get(key)); + if (source.containsKey(key)) { + throw new IllegalArgumentException("Unexpected metadata key [" + key + "] in source with value [" + source.get(key) + "]"); + } + } + if (numMetadata < metadata.size()) { + Set keys = new HashSet<>(metadata.keySet()); + keys.removeAll(validators.keySet()); + throw new IllegalArgumentException( + "Unexpected metadata keys [" + + keys.stream().sorted().map(k -> k + ":" + metadata.get(k)).collect(Collectors.joining(", ")) + + "]" + ); + } + } + + /** + * Returns an entrySet that respects the validators of the map. + */ + @Override + public Set> entrySet() { + if (entrySet == null) { + entrySet = new EntrySet(source.entrySet(), metadata.entrySet()); + } + return entrySet; + } + + /** + * Associate a key with a value. If the key has a validator, it is applied before association. + * @throws IllegalArgumentException if value does not pass validation for the given key + */ + @Override + public Object put(String key, Object value) { + Validator validator = validators.get(key); + if (validator != null) { + validator.accept(MapOperation.UPDATE, key, value); + return metadata.put(key, value); + } + return source.put(key, value); + } + + /** + * Remove the mapping of key. If the key has a validator, it is checked before key removal. + * @throws IllegalArgumentException if the validator does not allow the key to be removed + */ + @Override + public Object remove(Object key) { + // uses map directly to avoid AbstractMaps linear time implementation using entrySet() + if (key instanceof String strKey) { + Validator validator = validators.get(key); + if (validator != null) { + validator.accept(MapOperation.REMOVE, strKey, null); + return metadata.remove(key); + } + } + return source.remove(key); + } + + /** + * Clear entire map. For each key in the map with a validator, that validator is checked as in {@link #remove(Object)}. + * @throws IllegalArgumentException if any validator does not allow the key to be removed, in this case the map is unmodified + */ + @Override + public void clear() { + // AbstractMap uses entrySet().clear(), it should be quicker to run through the validators, then call the wrapped maps clear + validators.forEach((k, v) -> { + if (metadata.containsKey(k)) { + v.accept(MapOperation.REMOVE, k, null); + } + }); + metadata.clear(); + source.clear(); + } + + @Override + public int size() { + // uses map directly to avoid creating an EntrySet via AbstractMaps implementation, which returns entrySet().size() + return source.size() + metadata.size(); + } + + @Override + public boolean containsValue(Object value) { + // uses map directly to avoid AbstractMaps linear time implementation using entrySet() + return metadata.containsValue(value) || source.containsValue(value); + } + + @Override + public boolean containsKey(Object key) { + // uses map directly to avoid AbstractMaps linear time implementation using entrySet() + return metadata.containsKey(key) || source.containsKey(key); + } + + @Override + public Object get(Object key) { + // uses map directly to avoid AbstractMaps linear time implementation using entrySet() + if (validators.get(key) != null) { + return metadata.get(key); + } + return source.get(key); + } + + /** + * Get the String version of the value associated with {@code key}, or null + */ + public String getString(Object key) { + return Objects.toString(get(key), null); + } + + /** + * Get the {@link Number} associated with key, or null + * @throws IllegalArgumentException if the value is not a {@link Number} + */ + public Number getNumber(Object key) { + Object value = get(key); + if (value == null) { + return null; + } + if (value instanceof Number number) { + return number; + } + throw new IllegalArgumentException( + "unexpected type for [" + key + "] with value [" + value + "], expected Number, got [" + value.getClass().getName() + "]" + ); + } + + /** + * Get the {@link ZonedDateTime} associated with key, or null. If the value stored is a {@link Number}, assumes that + * value represents milliseconds from epoch. + * @throws IllegalArgumentException if the value is neither a {@link Number} nor a {@link ZonedDateTime}. + */ + public ZonedDateTime getZonedDateTime(Object key) { + Object value = get(key); + if (value == null) { + return null; + } + if (value instanceof ZonedDateTime zdt) { + return zdt; + } else if (value instanceof Number number) { + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(number.longValue()), ZoneOffset.UTC); + } + throw new IllegalArgumentException( + "unexpected type for [" + key + "] with value [" + value + "], expected Number or ZonedDateTime, got [" + value.getClass().getName() + "]" + ); + } + + /** + * Puts the {@link ZonedDateTime} as a long representing milliseconds from epoch. + */ + public void putEpochMilli(String key, ZonedDateTime value) { + if (value == null) { + put(key, null); + return; + } + put(key, value.toInstant().toEpochMilli()); + } + + /** + * Set of entries of the wrapped map that calls the appropriate validator before changing an entries value or removing an entry. + * + * Inherits {@link AbstractSet#removeAll(Collection)}, which calls the overridden {@link #remove(Object)} which performs validation. + * + * Inherits {@link AbstractCollection#retainAll(Collection)} and {@link AbstractCollection#clear()}, which both use + * {@link EntrySetIterator#remove()} for removal. + */ + class EntrySet extends AbstractSet> { + Set> sourceSet; + Set> metadataSet; + + EntrySet(Set> sourceSet, Set> metadataSet) { + this.sourceSet = sourceSet; + this.metadataSet = metadataSet; + } + + @Override + public Iterator> iterator() { + return new EntrySetIterator(sourceSet.iterator(), metadataSet.iterator()); + } + + @Override + public int size() { + return sourceSet.size() + metadataSet.size(); + } + + @Override + public boolean remove(Object o) { + if (metadataSet.contains(o)) { + if (o instanceof Map.Entry entry) { + if (entry.getKey()instanceof String key) { + Validator validator = validators.get(key); + if (validator != null) { + validator.accept(MapOperation.REMOVE, key, null); + return metadataSet.remove(o); + } + } + } + } + return sourceSet.remove(o); + } + } + + /** + * Iterator over the wrapped map that returns a validating {@link Entry} on {@link #next()} and validates on {@link #remove()}. + * + * {@link #remove()} is called by remove in {@link AbstractMap#values()}, {@link AbstractMap#keySet()}, {@link AbstractMap#clear()} via + * {@link AbstractSet#clear()} + */ + class EntrySetIterator implements Iterator> { + final Iterator> sourceIter; + final Iterator> metadataIter; + + boolean sourceCur = true; + Entry cur; + + EntrySetIterator(Iterator> sourceIter, Iterator> metadataIter) { + this.sourceIter = sourceIter; + this.metadataIter = metadataIter; + } + + @Override + public boolean hasNext() { + return sourceIter.hasNext() || metadataIter.hasNext(); + } + + @Override + public Map.Entry next() { + sourceCur = sourceIter.hasNext(); + return cur = new Entry(sourceCur ? sourceIter.next() : metadataIter.next(), sourceCur); + } + + /** + * Remove current entry from the backing Map. Checks the Entry's key's validator, if one exists, before removal. + * @throws IllegalArgumentException if the validator does not allow the Entry to be removed + * @throws IllegalStateException if remove is called before {@link #next()} + */ + @Override + public void remove() { + if (cur == null) { + throw new IllegalStateException(); + } + if (sourceCur) { + sourceIter.remove(); + } else { + Validator validator = validators.get(cur.getKey()); + if (validator != null) { + validator.accept(MapOperation.REMOVE, cur.getKey(), null); + } + metadataIter.remove(); + } + } + } + + /** + * Wrapped Map.Entry that calls the key's validator on {@link #setValue(Object)} + */ + class Entry implements Map.Entry { + final Map.Entry entry; + final boolean isSource; + + Entry(Map.Entry entry, boolean isSource) { + this.entry = entry; + this.isSource = isSource; + } + + @Override + public String getKey() { + return entry.getKey(); + } + + @Override + public Object getValue() { + return entry.getValue(); + } + + /** + * Associate the value with the Entry's key in the linked Map. If the Entry's key has a validator, it is applied before association + * @throws IllegalArgumentException if value does not pass validation for the Entry's key + */ + @Override + public Object setValue(Object value) { + if (isSource == false) { + Validator validator = validators.get(entry.getKey()); + if (validator != null) { + validator.accept(MapOperation.UPDATE, entry.getKey(), value); + } + } + return entry.setValue(value); + } + } + + @Override + public String getIndex() { + return getString(INDEX); + } + + @Override + public void setIndex(String index) { + put(INDEX, index); + } + + @Override + public String getId() { + return getString(ID); + } + + @Override + public void setId(String id) { + put(ID, id); + } + + @Override + public String getRouting() { + return getString(ROUTING); + } + + @Override + public void setRouting(String routing) { + put(ROUTING, routing); + } + + @Override + public long getVersion() { + return getNumber(VERSION).longValue(); + } + + @Override + public void setVersion(long version) { + put(VERSION, version); + } + + @Override + public boolean hasVersion() { + if (containsKey(VERSION) == false) { + return false; + } + return get(VERSION) == null; + } + + @Override + public String getVersionType() { + return getString(VERSION_TYPE); + } + + @Override + public void setVersionType(String versionType) { + Metadata.super.setVersionType(versionType); + } + + @Override + public String getOp() { + if (opKey == null) { + throw new UnsupportedOperationException(); + } + return getString(opKey); + } + + @Override + public void setOp(String op) { + if (opKey == null) { + throw new UnsupportedOperationException(); + } + + put(opKey, op); + } + + @Override + public String getType() { + return getString(TYPE); + } + + @Override + public ZonedDateTime getTimestamp() { + if (timestampKey == null) { + throw new UnsupportedOperationException(); + } + + return getZonedDateTime(timestampKey); + } + + public Number getIfSeqNo() { + return getNumber(IF_SEQ_NO); + } + + public Number getIfPrimaryTerm() { + return getNumber(IF_PRIMARY_TERM); + } + + @SuppressWarnings("unchecked") + public Map getDynamicTemplates() { + return (Map) metadata.get(DYNAMIC_TEMPLATES); + } + + /** + * Allow a String or null + */ + public static void stringValidator(MapOperation op, String key, Object value) { + if (op == MapOperation.REMOVE || value == null || value instanceof String) { + return; + } + throw new IllegalArgumentException( + key + " must be null or a String but was [" + value + "] with type [" + value.getClass().getName() + "]" + ); + } + + /** + * Allow Numbers that can be represented as longs without loss of precision + */ + public static void longValidator(MapOperation op, String key, Object value) { + if (op == MapOperation.REMOVE || value == null) { + return; + } + if (value instanceof Number number) { + long version = number.longValue(); + // did we round? + if (number.doubleValue() == version) { + return; + } + } + throw new IllegalArgumentException( + key + " may only be set to an int or a long but was [" + value + "] with type [" + value.getClass().getName() + "]" + ); + } + + /** + * Same as {@link #longValidator} but value cannot be null + */ + public static void nonNullLongValidator(MapOperation op, String key, Object value) { + if (op == MapOperation.REMOVE || value == null) { + throw new IllegalArgumentException(key + " cannot be null"); + } + longValidator(op, key, value); + } + + /** + * Allow lower case Strings that map to VersionType values, or null + */ + public static void versionTypeValidator(MapOperation op, String key, Object value) { + if (op == MapOperation.REMOVE || value == null) { + return; + } + if (value instanceof String versionType) { + try { + VersionType.fromString(versionType); + return; + } catch (IllegalArgumentException ignored) {} + } + throw new IllegalArgumentException( + key + + " must be a null or one of [" + + Arrays.stream(VersionType.values()).map(vt -> VersionType.toString(vt)).collect(Collectors.joining(", ")) + + "] but was [" + + value + + "] with type [" + + value.getClass().getName() + + "]" + ); + } + + /** + * Allow maps + */ + public static void mapValidator(MapOperation op, String key, Object value) { + if (op == MapOperation.REMOVE || value == null || value instanceof Map) { + return; + } + throw new IllegalArgumentException( + key + " must be a null or a Map but was [" + value + "] with type [" + value.getClass().getName() + "]" + ); + } + + public enum MapOperation { + INIT, + UPDATE, + REMOVE + } + + @FunctionalInterface + public interface Validator { + void accept(MapOperation op, String key, Object value); + } +} diff --git a/server/src/main/java/org/elasticsearch/script/UpdateMetadata.java b/server/src/main/java/org/elasticsearch/script/UpdateMetadata.java new file mode 100644 index 0000000000000..88b5b27cd5634 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/UpdateMetadata.java @@ -0,0 +1,154 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.script; + +import org.elasticsearch.common.util.Maps; + +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +public class UpdateMetadata extends SourceAndMetadataMap { + protected static final String OP = "op"; + protected static final String TIMESTAMP = "_now"; + protected static final String SOURCE = "_source"; + + private static final String LEGACY_NOOP_STRING = "none"; + protected static final Set VALID_OPS = Set.of("noop", "index", "delete", LEGACY_NOOP_STRING); + + public static Map VALIDATORS = Map.of( + INDEX, + UpdateMetadata::setOnceStringValidator, + ID, + UpdateMetadata::setOnceStringValidator, + VERSION, + UpdateMetadata::setOnceLongValidator, + ROUTING, + UpdateMetadata::setOnceStringValidator, + TYPE, + UpdateMetadata::setOnceStringValidator, + OP, + opValidatorFromValidOps(VALID_OPS), + TIMESTAMP, + UpdateMetadata::setOnceLongValidator + ); + + public UpdateMetadata( + String index, + String id, + long version, + String routing, + String type, + String op, + long timestamp, + Map source + ) { + super(wrapSource(source), metadataMap(index, id, version, routing, type, op, timestamp), VALIDATORS, TIMESTAMP, OP); + } + + protected static Map wrapSource(Map source) { + Map wrapper = Maps.newHashMapWithExpectedSize(1); + wrapper.put(SOURCE, source); + return wrapper; + } + + protected static Map metadataMap( + String index, + String id, + long version, + String routing, + String type, + String op, + long timestamp + ) { + Map metadata = Maps.newHashMapWithExpectedSize(VALIDATORS.size()); + metadata.put(INDEX, index); + metadata.put(ID, id); + metadata.put(VERSION, version); + metadata.put(ROUTING, routing); + metadata.put(TYPE, type); + metadata.put(OP, op); + metadata.put(TIMESTAMP, timestamp); + return metadata; + } + + @Override + public boolean hasVersion() { + return metadata.get(VERSION) != null; + } + + @Override + public long getVersion() { + if (hasVersion() == false) { + return Long.MIN_VALUE; + } + return super.getVersion(); + } + + @Override + public String getOp() { + String op = super.getOp(); + if (LEGACY_NOOP_STRING.equals(op)) { + return "noop"; + } + return op; + } + + @Override + public void setOp(String op) { + if (LEGACY_NOOP_STRING.equals(op)) { + throw new IllegalArgumentException(LEGACY_NOOP_STRING + " is deprecated, use 'noop' instead"); + } + super.setOp(op); + } + + public static void setOnceStringValidator(MapOperation op, String key, Object value) { + if (op != MapOperation.INIT) { + throw new IllegalArgumentException("Cannot " + op.name().toLowerCase(Locale.ROOT) + " key [" + key + "]"); + } + stringValidator(op, key, value); + } + + public static void setOnceLongValidator(MapOperation op, String key, Object value) { + if (op != MapOperation.INIT) { + throw new IllegalArgumentException("Cannot " + op.name().toLowerCase(Locale.ROOT) + " key [" + key + "]"); + } + longValidator(op, key, value); + } + + public static Validator opValidatorFromValidOps(Set validOps) { + return new Validator() { + @Override + public void accept(MapOperation op, String key, Object value) { + if (op == MapOperation.REMOVE) { + throw new IllegalArgumentException("Cannot remove [" + key + "]"); + } + if (value instanceof String opStr) { + if (validOps.contains(opStr)) { + return; + } + throw new IllegalArgumentException( + key + " must be one of " + validOps.stream().sorted().collect(Collectors.joining(",")) + ", not [" + opStr + "]" + ); + } + throw new IllegalArgumentException( + key + + " must be String and one of " + + validOps.stream().sorted().collect(Collectors.joining(",")) + + " but was [" + + value + + "] with type [" + + value.getClass().getName() + + "]" + ); + } + }; + } +} diff --git a/server/src/main/java/org/elasticsearch/script/UpdateScript.java b/server/src/main/java/org/elasticsearch/script/UpdateScript.java index 578e2fa7f29b1..ac7edd726d5de 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateScript.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateScript.java @@ -12,7 +12,7 @@ import java.util.Map; /** - * An update script. + * A script used in the update API */ public abstract class UpdateScript { @@ -24,12 +24,14 @@ public abstract class UpdateScript { /** The generic runtime parameters for the script. */ private final Map params; - /** The update context for the script. */ + private final Metadata metadata; + private final Map ctx; - public UpdateScript(Map params, Map ctx) { + public UpdateScript(Map params, Map ctx, Metadata metadata) { this.params = params; this.ctx = ctx; + this.metadata = metadata; } /** Return the parameters for this script. */ @@ -42,9 +44,14 @@ public Map getCtx() { return ctx; } + /** Return the update metadata for this script */ + public Metadata metadata() { + return metadata; + } + public abstract void execute(); public interface Factory { - UpdateScript newInstance(Map params, Map ctx); + UpdateScript newInstance(Map params, Map ctx, Metadata metadata); } } diff --git a/server/src/main/java/org/elasticsearch/script/UpsertMetadata.java b/server/src/main/java/org/elasticsearch/script/UpsertMetadata.java new file mode 100644 index 0000000000000..47871a85a6421 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/UpsertMetadata.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.script; + +/** + * Metadata for insert via upsert in the Update context + */ +public class UpsertMetadata /*extends UpdateMetadata*/ { +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java index 958c59f67a838..7bf2d75ba3ef0 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java @@ -88,8 +88,8 @@ public void testExecuteVerboseItem() throws Exception { assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorTag(), equalTo("test-id")); assertVerboseResult(simulateDocumentVerboseResult.getProcessorResults().get(1), pipeline.getId(), ingestDocument); assertThat( - simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument().getSourceAndMetadata(), - not(sameInstance(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument().getSourceAndMetadata())) + simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument().getIngestContext(), + not(sameInstance(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument().getIngestContext())) ); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue()); } @@ -381,7 +381,7 @@ public boolean isAsync() { result.getIngestDocument().getMetadataMap().get(IngestDocument.Metadata.ID.getFieldName()), equalTo(Integer.toString(id)) ); - assertThat(result.getIngestDocument().getSourceAndMetadata().get("processed"), is(true)); + assertThat(result.getIngestDocument().getIngestContext().get("processed"), is(true)); } } @@ -399,7 +399,7 @@ private static void assertVerboseResult( assertThat(simulateVerboseIngestDocument, not(sameInstance(expectedIngestDocument))); assertIngestDocument(simulateVerboseIngestDocument, expectedIngestDocument); - assertThat(simulateVerboseIngestDocument.getSourceAndMetadata(), not(sameInstance(expectedIngestDocument.getSourceAndMetadata()))); + assertThat(simulateVerboseIngestDocument.getIngestContext(), not(sameInstance(expectedIngestDocument.getIngestContext()))); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java index e505dfc2ce64a..27606163f477c 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java @@ -109,8 +109,8 @@ public boolean isAsync() { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); ingestDocument.setFieldValue(conditionalField, falseValue); execProcessor(processor, ingestDocument, (result, e) -> {}); - assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(falseValue)); - assertThat(ingestDocument.getSourceAndMetadata(), not(hasKey("foo"))); + assertThat(ingestDocument.getIngestContext().get(conditionalField), is(falseValue)); + assertThat(ingestDocument.getIngestContext(), not(hasKey("foo"))); assertStats(processor, 0, 0, 0); assertEquals(scriptName, processor.getCondition()); @@ -124,8 +124,8 @@ public boolean isAsync() { ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); ingestDocument.setFieldValue(conditionalField, trueValue); execProcessor(processor, ingestDocument, (result, e) -> {}); - assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(trueValue)); - assertThat(ingestDocument.getSourceAndMetadata().get("foo"), is("bar")); + assertThat(ingestDocument.getIngestContext().get(conditionalField), is(trueValue)); + assertThat(ingestDocument.getIngestContext().get("foo"), is("bar")); assertStats(processor, 1, 0, 1); ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 3f757cb490984..02f0db13f3524 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -328,11 +328,11 @@ public void testHasFieldEmptyPathAfterStrippingOutPrefix() { public void testSimpleSetFieldValue() { ingestDocument.setFieldValue("new_field", "foo"); - assertThat(ingestDocument.getSourceAndMetadata().get("new_field"), equalTo("foo")); + assertThat(ingestDocument.getIngestContext().get("new_field"), equalTo("foo")); ingestDocument.setFieldValue("_ttl", "ttl"); - assertThat(ingestDocument.getSourceAndMetadata().get("_ttl"), equalTo("ttl")); + assertThat(ingestDocument.getIngestContext().get("_ttl"), equalTo("ttl")); ingestDocument.setFieldValue("_source.another_field", "bar"); - assertThat(ingestDocument.getSourceAndMetadata().get("another_field"), equalTo("bar")); + assertThat(ingestDocument.getIngestContext().get("another_field"), equalTo("bar")); ingestDocument.setFieldValue("_ingest.new_field", "new_value"); assertThat(ingestDocument.getIngestMetadata().size(), equalTo(2)); assertThat(ingestDocument.getIngestMetadata().get("new_field"), equalTo("new_value")); @@ -342,15 +342,15 @@ public void testSimpleSetFieldValue() { public void testSetFieldValueNullValue() { ingestDocument.setFieldValue("new_field", null); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(true)); - assertThat(ingestDocument.getSourceAndMetadata().get("new_field"), nullValue()); + assertThat(ingestDocument.getIngestContext().containsKey("new_field"), equalTo(true)); + assertThat(ingestDocument.getIngestContext().get("new_field"), nullValue()); } @SuppressWarnings("unchecked") public void testNestedSetFieldValue() { ingestDocument.setFieldValue("a.b.c.d", "foo"); - assertThat(ingestDocument.getSourceAndMetadata().get("a"), instanceOf(Map.class)); - Map a = (Map) ingestDocument.getSourceAndMetadata().get("a"); + assertThat(ingestDocument.getIngestContext().get("a"), instanceOf(Map.class)); + Map a = (Map) ingestDocument.getIngestContext().get("a"); assertThat(a.get("b"), instanceOf(Map.class)); Map b = (Map) a.get("b"); assertThat(b.get("c"), instanceOf(Map.class)); @@ -362,14 +362,14 @@ public void testNestedSetFieldValue() { public void testSetFieldValueOnExistingField() { ingestDocument.setFieldValue("foo", "newbar"); - assertThat(ingestDocument.getSourceAndMetadata().get("foo"), equalTo("newbar")); + assertThat(ingestDocument.getIngestContext().get("foo"), equalTo("newbar")); } @SuppressWarnings("unchecked") public void testSetFieldValueOnExistingParent() { ingestDocument.setFieldValue("fizz.new", "bar"); - assertThat(ingestDocument.getSourceAndMetadata().get("fizz"), instanceOf(Map.class)); - Map innerMap = (Map) ingestDocument.getSourceAndMetadata().get("fizz"); + assertThat(ingestDocument.getIngestContext().get("fizz"), instanceOf(Map.class)); + Map innerMap = (Map) ingestDocument.getIngestContext().get("fizz"); assertThat(innerMap.get("new"), instanceOf(String.class)); String value = (String) innerMap.get("new"); assertThat(value, equalTo("bar")); @@ -407,12 +407,12 @@ public void testSetFieldValueNullName() { public void testSetSourceObject() { ingestDocument.setFieldValue("_source", "value"); - assertThat(ingestDocument.getSourceAndMetadata().get("_source"), equalTo("value")); + assertThat(ingestDocument.getIngestContext().get("_source"), equalTo("value")); } public void testSetIngestObject() { ingestDocument.setFieldValue("_ingest", "value"); - assertThat(ingestDocument.getSourceAndMetadata().get("_ingest"), equalTo("value")); + assertThat(ingestDocument.getIngestContext().get("_ingest"), equalTo("value")); } public void testSetIngestSourceObject() { @@ -439,14 +439,14 @@ public void testSetEmptyPathAfterStrippingOutPrefix() { public void testListSetFieldValueNoIndexProvided() { ingestDocument.setFieldValue("list", "value"); - Object object = ingestDocument.getSourceAndMetadata().get("list"); + Object object = ingestDocument.getIngestContext().get("list"); assertThat(object, instanceOf(String.class)); assertThat(object, equalTo("value")); } public void testListAppendFieldValue() { ingestDocument.appendFieldValue("list", "new_value"); - Object object = ingestDocument.getSourceAndMetadata().get("list"); + Object object = ingestDocument.getIngestContext().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -458,7 +458,7 @@ public void testListAppendFieldValue() { public void testListAppendFieldValueWithDuplicate() { ingestDocument.appendFieldValue("list2", "foo", false); - Object object = ingestDocument.getSourceAndMetadata().get("list2"); + Object object = ingestDocument.getIngestContext().get("list2"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -468,7 +468,7 @@ public void testListAppendFieldValueWithDuplicate() { public void testListAppendFieldValueWithoutDuplicate() { ingestDocument.appendFieldValue("list2", "foo2", false); - Object object = ingestDocument.getSourceAndMetadata().get("list2"); + Object object = ingestDocument.getIngestContext().get("list2"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -478,7 +478,7 @@ public void testListAppendFieldValueWithoutDuplicate() { public void testListAppendFieldValues() { ingestDocument.appendFieldValue("list", Arrays.asList("item1", "item2", "item3")); - Object object = ingestDocument.getSourceAndMetadata().get("list"); + Object object = ingestDocument.getIngestContext().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -492,7 +492,7 @@ public void testListAppendFieldValues() { public void testListAppendFieldValuesWithoutDuplicates() { ingestDocument.appendFieldValue("list2", List.of("foo", "bar", "baz", "foo2"), false); - Object object = ingestDocument.getSourceAndMetadata().get("list2"); + Object object = ingestDocument.getIngestContext().get("list2"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -505,7 +505,7 @@ public void testListAppendFieldValuesWithoutDuplicates() { public void testAppendFieldValueToNonExistingList() { ingestDocument.appendFieldValue("non_existing_list", "new_value"); - Object object = ingestDocument.getSourceAndMetadata().get("non_existing_list"); + Object object = ingestDocument.getIngestContext().get("non_existing_list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -515,7 +515,7 @@ public void testAppendFieldValueToNonExistingList() { public void testAppendFieldValuesToNonExistingList() { ingestDocument.appendFieldValue("non_existing_list", Arrays.asList("item1", "item2", "item3")); - Object object = ingestDocument.getSourceAndMetadata().get("non_existing_list"); + Object object = ingestDocument.getIngestContext().get("non_existing_list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -527,7 +527,7 @@ public void testAppendFieldValuesToNonExistingList() { public void testAppendFieldValueConvertStringToList() { ingestDocument.appendFieldValue("fizz.buzz", "new_value"); - Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + Object object = ingestDocument.getIngestContext().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -542,7 +542,7 @@ public void testAppendFieldValueConvertStringToList() { public void testAppendFieldValuesConvertStringToList() { ingestDocument.appendFieldValue("fizz.buzz", Arrays.asList("item1", "item2", "item3")); - Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + Object object = ingestDocument.getIngestContext().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -559,7 +559,7 @@ public void testAppendFieldValuesConvertStringToList() { public void testAppendFieldValueConvertIntegerToList() { ingestDocument.appendFieldValue("int", 456); - Object object = ingestDocument.getSourceAndMetadata().get("int"); + Object object = ingestDocument.getIngestContext().get("int"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -570,7 +570,7 @@ public void testAppendFieldValueConvertIntegerToList() { public void testAppendFieldValuesConvertIntegerToList() { ingestDocument.appendFieldValue("int", Arrays.asList(456, 789)); - Object object = ingestDocument.getSourceAndMetadata().get("int"); + Object object = ingestDocument.getIngestContext().get("int"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -582,7 +582,7 @@ public void testAppendFieldValuesConvertIntegerToList() { public void testAppendFieldValueConvertMapToList() { ingestDocument.appendFieldValue("fizz", Collections.singletonMap("field", "value")); - Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + Object object = ingestDocument.getIngestContext().get("fizz"); assertThat(object, instanceOf(List.class)); List list = (List) object; assertThat(list.size(), equalTo(2)); @@ -595,7 +595,7 @@ public void testAppendFieldValueConvertMapToList() { public void testAppendFieldValueToNull() { ingestDocument.appendFieldValue("fizz.foo_null", "new_value"); - Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + Object object = ingestDocument.getIngestContext().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -609,7 +609,7 @@ public void testAppendFieldValueToNull() { public void testAppendFieldValueToListElement() { ingestDocument.appendFieldValue("fizz.list.0", "item2"); - Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + Object object = ingestDocument.getIngestContext().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -629,7 +629,7 @@ public void testAppendFieldValueToListElement() { public void testAppendFieldValuesToListElement() { ingestDocument.appendFieldValue("fizz.list.0", Arrays.asList("item2", "item3", "item4")); - Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + Object object = ingestDocument.getIngestContext().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -651,7 +651,7 @@ public void testAppendFieldValuesToListElement() { public void testAppendFieldValueConvertStringListElementToList() { ingestDocument.appendFieldValue("fizz.list.0.0", "new_value"); - Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + Object object = ingestDocument.getIngestContext().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -675,7 +675,7 @@ public void testAppendFieldValueConvertStringListElementToList() { public void testAppendFieldValuesConvertStringListElementToList() { ingestDocument.appendFieldValue("fizz.list.0.0", Arrays.asList("item2", "item3", "item4")); - Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + Object object = ingestDocument.getIngestContext().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -701,7 +701,7 @@ public void testAppendFieldValuesConvertStringListElementToList() { public void testAppendFieldValueListElementConvertMapToList() { ingestDocument.appendFieldValue("list.0", Collections.singletonMap("item2", "value2")); - Object object = ingestDocument.getSourceAndMetadata().get("list"); + Object object = ingestDocument.getIngestContext().get("list"); assertThat(object, instanceOf(List.class)); List list = (List) object; assertThat(list.size(), equalTo(2)); @@ -715,7 +715,7 @@ public void testAppendFieldValueListElementConvertMapToList() { public void testAppendFieldValueToNullListElement() { ingestDocument.appendFieldValue("list.1", "new_value"); - Object object = ingestDocument.getSourceAndMetadata().get("list"); + Object object = ingestDocument.getIngestContext().get("list"); assertThat(object, instanceOf(List.class)); List list = (List) object; assertThat(list.get(1), instanceOf(List.class)); @@ -727,7 +727,7 @@ public void testAppendFieldValueToNullListElement() { public void testAppendFieldValueToListOfMaps() { ingestDocument.appendFieldValue("list", Collections.singletonMap("item2", "value2")); - Object object = ingestDocument.getSourceAndMetadata().get("list"); + Object object = ingestDocument.getIngestContext().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -739,7 +739,7 @@ public void testAppendFieldValueToListOfMaps() { public void testListSetFieldValueIndexProvided() { ingestDocument.setFieldValue("list.1", "value"); - Object object = ingestDocument.getSourceAndMetadata().get("list"); + Object object = ingestDocument.getIngestContext().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -750,7 +750,7 @@ public void testListSetFieldValueIndexProvided() { public void testSetFieldValueListAsPartOfPath() { ingestDocument.setFieldValue("list.0.field", "new_value"); - Object object = ingestDocument.getSourceAndMetadata().get("list"); + Object object = ingestDocument.getIngestContext().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -798,43 +798,43 @@ public void testSetFieldValueEmptyName() { public void testRemoveField() { ingestDocument.removeField("foo"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(10)); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("foo"), equalTo(false)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(10)); + assertThat(ingestDocument.getIngestContext().containsKey("foo"), equalTo(false)); ingestDocument.removeField("_index"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(9)); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("_index"), equalTo(false)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(9)); + assertThat(ingestDocument.getIngestContext().containsKey("_index"), equalTo(false)); ingestDocument.removeField("_source.fizz"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(false)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(8)); + assertThat(ingestDocument.getIngestContext().containsKey("fizz"), equalTo(false)); assertThat(ingestDocument.getIngestMetadata().size(), equalTo(1)); ingestDocument.removeField("_ingest.timestamp"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(8)); assertThat(ingestDocument.getIngestMetadata().size(), equalTo(0)); } public void testRemoveInnerField() { ingestDocument.removeField("fizz.buzz"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(11)); - assertThat(ingestDocument.getSourceAndMetadata().get("fizz"), instanceOf(Map.class)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(11)); + assertThat(ingestDocument.getIngestContext().get("fizz"), instanceOf(Map.class)); @SuppressWarnings("unchecked") - Map map = (Map) ingestDocument.getSourceAndMetadata().get("fizz"); + Map map = (Map) ingestDocument.getIngestContext().get("fizz"); assertThat(map.size(), equalTo(3)); assertThat(map.containsKey("buzz"), equalTo(false)); ingestDocument.removeField("fizz.foo_null"); assertThat(map.size(), equalTo(2)); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(11)); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(11)); + assertThat(ingestDocument.getIngestContext().containsKey("fizz"), equalTo(true)); ingestDocument.removeField("fizz.1"); assertThat(map.size(), equalTo(1)); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(11)); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(11)); + assertThat(ingestDocument.getIngestContext().containsKey("fizz"), equalTo(true)); ingestDocument.removeField("fizz.list"); assertThat(map.size(), equalTo(0)); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(11)); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(11)); + assertThat(ingestDocument.getIngestContext().containsKey("fizz"), equalTo(true)); } public void testRemoveNonExistingField() { @@ -869,8 +869,8 @@ public void testRemoveSourceObject() { public void testRemoveIngestObject() { ingestDocument.removeField("_ingest"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(10)); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("_ingest"), equalTo(false)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(10)); + assertThat(ingestDocument.getIngestContext().containsKey("_ingest"), equalTo(false)); } public void testRemoveEmptyPathAfterStrippingOutPrefix() { @@ -891,9 +891,9 @@ public void testRemoveEmptyPathAfterStrippingOutPrefix() { public void testListRemoveField() { ingestDocument.removeField("list.0.field"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(11)); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); - Object object = ingestDocument.getSourceAndMetadata().get("list"); + assertThat(ingestDocument.getIngestContext().size(), equalTo(11)); + assertThat(ingestDocument.getIngestContext().containsKey("list"), equalTo(true)); + Object object = ingestDocument.getIngestContext().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -1037,7 +1037,7 @@ public void testIngestMetadataTimestamp() throws Exception { public void testCopyConstructor() { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); IngestDocument copy = new IngestDocument(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata(), not(sameInstance(copy.getSourceAndMetadata()))); + assertThat(ingestDocument.getIngestContext(), not(sameInstance(copy.getIngestContext()))); assertIngestDocument(ingestDocument, copy); } @@ -1051,8 +1051,8 @@ public void testCopyConstructorWithZonedDateTime() { IngestDocument original = TestIngestDocument.withDefaultVersion(sourceAndMetadata); IngestDocument copy = new IngestDocument(original); - assertThat(copy.getSourceAndMetadata().get("beforeClockChange"), equalTo(original.getSourceAndMetadata().get("beforeClockChange"))); - assertThat(copy.getSourceAndMetadata().get("afterClockChange"), equalTo(original.getSourceAndMetadata().get("afterClockChange"))); + assertThat(copy.getIngestContext().get("beforeClockChange"), equalTo(original.getIngestContext().get("beforeClockChange"))); + assertThat(copy.getIngestContext().get("afterClockChange"), equalTo(original.getIngestContext().get("afterClockChange"))); } public void testSetInvalidSourceField() throws Exception { @@ -1076,7 +1076,7 @@ public void testSetInvalidSourceField() throws Exception { public void testDeepCopy() { IngestDocument copiedDoc = new IngestDocument( - IngestDocument.deepCopyMap(ingestDocument.getSourceAndMetadata()), + IngestDocument.deepCopyMap(ingestDocument.getIngestContext()), IngestDocument.deepCopyMap(ingestDocument.getIngestMetadata()) ); assertArrayEquals( diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 4471efe555157..0a082f34ee95a 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -524,7 +524,7 @@ public String getType() { ingestService.getPipeline("_id1").execute(ingestDocument, (ingestDocument1, e) -> exceptionHolder[0] = e); assertThat(exceptionHolder[0], notNullValue()); assertThat(exceptionHolder[0].getMessage(), containsString("reload me")); - assertThat(ingestDocument.getSourceAndMetadata().get("_field"), nullValue()); + assertThat(ingestDocument.getIngestContext().get("_field"), nullValue()); } externalProperty[0] = true; @@ -535,7 +535,7 @@ public String getType() { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestService.getPipeline("_id1").execute(ingestDocument, (ingestDocument1, e) -> holder[0] = e); assertThat(holder[0], nullValue()); - assertThat(ingestDocument.getSourceAndMetadata().get("_field"), equalTo("_value")); + assertThat(ingestDocument.getIngestContext().get("_field"), equalTo("_value")); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestSourceAndMetadataTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestSourceAndMetadataTests.java index f0085abda82ee..640c0ec34844b 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestSourceAndMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestSourceAndMetadataTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.index.VersionType; +import org.elasticsearch.script.SourceAndMetadataMap; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -76,7 +77,7 @@ public String toString() { } }); source.put("missing", null); - map = new IngestSourceAndMetadata(source, metadata, null, replaceValidator("_version", IngestSourceAndMetadata::longValidator)); + map = new IngestSourceAndMetadata(source, metadata, null, replaceValidator("_version", SourceAndMetadataMap::longValidator)); assertNull(map.getString("missing")); assertNull(map.getString("no key")); assertEquals("myToString()", map.getString("toStr")); @@ -143,11 +144,11 @@ public void testRemove() { String canRemove = "canRemove"; Map metadata = new HashMap<>(); metadata.put(cannotRemove, "value"); - map = new IngestSourceAndMetadata(new HashMap<>(), metadata, null, Map.of(cannotRemove, (k, v) -> { + map = new IngestSourceAndMetadata(new HashMap<>(), metadata, null, Map.of(cannotRemove, (o, k, v) -> { if (v == null) { throw new IllegalArgumentException(k + " cannot be null or removed"); } - }, canRemove, (k, v) -> {})); + }, canRemove, (o, k, v) -> {})); String msg = "cannotRemove cannot be null or removed"; IllegalArgumentException err = expectThrows(IllegalArgumentException.class, () -> map.remove(cannotRemove)); assertEquals(msg, err.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java index 68ec921186599..445c04c8d737c 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java @@ -197,7 +197,7 @@ public void testPipelineProcessorWithPipelineChain() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); // start the chain ingestDocument.executePipeline(pipeline1, (result, e) -> {}); - assertNotNull(ingestDocument.getSourceAndMetadata().get(key1)); + assertNotNull(ingestDocument.getIngestContext().get(key1)); // check the stats IngestStats.Stats pipeline1Stats = pipeline1.getMetrics().createStats(); diff --git a/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java index 7054778b8528a..70c8a44d19ca9 100644 --- a/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java @@ -763,8 +763,8 @@ public void testActualPipelineProcessorRepeatedInvocation() throws Exception { // each invocation updates key1 with a random int assertNotEquals( - resultList.get(1).getIngestDocument().getSourceAndMetadata().get(key1), - resultList.get(3).getIngestDocument().getSourceAndMetadata().get(key1) + resultList.get(1).getIngestDocument().getIngestContext().get(key1), + resultList.get(3).getIngestDocument().getIngestContext().get(key1) ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java b/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java index e2bea702baff5..69a50d8f6d04a 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java @@ -21,7 +21,7 @@ public class IngestDocumentMatcher { */ public static void assertIngestDocument(IngestDocument docA, IngestDocument docB) { if ((deepEquals(docA.getIngestMetadata(), docB.getIngestMetadata(), true) - && deepEquals(docA.getSourceAndMetadata(), docB.getSourceAndMetadata(), false)) == false) { + && deepEquals(docA.getIngestContext(), docB.getIngestContext(), false)) == false) { throw new AssertionError("Expected [" + docA + "] but received [" + docB + "]."); } } diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java index d9cc17e88adb5..166ab3238dae2 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -62,7 +62,7 @@ public static String randomLeafFieldName(Random random) { * field. */ public static String randomExistingFieldName(Random random, IngestDocument ingestDocument) { - Map source = new TreeMap<>(ingestDocument.getSourceAndMetadata()); + Map source = new TreeMap<>(ingestDocument.getIngestContext()); Map.Entry randomEntry = getRandomEntry(random, source.entrySet()); String key = randomEntry.getKey(); while (randomEntry.getValue() instanceof Map) { @@ -111,7 +111,7 @@ public static String addRandomField(Random random, IngestDocument ingestDocument */ public static boolean canAddField(String path, IngestDocument ingestDocument) { String[] pathElements = path.split("\\."); - Map innerMap = ingestDocument.getSourceAndMetadata(); + Map innerMap = ingestDocument.getIngestContext(); if (pathElements.length > 1) { for (int i = 0; i < pathElements.length - 1; i++) { Object currentLevel = innerMap.get(pathElements[i]); diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java index b6b6949a07290..8a669785cbee7 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java @@ -11,11 +11,11 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.VersionType; +import org.elasticsearch.script.SourceAndMetadataMap; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; import java.util.Map; -import java.util.function.BiConsumer; /** * Construct ingest documents for testing purposes @@ -36,7 +36,7 @@ public static IngestDocument withNullableVersion(Map sourceAndMe * _versions. Normally null _version is not allowed, but many tests don't care about that invariant. */ public static IngestDocument ofIngestWithNullableVersion(Map sourceAndMetadata, Map ingestMetadata) { - Map> validators = replaceValidator(VERSION, IngestSourceAndMetadata::longValidator); + Map validators = replaceValidator(VERSION, SourceAndMetadataMap::longValidator); Tuple, Map> sm = IngestSourceAndMetadata.splitSourceAndMetadata(sourceAndMetadata); IngestSourceAndMetadata withNullableVersion = new IngestSourceAndMetadata(sm.v1(), sm.v2(), null, validators); return new IngestDocument(withNullableVersion, ingestMetadata); @@ -57,8 +57,8 @@ public static IngestDocument withDefaultVersion(Map sourceAndMet * Return the default validator map with a single validator replaced, if that validator was already present in the default validators * map */ - protected static Map> replaceValidator(String key, BiConsumer validator) { - Map> validators = new HashMap<>(IngestSourceAndMetadata.VALIDATORS); + protected static Map replaceValidator(String key, SourceAndMetadataMap.Validator validator) { + Map validators = new HashMap<>(IngestSourceAndMetadata.VALIDATORS); validators.computeIfPresent(key, (k, v) -> validator); return validators; } @@ -67,7 +67,7 @@ protected static Map> replaceValidator(String * Create an IngestDocument with a metadata map and validators. The metadata map is passed by reference, not copied, so callers * can observe changes to the map directly. */ - public static IngestDocument ofMetadataWithValidator(Map metadata, Map> validators) { + public static IngestDocument ofMetadataWithValidator(Map metadata, Map validators) { return new IngestDocument(new IngestSourceAndMetadata(new HashMap<>(), metadata, null, validators), new HashMap<>()); } diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 0b9f7f8972620..0e4b950214a46 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -166,7 +166,7 @@ public boolean execute(Map ctx) { }; return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(UpdateScript.class)) { - UpdateScript.Factory factory = (parameters, ctx) -> new UpdateScript(parameters, ctx) { + UpdateScript.Factory factory = (parameters, ctx, md) -> new UpdateScript(parameters, ctx, md) { @Override public void execute() { final Map vars = new HashMap<>(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java index 1663c63eeeae3..1a3958a7b03cd 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java @@ -61,7 +61,7 @@ public void testWriteToDocAndSerialize() throws IOException { builder.field(metadata.getKey(), metadata.getValue().toString()); } } - Map source = IngestDocument.deepCopyMap(document.getSourceAndMetadata()); + Map source = IngestDocument.deepCopyMap(document.getIngestContext()); metadataMap.keySet().forEach(mD -> source.remove(mD)); builder.field("_source", source); builder.field("_ingest", document.getIngestMetadata()); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java index 0d7f900188ba1..ca2132a00c380 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java @@ -112,7 +112,7 @@ public void testNoMatch() throws Exception { VersionType.INTERNAL, Map.of("domain", "elastic.com") ); - int numProperties = ingestDocument.getSourceAndMetadata().size(); + int numProperties = ingestDocument.getIngestContext().size(); // Run IngestDocument[] holder = new IngestDocument[1]; processor.execute(ingestDocument, (result, e) -> holder[0] = result); @@ -133,7 +133,7 @@ public void testNoMatch() throws Exception { assertThat(termQueryBuilder.fieldName(), equalTo("domain")); assertThat(termQueryBuilder.value(), equalTo("elastic.com")); // Check result - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(numProperties)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(numProperties)); } public void testSearchFailure() throws Exception { @@ -204,11 +204,11 @@ public void testIgnoreKeyMissing() throws Exception { ); IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1L, "_routing", VersionType.INTERNAL, Map.of()); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(5)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(5)); IngestDocument[] holder = new IngestDocument[1]; processor.execute(ingestDocument, (result, e) -> holder[0] = result); assertThat(holder[0], notNullValue()); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(5)); + assertThat(ingestDocument.getIngestContext().size(), equalTo(5)); } { MatchProcessor processor = new MatchProcessor( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java index 48d98c981e14e..2d2c3bcb1a23c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java @@ -144,7 +144,7 @@ void handleResponse(InferModelAction.Response response, IngestDocument ingestDoc } InferModelAction.Request buildRequest(IngestDocument ingestDocument) { - Map fields = new HashMap<>(ingestDocument.getSourceAndMetadata()); + Map fields = new HashMap<>(ingestDocument.getIngestContext()); // Add ingestMetadata as previous processors might have added metadata from which we are predicting (see: foreach processor) if (ingestDocument.getIngestMetadata().isEmpty() == false) { fields.put(INGEST_KEY, ingestDocument.getIngestMetadata()); From 7d52c593eaa2374fad2c26c30d7aba89a09a47de Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 15:32:48 -0500 Subject: [PATCH 02/36] Create validators --- .../ingest/common/RenameProcessorTests.java | 6 +- .../geoip/GeoIpProcessorFactoryTests.java | 5 +- .../action/update/UpdateHelper.java | 89 +++++++++---- .../elasticsearch/ingest/IngestDocument.java | 2 +- .../ingest/IngestSourceAndMetadata.java | 99 +++++++++++--- .../script/SourceAndMetadataMap.java | 124 ++---------------- ...data.java => UpdateSourceAndMetadata.java} | 48 +++++-- .../elasticsearch/script/UpsertMetadata.java | 15 --- .../script/UpsertSourceAndMetadata.java | 60 +++++++++ .../ingest/TestIngestDocument.java | 5 +- 10 files changed, 263 insertions(+), 190 deletions(-) rename server/src/main/java/org/elasticsearch/script/{UpdateMetadata.java => UpdateSourceAndMetadata.java} (74%) delete mode 100644 server/src/main/java/org/elasticsearch/script/UpsertMetadata.java create mode 100644 server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java index 47461df98fc9b..d186e9ff72655 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java @@ -140,11 +140,11 @@ public void testRenameAtomicOperationSetFails() throws Exception { Map metadata = new HashMap<>(); metadata.put("list", Collections.singletonList("item")); - IngestDocument ingestDocument = TestIngestDocument.ofMetadataWithValidator(metadata, Map.of("new_field", (k, v) -> { + IngestDocument ingestDocument = TestIngestDocument.ofMetadataWithValidator(metadata, Map.of("new_field", (o, k, v) -> { if (v != null) { throw new UnsupportedOperationException(); } - }, "list", (k, v) -> {})); + }, "list", (o, k, v) -> {})); Processor processor = createRenameProcessor("list", "new_field", false); try { processor.execute(ingestDocument); @@ -160,7 +160,7 @@ public void testRenameAtomicOperationRemoveFails() throws Exception { Map metadata = new HashMap<>(); metadata.put("list", Collections.singletonList("item")); - IngestDocument ingestDocument = TestIngestDocument.ofMetadataWithValidator(metadata, Map.of("list", (k, v) -> { + IngestDocument ingestDocument = TestIngestDocument.ofMetadataWithValidator(metadata, Map.of("list", (o, k, v) -> { if (v == null) { throw new UnsupportedOperationException(); } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 9a421e36bddf6..b52bcd76eb6f0 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -473,10 +473,7 @@ public void testDatabaseNotReadyYet() throws Exception { ); processor.execute(ingestDocument); assertThat(ingestDocument.getIngestContext().get("geoip"), nullValue()); - assertThat( - ingestDocument.getIngestContext().get("tags"), - equalTo(List.of("_geoip_database_unavailable_GeoLite2-City.mmdb")) - ); + assertThat(ingestDocument.getIngestContext().get("tags"), equalTo(List.of("_geoip_database_unavailable_GeoLite2-City.mmdb"))); } copyDatabaseFile(geoipTmpDir, "GeoLite2-City-Test.mmdb"); diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index f25bc89c54b37..2496278605e1d 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -32,6 +32,8 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.UpdateScript; +import org.elasticsearch.script.UpdateSourceAndMetadata; +import org.elasticsearch.script.UpsertSourceAndMetadata; import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -86,10 +88,17 @@ protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult * Execute a scripted upsert, where there is an existing upsert document and a script to be executed. The script is executed and a new * Tuple of operation and updated {@code _source} is returned. */ - Tuple> executeScriptedUpsert(Script script, UpdateScript.Metadata metadata) { + Tuple> executeScriptedUpsert(Script script, UpsertSourceAndMetadata srcAndMeta) { // Tell the script that this is a create and not an update (insert from upsert) - UpdateScript.Metadata md = executeScript(script, metadata); - return new Tuple<>(lenientGetOp(md, logger, script.getIdOrCode()), md.getSource()); + srcAndMeta = executeScript(script, srcAndMeta); + UpdateOpType operation = UpdateOpType.lenientFromString(srcAndMeta.getOp(), logger, script.getIdOrCode()); + if (operation != UpdateOpType.CREATE && operation != UpdateOpType.NONE) { + // Only valid options for an upsert script are "create" (the default) or "none", meaning abort upsert + logger.warn("Invalid upsert operation [{}] for script [{}], doing nothing...", operation, script.getIdOrCode()); + operation = UpdateOpType.NONE; + } + + return new Tuple<>(operation, srcAndMeta.getSource()); } /** @@ -104,13 +113,17 @@ Result prepareUpsert(ShardId shardId, UpdateRequest request, final GetResult get if (request.scriptedUpsert() && request.script() != null) { // Run the script to perform the create logic IndexRequest upsert = request.upsertRequest(); - Tuple> upsertResult = executeScriptedUpsert( - request.script, - UpdateScript.insert(getResult.getIndex(), getResult.getId(), Op.CREATE, nowInMillis.getAsLong(), upsert.sourceAsMap()) + UpsertSourceAndMetadata srcAndMeta = new UpsertSourceAndMetadata( + getResult.getIndex(), + getResult.getId(), + UpdateOpType.CREATE.toString(), + nowInMillis.getAsLong(), + upsert.sourceAsMap() ); + Tuple> upsertResult = executeScriptedUpsert(request.script, srcAndMeta); switch (upsertResult.v1()) { case CREATE -> indexRequest = Requests.indexRequest(request.index()).source(upsertResult.v2()); - case NOOP -> { + case NONE -> { UpdateResponse update = new UpdateResponse( shardId, getResult.getId(), @@ -221,23 +234,23 @@ Result prepareUpdateScriptRequest(ShardId shardId, UpdateRequest request, GetRes final Tuple> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true); final XContentType updateSourceContentType = sourceAndContent.v1(); - UpdateScript.Metadata md = executeScript( + UpdateSourceAndMetadata srcAndMeta = executeScript( request.script, - UpdateScript.update( + new UpdateSourceAndMetadata( getResult.getIndex(), getResult.getId(), getResult.getVersion(), routing, - Op.INDEX, // The default operation is "index" - nowInMillis.getAsLong(), MapperService.SINGLE_MAPPING_NAME, + UpdateOpType.INDEX.toString(), // The default operation is "index" + nowInMillis.getAsLong(), sourceAndContent.v2() ) ); - Op op = lenientGetOp(md, logger, request.script.getIdOrCode()); - final Map updatedSourceAsMap = md.getSource(); + UpdateOpType operation = UpdateOpType.lenientFromString(srcAndMeta.getOp(), logger, request.script.getIdOrCode()); + final Map updatedSourceAsMap = srcAndMeta.getSource(); - switch (op) { + switch (operation) { case INDEX -> { final IndexRequest indexRequest = Requests.indexRequest(request.index()) .id(request.id()) @@ -288,11 +301,11 @@ Result prepareUpdateScriptRequest(ShardId shardId, UpdateRequest request, GetRes } } - private Metadata executeScript(Script script, Map ctx) { + private T executeScript(Script script, T metadata) { try { if (scriptService != null) { UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT); - UpdateScript executableScript = factory.newInstance(script.getParams(), metadata); + UpdateScript executableScript = factory.newInstance(script.getParams(), metadata, metadata); executableScript.execute(); } } catch (Exception e) { @@ -386,13 +399,43 @@ public XContentType updateSourceContentType() { } } - protected Op lenientGetOp(UpdateScript.Metadata md, Logger logger, String scriptId) { - try { - return md.getOp(); - } catch (IllegalArgumentException err) { - // TODO: can we remove this leniency yet?? (this comment from 1907c466, April 2017 -@stu) - logger.warn("[{}] for script [{}], doing nothing...", err.getMessage(), scriptId); - return Op.NOOP; + /** + * After executing the script, this is the type of operation that will be used for subsequent actions. This corresponds to the "ctx.op" + * variable inside of scripts. + */ + enum UpdateOpType { + CREATE("create"), + INDEX("index"), + DELETE("delete"), + NONE("none"); + + private final String name; + + UpdateOpType(String name) { + this.name = name; + } + + public static UpdateOpType lenientFromString(String operation, Logger logger, String scriptId) { + switch (operation) { + case "create": + return UpdateOpType.CREATE; + case "index": + return UpdateOpType.INDEX; + case "delete": + return UpdateOpType.DELETE; + case "noop": + case "none": + return UpdateOpType.NONE; + default: + // TODO: can we remove this leniency yet?? + logger.warn("Used upsert operation [{}] for script [{}], doing nothing...", operation, scriptId); + return UpdateOpType.NONE; + } + } + + @Override + public String toString() { + return name; } } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 513919a88d137..65efd335cfefa 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -80,7 +80,7 @@ public IngestDocument(IngestDocument other) { deepCopyMap(other.ingestContext.getSource()), deepCopyMap(other.ingestContext.getMetadata()), other.getIngestSourceAndMetadata().timestamp, - other.getIngestSourceAndMetadata().validators + other.getIngestSourceAndMetadata().getValidators() ), deepCopyMap(other.ingestMetadata) ); diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java index 598392f561d46..3833efb1e5bf3 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java @@ -11,22 +11,12 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.VersionType; -import org.elasticsearch.script.Metadata; import org.elasticsearch.script.SourceAndMetadataMap; import java.time.ZonedDateTime; -import java.util.AbstractCollection; -import java.util.AbstractMap; -import java.util.AbstractSet; import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.function.BiConsumer; import java.util.stream.Collectors; /** @@ -43,29 +33,34 @@ * The map is expected to be used by processors, server code should the typed getter and setters where possible. */ class IngestSourceAndMetadata extends SourceAndMetadataMap { + public static final String VERSION_TYPE = "_version_type"; + public static final String IF_SEQ_NO = "_if_seq_no"; + public static final String IF_PRIMARY_TERM = "_if_primary_term"; + public static final String DYNAMIC_TEMPLATES = "_dynamic_templates"; + protected final ZonedDateTime timestamp; /** * map of key to validating function. Should throw {@link IllegalArgumentException} on invalid value */ protected static final Map VALIDATORS = Map.of( - IngestDocument.Metadata.INDEX.getFieldName(), + INDEX, SourceAndMetadataMap::stringValidator, - IngestDocument.Metadata.ID.getFieldName(), + ID, SourceAndMetadataMap::stringValidator, - IngestDocument.Metadata.ROUTING.getFieldName(), + ROUTING, SourceAndMetadataMap::stringValidator, - IngestDocument.Metadata.VERSION.getFieldName(), + VERSION, SourceAndMetadataMap::nonNullLongValidator, - IngestDocument.Metadata.VERSION_TYPE.getFieldName(), - SourceAndMetadataMap::versionTypeValidator, - IngestDocument.Metadata.DYNAMIC_TEMPLATES.getFieldName(), + VERSION_TYPE, + IngestSourceAndMetadata::versionTypeValidator, + DYNAMIC_TEMPLATES, SourceAndMetadataMap::mapValidator, - IngestDocument.Metadata.IF_SEQ_NO.getFieldName(), + IF_SEQ_NO, SourceAndMetadataMap::longValidator, - IngestDocument.Metadata.IF_PRIMARY_TERM.getFieldName(), + IF_PRIMARY_TERM, SourceAndMetadataMap::longValidator, - IngestDocument.Metadata.TYPE.getFieldName(), + TYPE, SourceAndMetadataMap::stringValidator ); @@ -162,4 +157,68 @@ public static ZonedDateTime getTimestamp(Map ingestMetadata) { public ZonedDateTime getTimestamp() { return timestamp; } + + public long getVersion() { + Number version = getNumber(VERSION); + assert version != null : VERSION + " validation allowed null version"; + return version.longValue(); + } + + @Override + public String getVersionType() { + return getString(VERSION_TYPE); + } + + @Override + public void setVersionType(String versionType) { + put(VERSION_TYPE, versionType); + } + + @Override + public String getType() { + return getString(TYPE); + } + + public Number getIfSeqNo() { + return getNumber(IF_SEQ_NO); + } + + public Number getIfPrimaryTerm() { + return getNumber(IF_PRIMARY_TERM); + } + + @SuppressWarnings("unchecked") + public Map getDynamicTemplates() { + return (Map) metadata.get(DYNAMIC_TEMPLATES); + } + + public Map getValidators() { + return validators; + } + + /** + * Allow lower case Strings that map to VersionType values, or null + */ + public static void versionTypeValidator(MapOperation op, String key, Object value) { + if (op == MapOperation.REMOVE || value == null) { + return; + } + if (value instanceof String versionType) { + try { + VersionType.fromString(versionType); + return; + } catch (IllegalArgumentException ignored) {} + } + throw new IllegalArgumentException( + key + + " must be a null or one of [" + + Arrays.stream(VersionType.values()).map(vt -> VersionType.toString(vt)).collect(Collectors.joining(", ")) + + "] but was [" + + value + + "] with type [" + + value.getClass().getName() + + "]" + ); + } + } diff --git a/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java b/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java index f8af1f46cb746..7955b0060ac1a 100644 --- a/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java +++ b/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java @@ -8,52 +8,32 @@ package org.elasticsearch.script; -import org.elasticsearch.index.VersionType; -import org.elasticsearch.ingest.IngestDocument; - import java.time.Instant; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.AbstractCollection; import java.util.AbstractMap; import java.util.AbstractSet; -import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.function.BiConsumer; import java.util.stream.Collectors; /** - * Check if the key is metadata and, if so, that it can be set to value - * @param key the key to check - * @param value the value to check, if key is being deleted, value should be null. This means there - * is no difference between setting a key to null and deleting. - * @return {@code true} if the {@param key} is metdata and may be set to {@param value} - * {@code false} if the {@param key} is not metadata - * @throws IllegalArgumentException if {@param key} is metadata but may not be set to {@param value}. - */ - -/** + * Container that holds the source and metadata for write scripts. Acts like a map for backwards compatibilty with + * ctx and implements {@link Metadata} so that scripts can use the newer API for setting. * + * Common metadata keys are */ -public class SourceAndMetadataMap extends AbstractMap implements Metadata { +public abstract class SourceAndMetadataMap extends AbstractMap implements Metadata { public static final String INDEX = "_index"; public static final String ID = "_id"; public static final String TYPE = "_type"; public static final String ROUTING = "_routing"; public static final String VERSION = "_version"; - public static final String VERSION_TYPE = "_version_type"; - - public static final String IF_SEQ_NO = "_if_seq_no"; - public static final String IF_PRIMARY_TERM = "_if_primary_term"; - public static final String DYNAMIC_TEMPLATES = "_dynamic_templates"; - - public String timestampKey; - public String opKey; protected final Map validators; protected final Map source; @@ -66,15 +46,6 @@ protected SourceAndMetadataMap(Map source, Map m validateMetadata(); } - protected SourceAndMetadataMap(Map source, Map metadata, Map validators, String timestampKey, String opKey) { - this.source = source; - this.metadata = metadata; - this.validators = validators; - this.timestampKey = timestampKey; - this.opKey = opKey; - validateMetadata(); - } - protected AbstractSet> entrySet; // cache to avoid recreation /** @@ -242,7 +213,13 @@ public ZonedDateTime getZonedDateTime(Object key) { return ZonedDateTime.ofInstant(Instant.ofEpochMilli(number.longValue()), ZoneOffset.UTC); } throw new IllegalArgumentException( - "unexpected type for [" + key + "] with value [" + value + "], expected Number or ZonedDateTime, got [" + value.getClass().getName() + "]" + "unexpected type for [" + + key + + "] with value [" + + value + + "], expected Number or ZonedDateTime, got [" + + value.getClass().getName() + + "]" ); } @@ -438,60 +415,6 @@ public boolean hasVersion() { return get(VERSION) == null; } - @Override - public String getVersionType() { - return getString(VERSION_TYPE); - } - - @Override - public void setVersionType(String versionType) { - Metadata.super.setVersionType(versionType); - } - - @Override - public String getOp() { - if (opKey == null) { - throw new UnsupportedOperationException(); - } - return getString(opKey); - } - - @Override - public void setOp(String op) { - if (opKey == null) { - throw new UnsupportedOperationException(); - } - - put(opKey, op); - } - - @Override - public String getType() { - return getString(TYPE); - } - - @Override - public ZonedDateTime getTimestamp() { - if (timestampKey == null) { - throw new UnsupportedOperationException(); - } - - return getZonedDateTime(timestampKey); - } - - public Number getIfSeqNo() { - return getNumber(IF_SEQ_NO); - } - - public Number getIfPrimaryTerm() { - return getNumber(IF_PRIMARY_TERM); - } - - @SuppressWarnings("unchecked") - public Map getDynamicTemplates() { - return (Map) metadata.get(DYNAMIC_TEMPLATES); - } - /** * Allow a String or null */ @@ -533,31 +456,6 @@ public static void nonNullLongValidator(MapOperation op, String key, Object valu longValidator(op, key, value); } - /** - * Allow lower case Strings that map to VersionType values, or null - */ - public static void versionTypeValidator(MapOperation op, String key, Object value) { - if (op == MapOperation.REMOVE || value == null) { - return; - } - if (value instanceof String versionType) { - try { - VersionType.fromString(versionType); - return; - } catch (IllegalArgumentException ignored) {} - } - throw new IllegalArgumentException( - key - + " must be a null or one of [" - + Arrays.stream(VersionType.values()).map(vt -> VersionType.toString(vt)).collect(Collectors.joining(", ")) - + "] but was [" - + value - + "] with type [" - + value.getClass().getName() - + "]" - ); - } - /** * Allow maps */ diff --git a/server/src/main/java/org/elasticsearch/script/UpdateMetadata.java b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java similarity index 74% rename from server/src/main/java/org/elasticsearch/script/UpdateMetadata.java rename to server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java index 88b5b27cd5634..b06ff22f88db7 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateMetadata.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java @@ -10,37 +10,43 @@ import org.elasticsearch.common.util.Maps; +import java.time.ZonedDateTime; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -public class UpdateMetadata extends SourceAndMetadataMap { +/** + * Source and metadata for update (as opposed to insert via upsert) in the Update context. + */ +public class UpdateSourceAndMetadata extends SourceAndMetadataMap { protected static final String OP = "op"; protected static final String TIMESTAMP = "_now"; protected static final String SOURCE = "_source"; - private static final String LEGACY_NOOP_STRING = "none"; + // AbstractAsyncBulkByScrollAction.OpType uses 'noop' rather than 'none', so unify on 'noop' but allow 'none' in + // the ctx map + protected static final String LEGACY_NOOP_STRING = "none"; protected static final Set VALID_OPS = Set.of("noop", "index", "delete", LEGACY_NOOP_STRING); public static Map VALIDATORS = Map.of( INDEX, - UpdateMetadata::setOnceStringValidator, + UpdateSourceAndMetadata::setOnceStringValidator, ID, - UpdateMetadata::setOnceStringValidator, + UpdateSourceAndMetadata::setOnceStringValidator, VERSION, - UpdateMetadata::setOnceLongValidator, + UpdateSourceAndMetadata::setOnceLongValidator, ROUTING, - UpdateMetadata::setOnceStringValidator, + UpdateSourceAndMetadata::setOnceStringValidator, TYPE, - UpdateMetadata::setOnceStringValidator, + UpdateSourceAndMetadata::setOnceStringValidator, OP, opValidatorFromValidOps(VALID_OPS), TIMESTAMP, - UpdateMetadata::setOnceLongValidator + UpdateSourceAndMetadata::setOnceLongValidator ); - public UpdateMetadata( + public UpdateSourceAndMetadata( String index, String id, long version, @@ -50,7 +56,11 @@ public UpdateMetadata( long timestamp, Map source ) { - super(wrapSource(source), metadataMap(index, id, version, routing, type, op, timestamp), VALIDATORS, TIMESTAMP, OP); + super(wrapSource(source), metadataMap(index, id, version, routing, type, op, timestamp), VALIDATORS); + } + + protected UpdateSourceAndMetadata(Map source, Map metadata, Map validators) { + super(wrapSource(source), metadata, validators); } protected static Map wrapSource(Map source) { @@ -109,6 +119,24 @@ public void setOp(String op) { super.setOp(op); } + @Override + @SuppressWarnings("unchecked") + public Map getSource() { + Map wrapped = super.getSource(); + Object rawSource = wrapped.get(SOURCE); + if (rawSource instanceof Map map) { + return (Map) map; + } + throw new IllegalArgumentException( + "Expected source to be a map, instead was [" + rawSource + "] with type [" + rawSource.getClass().getCanonicalName() + "]" + ); + } + + @Override + public ZonedDateTime getTimestamp() { + return getZonedDateTime(TIMESTAMP); + } + public static void setOnceStringValidator(MapOperation op, String key, Object value) { if (op != MapOperation.INIT) { throw new IllegalArgumentException("Cannot " + op.name().toLowerCase(Locale.ROOT) + " key [" + key + "]"); diff --git a/server/src/main/java/org/elasticsearch/script/UpsertMetadata.java b/server/src/main/java/org/elasticsearch/script/UpsertMetadata.java deleted file mode 100644 index 47871a85a6421..0000000000000 --- a/server/src/main/java/org/elasticsearch/script/UpsertMetadata.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.script; - -/** - * Metadata for insert via upsert in the Update context - */ -public class UpsertMetadata /*extends UpdateMetadata*/ { -} diff --git a/server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java new file mode 100644 index 0000000000000..36360b1ccc620 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.script; + +import org.elasticsearch.common.util.Maps; + +import java.util.Map; +import java.util.Set; + +/** + * Metadata for insert via upsert in the Update context + */ +public class UpsertSourceAndMetadata extends UpdateSourceAndMetadata { + protected static final Set VALID_OPS = Set.of("noop", "create", LEGACY_NOOP_STRING); + + public static Map VALIDATORS = Map.of( + INDEX, + UpdateSourceAndMetadata::setOnceStringValidator, + ID, + UpdateSourceAndMetadata::setOnceStringValidator, + OP, + opValidatorFromValidOps(VALID_OPS), + TIMESTAMP, + UpdateSourceAndMetadata::setOnceLongValidator + ); + + public UpsertSourceAndMetadata(String index, String id, String op, long timestamp, Map source) { + super(source, metadataMap(index, id, op, timestamp), VALIDATORS); + } + + protected static Map metadataMap(String index, String id, String op, long timestamp) { + Map metadata = Maps.newHashMapWithExpectedSize(VALIDATORS.size()); + metadata.put(INDEX, index); + metadata.put(ID, id); + metadata.put(OP, op); + metadata.put(TIMESTAMP, timestamp); + return metadata; + } + + @Override + public String getRouting() { + throw new IllegalStateException("routing is unavailable for insert"); + } + + @Override + public long getVersion() { + throw new IllegalStateException("version is unavailable for insert"); + } + + @Override + public boolean hasVersion() { + throw new IllegalStateException("version is unavailable for insert"); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java index 8a669785cbee7..dda921946b1eb 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java @@ -67,7 +67,10 @@ protected static Map replaceValidator(St * Create an IngestDocument with a metadata map and validators. The metadata map is passed by reference, not copied, so callers * can observe changes to the map directly. */ - public static IngestDocument ofMetadataWithValidator(Map metadata, Map validators) { + public static IngestDocument ofMetadataWithValidator( + Map metadata, + Map validators + ) { return new IngestDocument(new IngestSourceAndMetadata(new HashMap<>(), metadata, null, validators), new HashMap<>()); } From 3fab6628ef1c4f8a3d2121d0d434c282da87c3d3 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 15:39:20 -0500 Subject: [PATCH 03/36] revert "sourceAndMetadata" from "ingestContext" --- .../attachment/AttachmentProcessorTests.java | 4 +- .../ingest/common/DateProcessor.java | 2 +- .../ingest/common/DotExpanderProcessor.java | 2 +- .../ingest/common/JsonProcessor.java | 2 +- .../ingest/common/RemoveProcessor.java | 2 +- .../ingest/common/ScriptProcessor.java | 2 +- .../ingest/common/CsvProcessorTests.java | 4 +- .../common/DateIndexNameProcessorTests.java | 12 +- .../common/DotExpanderProcessorTests.java | 6 +- .../ingest/common/ForEachProcessorTests.java | 4 +- .../ingest/common/JsonProcessorTests.java | 4 +- .../ingest/common/RenameProcessorTests.java | 12 +- .../ingest/common/ScriptProcessorTests.java | 10 +- .../ingest/common/UriPartsProcessorTests.java | 4 +- .../ingest/geoip/GeoIpDownloaderIT.java | 10 +- ...gDatabasesWhilePerformingGeoLookupsIT.java | 4 +- .../geoip/GeoIpProcessorFactoryTests.java | 16 +-- .../ingest/geoip/GeoIpProcessorTests.java | 42 +++--- .../useragent/UserAgentProcessorTests.java | 14 +- .../action/ingest/AsyncIngestProcessorIT.java | 4 +- .../elasticsearch/index/FinalPipelineIT.java | 8 +- .../ingest/WriteableIngestDocument.java | 2 +- .../ingest/ConditionalProcessor.java | 2 +- .../elasticsearch/ingest/IngestDocument.java | 46 +++---- .../elasticsearch/ingest/IngestService.java | 2 +- .../ingest/SimulateExecutionServiceTests.java | 8 +- .../ingest/ConditionalProcessorTests.java | 8 +- .../ingest/IngestDocumentTests.java | 120 +++++++++--------- .../ingest/IngestServiceTests.java | 4 +- .../ingest/PipelineProcessorTests.java | 2 +- .../ingest/TrackingResultProcessorTests.java | 4 +- .../ingest/IngestDocumentMatcher.java | 2 +- .../ingest/RandomDocumentPicks.java | 4 +- .../results/InferenceResultsTestCase.java | 2 +- .../xpack/enrich/MatchProcessorTests.java | 8 +- .../inference/ingest/InferenceProcessor.java | 2 +- 36 files changed, 192 insertions(+), 192 deletions(-) diff --git a/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index 553a3937e40a4..1fead50a600e7 100644 --- a/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -326,7 +326,7 @@ public void testParseAsBytesArray() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - Map attachmentData = (Map) ingestDocument.getIngestContext().get("target_field"); + Map attachmentData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(attachmentData.keySet(), containsInAnyOrder("language", "content", "content_type", "content_length")); assertThat(attachmentData.get("language"), is("en")); @@ -443,7 +443,7 @@ private Map parseDocument( attachmentProcessor.execute(ingestDocument); @SuppressWarnings("unchecked") - Map attachmentData = (Map) ingestDocument.getIngestContext().get("target_field"); + Map attachmentData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); return attachmentData; } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java index 2fa344456dd71..031ed9cf86bf7 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java @@ -101,7 +101,7 @@ public IngestDocument execute(IngestDocument ingestDocument) { Exception lastException = null; for (Function, Function> dateParser : dateParsers) { try { - dateTime = dateParser.apply(ingestDocument.getIngestContext()).apply(value); + dateTime = dateParser.apply(ingestDocument.getSourceAndMetadata()).apply(value); break; } catch (Exception e) { // try the next parser and keep track of the exceptions diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java index 586e97801a0bb..5174ea614c0ee 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java @@ -45,7 +45,7 @@ public IngestDocument execute(IngestDocument ingestDocument) throws Exception { map = ingestDocument.getFieldValue(this.path, Map.class); } else { pathToExpand = field; - map = ingestDocument.getIngestContext(); + map = ingestDocument.getSourceAndMetadata(); } if (this.field.equals("*")) { diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java index 8bcce51b9d92c..83cd59bc1b4be 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java @@ -145,7 +145,7 @@ public static void recursiveMerge(Map target, Map IngestDocument.Metadata.isMetadata(documentField) == false) .filter(documentField -> shouldKeep(documentField, fieldsToKeep, document) == false) diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java index 2c76c9b054be8..44d4edec73670 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java @@ -75,7 +75,7 @@ public IngestDocument execute(IngestDocument document) { if (factory == null) { factory = scriptService.compile(script, IngestScript.CONTEXT); } - factory.newInstance(script.getParams(), document.getContextMetadata(), document.getIngestContext()).execute(); + factory.newInstance(script.getParams(), document.getContextMetadata(), document.getSourceAndMetadata()).execute(); return document; } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/CsvProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/CsvProcessorTests.java index 7e9d868d1d037..e111d5f18d210 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/CsvProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/CsvProcessorTests.java @@ -310,13 +310,13 @@ public void testEmptyHeaders() throws Exception { assumeTrue("single run only", quote.isEmpty()); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "abc,abc"); - HashMap metadata = new HashMap<>(ingestDocument.getIngestContext()); + HashMap metadata = new HashMap<>(ingestDocument.getSourceAndMetadata()); CsvProcessor processor = new CsvProcessor(randomAlphaOfLength(5), null, fieldName, new String[0], false, ',', '"', false, null); processor.execute(ingestDocument); - assertEquals(metadata, ingestDocument.getIngestContext()); + assertEquals(metadata, ingestDocument.getSourceAndMetadata()); } private IngestDocument processDocument(String[] headers, String csv) { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java index 48e64f3984345..a79580d743a0b 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java @@ -43,7 +43,7 @@ public void testJavaPattern() throws Exception { Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z") ); processor.execute(document); - assertThat(document.getIngestContext().get("_index"), equalTo("")); + assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); } public void testTAI64N() throws Exception { @@ -65,7 +65,7 @@ public void testTAI64N() throws Exception { Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024") ); dateProcessor.execute(document); - assertThat(document.getIngestContext().get("_index"), equalTo("")); + assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); } public void testUnixMs() throws Exception { @@ -80,11 +80,11 @@ public void testUnixMs() throws Exception { ); IngestDocument document = new IngestDocument("_index", "_id", 1, null, null, Collections.singletonMap("_field", "1000500")); dateProcessor.execute(document); - assertThat(document.getIngestContext().get("_index"), equalTo("")); + assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); document = new IngestDocument("_index", "_id", 1, null, null, Collections.singletonMap("_field", 1000500L)); dateProcessor.execute(document); - assertThat(document.getIngestContext().get("_index"), equalTo("")); + assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); } public void testUnix() throws Exception { @@ -99,7 +99,7 @@ public void testUnix() throws Exception { ); IngestDocument document = new IngestDocument("_index", "_id", 1, null, null, Collections.singletonMap("_field", "1000.5")); dateProcessor.execute(document); - assertThat(document.getIngestContext().get("_index"), equalTo("")); + assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); } public void testTemplatedFields() throws Exception { @@ -122,7 +122,7 @@ public void testTemplatedFields() throws Exception { dateProcessor.execute(document); assertThat( - document.getIngestContext().get("_index"), + document.getSourceAndMetadata().get("_index"), equalTo( "<" + indexNamePrefix diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java index c7b7211ee8a91..1714717d0e6d3 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java @@ -151,11 +151,11 @@ public void testEscapeFields_doNothingIfFieldNotInSourceDoc() throws Exception { // hasField returns false since it requires the expanded form, which is not expanded since we did not ask for it to be assertFalse(document.hasField("foo.bar")); // nothing has changed - assertEquals(document.getIngestContext().get("foo.bar"), "baz1"); + assertEquals(document.getSourceAndMetadata().get("foo.bar"), "baz1"); // abc.def is not found anywhere assertFalse(document.hasField("abc.def")); - assertFalse(document.getIngestContext().containsKey("abc")); - assertFalse(document.getIngestContext().containsKey("abc.def")); + assertFalse(document.getSourceAndMetadata().containsKey("abc")); + assertFalse(document.getSourceAndMetadata().containsKey("abc.def")); // asking to expand a (literal) field that does not exist, but the nested field does exist source = new HashMap<>(); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index 6b914b7815215..0d96e4c6680b7 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -107,8 +107,8 @@ public void testMetadataAvailable() { IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Collections.singletonMap("values", values)); TestProcessor innerProcessor = new TestProcessor(id -> { - id.setFieldValue("_ingest._value.index", id.getIngestContext().get("_index")); - id.setFieldValue("_ingest._value.id", id.getIngestContext().get("_id")); + id.setFieldValue("_ingest._value.index", id.getSourceAndMetadata().get("_index")); + id.setFieldValue("_ingest._value.id", id.getSourceAndMetadata().get("_id")); }); ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", innerProcessor, false); execProcessor(processor, ingestDocument, (result, e) -> {}); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index 96c1eb2c9a830..a9596fb0083ab 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -157,7 +157,7 @@ public void testAddToRoot() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); jsonProcessor.execute(ingestDocument); - Map sourceAndMetadata = ingestDocument.getIngestContext(); + Map sourceAndMetadata = ingestDocument.getSourceAndMetadata(); assertEquals(1, sourceAndMetadata.get("a")); assertEquals(2, sourceAndMetadata.get("b")); assertEquals("see", sourceAndMetadata.get("c")); @@ -175,7 +175,7 @@ public void testDuplicateKeys() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); lenientJsonProcessor.execute(ingestDocument); - Map sourceAndMetadata = ingestDocument.getIngestContext(); + Map sourceAndMetadata = ingestDocument.getSourceAndMetadata(); assertEquals(2, sourceAndMetadata.get("a")); assertEquals("see", sourceAndMetadata.get("c")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java index d186e9ff72655..32566e82baf80 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java @@ -56,14 +56,14 @@ public void testRenameArrayElement() throws Exception { Processor processor = createRenameProcessor("list.0", "item", false); processor.execute(ingestDocument); - Object actualObject = ingestDocument.getIngestContext().get("list"); + Object actualObject = ingestDocument.getSourceAndMetadata().get("list"); assertThat(actualObject, instanceOf(List.class)); @SuppressWarnings("unchecked") List actualList = (List) actualObject; assertThat(actualList.size(), equalTo(2)); assertThat(actualList.get(0), equalTo("item2")); assertThat(actualList.get(1), equalTo("item3")); - actualObject = ingestDocument.getIngestContext().get("item"); + actualObject = ingestDocument.getSourceAndMetadata().get("item"); assertThat(actualObject, instanceOf(String.class)); assertThat(actualObject, equalTo("item1")); @@ -151,8 +151,8 @@ public void testRenameAtomicOperationSetFails() throws Exception { fail("processor execute should have failed"); } catch (UnsupportedOperationException e) { // the set failed, the old field has not been removed - assertThat(ingestDocument.getIngestContext().containsKey("list"), equalTo(true)); - assertThat(ingestDocument.getIngestContext().containsKey("new_field"), equalTo(false)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false)); } } @@ -171,8 +171,8 @@ public void testRenameAtomicOperationRemoveFails() throws Exception { fail("processor execute should have failed"); } catch (UnsupportedOperationException e) { // the set failed, the old field has not been removed - assertThat(ingestDocument.getIngestContext().containsKey("list"), equalTo(true)); - assertThat(ingestDocument.getIngestContext().containsKey("new_field"), equalTo(false)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false)); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java index 975841a3f7f53..972ca029b7b03 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java @@ -78,11 +78,11 @@ private IngestDocument randomDocument() { } private void assertIngestDocument(IngestDocument ingestDocument) { - assertThat(ingestDocument.getIngestContext(), hasKey("bytes_in")); - assertThat(ingestDocument.getIngestContext(), hasKey("bytes_out")); - assertThat(ingestDocument.getIngestContext(), hasKey("bytes_total")); + assertThat(ingestDocument.getSourceAndMetadata(), hasKey("bytes_in")); + assertThat(ingestDocument.getSourceAndMetadata(), hasKey("bytes_out")); + assertThat(ingestDocument.getSourceAndMetadata(), hasKey("bytes_total")); int bytesTotal = ingestDocument.getFieldValue("bytes_in", Integer.class) + ingestDocument.getFieldValue("bytes_out", Integer.class); - assertThat(ingestDocument.getIngestContext().get("bytes_total"), is(bytesTotal)); - assertThat(ingestDocument.getIngestContext().get("_dynamic_templates"), equalTo(Map.of("foo", "bar"))); + assertThat(ingestDocument.getSourceAndMetadata().get("bytes_total"), is(bytesTotal)); + assertThat(ingestDocument.getSourceAndMetadata().get("_dynamic_templates"), equalTo(Map.of("foo", "bar"))); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java index 6054484e32af9..88181446b9e11 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java @@ -192,7 +192,7 @@ public void testRemoveIfSuccessfulDoesNotRemoveTargetField() throws Exception { Map expectedSourceAndMetadata = new HashMap<>(); expectedSourceAndMetadata.put(field, Map.of("scheme", "http", "domain", "www.google.com", "path", "")); for (Map.Entry entry : expectedSourceAndMetadata.entrySet()) { - assertThat(output.getIngestContext(), hasEntry(entry.getKey(), entry.getValue())); + assertThat(output.getSourceAndMetadata(), hasEntry(entry.getKey(), entry.getValue())); } } @@ -237,7 +237,7 @@ private void testUriParsing(boolean keepOriginal, boolean removeIfSuccessful, St expectedSourceAndMetadata.put("url", values); for (Map.Entry entry : expectedSourceAndMetadata.entrySet()) { - assertThat(output.getIngestContext(), hasEntry(entry.getKey(), entry.getValue())); + assertThat(output.getSourceAndMetadata(), hasEntry(entry.getKey(), entry.getValue())); } } diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 3a717eb0361b3..71169e504f8ba 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -298,9 +298,9 @@ public void testUseGeoIpProcessorWithDownloadedDBs() throws Exception { assertThat(result.getIngestDocument(), notNullValue()); IngestDocument doc = result.getIngestDocument(); - assertThat(doc.getIngestContext(), hasKey("ip-city")); - assertThat(doc.getIngestContext(), hasKey("ip-asn")); - assertThat(doc.getIngestContext(), hasKey("ip-country")); + assertThat(doc.getSourceAndMetadata(), hasKey("ip-city")); + assertThat(doc.getSourceAndMetadata(), hasKey("ip-asn")); + assertThat(doc.getSourceAndMetadata(), hasKey("ip-country")); assertThat(doc.getFieldValue("ip-city.city_name", String.class), equalTo("Tumba")); assertThat(doc.getFieldValue("ip-asn.organization_name", String.class), equalTo("Bredband2 AB")); @@ -362,7 +362,7 @@ public void testStartWithNoDatabases() throws Exception { SimulateDocumentBaseResult result = simulatePipeline(); assertThat(result.getFailure(), nullValue()); assertThat(result.getIngestDocument(), notNullValue()); - Map source = result.getIngestDocument().getIngestContext(); + Map source = result.getIngestDocument().getSourceAndMetadata(); assertThat( source, hasEntry( @@ -388,7 +388,7 @@ private void verifyUpdatedDatabase() throws Exception { assertThat(result.getFailure(), nullValue()); assertThat(result.getIngestDocument(), notNullValue()); - Map source = result.getIngestDocument().getIngestContext(); + Map source = result.getIngestDocument().getSourceAndMetadata(); assertThat(source, not(hasKey("tags"))); assertThat(source, hasKey("ip-city")); assertThat(source, hasKey("ip-asn")); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java index e021f22198d5f..fffd1343a3392 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java @@ -102,7 +102,7 @@ public void test() throws Exception { Map.of("_field", "89.160.20.128") ); processor1.execute(document1); - assertThat(document1.getIngestContext().get("geoip"), notNullValue()); + assertThat(document1.getSourceAndMetadata().get("geoip"), notNullValue()); IngestDocument document2 = new IngestDocument( "index", "id", @@ -112,7 +112,7 @@ public void test() throws Exception { Map.of("_field", "89.160.20.128") ); processor2.execute(document2); - assertThat(document2.getIngestContext().get("geoip"), notNullValue()); + assertThat(document2.getSourceAndMetadata().get("geoip"), notNullValue()); numberOfIngestRuns.incrementAndGet(); } catch (Exception | AssertionError e) { logger.error("error in ingest thread after run [" + numberOfIngestRuns.get() + "]", e); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index b52bcd76eb6f0..356d1b8cb23f7 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -423,7 +423,7 @@ public void testUpdateDatabaseWhileIngesting() throws Exception { { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map geoData = (Map) ingestDocument.getIngestContext().get("geoip"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("geoip"); assertThat(geoData.get("city_name"), equalTo("Tumba")); } { @@ -431,7 +431,7 @@ public void testUpdateDatabaseWhileIngesting() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); databaseNodeService.updateDatabase("GeoLite2-City.mmdb", "md5", geoipTmpDir.resolve("GeoLite2-City-Test.mmdb")); processor.execute(ingestDocument); - Map geoData = (Map) ingestDocument.getIngestContext().get("geoip"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("geoip"); assertThat(geoData.get("city_name"), equalTo("Linköping")); } { @@ -440,7 +440,7 @@ public void testUpdateDatabaseWhileIngesting() throws Exception { databaseNodeService.removeStaleEntries(List.of("GeoLite2-City.mmdb")); configDatabases.updateDatabase(geoIpConfigDir.resolve("GeoLite2-City.mmdb"), false); processor.execute(ingestDocument); - Map geoData = (Map) ingestDocument.getIngestContext().get("geoip"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("geoip"); assertThat(geoData, nullValue()); } { @@ -448,7 +448,7 @@ public void testUpdateDatabaseWhileIngesting() throws Exception { databaseNodeService.updateDatabase("GeoLite2-City-Test.mmdb", "md5", geoipTmpDir.resolve("GeoLite2-City-Test.mmdb")); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext(), hasEntry("tags", List.of("_geoip_database_unavailable_GeoLite2-City.mmdb"))); + assertThat(ingestDocument.getSourceAndMetadata(), hasEntry("tags", List.of("_geoip_database_unavailable_GeoLite2-City.mmdb"))); } } @@ -472,8 +472,8 @@ public void testDatabaseNotReadyYet() throws Exception { config ); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().get("geoip"), nullValue()); - assertThat(ingestDocument.getIngestContext().get("tags"), equalTo(List.of("_geoip_database_unavailable_GeoLite2-City.mmdb"))); + assertThat(ingestDocument.getSourceAndMetadata().get("geoip"), nullValue()); + assertThat(ingestDocument.getSourceAndMetadata().get("tags"), equalTo(List.of("_geoip_database_unavailable_GeoLite2-City.mmdb"))); } copyDatabaseFile(geoipTmpDir, "GeoLite2-City-Test.mmdb"); @@ -490,8 +490,8 @@ public void testDatabaseNotReadyYet() throws Exception { GeoIpProcessor processor = (GeoIpProcessor) factory.create(null, null, null, config); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().get("tags"), nullValue()); - Map geoData = (Map) ingestDocument.getIngestContext().get("geoip"); + assertThat(ingestDocument.getSourceAndMetadata().get("tags"), nullValue()); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("geoip"); assertThat(geoData.get("city_name"), equalTo("Linköping")); } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 69f4a8d6ecdb8..bbefc307e800f 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -54,9 +54,9 @@ public void testCity() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo("8.8.8.8")); + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("8.8.8.8")); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData.size(), equalTo(6)); assertThat(geoData.get("ip"), equalTo("8.8.8.8")); assertThat(geoData.get("country_iso_code"), equalTo("US")); @@ -171,9 +171,9 @@ public void testCity_withIpV6() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo(address)); + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(address)); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData.size(), equalTo(9)); assertThat(geoData.get("ip"), equalTo(address)); assertThat(geoData.get("country_iso_code"), equalTo("US")); @@ -208,9 +208,9 @@ public void testCityWithMissingLocation() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo("80.231.5.0")); + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("80.231.5.0")); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData.size(), equalTo(1)); assertThat(geoData.get("ip"), equalTo("80.231.5.0")); } @@ -234,9 +234,9 @@ public void testCountry() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo("82.170.213.79")); + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("82.170.213.79")); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData.size(), equalTo(4)); assertThat(geoData.get("ip"), equalTo("82.170.213.79")); assertThat(geoData.get("country_iso_code"), equalTo("NL")); @@ -263,9 +263,9 @@ public void testCountryWithMissingLocation() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo("80.231.5.0")); + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("80.231.5.0")); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData.size(), equalTo(1)); assertThat(geoData.get("ip"), equalTo("80.231.5.0")); } @@ -290,9 +290,9 @@ public void testAsn() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().get("source_field"), equalTo(ip)); + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData.size(), equalTo(4)); assertThat(geoData.get("ip"), equalTo(ip)); assertThat(geoData.get("asn"), equalTo(1136L)); @@ -318,7 +318,7 @@ public void testAddressIsNotInTheDatabase() throws Exception { document.put("source_field", "127.0.0.1"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().containsKey("target_field"), is(false)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("target_field"), is(false)); } /** @@ -365,7 +365,7 @@ public void testListAllValid() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - List> geoData = (List>) ingestDocument.getIngestContext().get("target_field"); + List> geoData = (List>) ingestDocument.getSourceAndMetadata().get("target_field"); Map location = new HashMap<>(); location.put("lat", 37.751d); @@ -395,7 +395,7 @@ public void testListPartiallyValid() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - List> geoData = (List>) ingestDocument.getIngestContext().get("target_field"); + List> geoData = (List>) ingestDocument.getSourceAndMetadata().get("target_field"); Map location = new HashMap<>(); location.put("lat", 37.751d); @@ -447,7 +447,7 @@ public void testListFirstOnly() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getIngestContext().get("target_field"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); Map location = new HashMap<>(); location.put("lat", 37.751d); @@ -474,7 +474,7 @@ public void testListFirstOnlyNoMatches() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().containsKey("target_field"), is(false)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("target_field"), is(false)); } public void testInvalidDatabase() throws Exception { @@ -496,8 +496,8 @@ public void testInvalidDatabase() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().containsKey("target_field"), is(false)); - assertThat(ingestDocument.getIngestContext(), hasEntry("tags", List.of("_geoip_expired_database"))); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("target_field"), is(false)); + assertThat(ingestDocument.getSourceAndMetadata(), hasEntry("tags", List.of("_geoip_expired_database"))); } public void testNoDatabase() throws Exception { @@ -519,8 +519,8 @@ public void testNoDatabase() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); processor.execute(ingestDocument); - assertThat(ingestDocument.getIngestContext().containsKey("target_field"), is(false)); - assertThat(ingestDocument.getIngestContext(), hasEntry("tags", List.of("_geoip_database_unavailable_GeoLite2-City"))); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("target_field"), is(false)); + assertThat(ingestDocument.getSourceAndMetadata(), hasEntry("tags", List.of("_geoip_database_unavailable_GeoLite2-City"))); } public void testNoDatabase_ignoreMissing() throws Exception { diff --git a/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java b/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java index b0f7a169bb4ec..4da6ac77b50f8 100644 --- a/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java +++ b/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java @@ -135,7 +135,7 @@ public void testCommonBrowser() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getIngestContext(); + Map data = ingestDocument.getSourceAndMetadata(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -164,7 +164,7 @@ public void testWindowsOS() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getIngestContext(); + Map data = ingestDocument.getSourceAndMetadata(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -194,7 +194,7 @@ public void testUncommonDevice() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getIngestContext(); + Map data = ingestDocument.getSourceAndMetadata(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -221,7 +221,7 @@ public void testSpider() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getIngestContext(); + Map data = ingestDocument.getSourceAndMetadata(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -248,7 +248,7 @@ public void testTablet() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getIngestContext(); + Map data = ingestDocument.getSourceAndMetadata(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -276,7 +276,7 @@ public void testUnknown() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - Map data = ingestDocument.getIngestContext(); + Map data = ingestDocument.getSourceAndMetadata(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); @@ -314,7 +314,7 @@ public void testExtractDeviceTypeDisabled() { false ); userAgentProcessor.execute(ingestDocument); - Map data = ingestDocument.getIngestContext(); + Map data = ingestDocument.getSourceAndMetadata(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java index 1bb5f6816b1ed..2cd64faf9e101 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java @@ -108,7 +108,7 @@ public Map getProcessors(Processor.Parameters paramet @Override public void execute(IngestDocument ingestDocument, BiConsumer handler) { threadPool.generic().execute(() -> { - String id = (String) ingestDocument.getIngestContext().get("_id"); + String id = (String) ingestDocument.getSourceAndMetadata().get("_id"); if (usually()) { try { Thread.sleep(10); @@ -134,7 +134,7 @@ public boolean isAsync() { }, "test", (processorFactories, tag, description, config) -> new AbstractProcessor(tag, description) { @Override public IngestDocument execute(IngestDocument ingestDocument) throws Exception { - String id = (String) ingestDocument.getIngestContext().get("_id"); + String id = (String) ingestDocument.getSourceAndMetadata().get("_id"); ingestDocument.setFieldValue("bar", "baz-" + id); return ingestDocument; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java index 2f807c2c7d567..9c0298ec6dacb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java @@ -437,14 +437,14 @@ public void execute(IngestDocument ingestDocument, BiConsumer { if (exists != null) { - if (ingestDocument.getIngestContext().containsKey(exists) == false) { + if (ingestDocument.getSourceAndMetadata().containsKey(exists) == false) { handler.accept( null, new IllegalStateException( "expected document to contain [" + exists + "] but was [" - + ingestDocument.getIngestContext() + + ingestDocument.getSourceAndMetadata() ) ); } @@ -464,9 +464,9 @@ public IngestDocument execute(final IngestDocument ingestDocument) throws Except } else { // this asserts that this pipeline is the final pipeline executed if (exists != null) { - if (ingestDocument.getIngestContext().containsKey(exists) == false) { + if (ingestDocument.getSourceAndMetadata().containsKey(exists) == false) { throw new AssertionError( - "expected document to contain [" + exists + "] but was [" + ingestDocument.getIngestContext() + "expected document to contain [" + exists + "] but was [" + ingestDocument.getSourceAndMetadata() ); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index 96c2b428bfed9..e9e2882763e33 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -96,7 +96,7 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeGenericMap(ingestDocument.getIngestContext()); + out.writeGenericMap(ingestDocument.getSourceAndMetadata()); out.writeGenericMap(ingestDocument.getIngestMetadata()); } diff --git a/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java b/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java index 82b4458e59e2f..528bb402a59e8 100644 --- a/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java @@ -142,7 +142,7 @@ boolean evaluate(IngestDocument ingestDocument) { IngestConditionalScript.Factory factory = scriptService.compile(condition, IngestConditionalScript.CONTEXT); script = factory.newInstance(condition.getParams()); } - return script.execute(new UnmodifiableIngestData(new DynamicMap(ingestDocument.getIngestContext(), FUNCTIONS))); + return script.execute(new UnmodifiableIngestData(new DynamicMap(ingestDocument.getSourceAndMetadata(), FUNCTIONS))); } public Processor getInnerProcessor() { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 65efd335cfefa..695f7ec8c7d32 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -49,7 +49,7 @@ public final class IngestDocument { static final String TIMESTAMP = "timestamp"; - private final IngestSourceAndMetadata ingestContext; + private final IngestSourceAndMetadata sourceAndMetadata; private final Map ingestMetadata; // Contains all pipelines that have been executed for this document @@ -58,7 +58,7 @@ public final class IngestDocument { private boolean doNoSelfReferencesCheck = false; public IngestDocument(String index, String id, long version, String routing, VersionType versionType, Map source) { - this.ingestContext = new IngestSourceAndMetadata( + this.sourceAndMetadata = new IngestSourceAndMetadata( index, id, version, @@ -68,7 +68,7 @@ public IngestDocument(String index, String id, long version, String routing, Ver source ); this.ingestMetadata = new HashMap<>(); - this.ingestMetadata.put(TIMESTAMP, ingestContext.getTimestamp()); + this.ingestMetadata.put(TIMESTAMP, sourceAndMetadata.getTimestamp()); } /** @@ -77,8 +77,8 @@ public IngestDocument(String index, String id, long version, String routing, Ver public IngestDocument(IngestDocument other) { this( new IngestSourceAndMetadata( - deepCopyMap(other.ingestContext.getSource()), - deepCopyMap(other.ingestContext.getMetadata()), + deepCopyMap(other.sourceAndMetadata.getSource()), + deepCopyMap(other.sourceAndMetadata.getMetadata()), other.getIngestSourceAndMetadata().timestamp, other.getIngestSourceAndMetadata().getValidators() ), @@ -89,9 +89,9 @@ public IngestDocument(IngestDocument other) { /** * Constructor to create an IngestDocument from its constituent maps. The maps are shallow copied. */ - public IngestDocument(Map ingestContext, Map ingestMetadata) { - Tuple, Map> sm = IngestSourceAndMetadata.splitSourceAndMetadata(ingestContext); - this.ingestContext = new IngestSourceAndMetadata( + public IngestDocument(Map sourceAndMetadata, Map ingestMetadata) { + Tuple, Map> sm = IngestSourceAndMetadata.splitSourceAndMetadata(sourceAndMetadata); + this.sourceAndMetadata = new IngestSourceAndMetadata( sm.v1(), sm.v2(), IngestSourceAndMetadata.getTimestamp(ingestMetadata), @@ -100,7 +100,7 @@ public IngestDocument(Map ingestContext, Map ing this.ingestMetadata = new HashMap<>(ingestMetadata); this.ingestMetadata.computeIfPresent(TIMESTAMP, (k, v) -> { if (v instanceof String) { - return this.ingestContext.getTimestamp(); + return this.sourceAndMetadata.getTimestamp(); } return v; }); @@ -109,8 +109,8 @@ public IngestDocument(Map ingestContext, Map ing /** * Constructor to create an IngestDocument from its constituent maps */ - IngestDocument(IngestSourceAndMetadata ingestContext, Map ingestMetadata) { - this.ingestContext = ingestContext; + IngestDocument(IngestSourceAndMetadata sourceAndMetadata, Map ingestMetadata) { + this.sourceAndMetadata = sourceAndMetadata; this.ingestMetadata = ingestMetadata; } @@ -714,8 +714,8 @@ public String renderTemplate(TemplateScript.Factory template) { private Map createTemplateModel() { return new LazyMap<>(() -> { - Map model = new HashMap<>(ingestContext); - model.put(SourceFieldMapper.NAME, ingestContext); + Map model = new HashMap<>(sourceAndMetadata); + model.put(SourceFieldMapper.NAME, sourceAndMetadata); // If there is a field in the source with the name '_ingest' it gets overwritten here, // if access to that field is required then it get accessed via '_source._ingest' model.put(INGEST_KEY, ingestMetadata); @@ -726,36 +726,36 @@ private Map createTemplateModel() { /** * Get source and metadata map */ - public Map getIngestContext() { - return ingestContext; + public Map getSourceAndMetadata() { + return sourceAndMetadata; } /** * Get source and metadata map as {@link IngestSourceAndMetadata} */ public IngestSourceAndMetadata getIngestSourceAndMetadata() { - return ingestContext; + return sourceAndMetadata; } /** * Get all Metadata values in a Map */ public Map getMetadataMap() { - return ingestContext.getMetadata(); + return sourceAndMetadata.getMetadata(); } /** * Get the strongly typed metadata */ public org.elasticsearch.script.Metadata getContextMetadata() { - return ingestContext; + return sourceAndMetadata; } /** * Get all source values in a Map */ public Map getSource() { - return ingestContext.getSource(); + return sourceAndMetadata.getSource(); } /** @@ -899,17 +899,17 @@ public boolean equals(Object obj) { } IngestDocument other = (IngestDocument) obj; - return Objects.equals(ingestContext, other.ingestContext) && Objects.equals(ingestMetadata, other.ingestMetadata); + return Objects.equals(sourceAndMetadata, other.sourceAndMetadata) && Objects.equals(ingestMetadata, other.ingestMetadata); } @Override public int hashCode() { - return Objects.hash(ingestContext, ingestMetadata); + return Objects.hash(sourceAndMetadata, ingestMetadata); } @Override public String toString() { - return "IngestDocument{" + " sourceAndMetadata=" + ingestContext + ", ingestMetadata=" + ingestMetadata + '}'; + return "IngestDocument{" + " sourceAndMetadata=" + sourceAndMetadata + ", ingestMetadata=" + ingestMetadata + '}'; } public enum Metadata { @@ -956,7 +956,7 @@ private FieldPath(String path) { initialContext = ingestMetadata; newPath = path.substring(INGEST_KEY_PREFIX.length(), path.length()); } else { - initialContext = ingestContext; + initialContext = sourceAndMetadata; if (path.startsWith(SOURCE_PREFIX)) { newPath = path.substring(SOURCE_PREFIX.length(), path.length()); } else { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 4edf4bf62d0ec..67f0abae7b23d 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -947,7 +947,7 @@ private void innerExecute( private void postIngest(IngestDocument ingestDocument, IndexRequest indexRequest) { // cache timestamp from ingest source map - Object rawTimestamp = ingestDocument.getIngestContext().get(TimestampField.FIXED_TIMESTAMP_FIELD); + Object rawTimestamp = ingestDocument.getSourceAndMetadata().get(TimestampField.FIXED_TIMESTAMP_FIELD); if (rawTimestamp != null && indexRequest.getRawTimestamp() == null) { indexRequest.setRawTimestamp(rawTimestamp); } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java index 7bf2d75ba3ef0..958c59f67a838 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java @@ -88,8 +88,8 @@ public void testExecuteVerboseItem() throws Exception { assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorTag(), equalTo("test-id")); assertVerboseResult(simulateDocumentVerboseResult.getProcessorResults().get(1), pipeline.getId(), ingestDocument); assertThat( - simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument().getIngestContext(), - not(sameInstance(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument().getIngestContext())) + simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument().getSourceAndMetadata(), + not(sameInstance(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument().getSourceAndMetadata())) ); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue()); } @@ -381,7 +381,7 @@ public boolean isAsync() { result.getIngestDocument().getMetadataMap().get(IngestDocument.Metadata.ID.getFieldName()), equalTo(Integer.toString(id)) ); - assertThat(result.getIngestDocument().getIngestContext().get("processed"), is(true)); + assertThat(result.getIngestDocument().getSourceAndMetadata().get("processed"), is(true)); } } @@ -399,7 +399,7 @@ private static void assertVerboseResult( assertThat(simulateVerboseIngestDocument, not(sameInstance(expectedIngestDocument))); assertIngestDocument(simulateVerboseIngestDocument, expectedIngestDocument); - assertThat(simulateVerboseIngestDocument.getIngestContext(), not(sameInstance(expectedIngestDocument.getIngestContext()))); + assertThat(simulateVerboseIngestDocument.getSourceAndMetadata(), not(sameInstance(expectedIngestDocument.getSourceAndMetadata()))); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java index 27606163f477c..e505dfc2ce64a 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java @@ -109,8 +109,8 @@ public boolean isAsync() { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); ingestDocument.setFieldValue(conditionalField, falseValue); execProcessor(processor, ingestDocument, (result, e) -> {}); - assertThat(ingestDocument.getIngestContext().get(conditionalField), is(falseValue)); - assertThat(ingestDocument.getIngestContext(), not(hasKey("foo"))); + assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(falseValue)); + assertThat(ingestDocument.getSourceAndMetadata(), not(hasKey("foo"))); assertStats(processor, 0, 0, 0); assertEquals(scriptName, processor.getCondition()); @@ -124,8 +124,8 @@ public boolean isAsync() { ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); ingestDocument.setFieldValue(conditionalField, trueValue); execProcessor(processor, ingestDocument, (result, e) -> {}); - assertThat(ingestDocument.getIngestContext().get(conditionalField), is(trueValue)); - assertThat(ingestDocument.getIngestContext().get("foo"), is("bar")); + assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(trueValue)); + assertThat(ingestDocument.getSourceAndMetadata().get("foo"), is("bar")); assertStats(processor, 1, 0, 1); ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 02f0db13f3524..3f757cb490984 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -328,11 +328,11 @@ public void testHasFieldEmptyPathAfterStrippingOutPrefix() { public void testSimpleSetFieldValue() { ingestDocument.setFieldValue("new_field", "foo"); - assertThat(ingestDocument.getIngestContext().get("new_field"), equalTo("foo")); + assertThat(ingestDocument.getSourceAndMetadata().get("new_field"), equalTo("foo")); ingestDocument.setFieldValue("_ttl", "ttl"); - assertThat(ingestDocument.getIngestContext().get("_ttl"), equalTo("ttl")); + assertThat(ingestDocument.getSourceAndMetadata().get("_ttl"), equalTo("ttl")); ingestDocument.setFieldValue("_source.another_field", "bar"); - assertThat(ingestDocument.getIngestContext().get("another_field"), equalTo("bar")); + assertThat(ingestDocument.getSourceAndMetadata().get("another_field"), equalTo("bar")); ingestDocument.setFieldValue("_ingest.new_field", "new_value"); assertThat(ingestDocument.getIngestMetadata().size(), equalTo(2)); assertThat(ingestDocument.getIngestMetadata().get("new_field"), equalTo("new_value")); @@ -342,15 +342,15 @@ public void testSimpleSetFieldValue() { public void testSetFieldValueNullValue() { ingestDocument.setFieldValue("new_field", null); - assertThat(ingestDocument.getIngestContext().containsKey("new_field"), equalTo(true)); - assertThat(ingestDocument.getIngestContext().get("new_field"), nullValue()); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().get("new_field"), nullValue()); } @SuppressWarnings("unchecked") public void testNestedSetFieldValue() { ingestDocument.setFieldValue("a.b.c.d", "foo"); - assertThat(ingestDocument.getIngestContext().get("a"), instanceOf(Map.class)); - Map a = (Map) ingestDocument.getIngestContext().get("a"); + assertThat(ingestDocument.getSourceAndMetadata().get("a"), instanceOf(Map.class)); + Map a = (Map) ingestDocument.getSourceAndMetadata().get("a"); assertThat(a.get("b"), instanceOf(Map.class)); Map b = (Map) a.get("b"); assertThat(b.get("c"), instanceOf(Map.class)); @@ -362,14 +362,14 @@ public void testNestedSetFieldValue() { public void testSetFieldValueOnExistingField() { ingestDocument.setFieldValue("foo", "newbar"); - assertThat(ingestDocument.getIngestContext().get("foo"), equalTo("newbar")); + assertThat(ingestDocument.getSourceAndMetadata().get("foo"), equalTo("newbar")); } @SuppressWarnings("unchecked") public void testSetFieldValueOnExistingParent() { ingestDocument.setFieldValue("fizz.new", "bar"); - assertThat(ingestDocument.getIngestContext().get("fizz"), instanceOf(Map.class)); - Map innerMap = (Map) ingestDocument.getIngestContext().get("fizz"); + assertThat(ingestDocument.getSourceAndMetadata().get("fizz"), instanceOf(Map.class)); + Map innerMap = (Map) ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(innerMap.get("new"), instanceOf(String.class)); String value = (String) innerMap.get("new"); assertThat(value, equalTo("bar")); @@ -407,12 +407,12 @@ public void testSetFieldValueNullName() { public void testSetSourceObject() { ingestDocument.setFieldValue("_source", "value"); - assertThat(ingestDocument.getIngestContext().get("_source"), equalTo("value")); + assertThat(ingestDocument.getSourceAndMetadata().get("_source"), equalTo("value")); } public void testSetIngestObject() { ingestDocument.setFieldValue("_ingest", "value"); - assertThat(ingestDocument.getIngestContext().get("_ingest"), equalTo("value")); + assertThat(ingestDocument.getSourceAndMetadata().get("_ingest"), equalTo("value")); } public void testSetIngestSourceObject() { @@ -439,14 +439,14 @@ public void testSetEmptyPathAfterStrippingOutPrefix() { public void testListSetFieldValueNoIndexProvided() { ingestDocument.setFieldValue("list", "value"); - Object object = ingestDocument.getIngestContext().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(String.class)); assertThat(object, equalTo("value")); } public void testListAppendFieldValue() { ingestDocument.appendFieldValue("list", "new_value"); - Object object = ingestDocument.getIngestContext().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -458,7 +458,7 @@ public void testListAppendFieldValue() { public void testListAppendFieldValueWithDuplicate() { ingestDocument.appendFieldValue("list2", "foo", false); - Object object = ingestDocument.getIngestContext().get("list2"); + Object object = ingestDocument.getSourceAndMetadata().get("list2"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -468,7 +468,7 @@ public void testListAppendFieldValueWithDuplicate() { public void testListAppendFieldValueWithoutDuplicate() { ingestDocument.appendFieldValue("list2", "foo2", false); - Object object = ingestDocument.getIngestContext().get("list2"); + Object object = ingestDocument.getSourceAndMetadata().get("list2"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -478,7 +478,7 @@ public void testListAppendFieldValueWithoutDuplicate() { public void testListAppendFieldValues() { ingestDocument.appendFieldValue("list", Arrays.asList("item1", "item2", "item3")); - Object object = ingestDocument.getIngestContext().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -492,7 +492,7 @@ public void testListAppendFieldValues() { public void testListAppendFieldValuesWithoutDuplicates() { ingestDocument.appendFieldValue("list2", List.of("foo", "bar", "baz", "foo2"), false); - Object object = ingestDocument.getIngestContext().get("list2"); + Object object = ingestDocument.getSourceAndMetadata().get("list2"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -505,7 +505,7 @@ public void testListAppendFieldValuesWithoutDuplicates() { public void testAppendFieldValueToNonExistingList() { ingestDocument.appendFieldValue("non_existing_list", "new_value"); - Object object = ingestDocument.getIngestContext().get("non_existing_list"); + Object object = ingestDocument.getSourceAndMetadata().get("non_existing_list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -515,7 +515,7 @@ public void testAppendFieldValueToNonExistingList() { public void testAppendFieldValuesToNonExistingList() { ingestDocument.appendFieldValue("non_existing_list", Arrays.asList("item1", "item2", "item3")); - Object object = ingestDocument.getIngestContext().get("non_existing_list"); + Object object = ingestDocument.getSourceAndMetadata().get("non_existing_list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -527,7 +527,7 @@ public void testAppendFieldValuesToNonExistingList() { public void testAppendFieldValueConvertStringToList() { ingestDocument.appendFieldValue("fizz.buzz", "new_value"); - Object object = ingestDocument.getIngestContext().get("fizz"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -542,7 +542,7 @@ public void testAppendFieldValueConvertStringToList() { public void testAppendFieldValuesConvertStringToList() { ingestDocument.appendFieldValue("fizz.buzz", Arrays.asList("item1", "item2", "item3")); - Object object = ingestDocument.getIngestContext().get("fizz"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -559,7 +559,7 @@ public void testAppendFieldValuesConvertStringToList() { public void testAppendFieldValueConvertIntegerToList() { ingestDocument.appendFieldValue("int", 456); - Object object = ingestDocument.getIngestContext().get("int"); + Object object = ingestDocument.getSourceAndMetadata().get("int"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -570,7 +570,7 @@ public void testAppendFieldValueConvertIntegerToList() { public void testAppendFieldValuesConvertIntegerToList() { ingestDocument.appendFieldValue("int", Arrays.asList(456, 789)); - Object object = ingestDocument.getIngestContext().get("int"); + Object object = ingestDocument.getSourceAndMetadata().get("int"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -582,7 +582,7 @@ public void testAppendFieldValuesConvertIntegerToList() { public void testAppendFieldValueConvertMapToList() { ingestDocument.appendFieldValue("fizz", Collections.singletonMap("field", "value")); - Object object = ingestDocument.getIngestContext().get("fizz"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(object, instanceOf(List.class)); List list = (List) object; assertThat(list.size(), equalTo(2)); @@ -595,7 +595,7 @@ public void testAppendFieldValueConvertMapToList() { public void testAppendFieldValueToNull() { ingestDocument.appendFieldValue("fizz.foo_null", "new_value"); - Object object = ingestDocument.getIngestContext().get("fizz"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -609,7 +609,7 @@ public void testAppendFieldValueToNull() { public void testAppendFieldValueToListElement() { ingestDocument.appendFieldValue("fizz.list.0", "item2"); - Object object = ingestDocument.getIngestContext().get("fizz"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -629,7 +629,7 @@ public void testAppendFieldValueToListElement() { public void testAppendFieldValuesToListElement() { ingestDocument.appendFieldValue("fizz.list.0", Arrays.asList("item2", "item3", "item4")); - Object object = ingestDocument.getIngestContext().get("fizz"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -651,7 +651,7 @@ public void testAppendFieldValuesToListElement() { public void testAppendFieldValueConvertStringListElementToList() { ingestDocument.appendFieldValue("fizz.list.0.0", "new_value"); - Object object = ingestDocument.getIngestContext().get("fizz"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -675,7 +675,7 @@ public void testAppendFieldValueConvertStringListElementToList() { public void testAppendFieldValuesConvertStringListElementToList() { ingestDocument.appendFieldValue("fizz.list.0.0", Arrays.asList("item2", "item3", "item4")); - Object object = ingestDocument.getIngestContext().get("fizz"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(object, instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) object; @@ -701,7 +701,7 @@ public void testAppendFieldValuesConvertStringListElementToList() { public void testAppendFieldValueListElementConvertMapToList() { ingestDocument.appendFieldValue("list.0", Collections.singletonMap("item2", "value2")); - Object object = ingestDocument.getIngestContext().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); List list = (List) object; assertThat(list.size(), equalTo(2)); @@ -715,7 +715,7 @@ public void testAppendFieldValueListElementConvertMapToList() { public void testAppendFieldValueToNullListElement() { ingestDocument.appendFieldValue("list.1", "new_value"); - Object object = ingestDocument.getIngestContext().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); List list = (List) object; assertThat(list.get(1), instanceOf(List.class)); @@ -727,7 +727,7 @@ public void testAppendFieldValueToNullListElement() { public void testAppendFieldValueToListOfMaps() { ingestDocument.appendFieldValue("list", Collections.singletonMap("item2", "value2")); - Object object = ingestDocument.getIngestContext().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -739,7 +739,7 @@ public void testAppendFieldValueToListOfMaps() { public void testListSetFieldValueIndexProvided() { ingestDocument.setFieldValue("list.1", "value"); - Object object = ingestDocument.getIngestContext().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -750,7 +750,7 @@ public void testListSetFieldValueIndexProvided() { public void testSetFieldValueListAsPartOfPath() { ingestDocument.setFieldValue("list.0.field", "new_value"); - Object object = ingestDocument.getIngestContext().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -798,43 +798,43 @@ public void testSetFieldValueEmptyName() { public void testRemoveField() { ingestDocument.removeField("foo"); - assertThat(ingestDocument.getIngestContext().size(), equalTo(10)); - assertThat(ingestDocument.getIngestContext().containsKey("foo"), equalTo(false)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(10)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("foo"), equalTo(false)); ingestDocument.removeField("_index"); - assertThat(ingestDocument.getIngestContext().size(), equalTo(9)); - assertThat(ingestDocument.getIngestContext().containsKey("_index"), equalTo(false)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(9)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("_index"), equalTo(false)); ingestDocument.removeField("_source.fizz"); - assertThat(ingestDocument.getIngestContext().size(), equalTo(8)); - assertThat(ingestDocument.getIngestContext().containsKey("fizz"), equalTo(false)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(false)); assertThat(ingestDocument.getIngestMetadata().size(), equalTo(1)); ingestDocument.removeField("_ingest.timestamp"); - assertThat(ingestDocument.getIngestContext().size(), equalTo(8)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); assertThat(ingestDocument.getIngestMetadata().size(), equalTo(0)); } public void testRemoveInnerField() { ingestDocument.removeField("fizz.buzz"); - assertThat(ingestDocument.getIngestContext().size(), equalTo(11)); - assertThat(ingestDocument.getIngestContext().get("fizz"), instanceOf(Map.class)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(11)); + assertThat(ingestDocument.getSourceAndMetadata().get("fizz"), instanceOf(Map.class)); @SuppressWarnings("unchecked") - Map map = (Map) ingestDocument.getIngestContext().get("fizz"); + Map map = (Map) ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(map.size(), equalTo(3)); assertThat(map.containsKey("buzz"), equalTo(false)); ingestDocument.removeField("fizz.foo_null"); assertThat(map.size(), equalTo(2)); - assertThat(ingestDocument.getIngestContext().size(), equalTo(11)); - assertThat(ingestDocument.getIngestContext().containsKey("fizz"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(11)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); ingestDocument.removeField("fizz.1"); assertThat(map.size(), equalTo(1)); - assertThat(ingestDocument.getIngestContext().size(), equalTo(11)); - assertThat(ingestDocument.getIngestContext().containsKey("fizz"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(11)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); ingestDocument.removeField("fizz.list"); assertThat(map.size(), equalTo(0)); - assertThat(ingestDocument.getIngestContext().size(), equalTo(11)); - assertThat(ingestDocument.getIngestContext().containsKey("fizz"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(11)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); } public void testRemoveNonExistingField() { @@ -869,8 +869,8 @@ public void testRemoveSourceObject() { public void testRemoveIngestObject() { ingestDocument.removeField("_ingest"); - assertThat(ingestDocument.getIngestContext().size(), equalTo(10)); - assertThat(ingestDocument.getIngestContext().containsKey("_ingest"), equalTo(false)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(10)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("_ingest"), equalTo(false)); } public void testRemoveEmptyPathAfterStrippingOutPrefix() { @@ -891,9 +891,9 @@ public void testRemoveEmptyPathAfterStrippingOutPrefix() { public void testListRemoveField() { ingestDocument.removeField("list.0.field"); - assertThat(ingestDocument.getIngestContext().size(), equalTo(11)); - assertThat(ingestDocument.getIngestContext().containsKey("list"), equalTo(true)); - Object object = ingestDocument.getIngestContext().get("list"); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(11)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -1037,7 +1037,7 @@ public void testIngestMetadataTimestamp() throws Exception { public void testCopyConstructor() { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); IngestDocument copy = new IngestDocument(ingestDocument); - assertThat(ingestDocument.getIngestContext(), not(sameInstance(copy.getIngestContext()))); + assertThat(ingestDocument.getSourceAndMetadata(), not(sameInstance(copy.getSourceAndMetadata()))); assertIngestDocument(ingestDocument, copy); } @@ -1051,8 +1051,8 @@ public void testCopyConstructorWithZonedDateTime() { IngestDocument original = TestIngestDocument.withDefaultVersion(sourceAndMetadata); IngestDocument copy = new IngestDocument(original); - assertThat(copy.getIngestContext().get("beforeClockChange"), equalTo(original.getIngestContext().get("beforeClockChange"))); - assertThat(copy.getIngestContext().get("afterClockChange"), equalTo(original.getIngestContext().get("afterClockChange"))); + assertThat(copy.getSourceAndMetadata().get("beforeClockChange"), equalTo(original.getSourceAndMetadata().get("beforeClockChange"))); + assertThat(copy.getSourceAndMetadata().get("afterClockChange"), equalTo(original.getSourceAndMetadata().get("afterClockChange"))); } public void testSetInvalidSourceField() throws Exception { @@ -1076,7 +1076,7 @@ public void testSetInvalidSourceField() throws Exception { public void testDeepCopy() { IngestDocument copiedDoc = new IngestDocument( - IngestDocument.deepCopyMap(ingestDocument.getIngestContext()), + IngestDocument.deepCopyMap(ingestDocument.getSourceAndMetadata()), IngestDocument.deepCopyMap(ingestDocument.getIngestMetadata()) ); assertArrayEquals( diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index cfd2f7003dc3a..7fcb2b07a1f81 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -523,7 +523,7 @@ public String getType() { ingestService.getPipeline("_id1").execute(ingestDocument, (ingestDocument1, e) -> exceptionHolder[0] = e); assertThat(exceptionHolder[0], notNullValue()); assertThat(exceptionHolder[0].getMessage(), containsString("reload me")); - assertThat(ingestDocument.getIngestContext().get("_field"), nullValue()); + assertThat(ingestDocument.getSourceAndMetadata().get("_field"), nullValue()); } externalProperty[0] = true; @@ -534,7 +534,7 @@ public String getType() { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestService.getPipeline("_id1").execute(ingestDocument, (ingestDocument1, e) -> holder[0] = e); assertThat(holder[0], nullValue()); - assertThat(ingestDocument.getIngestContext().get("_field"), equalTo("_value")); + assertThat(ingestDocument.getSourceAndMetadata().get("_field"), equalTo("_value")); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java index 445c04c8d737c..68ec921186599 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java @@ -197,7 +197,7 @@ public void testPipelineProcessorWithPipelineChain() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); // start the chain ingestDocument.executePipeline(pipeline1, (result, e) -> {}); - assertNotNull(ingestDocument.getIngestContext().get(key1)); + assertNotNull(ingestDocument.getSourceAndMetadata().get(key1)); // check the stats IngestStats.Stats pipeline1Stats = pipeline1.getMetrics().createStats(); diff --git a/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java index 70c8a44d19ca9..7054778b8528a 100644 --- a/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java @@ -763,8 +763,8 @@ public void testActualPipelineProcessorRepeatedInvocation() throws Exception { // each invocation updates key1 with a random int assertNotEquals( - resultList.get(1).getIngestDocument().getIngestContext().get(key1), - resultList.get(3).getIngestDocument().getIngestContext().get(key1) + resultList.get(1).getIngestDocument().getSourceAndMetadata().get(key1), + resultList.get(3).getIngestDocument().getSourceAndMetadata().get(key1) ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java b/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java index 69a50d8f6d04a..e2bea702baff5 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java @@ -21,7 +21,7 @@ public class IngestDocumentMatcher { */ public static void assertIngestDocument(IngestDocument docA, IngestDocument docB) { if ((deepEquals(docA.getIngestMetadata(), docB.getIngestMetadata(), true) - && deepEquals(docA.getIngestContext(), docB.getIngestContext(), false)) == false) { + && deepEquals(docA.getSourceAndMetadata(), docB.getSourceAndMetadata(), false)) == false) { throw new AssertionError("Expected [" + docA + "] but received [" + docB + "]."); } } diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java index 166ab3238dae2..d9cc17e88adb5 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -62,7 +62,7 @@ public static String randomLeafFieldName(Random random) { * field. */ public static String randomExistingFieldName(Random random, IngestDocument ingestDocument) { - Map source = new TreeMap<>(ingestDocument.getIngestContext()); + Map source = new TreeMap<>(ingestDocument.getSourceAndMetadata()); Map.Entry randomEntry = getRandomEntry(random, source.entrySet()); String key = randomEntry.getKey(); while (randomEntry.getValue() instanceof Map) { @@ -111,7 +111,7 @@ public static String addRandomField(Random random, IngestDocument ingestDocument */ public static boolean canAddField(String path, IngestDocument ingestDocument) { String[] pathElements = path.split("\\."); - Map innerMap = ingestDocument.getIngestContext(); + Map innerMap = ingestDocument.getSourceAndMetadata(); if (pathElements.length > 1) { for (int i = 0; i < pathElements.length - 1; i++) { Object currentLevel = innerMap.get(pathElements[i]); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java index 1a3958a7b03cd..1663c63eeeae3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java @@ -61,7 +61,7 @@ public void testWriteToDocAndSerialize() throws IOException { builder.field(metadata.getKey(), metadata.getValue().toString()); } } - Map source = IngestDocument.deepCopyMap(document.getIngestContext()); + Map source = IngestDocument.deepCopyMap(document.getSourceAndMetadata()); metadataMap.keySet().forEach(mD -> source.remove(mD)); builder.field("_source", source); builder.field("_ingest", document.getIngestMetadata()); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java index ca2132a00c380..0d7f900188ba1 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java @@ -112,7 +112,7 @@ public void testNoMatch() throws Exception { VersionType.INTERNAL, Map.of("domain", "elastic.com") ); - int numProperties = ingestDocument.getIngestContext().size(); + int numProperties = ingestDocument.getSourceAndMetadata().size(); // Run IngestDocument[] holder = new IngestDocument[1]; processor.execute(ingestDocument, (result, e) -> holder[0] = result); @@ -133,7 +133,7 @@ public void testNoMatch() throws Exception { assertThat(termQueryBuilder.fieldName(), equalTo("domain")); assertThat(termQueryBuilder.value(), equalTo("elastic.com")); // Check result - assertThat(ingestDocument.getIngestContext().size(), equalTo(numProperties)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(numProperties)); } public void testSearchFailure() throws Exception { @@ -204,11 +204,11 @@ public void testIgnoreKeyMissing() throws Exception { ); IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1L, "_routing", VersionType.INTERNAL, Map.of()); - assertThat(ingestDocument.getIngestContext().size(), equalTo(5)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(5)); IngestDocument[] holder = new IngestDocument[1]; processor.execute(ingestDocument, (result, e) -> holder[0] = result); assertThat(holder[0], notNullValue()); - assertThat(ingestDocument.getIngestContext().size(), equalTo(5)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(5)); } { MatchProcessor processor = new MatchProcessor( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java index 2d2c3bcb1a23c..48d98c981e14e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java @@ -144,7 +144,7 @@ void handleResponse(InferModelAction.Response response, IngestDocument ingestDoc } InferModelAction.Request buildRequest(IngestDocument ingestDocument) { - Map fields = new HashMap<>(ingestDocument.getIngestContext()); + Map fields = new HashMap<>(ingestDocument.getSourceAndMetadata()); // Add ingestMetadata as previous processors might have added metadata from which we are predicting (see: foreach processor) if (ingestDocument.getIngestMetadata().isEmpty() == false) { fields.put(INGEST_KEY, ingestDocument.getIngestMetadata()); From 205df38dab4652bd6d3fba3bd6648c2d16324331 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 15:41:13 -0500 Subject: [PATCH 04/36] revert "getContextMetadata" -> "getMetadata" --- .../java/org/elasticsearch/ingest/common/ScriptProcessor.java | 2 +- .../ingest/common/ScriptProcessorFactoryTests.java | 2 +- .../src/main/java/org/elasticsearch/ingest/IngestDocument.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java index 44d4edec73670..2586b9aed919d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java @@ -75,7 +75,7 @@ public IngestDocument execute(IngestDocument document) { if (factory == null) { factory = scriptService.compile(script, IngestScript.CONTEXT); } - factory.newInstance(script.getParams(), document.getContextMetadata(), document.getSourceAndMetadata()).execute(); + factory.newInstance(script.getParams(), document.getMetadata(), document.getSourceAndMetadata()).execute(); return document; } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java index 99c97b5e67fd0..6fb39fa0fb803 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java @@ -160,7 +160,7 @@ public void testInlineIsCompiled() throws Exception { assertNotNull(processor.getPrecompiledIngestScriptFactory()); IngestDocument doc = TestIngestDocument.emptyIngestDocument(); Map ctx = TestIngestDocument.emptyIngestDocument().getIngestSourceAndMetadata(); - processor.getPrecompiledIngestScriptFactory().newInstance(null, doc.getContextMetadata(), ctx).execute(); + processor.getPrecompiledIngestScriptFactory().newInstance(null, doc.getMetadata(), ctx).execute(); assertThat(ctx.get("foo"), equalTo("bar")); } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 695f7ec8c7d32..b517d2a6f4153 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -747,7 +747,7 @@ public Map getMetadataMap() { /** * Get the strongly typed metadata */ - public org.elasticsearch.script.Metadata getContextMetadata() { + public org.elasticsearch.script.Metadata getMetadata() { return sourceAndMetadata; } From c35d5a753a3d1fbba42785f095161e64d0ddb187 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 16:23:34 -0500 Subject: [PATCH 05/36] Update javadoc --- .../geoip/GeoIpProcessorFactoryTests.java | 5 +++- .../ingest/IngestSourceAndMetadata.java | 8 +++--- .../org/elasticsearch/script/Metadata.java | 26 ++++++++++++++----- .../script/SourceAndMetadataMap.java | 18 +++++++++++-- .../script/UpdateSourceAndMetadata.java | 9 +++++++ 5 files changed, 52 insertions(+), 14 deletions(-) diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 356d1b8cb23f7..0f97f212c42c0 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -473,7 +473,10 @@ public void testDatabaseNotReadyYet() throws Exception { ); processor.execute(ingestDocument); assertThat(ingestDocument.getSourceAndMetadata().get("geoip"), nullValue()); - assertThat(ingestDocument.getSourceAndMetadata().get("tags"), equalTo(List.of("_geoip_database_unavailable_GeoLite2-City.mmdb"))); + assertThat( + ingestDocument.getSourceAndMetadata().get("tags"), + equalTo(List.of("_geoip_database_unavailable_GeoLite2-City.mmdb")) + ); } copyDatabaseFile(geoipTmpDir, "GeoLite2-City-Test.mmdb"); diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java index 3833efb1e5bf3..026713575ddfc 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java @@ -154,10 +154,12 @@ public static ZonedDateTime getTimestamp(Map ingestMetadata) { } // timestamp isn't backed by the map + @Override public ZonedDateTime getTimestamp() { return timestamp; } + @Override public long getVersion() { Number version = getNumber(VERSION); assert version != null : VERSION + " validation allowed null version"; @@ -174,11 +176,7 @@ public void setVersionType(String versionType) { put(VERSION_TYPE, versionType); } - @Override - public String getType() { - return getString(TYPE); - } - + // These getters are inaccessible to scripts but live in the same map public Number getIfSeqNo() { return getNumber(IF_SEQ_NO); } diff --git a/server/src/main/java/org/elasticsearch/script/Metadata.java b/server/src/main/java/org/elasticsearch/script/Metadata.java index ada69e394361f..811889ed6e4f7 100644 --- a/server/src/main/java/org/elasticsearch/script/Metadata.java +++ b/server/src/main/java/org/elasticsearch/script/Metadata.java @@ -11,7 +11,10 @@ import java.time.ZonedDateTime; /** - * Ingest and update metadata available to write scripts + * Ingest and update metadata available to write scripts. + * + * This interface is a super-set of all metadata for the write contexts. A write contexts should only + * whitelist the relevant getter and setters. */ public interface Metadata { /** @@ -48,6 +51,7 @@ default boolean hasVersion() { /** * The version type of the document, {@link org.elasticsearch.index.VersionType} as a lower-case string. + * Since update does not hav ethis metadata, defaults to throwing {@link UnsupportedOperationException}. */ default String getVersionType() { throw new UnsupportedOperationException(); @@ -55,26 +59,36 @@ default String getVersionType() { /** * Set the version type of the document. + * + * Since update does not hav ethis metadata, defaults to throwing {@link UnsupportedOperationException} * @param versionType {@link org.elasticsearch.index.VersionType} as a lower-case string */ default void setVersionType(String versionType) { throw new UnsupportedOperationException(); } + /** + * Get the update operation for this document, eg "create", "index", "noop". + * + * Since ingest does not have this metadata, defaults to throwing {@link UnsupportedOperationException}. + */ default String getOp() { throw new UnsupportedOperationException(); } + /** + * Set the update operation for this document. See {@code org.elasticsearch.action.update.UpdateHelper.UpdateOpType} and + * {@code org.elasticsearch.reindex.AbstractAsyncBulkByScrollAction.OpType} + * + * Since ingest does not have this metadata, defaults to throwing {@link UnsupportedOperationException}. + * @param op the op type as a string. + */ default void setOp(String op) { throw new UnsupportedOperationException(); } - default String getType() { - throw new UnsupportedOperationException(); - } - /** - * Timestamp of this ingestion or update + * Timestamp of this ingestion or update. */ ZonedDateTime getTimestamp(); } diff --git a/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java b/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java index 7955b0060ac1a..d3a26838825d7 100644 --- a/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java +++ b/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java @@ -24,9 +24,12 @@ /** * Container that holds the source and metadata for write scripts. Acts like a map for backwards compatibilty with - * ctx and implements {@link Metadata} so that scripts can use the newer API for setting. + * ctx and implements {@link Metadata} so that scripts can use the newer API for reading and writing metadata values. * - * Common metadata keys are + * Keeps two maps, a {@link #metadata} map for metadata mappings and a {@link #source} map for all other mappings. + * A mapping belongs in metadata if and only if there is a {@link Validator} for the mapping. + * + * Validators ensure modifications to the metadata map are always valid so getters are exception free. */ public abstract class SourceAndMetadataMap extends AbstractMap implements Metadata { public static final String INDEX = "_index"; @@ -468,12 +471,23 @@ public static void mapValidator(MapOperation op, String key, Object value) { ); } + /** + * The operation being performed on the value in the map. + * INIT: Initial value - the metadata value as passed into this class + * UPDATE: the metadata is being set to a different value + * REMOVE: the metadata mapping is being removed + */ public enum MapOperation { INIT, UPDATE, REMOVE } + /** + * A "TriConsumer" that tests if the {@link MapOperation}, the metadata key and value are valid. + * + * @throws IllegalArgumentException if the given triple is invalid + */ @FunctionalInterface public interface Validator { void accept(MapOperation op, String key, Object value); diff --git a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java index b06ff22f88db7..66eaaa34d8da8 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java @@ -137,6 +137,9 @@ public ZonedDateTime getTimestamp() { return getZonedDateTime(TIMESTAMP); } + /** + * A validator that allows string values to be set once but neither updated nor removed. + */ public static void setOnceStringValidator(MapOperation op, String key, Object value) { if (op != MapOperation.INIT) { throw new IllegalArgumentException("Cannot " + op.name().toLowerCase(Locale.ROOT) + " key [" + key + "]"); @@ -144,6 +147,9 @@ public static void setOnceStringValidator(MapOperation op, String key, Object va stringValidator(op, key, value); } + /** + * Same as {@link #longValidator(MapOperation, String, Object)} but allows longs to be set once and neither updated nor removed. + */ public static void setOnceLongValidator(MapOperation op, String key, Object value) { if (op != MapOperation.INIT) { throw new IllegalArgumentException("Cannot " + op.name().toLowerCase(Locale.ROOT) + " key [" + key + "]"); @@ -151,6 +157,9 @@ public static void setOnceLongValidator(MapOperation op, String key, Object valu longValidator(op, key, value); } + /** + * + */ public static Validator opValidatorFromValidOps(Set validOps) { return new Validator() { @Override From 5fd7d3fc006e02c7f14a2e109862f8c9a8350d54 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 16:32:37 -0500 Subject: [PATCH 06/36] rest tests --- .../rest-api-spec/test/painless/15_update.yml | 4 ++-- .../test/painless/25_script_upsert.yml | 12 ++++++------ .../elasticsearch/script/SourceAndMetadataMap.java | 13 +------------ 3 files changed, 9 insertions(+), 20 deletions(-) diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml index 7a761d34ee593..c29dbb87d5d22 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml @@ -136,7 +136,7 @@ id: "2" body: script: - source: "ctx._source.bar = meta().id + '-extra'" + source: "ctx._source.bar = metadata().id + '-extra'" lang: "painless" upsert: {} scripted_upsert: true @@ -155,7 +155,7 @@ id: "2" body: script: - source: "meta().op = Op.DELETE" + source: "metadata().op = Op.DELETE" lang: "painless" upsert: {} scripted_upsert: true diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml index 4145f6d7a7964..4d04f283b7f29 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml @@ -101,13 +101,13 @@ reason: "update metadata introduced in 8.4.0" - do: - catch: /type unavailable for insert/ + catch: /routing unavailable for insert/ update: index: test_1 id: "1" body: script: - source: "ctx._source.foo = meta().type" + source: "ctx._source.foo = metadata().routing" lang: "painless" upsert: {} scripted_upsert: true @@ -118,7 +118,7 @@ id: "2" body: script: - source: "ctx._source.foo = meta().index + '_1'; ctx._source.bar = 'nothing'" + source: "ctx._source.foo = metadata().index + '_1'; ctx._source.bar = 'nothing'" lang: "painless" upsert: {} scripted_upsert: true @@ -137,7 +137,7 @@ id: "3" body: script: - source: "meta().op = Op.NOOP; ctx._source.bar = 'skipped?'" + source: "metadata().op = 'noop'; ctx._source.bar = 'skipped?'" lang: "painless" upsert: {} scripted_upsert: true @@ -156,7 +156,7 @@ id: "3" body: script: - source: "meta().op = Op.CREATE; ctx._source.bar = 'skipped?'" + source: "metadata().op = 'create'; ctx._source.bar = 'skipped?'" lang: "painless" upsert: {} scripted_upsert: true @@ -176,7 +176,7 @@ id: "2" body: script: - source: "ctx._source.bar = meta().type + '-extra'" + source: "ctx._source.bar = metadata().op + '-extra'" lang: "painless" upsert: {} scripted_upsert: true diff --git a/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java b/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java index d3a26838825d7..92c529b210868 100644 --- a/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java +++ b/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java @@ -226,17 +226,6 @@ public ZonedDateTime getZonedDateTime(Object key) { ); } - /** - * Puts the {@link ZonedDateTime} as a long representing milliseconds from epoch. - */ - public void putEpochMilli(String key, ZonedDateTime value) { - if (value == null) { - put(key, null); - return; - } - put(key, value.toInstant().toEpochMilli()); - } - /** * Set of entries of the wrapped map that calls the appropriate validator before changing an entries value or removing an entry. * @@ -486,7 +475,7 @@ public enum MapOperation { /** * A "TriConsumer" that tests if the {@link MapOperation}, the metadata key and value are valid. * - * @throws IllegalArgumentException if the given triple is invalid + * throws IllegalArgumentException if the given triple is invalid */ @FunctionalInterface public interface Validator { From bd3e4f29094245c0ccb69287dba629c9bcd56ddb Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 16:51:24 -0500 Subject: [PATCH 07/36] Script: Metadata for update context Adds the metadata() API call and a Metadata class for the Update context There are different metadata available in the update context depending on whether it is an update or an insert (via upsert). For update, scripts can read index, id, routing, version and timestamp. For insert, scripts can read index, id and timestamp. Scripts can always read and write the op but the available ops are different. Updates allow 'noop', 'index' and 'delete'. Inserts allow 'noop' and 'create'. Refs: #86472 --- .../painless/org.elasticsearch.script.update.txt | 1 - .../resources/rest-api-spec/test/painless/15_update.yml | 2 +- .../rest-api-spec/test/painless/25_script_upsert.yml | 4 ++-- .../org/elasticsearch/script/UpdateSourceAndMetadata.java | 4 ++-- .../org/elasticsearch/script/UpsertSourceAndMetadata.java | 6 +++--- 5 files changed, 8 insertions(+), 9 deletions(-) diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.update.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.update.txt index bec70394a6d87..ee2846331ee0f 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.update.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.update.txt @@ -27,7 +27,6 @@ class org.elasticsearch.script.Metadata { String getOp() void setOp(String) ZonedDateTime getTimestamp() - String getType() } class org.elasticsearch.script.UpdateScript { diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml index c29dbb87d5d22..e00f82af7cc76 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml @@ -155,7 +155,7 @@ id: "2" body: script: - source: "metadata().op = Op.DELETE" + source: "metadata().op = 'delete'" lang: "painless" upsert: {} scripted_upsert: true diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml index 4d04f283b7f29..865bed8de24e9 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml @@ -101,7 +101,7 @@ reason: "update metadata introduced in 8.4.0" - do: - catch: /routing unavailable for insert/ + catch: /routing is unavailable for insert/ update: index: test_1 id: "1" @@ -186,4 +186,4 @@ index: test_1 id: "2" - - match: { _source.bar: _doc-extra } + - match: { _source.bar: index-extra } diff --git a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java index 66eaaa34d8da8..954db65dea74b 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java @@ -104,7 +104,7 @@ public long getVersion() { @Override public String getOp() { - String op = super.getOp(); + String op = getString(OP); if (LEGACY_NOOP_STRING.equals(op)) { return "noop"; } @@ -116,7 +116,7 @@ public void setOp(String op) { if (LEGACY_NOOP_STRING.equals(op)) { throw new IllegalArgumentException(LEGACY_NOOP_STRING + " is deprecated, use 'noop' instead"); } - super.setOp(op); + put(OP, op); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java index 36360b1ccc620..1ba351359a734 100644 --- a/server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java @@ -45,16 +45,16 @@ protected static Map metadataMap(String index, String id, String @Override public String getRouting() { - throw new IllegalStateException("routing is unavailable for insert"); + throw new IllegalArgumentException("routing is unavailable for insert"); } @Override public long getVersion() { - throw new IllegalStateException("version is unavailable for insert"); + throw new IllegalArgumentException("version is unavailable for insert"); } @Override public boolean hasVersion() { - throw new IllegalStateException("version is unavailable for insert"); + throw new IllegalArgumentException("version is unavailable for insert"); } } From 73e1743d08046d74457238fda38b0b6c658900ec Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 17:12:40 -0500 Subject: [PATCH 08/36] add opValidator javadocs --- .../java/org/elasticsearch/script/UpdateSourceAndMetadata.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java index 954db65dea74b..e7640a930d0e2 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java @@ -158,7 +158,7 @@ public static void setOnceLongValidator(MapOperation op, String key, Object valu } /** - * + * A {@link #stringValidator(MapOperation, String, Object)} that validates against a set of valid strings and forbids removal. */ public static Validator opValidatorFromValidOps(Set validOps) { return new Validator() { From 08042273a6cd42ffb29358e0e59b3bff50f2f8ab Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 17:55:14 -0500 Subject: [PATCH 09/36] Add hashcode for WriteableIngestDocumentTests.testFromXContent and leinancy for UpdateRequestTests.testUpdateScript --- .../script/SourceAndMetadataMap.java | 14 ++++++ .../script/UpdateSourceAndMetadata.java | 49 ++++++++++--------- .../action/update/UpdateRequestTests.java | 14 +++--- 3 files changed, 47 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java b/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java index 92c529b210868..b602a20807ef2 100644 --- a/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java +++ b/server/src/main/java/org/elasticsearch/script/SourceAndMetadataMap.java @@ -481,4 +481,18 @@ public enum MapOperation { public interface Validator { void accept(MapOperation op, String key, Object value); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if ((o instanceof SourceAndMetadataMap) == false) return false; + if (super.equals(o) == false) return false; + SourceAndMetadataMap that = (SourceAndMetadataMap) o; + return validators.equals(that.validators) && source.equals(that.source) && metadata.equals(that.metadata); + } + + @Override + public int hashCode() { + return Objects.hash(source, metadata); + } } diff --git a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java index e7640a930d0e2..404fc65c9e89a 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java @@ -41,7 +41,7 @@ public class UpdateSourceAndMetadata extends SourceAndMetadataMap { TYPE, UpdateSourceAndMetadata::setOnceStringValidator, OP, - opValidatorFromValidOps(VALID_OPS), + UpdateSourceAndMetadata::stringValidator, TIMESTAMP, UpdateSourceAndMetadata::setOnceLongValidator ); @@ -105,7 +105,8 @@ public long getVersion() { @Override public String getOp() { String op = getString(OP); - if (LEGACY_NOOP_STRING.equals(op)) { + if (LEGACY_NOOP_STRING.equals(op) || VALID_OPS.contains(op) == false) { + // UpdateHelper.UpdateOpType.lenientFromString allows anything into the map return "noop"; } return op; @@ -116,6 +117,11 @@ public void setOp(String op) { if (LEGACY_NOOP_STRING.equals(op)) { throw new IllegalArgumentException(LEGACY_NOOP_STRING + " is deprecated, use 'noop' instead"); } + if (VALID_OPS.contains(op) == false) { + throw new IllegalArgumentException( + "[" + op + "] must be one of " + VALID_OPS.stream().sorted().collect(Collectors.joining(", ")) + ", not [" + op + "]" + ); + } put(OP, op); } @@ -161,31 +167,28 @@ public static void setOnceLongValidator(MapOperation op, String key, Object valu * A {@link #stringValidator(MapOperation, String, Object)} that validates against a set of valid strings and forbids removal. */ public static Validator opValidatorFromValidOps(Set validOps) { - return new Validator() { - @Override - public void accept(MapOperation op, String key, Object value) { - if (op == MapOperation.REMOVE) { - throw new IllegalArgumentException("Cannot remove [" + key + "]"); - } - if (value instanceof String opStr) { - if (validOps.contains(opStr)) { - return; - } - throw new IllegalArgumentException( - key + " must be one of " + validOps.stream().sorted().collect(Collectors.joining(",")) + ", not [" + opStr + "]" - ); + return (op, key, value) -> { + if (op == MapOperation.REMOVE) { + throw new IllegalArgumentException("Cannot remove [" + key + "]"); + } + if (value instanceof String opStr) { + if (validOps.contains(opStr)) { + return; } throw new IllegalArgumentException( - key - + " must be String and one of " - + validOps.stream().sorted().collect(Collectors.joining(",")) - + " but was [" - + value - + "] with type [" - + value.getClass().getName() - + "]" + key + " must be one of " + validOps.stream().sorted().collect(Collectors.joining(", ")) + ", not [" + opStr + "]" ); } + throw new IllegalArgumentException( + key + + " must be String and one of " + + validOps.stream().sorted().collect(Collectors.joining(",")) + + " but was [" + + value + + "] with type [" + + value.getClass().getName() + + "]" + ); }; } } diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 5ee90a2f38684..d05b9c48e5766 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -91,19 +91,19 @@ public void setUp() throws Exception { ctx.put("_timestamp", ctx.get("_now")); return null; }); - scripts.put("ctx.op = delete", vars -> { + scripts.put("ctx.op = 'delete'", vars -> { @SuppressWarnings("unchecked") final Map ctx = (Map) vars.get("ctx"); ctx.put("op", "delete"); return null; }); - scripts.put("ctx.op = bad", vars -> { + scripts.put("ctx.op = 'bad'", vars -> { @SuppressWarnings("unchecked") final Map ctx = (Map) vars.get("ctx"); ctx.put("op", "bad"); return null; }); - scripts.put("ctx.op = none", vars -> { + scripts.put("ctx.op = 'none'", vars -> { @SuppressWarnings("unchecked") final Map ctx = (Map) vars.get("ctx"); ctx.put("op", "none"); @@ -381,7 +381,7 @@ public void testIndexTimeout() { public void testDeleteTimeout() { final GetResult getResult = new GetResult("test", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null, null); - final UpdateRequest updateRequest = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = delete")) + final UpdateRequest updateRequest = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = 'delete'")) .timeout(randomTimeValue()); runTimeoutTest(getResult, updateRequest); } @@ -598,7 +598,7 @@ public void testUpdateScript() throws Exception { assertThat(result.updatedSourceAsMap().get("body").toString(), equalTo("foo")); // Now where the script changes the op to "delete" - request = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = delete")); + request = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = 'delete'")); result = updateHelper.prepareUpdateScriptRequest(shardId, request, getResult, ESTestCase::randomNonNegativeLong); @@ -608,9 +608,9 @@ public void testUpdateScript() throws Exception { // We treat everything else as a No-op boolean goodNoop = randomBoolean(); if (goodNoop) { - request = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = none")); + request = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = 'none'")); } else { - request = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = bad")); + request = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = 'bad'")); } result = updateHelper.prepareUpdateScriptRequest(shardId, request, getResult, ESTestCase::randomNonNegativeLong); From c86cd15d81d3d40e6613ab0b025105484641aaa7 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 18:06:26 -0500 Subject: [PATCH 10/36] create is valid in scripts --- .../java/org/elasticsearch/script/UpdateSourceAndMetadata.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java index 404fc65c9e89a..7ba028b46cf50 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java @@ -27,7 +27,7 @@ public class UpdateSourceAndMetadata extends SourceAndMetadataMap { // AbstractAsyncBulkByScrollAction.OpType uses 'noop' rather than 'none', so unify on 'noop' but allow 'none' in // the ctx map protected static final String LEGACY_NOOP_STRING = "none"; - protected static final Set VALID_OPS = Set.of("noop", "index", "delete", LEGACY_NOOP_STRING); + protected static final Set VALID_OPS = Set.of("noop", "create", "index", "delete", LEGACY_NOOP_STRING); public static Map VALIDATORS = Map.of( INDEX, From 4ee36edd3c4f1cdaaf837f5773a02c8a90859eb3 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 18:13:58 -0500 Subject: [PATCH 11/36] Pass valid ops --- .../script/UpdateSourceAndMetadata.java | 14 +++++++++----- .../script/UpsertSourceAndMetadata.java | 6 +++--- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java index 7ba028b46cf50..d71975ee7ed13 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java @@ -27,7 +27,7 @@ public class UpdateSourceAndMetadata extends SourceAndMetadataMap { // AbstractAsyncBulkByScrollAction.OpType uses 'noop' rather than 'none', so unify on 'noop' but allow 'none' in // the ctx map protected static final String LEGACY_NOOP_STRING = "none"; - protected static final Set VALID_OPS = Set.of("noop", "create", "index", "delete", LEGACY_NOOP_STRING); + protected static final Set VALID_UPDATE_OPS = Set.of("noop", "index", "delete", LEGACY_NOOP_STRING); public static Map VALIDATORS = Map.of( INDEX, @@ -46,6 +46,8 @@ public class UpdateSourceAndMetadata extends SourceAndMetadataMap { UpdateSourceAndMetadata::setOnceLongValidator ); + protected final Set validOps; + public UpdateSourceAndMetadata( String index, String id, @@ -57,10 +59,12 @@ public UpdateSourceAndMetadata( Map source ) { super(wrapSource(source), metadataMap(index, id, version, routing, type, op, timestamp), VALIDATORS); + validOps = VALID_UPDATE_OPS; } - protected UpdateSourceAndMetadata(Map source, Map metadata, Map validators) { + protected UpdateSourceAndMetadata(Map source, Map metadata, Map validators, Set validOps) { super(wrapSource(source), metadata, validators); + this.validOps = validOps; } protected static Map wrapSource(Map source) { @@ -105,7 +109,7 @@ public long getVersion() { @Override public String getOp() { String op = getString(OP); - if (LEGACY_NOOP_STRING.equals(op) || VALID_OPS.contains(op) == false) { + if (LEGACY_NOOP_STRING.equals(op) || validOps.contains(op) == false) { // UpdateHelper.UpdateOpType.lenientFromString allows anything into the map return "noop"; } @@ -117,9 +121,9 @@ public void setOp(String op) { if (LEGACY_NOOP_STRING.equals(op)) { throw new IllegalArgumentException(LEGACY_NOOP_STRING + " is deprecated, use 'noop' instead"); } - if (VALID_OPS.contains(op) == false) { + if (validOps.contains(op) == false) { throw new IllegalArgumentException( - "[" + op + "] must be one of " + VALID_OPS.stream().sorted().collect(Collectors.joining(", ")) + ", not [" + op + "]" + "[" + op + "] must be one of " + VALID_UPDATE_OPS.stream().sorted().collect(Collectors.joining(", ")) + ", not [" + op + "]" ); } put(OP, op); diff --git a/server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java index 1ba351359a734..bc1eccd733515 100644 --- a/server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/script/UpsertSourceAndMetadata.java @@ -17,7 +17,7 @@ * Metadata for insert via upsert in the Update context */ public class UpsertSourceAndMetadata extends UpdateSourceAndMetadata { - protected static final Set VALID_OPS = Set.of("noop", "create", LEGACY_NOOP_STRING); + protected static final Set VALID_UPSERT_OPS = Set.of("noop", "create", LEGACY_NOOP_STRING); public static Map VALIDATORS = Map.of( INDEX, @@ -25,13 +25,13 @@ public class UpsertSourceAndMetadata extends UpdateSourceAndMetadata { ID, UpdateSourceAndMetadata::setOnceStringValidator, OP, - opValidatorFromValidOps(VALID_OPS), + opValidatorFromValidOps(VALID_UPSERT_OPS), TIMESTAMP, UpdateSourceAndMetadata::setOnceLongValidator ); public UpsertSourceAndMetadata(String index, String id, String op, long timestamp, Map source) { - super(source, metadataMap(index, id, op, timestamp), VALIDATORS); + super(source, metadataMap(index, id, op, timestamp), VALIDATORS, VALID_UPSERT_OPS); } protected static Map metadataMap(String index, String id, String op, long timestamp) { From c9359d52b555b3839b1f8fb19fe173a6d15f127e Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 18:37:20 -0500 Subject: [PATCH 12/36] spotless UpdateSourceAndMetadata --- .../org/elasticsearch/script/UpdateSourceAndMetadata.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java index d71975ee7ed13..0687515aed9b1 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateSourceAndMetadata.java @@ -62,7 +62,12 @@ public UpdateSourceAndMetadata( validOps = VALID_UPDATE_OPS; } - protected UpdateSourceAndMetadata(Map source, Map metadata, Map validators, Set validOps) { + protected UpdateSourceAndMetadata( + Map source, + Map metadata, + Map validators, + Set validOps + ) { super(wrapSource(source), metadata, validators); this.validOps = validOps; } From 7c35fc26548b41e9bca082a057e6adb5c85a1822 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 6 Jul 2022 20:37:15 -0500 Subject: [PATCH 13/36] Update docs/changelog/88333.yaml --- docs/changelog/88333.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 docs/changelog/88333.yaml diff --git a/docs/changelog/88333.yaml b/docs/changelog/88333.yaml new file mode 100644 index 0000000000000..f72dbe19ff1b7 --- /dev/null +++ b/docs/changelog/88333.yaml @@ -0,0 +1,5 @@ +pr: 88333 +summary: "Script: Metadata for update context" +area: Infra/Scripting +type: enhancement +issues: [] From 4cc8484d5893a98fb6d25e9fd99228424bdd0df6 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Mon, 11 Jul 2022 12:25:39 -0400 Subject: [PATCH 14/36] Improve error when sorting on incompatible types (#88399) Currently when sorting on incompatible types, we get class_cast_exception error (code 500). This patch improves the error to explain that the problem is because of incompatible sort types for the field across different shards and returns user error (code 400). Closes #73146 --- docs/changelog/88399.yaml | 6 +++ .../search/sort/FieldSortIT.java | 39 ++++++++++++++ .../action/search/SearchPhaseController.java | 53 +++++++++++++++++++ 3 files changed, 98 insertions(+) create mode 100644 docs/changelog/88399.yaml diff --git a/docs/changelog/88399.yaml b/docs/changelog/88399.yaml new file mode 100644 index 0000000000000..f38fc092ae629 --- /dev/null +++ b/docs/changelog/88399.yaml @@ -0,0 +1,6 @@ +pr: 88399 +summary: Improve error when sorting on incompatible types +area: Search +type: enhancement +issues: + - 73146 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index 1593aed61df44..4e4997134fdcc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -2122,4 +2122,43 @@ public void testLongSortOptimizationCorrectResults() { } } + public void testSortMixedFieldTypes() { + assertAcked(prepareCreate("index_long").setMapping("foo", "type=long").get()); + assertAcked(prepareCreate("index_integer").setMapping("foo", "type=integer").get()); + assertAcked(prepareCreate("index_double").setMapping("foo", "type=double").get()); + assertAcked(prepareCreate("index_keyword").setMapping("foo", "type=keyword").get()); + + client().prepareIndex("index_long").setId("1").setSource("foo", "123").get(); + client().prepareIndex("index_integer").setId("1").setSource("foo", "123").get(); + client().prepareIndex("index_double").setId("1").setSource("foo", "123").get(); + client().prepareIndex("index_keyword").setId("1").setSource("foo", "123").get(); + refresh(); + + { // mixing long and integer types is ok, as we convert integer sort to long sort + SearchResponse searchResponse = client().prepareSearch("index_long", "index_integer") + .addSort(new FieldSortBuilder("foo")) + .setSize(10) + .get(); + assertSearchResponse(searchResponse); + } + + String errMsg = "Can't sort on field [foo]; the field has incompatible sort types"; + + { // mixing long and double types is not allowed + SearchPhaseExecutionException exc = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch("index_long", "index_double").addSort(new FieldSortBuilder("foo")).setSize(10).get() + ); + assertThat(exc.getCause().toString(), containsString(errMsg)); + } + + { // mixing long and keyword types is not allowed + SearchPhaseExecutionException exc = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch("index_long", "index_keyword").addSort(new FieldSortBuilder("foo")).setSize(10).get() + ); + assertThat(exc.getCause().toString(), containsString(errMsg)); + } + } + } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index b119fcfb45bc3..aa44ab318b6f1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -14,6 +14,8 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortedNumericSortField; +import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; @@ -197,6 +199,7 @@ static TopDocs mergeTopDocs(Collection results, int topN, int from) { final TopFieldGroups[] shardTopDocs = results.toArray(new TopFieldGroups[numShards]); mergedTopDocs = TopFieldGroups.merge(sort, from, topN, shardTopDocs, false); } else if (topDocs instanceof TopFieldDocs firstTopDocs) { + checkSameSortTypes(results, firstTopDocs.fields); final Sort sort = new Sort(firstTopDocs.fields); final TopFieldDocs[] shardTopDocs = results.toArray(new TopFieldDocs[numShards]); mergedTopDocs = TopDocs.merge(sort, from, topN, shardTopDocs); @@ -207,6 +210,56 @@ static TopDocs mergeTopDocs(Collection results, int topN, int from) { return mergedTopDocs; } + private static void checkSameSortTypes(Collection results, SortField[] firstSortFields) { + if (results.size() < 2) return; + + SortField.Type[] firstTypes = new SortField.Type[firstSortFields.length]; + boolean isFirstResult = true; + for (TopDocs topDocs : results) { + SortField[] curSortFields = ((TopFieldDocs) topDocs).fields; + if (isFirstResult) { + for (int i = 0; i < curSortFields.length; i++) { + firstTypes[i] = getType(firstSortFields[i]); + if (firstTypes[i] == SortField.Type.CUSTOM) { + // for custom types that we can't resolve, we can't do the check + return; + } + } + isFirstResult = false; + } else { + for (int i = 0; i < curSortFields.length; i++) { + SortField.Type curType = getType(curSortFields[i]); + if (curType != firstTypes[i]) { + if (curType == SortField.Type.CUSTOM) { + // for custom types that we can't resolve, we can't do the check + return; + } + throw new IllegalArgumentException( + "Can't sort on field [" + + curSortFields[i].getField() + + "]; the field has incompatible sort types: [" + + firstTypes[i] + + "] and [" + + curType + + "] across shards!" + ); + } + } + } + } + } + + private static SortField.Type getType(SortField sortField) { + if (sortField instanceof SortedNumericSortField) { + return ((SortedNumericSortField) sortField).getNumericType(); + } + if (sortField instanceof SortedSetSortField) { + return SortField.Type.STRING; + } else { + return sortField.getType(); + } + } + static void setShardIndex(TopDocs topDocs, int shardIndex) { assert topDocs.scoreDocs.length == 0 || topDocs.scoreDocs[0].shardIndex == -1 : "shardIndex is already set"; for (ScoreDoc doc : topDocs.scoreDocs) { From f4509dbbff4383366cf4f7022898f029470423d9 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Mon, 11 Jul 2022 18:28:54 -0500 Subject: [PATCH 15/36] Add UpdateCtxMap and UpsertCtxMap --- ...AsyncBulkByScrollActionScriptTestCase.java | 2 +- .../action/update/UpdateHelper.java | 18 +- .../elasticsearch/script/UpdateCtxMap.java | 195 ++++++------------ .../elasticsearch/script/UpdateScript.java | 23 ++- .../elasticsearch/script/UpsertCtxMap.java | 77 +++---- 5 files changed, 131 insertions(+), 184 deletions(-) diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java index 8914647574f0c..02d5aaef31098 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java @@ -50,7 +50,7 @@ protected T applyScript(Consumer> IndexRequest index = new IndexRequest("index").id("1").source(singletonMap("foo", "bar")); ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "id", 0); when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn( - (params, ctx, md) -> new UpdateScript(Collections.emptyMap(), ctx, md) { + (params, ctx) -> new UpdateScript(Collections.emptyMap(), ctx) { @Override public void execute() { scriptBody.accept(ctx); diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 2496278605e1d..83bceeaceedf2 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -32,8 +32,8 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.UpdateScript; -import org.elasticsearch.script.UpdateSourceAndMetadata; -import org.elasticsearch.script.UpsertSourceAndMetadata; +import org.elasticsearch.script.UpdateCtxMap; +import org.elasticsearch.script.UpsertCtxMap; import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -88,7 +88,7 @@ protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult * Execute a scripted upsert, where there is an existing upsert document and a script to be executed. The script is executed and a new * Tuple of operation and updated {@code _source} is returned. */ - Tuple> executeScriptedUpsert(Script script, UpsertSourceAndMetadata srcAndMeta) { + Tuple> executeScriptedUpsert(Script script, UpsertCtxMap srcAndMeta) { // Tell the script that this is a create and not an update (insert from upsert) srcAndMeta = executeScript(script, srcAndMeta); UpdateOpType operation = UpdateOpType.lenientFromString(srcAndMeta.getOp(), logger, script.getIdOrCode()); @@ -113,7 +113,7 @@ Result prepareUpsert(ShardId shardId, UpdateRequest request, final GetResult get if (request.scriptedUpsert() && request.script() != null) { // Run the script to perform the create logic IndexRequest upsert = request.upsertRequest(); - UpsertSourceAndMetadata srcAndMeta = new UpsertSourceAndMetadata( + UpsertCtxMap srcAndMeta = new UpsertCtxMap( getResult.getIndex(), getResult.getId(), UpdateOpType.CREATE.toString(), @@ -234,9 +234,9 @@ Result prepareUpdateScriptRequest(ShardId shardId, UpdateRequest request, GetRes final Tuple> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true); final XContentType updateSourceContentType = sourceAndContent.v1(); - UpdateSourceAndMetadata srcAndMeta = executeScript( + UpdateCtxMap srcAndMeta = executeScript( request.script, - new UpdateSourceAndMetadata( + new UpdateCtxMap( getResult.getIndex(), getResult.getId(), getResult.getVersion(), @@ -301,17 +301,17 @@ Result prepareUpdateScriptRequest(ShardId shardId, UpdateRequest request, GetRes } } - private T executeScript(Script script, T metadata) { + private T executeScript(Script script, T ctxMap) { try { if (scriptService != null) { UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT); - UpdateScript executableScript = factory.newInstance(script.getParams(), metadata, metadata); + UpdateScript executableScript = factory.newInstance(script.getParams(), ctxMap); executableScript.execute(); } } catch (Exception e) { throw new IllegalArgumentException("failed to execute script", e); } - return metadata; + return ctxMap; } /** diff --git a/server/src/main/java/org/elasticsearch/script/UpdateCtxMap.java b/server/src/main/java/org/elasticsearch/script/UpdateCtxMap.java index 0687515aed9b1..a8da53d4eb247 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateCtxMap.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateCtxMap.java @@ -10,45 +10,23 @@ import org.elasticsearch.common.util.Maps; -import java.time.ZonedDateTime; -import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.function.BiConsumer; import java.util.stream.Collectors; /** * Source and metadata for update (as opposed to insert via upsert) in the Update context. */ -public class UpdateSourceAndMetadata extends SourceAndMetadataMap { +public class UpdateCtxMap extends CtxMap { protected static final String OP = "op"; - protected static final String TIMESTAMP = "_now"; protected static final String SOURCE = "_source"; // AbstractAsyncBulkByScrollAction.OpType uses 'noop' rather than 'none', so unify on 'noop' but allow 'none' in // the ctx map protected static final String LEGACY_NOOP_STRING = "none"; - protected static final Set VALID_UPDATE_OPS = Set.of("noop", "index", "delete", LEGACY_NOOP_STRING); - - public static Map VALIDATORS = Map.of( - INDEX, - UpdateSourceAndMetadata::setOnceStringValidator, - ID, - UpdateSourceAndMetadata::setOnceStringValidator, - VERSION, - UpdateSourceAndMetadata::setOnceLongValidator, - ROUTING, - UpdateSourceAndMetadata::setOnceStringValidator, - TYPE, - UpdateSourceAndMetadata::setOnceStringValidator, - OP, - UpdateSourceAndMetadata::stringValidator, - TIMESTAMP, - UpdateSourceAndMetadata::setOnceLongValidator - ); - - protected final Set validOps; - - public UpdateSourceAndMetadata( + + public UpdateCtxMap( String index, String id, long version, @@ -58,18 +36,11 @@ public UpdateSourceAndMetadata( long timestamp, Map source ) { - super(wrapSource(source), metadataMap(index, id, version, routing, type, op, timestamp), VALIDATORS); - validOps = VALID_UPDATE_OPS; + super(wrapSource(source), new UpdateMetadata(index, id, version, routing, type, op, timestamp)); } - protected UpdateSourceAndMetadata( - Map source, - Map metadata, - Map validators, - Set validOps - ) { - super(wrapSource(source), metadata, validators); - this.validOps = validOps; + protected UpdateCtxMap(Map source, Metadata metadata) { + super(wrapSource(source), metadata); } protected static Map wrapSource(Map source) { @@ -78,60 +49,20 @@ protected static Map wrapSource(Map source) { return wrapper; } - protected static Map metadataMap( - String index, - String id, - long version, - String routing, - String type, - String op, - long timestamp - ) { - Map metadata = Maps.newHashMapWithExpectedSize(VALIDATORS.size()); - metadata.put(INDEX, index); - metadata.put(ID, id); - metadata.put(VERSION, version); - metadata.put(ROUTING, routing); - metadata.put(TYPE, type); - metadata.put(OP, op); - metadata.put(TIMESTAMP, timestamp); - return metadata; - } - - @Override - public boolean hasVersion() { - return metadata.get(VERSION) != null; - } - - @Override - public long getVersion() { - if (hasVersion() == false) { - return Long.MIN_VALUE; - } - return super.getVersion(); - } - - @Override public String getOp() { - String op = getString(OP); - if (LEGACY_NOOP_STRING.equals(op) || validOps.contains(op) == false) { - // UpdateHelper.UpdateOpType.lenientFromString allows anything into the map + String op = metadata.getString(OP); + if (LEGACY_NOOP_STRING.equals(op)) { + // TODO(stu): UpdateHelper.UpdateOpType.lenientFromString allows anything into the map return "noop"; } return op; } - @Override public void setOp(String op) { if (LEGACY_NOOP_STRING.equals(op)) { throw new IllegalArgumentException(LEGACY_NOOP_STRING + " is deprecated, use 'noop' instead"); } - if (validOps.contains(op) == false) { - throw new IllegalArgumentException( - "[" + op + "] must be one of " + VALID_UPDATE_OPS.stream().sorted().collect(Collectors.joining(", ")) + ", not [" + op + "]" - ); - } - put(OP, op); + metadata.put(OP, op); } @Override @@ -147,57 +78,67 @@ public Map getSource() { ); } - @Override - public ZonedDateTime getTimestamp() { - return getZonedDateTime(TIMESTAMP); - } + public static class UpdateMetadata extends Metadata { + protected static final String TIMESTAMP = "_now"; + protected static final FieldProperty SET_ONCE_STRING = new FieldProperty<>(String.class, true, false, null); + protected static final FieldProperty SET_ONCE_LONG = new FieldProperty<>( + Number.class, + false, + false, + FieldProperty.LONGABLE_NUMBER + ); - /** - * A validator that allows string values to be set once but neither updated nor removed. - */ - public static void setOnceStringValidator(MapOperation op, String key, Object value) { - if (op != MapOperation.INIT) { - throw new IllegalArgumentException("Cannot " + op.name().toLowerCase(Locale.ROOT) + " key [" + key + "]"); + protected static final Set VALID_UPDATE_OPS = Set.of("noop", "index", "delete", LEGACY_NOOP_STRING); + + static final Map> PROPERTIES = Map.of( + INDEX, + SET_ONCE_STRING, + ID, + SET_ONCE_STRING, + VERSION, + SET_ONCE_LONG, + OP, + new FieldProperty<>(String.class, true, true, setValidator(VALID_UPDATE_OPS)), + TIMESTAMP, + SET_ONCE_LONG + ); + + protected static BiConsumer setValidator(Set valid) { + return (k, v) -> { + if (valid.contains(v) == false) { + throw new IllegalArgumentException( + "[" + k + "] must be one of " + valid.stream().sorted().collect(Collectors.joining(", ")) + ", not [" + v + "]" + ); + } + }; } - stringValidator(op, key, value); - } - /** - * Same as {@link #longValidator(MapOperation, String, Object)} but allows longs to be set once and neither updated nor removed. - */ - public static void setOnceLongValidator(MapOperation op, String key, Object value) { - if (op != MapOperation.INIT) { - throw new IllegalArgumentException("Cannot " + op.name().toLowerCase(Locale.ROOT) + " key [" + key + "]"); + public UpdateMetadata(String index, String id, long version, String routing, String type, String op, long timestamp) { + this(metadataMap(index, id, version, routing, type, op, timestamp), PROPERTIES); } - longValidator(op, key, value); - } - /** - * A {@link #stringValidator(MapOperation, String, Object)} that validates against a set of valid strings and forbids removal. - */ - public static Validator opValidatorFromValidOps(Set validOps) { - return (op, key, value) -> { - if (op == MapOperation.REMOVE) { - throw new IllegalArgumentException("Cannot remove [" + key + "]"); - } - if (value instanceof String opStr) { - if (validOps.contains(opStr)) { - return; - } - throw new IllegalArgumentException( - key + " must be one of " + validOps.stream().sorted().collect(Collectors.joining(", ")) + ", not [" + opStr + "]" - ); - } - throw new IllegalArgumentException( - key - + " must be String and one of " - + validOps.stream().sorted().collect(Collectors.joining(",")) - + " but was [" - + value - + "] with type [" - + value.getClass().getName() - + "]" - ); - }; + protected UpdateMetadata(Map metadata, Map> properties) { + super(metadata, properties); + } + + protected static Map metadataMap( + String index, + String id, + long version, + String routing, + String type, + String op, + long timestamp + ) { + Map metadata = Maps.newHashMapWithExpectedSize(PROPERTIES.size()); + metadata.put(INDEX, index); + metadata.put(ID, id); + metadata.put(VERSION, version); + metadata.put(ROUTING, routing); + metadata.put(TYPE, type); + metadata.put(OP, op); + metadata.put(TIMESTAMP, timestamp); + return metadata; + } } } diff --git a/server/src/main/java/org/elasticsearch/script/UpdateScript.java b/server/src/main/java/org/elasticsearch/script/UpdateScript.java index ac7edd726d5de..4784b6f6067c9 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateScript.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateScript.java @@ -24,14 +24,11 @@ public abstract class UpdateScript { /** The generic runtime parameters for the script. */ private final Map params; - private final Metadata metadata; + private final UpdateCtxMap ctxMap; - private final Map ctx; - - public UpdateScript(Map params, Map ctx, Metadata metadata) { + public UpdateScript(Map params, UpdateCtxMap ctxMap) { this.params = params; - this.ctx = ctx; - this.metadata = metadata; + this.ctxMap = ctxMap; } /** Return the parameters for this script. */ @@ -41,17 +38,25 @@ public Map getParams() { /** Return the update context for this script. */ public Map getCtx() { - return ctx; + return ctxMap; } /** Return the update metadata for this script */ public Metadata metadata() { - return metadata; + return ctxMap.getMetadata(); + } + + public void setOp(String op) { + ctxMap.setOp(op); + } + + public String getOp() { + return ctxMap.getOp(); } public abstract void execute(); public interface Factory { - UpdateScript newInstance(Map params, Map ctx, Metadata metadata); + UpdateScript newInstance(Map params, UpdateCtxMap ctxMap); } } diff --git a/server/src/main/java/org/elasticsearch/script/UpsertCtxMap.java b/server/src/main/java/org/elasticsearch/script/UpsertCtxMap.java index bc1eccd733515..4950d2965ad20 100644 --- a/server/src/main/java/org/elasticsearch/script/UpsertCtxMap.java +++ b/server/src/main/java/org/elasticsearch/script/UpsertCtxMap.java @@ -16,45 +16,46 @@ /** * Metadata for insert via upsert in the Update context */ -public class UpsertSourceAndMetadata extends UpdateSourceAndMetadata { - protected static final Set VALID_UPSERT_OPS = Set.of("noop", "create", LEGACY_NOOP_STRING); - - public static Map VALIDATORS = Map.of( - INDEX, - UpdateSourceAndMetadata::setOnceStringValidator, - ID, - UpdateSourceAndMetadata::setOnceStringValidator, - OP, - opValidatorFromValidOps(VALID_UPSERT_OPS), - TIMESTAMP, - UpdateSourceAndMetadata::setOnceLongValidator - ); - - public UpsertSourceAndMetadata(String index, String id, String op, long timestamp, Map source) { - super(source, metadataMap(index, id, op, timestamp), VALIDATORS, VALID_UPSERT_OPS); +public class UpsertCtxMap extends UpdateCtxMap { + public UpsertCtxMap(String index, String id, String op, long timestamp, Map source) { + super(source, new UpsertMetadata(index, id, op, timestamp)); } - protected static Map metadataMap(String index, String id, String op, long timestamp) { - Map metadata = Maps.newHashMapWithExpectedSize(VALIDATORS.size()); - metadata.put(INDEX, index); - metadata.put(ID, id); - metadata.put(OP, op); - metadata.put(TIMESTAMP, timestamp); - return metadata; - } - - @Override - public String getRouting() { - throw new IllegalArgumentException("routing is unavailable for insert"); - } - - @Override - public long getVersion() { - throw new IllegalArgumentException("version is unavailable for insert"); - } - - @Override - public boolean hasVersion() { - throw new IllegalArgumentException("version is unavailable for insert"); + static class UpsertMetadata extends UpdateMetadata { + protected static final Set VALID_UPSERT_OPS = Set.of("noop", "create", LEGACY_NOOP_STRING); + + static final Map> PROPERTIES = Map.of( + INDEX, + SET_ONCE_STRING, + ID, + SET_ONCE_STRING, + OP, + new FieldProperty<>(String.class, true, true, setValidator(VALID_UPSERT_OPS)), + TIMESTAMP, + SET_ONCE_LONG + ); + + public UpsertMetadata(String index, String id, String op, long timestamp) { + super(metadataMap(index, id, op, timestamp), PROPERTIES); + } + + protected static Map metadataMap(String index, String id, String op, long timestamp) { + Map metadata = Maps.newHashMapWithExpectedSize(PROPERTIES.size()); + metadata.put(INDEX, index); + metadata.put(ID, id); + metadata.put(OP, op); + metadata.put(TIMESTAMP, timestamp); + return metadata; + } + + @Override + public String getRouting() { + throw new IllegalArgumentException("routing is unavailable for insert"); + } + + @Override + public long getVersion() { + throw new IllegalArgumentException("version is unavailable for insert"); + } } } From b55a0bc0aadadb655015f171b853d6d62a584d7e Mon Sep 17 00:00:00 2001 From: Nikola Grcevski <6207777+grcevski@users.noreply.github.com> Date: Mon, 11 Jul 2022 21:58:53 -0400 Subject: [PATCH 16/36] Fix test memory leak (#88362) --- .../elasticsearch/plugins/PluginsService.java | 13 ++++++++++--- .../plugins/PluginsServiceTests.java | 4 ++++ .../ilm/ILMImmutableStateHandlerProvider.java | 19 ++++++++++--------- .../xpack/ilm/IndexLifecycle.java | 10 +++++++--- 4 files changed, 31 insertions(+), 15 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index 058885de79f81..7e2e13d5343f5 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -309,7 +309,7 @@ public List loadServiceProviders(Class service) { List result = new ArrayList<>(); for (LoadedPlugin pluginTuple : plugins()) { - ServiceLoader.load(service, pluginTuple.loader()).iterator().forEachRemaining(c -> result.add(c)); + result.addAll(createExtensions(service, pluginTuple.instance)); } return Collections.unmodifiableList(result); @@ -348,11 +348,18 @@ static T createExtension(Class extensionClass, Class extensi throw new IllegalStateException("no public " + extensionConstructorMessage(extensionClass, extensionPointType)); } - if (constructors.length > 1) { + Constructor constructor = constructors[0]; + // Using modules and SPI requires that we declare the default no-arg constructor apart from our custom + // one arg constructor with a plugin. + if (constructors.length == 2) { + // we prefer the one arg constructor in this case + if (constructors[1].getParameterCount() > 0) { + constructor = constructors[1]; + } + } else if (constructors.length > 1) { throw new IllegalStateException("no unique public " + extensionConstructorMessage(extensionClass, extensionPointType)); } - final Constructor constructor = constructors[0]; if (constructor.getParameterCount() > 1) { throw new IllegalStateException(extensionSignatureMessage(extensionClass, extensionPointType, plugin)); } diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index ad334a2d42f8a..ac0e967face4f 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -531,6 +531,10 @@ public TestExtension() {} public TestExtension(TestPlugin plugin) { } + + public TestExtension(TestPlugin plugin, String anotherArg) { + + } } IllegalStateException e = expectThrows( IllegalStateException.class, diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/ILMImmutableStateHandlerProvider.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/ILMImmutableStateHandlerProvider.java index afd2397b8fb6f..fb69802fbf09f 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/ILMImmutableStateHandlerProvider.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/ILMImmutableStateHandlerProvider.java @@ -10,23 +10,24 @@ import org.elasticsearch.immutablestate.ImmutableClusterStateHandler; import org.elasticsearch.immutablestate.ImmutableClusterStateHandlerProvider; -import java.util.Arrays; import java.util.Collection; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; /** * ILM Provider implementation for the {@link ImmutableClusterStateHandlerProvider} service interface */ public class ILMImmutableStateHandlerProvider implements ImmutableClusterStateHandlerProvider { - private static final Set> handlers = ConcurrentHashMap.newKeySet(); + private final IndexLifecycle plugin; - @Override - public Collection> handlers() { - return handlers; + public ILMImmutableStateHandlerProvider() { + throw new IllegalStateException("Provider must be constructed using PluginsService"); + } + + public ILMImmutableStateHandlerProvider(IndexLifecycle plugin) { + this.plugin = plugin; } - public static void registerHandlers(ImmutableClusterStateHandler... stateHandlers) { - handlers.addAll(Arrays.asList(stateHandlers)); + @Override + public Collection> handlers() { + return plugin.immutableClusterStateHandlers(); } } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index b7f824b4c2845..e477442bcc2e4 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -27,6 +27,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.immutablestate.ImmutableClusterStateHandler; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.license.XPackLicenseState; @@ -159,6 +160,7 @@ public class IndexLifecycle extends Plugin implements ActionPlugin, HealthPlugin private final SetOnce snapshotHistoryStore = new SetOnce<>(); private final SetOnce ilmHealthIndicatorService = new SetOnce<>(); private final SetOnce slmHealthIndicatorService = new SetOnce<>(); + private final SetOnce immutableLifecycleAction = new SetOnce<>(); private final Settings settings; public IndexLifecycle(Settings settings) { @@ -268,10 +270,8 @@ public Collection createComponents( components.addAll(Arrays.asList(snapshotLifecycleService.get(), snapshotHistoryStore.get(), snapshotRetentionService.get())); ilmHealthIndicatorService.set(new IlmHealthIndicatorService(clusterService)); slmHealthIndicatorService.set(new SlmHealthIndicatorService(clusterService)); + immutableLifecycleAction.set(new ImmutableLifecycleAction(xContentRegistry, client, XPackPlugin.getSharedLicenseState())); - ILMImmutableStateHandlerProvider.registerHandlers( - new ImmutableLifecycleAction(xContentRegistry, client, XPackPlugin.getSharedLicenseState()) - ); return components; } @@ -422,6 +422,10 @@ public List getRestHandlers( return actions; } + List> immutableClusterStateHandlers() { + return List.of(immutableLifecycleAction.get()); + } + @Override public Collection getHealthIndicatorServices() { return List.of(ilmHealthIndicatorService.get(), slmHealthIndicatorService.get()); From 78244b74d4a5eeffc16edb92309578560280ec4d Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 12 Jul 2022 06:45:50 +0200 Subject: [PATCH 17/36] Remove usages of TestGeoShapeFieldMapperPlugin from enrich module (#88440) --- x-pack/plugin/enrich/build.gradle | 1 + .../java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java | 4 ++-- .../elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java | 4 ++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/enrich/build.gradle b/x-pack/plugin/enrich/build.gradle index 0fb22fe48ced8..ea59ebe2a2a92 100644 --- a/x-pack/plugin/enrich/build.gradle +++ b/x-pack/plugin/enrich/build.gradle @@ -14,6 +14,7 @@ dependencies { testImplementation(testArtifact(project(xpackModule('core')))) testImplementation project(path: ':modules:ingest-common') testImplementation project(path: ':modules:lang-mustache') + testImplementation project(path: ':modules:legacy-geo') testImplementation project(xpackModule('spatial')) testImplementation(testArtifact(project(xpackModule('monitoring')))) } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java index 48d97fe6d7b35..43e982cec08f1 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; @@ -33,6 +32,7 @@ import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; +import org.elasticsearch.xpack.spatial.SpatialPlugin; import java.util.Arrays; import java.util.Collection; @@ -61,7 +61,7 @@ protected Collection> getPlugins() { ReindexPlugin.class, IngestCommonPlugin.class, MustachePlugin.class, - TestGeoShapeFieldMapperPlugin.class + SpatialPlugin.class ); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index 051ce60e87639..000864758d984 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -52,7 +52,6 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; @@ -63,6 +62,7 @@ import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; import org.elasticsearch.xpack.enrich.action.EnrichReindexAction; +import org.elasticsearch.xpack.spatial.SpatialPlugin; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -87,7 +87,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return Arrays.asList(ReindexPlugin.class, IngestCommonPlugin.class, TestGeoShapeFieldMapperPlugin.class, LocalStateEnrich.class); + return Arrays.asList(ReindexPlugin.class, IngestCommonPlugin.class, SpatialPlugin.class, LocalStateEnrich.class); } private static ThreadPool testThreadPool; From dd1bd8323495814cae713ae67f28b96c63495bd7 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 12 Jul 2022 06:52:14 +0200 Subject: [PATCH 18/36] Don't index geo_shape field in AbstractBuilderTestCase (#88437) This commit stops adding the geo_shape field mapper by default and adds the mapper only when it is needed. --- .../extras/RankFeatureQueryBuilderTests.java | 3 +- .../join/aggregations/ChildrenTests.java | 3 +- .../join/aggregations/ParentTests.java | 3 +- .../join/query/HasChildQueryBuilderTests.java | 3 +- .../query/HasParentQueryBuilderTests.java | 3 +- .../join/query/ParentIdQueryBuilderTests.java | 3 +- .../PercolateQueryBuilderTests.java | 3 +- .../GeoBoundingBoxQueryBuilderTests.java | 29 +++++++++++++++++- .../query/GeoDistanceQueryBuilderTests.java | 30 ++++++++++++++++++- .../GeoShapeQueryBuilderGeoShapeTests.java | 24 +++++++++++++++ .../index/query/TermsQueryBuilderTests.java | 1 - .../query/TermsSetQueryBuilderTests.java | 12 ++------ .../FunctionScoreQueryBuilderTests.java | 3 +- .../vectors/KnnVectorQueryBuilderTests.java | 9 ------ .../DelayedShardAggregationBuilderTests.java | 3 +- .../errorquery/ErrorQueryBuilderTests.java | 3 +- .../test/AbstractBuilderTestCase.java | 9 ++---- .../PinnedQueryBuilderTests.java | 2 -- .../index/query/GeoGridQueryBuilderTests.java | 23 ++++++++++++-- 19 files changed, 117 insertions(+), 52 deletions(-) diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java index 4bd2d0714e93d..0e496990efc40 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import java.io.IOException; import java.util.ArrayList; @@ -54,7 +53,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected Collection> getPlugins() { - return Arrays.asList(MapperExtrasPlugin.class, TestGeoShapeFieldMapperPlugin.class); + return Arrays.asList(MapperExtrasPlugin.class); } @Override diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenTests.java index 96660aea34d23..9d12d1bc3d72f 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import java.util.Arrays; import java.util.Collection; @@ -20,7 +19,7 @@ public class ChildrenTests extends BaseAggregationTestCase> getPlugins() { - return Arrays.asList(ParentJoinPlugin.class, TestGeoShapeFieldMapperPlugin.class); + return Arrays.asList(ParentJoinPlugin.class); } @Override diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentTests.java index c424dab398803..75fb02eab2dbb 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import java.util.Arrays; import java.util.Collection; @@ -20,7 +19,7 @@ public class ParentTests extends BaseAggregationTestCase> getPlugins() { - return Arrays.asList(ParentJoinPlugin.class, TestGeoShapeFieldMapperPlugin.class); + return Arrays.asList(ParentJoinPlugin.class); } @Override diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index a32164322ce63..6f857674b6fe5 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -45,7 +45,6 @@ import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -77,7 +76,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase> getPlugins() { - return Arrays.asList(ParentJoinPlugin.class, TestGeoShapeFieldMapperPlugin.class); + return Arrays.asList(ParentJoinPlugin.class); } @Override diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java index 88176cd0f39fc..5801919aa4b24 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -60,7 +59,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase> getPlugins() { - return Arrays.asList(ParentJoinPlugin.class, TestGeoShapeFieldMapperPlugin.class); + return Arrays.asList(ParentJoinPlugin.class); } @Override diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java index 0340ac991db21..19a268bc73391 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.xcontent.XContentBuilder; import org.hamcrest.Matchers; @@ -48,7 +47,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase> getPlugins() { - return Arrays.asList(ParentJoinPlugin.class, TestGeoShapeFieldMapperPlugin.class); + return Arrays.asList(ParentJoinPlugin.class); } @Override diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index 9003e749f0d9c..b0df61bad4c53 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -77,7 +76,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase> getPlugins() { - return Arrays.asList(PercolatorPlugin.class, TestGeoShapeFieldMapperPlugin.class); + return Arrays.asList(PercolatorPlugin.class); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index 233cd44765214..c0875b88207da 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -15,6 +15,9 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.geo.GeometryTestUtils; @@ -23,9 +26,15 @@ import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; +import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Collection; +import java.util.Collections; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; @@ -35,10 +44,28 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase { /** Randomly generate either NaN or one of the two infinity values. */ private static final Double[] brokenDoubles = { Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY }; + private static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; + protected static final String GEO_SHAPE_ALIAS_FIELD_NAME = "mapped_geo_shape_alias"; + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + final XContentBuilder builder = PutMappingRequest.simpleMapping( + GEO_SHAPE_FIELD_NAME, + "type=geo_shape", + GEO_SHAPE_ALIAS_FIELD_NAME, + "type=alias,path=" + GEO_SHAPE_FIELD_NAME + ); + mapperService.merge("_doc", new CompressedXContent(Strings.toString(builder)), MapperService.MergeReason.MAPPING_UPDATE); + } + + @SuppressWarnings("deprecation") // dependencies in server for geo_shape field should be decoupled + protected Collection> getPlugins() { + return Collections.singletonList(TestGeoShapeFieldMapperPlugin.class); + } @Override protected GeoBoundingBoxQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, GEO_SHAPE_FIELD_NAME); + String fieldName = randomFrom(GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, GEO_SHAPE_FIELD_NAME, GEO_SHAPE_ALIAS_FIELD_NAME); GeoBoundingBoxQueryBuilder builder = new GeoBoundingBoxQueryBuilder(fieldName); // make sure that minX != maxX and minY != maxY after geohash encoding Rectangle box = randomValueOtherThanMany( diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java index f3c9090012d44..a564b04122537 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java @@ -14,7 +14,10 @@ import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.unit.DistanceUnit; @@ -22,9 +25,15 @@ import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; +import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Collection; +import java.util.Collections; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; @@ -33,9 +42,28 @@ @SuppressWarnings("checkstyle:MissingJavadocMethod") public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase { + private static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; + protected static final String GEO_SHAPE_ALIAS_FIELD_NAME = "mapped_geo_shape_alias"; + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + final XContentBuilder builder = PutMappingRequest.simpleMapping( + GEO_SHAPE_FIELD_NAME, + "type=geo_shape", + GEO_SHAPE_ALIAS_FIELD_NAME, + "type=alias,path=" + GEO_SHAPE_FIELD_NAME + ); + mapperService.merge("_doc", new CompressedXContent(Strings.toString(builder)), MapperService.MergeReason.MAPPING_UPDATE); + } + + @SuppressWarnings("deprecation") // dependencies in server for geo_shape field should be decoupled + protected Collection> getPlugins() { + return Collections.singletonList(TestGeoShapeFieldMapperPlugin.class); + } + @Override protected GeoDistanceQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, GEO_SHAPE_FIELD_NAME); + String fieldName = randomFrom(GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, GEO_SHAPE_FIELD_NAME, GEO_SHAPE_ALIAS_FIELD_NAME); GeoDistanceQueryBuilder qb = new GeoDistanceQueryBuilder(fieldName); String distance = "" + randomDouble(); if (randomBoolean()) { diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderGeoShapeTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderGeoShapeTests.java index 430906c6602e1..f359f4600faaf 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderGeoShapeTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderGeoShapeTests.java @@ -8,13 +8,37 @@ package org.elasticsearch.index.query; import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.ShapeType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; public class GeoShapeQueryBuilderGeoShapeTests extends GeoShapeQueryBuilderTests { + private static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + XContentBuilder builder = PutMappingRequest.simpleMapping(GEO_SHAPE_FIELD_NAME, "type=geo_shape"); + mapperService.merge("_doc", new CompressedXContent(Strings.toString(builder)), MapperService.MergeReason.MAPPING_UPDATE); + } + + @SuppressWarnings("deprecation") // dependencies in server for geo_shape field should be decoupled + protected Collection> getPlugins() { + return Collections.singletonList(TestGeoShapeFieldMapperPlugin.class); + } + protected String fieldName() { return GEO_SHAPE_FIELD_NAME; } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index 5b553a46aed6c..9cc03612ccbe4 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -68,7 +68,6 @@ protected TermsQueryBuilder doCreateTestQueryBuilder() { String fieldName = randomValueOtherThanMany( choice -> choice.equals(GEO_POINT_FIELD_NAME) || choice.equals(GEO_POINT_ALIAS_FIELD_NAME) - || choice.equals(GEO_SHAPE_FIELD_NAME) || choice.equals(INT_RANGE_FIELD_NAME) || choice.equals(DATE_RANGE_FIELD_NAME) || choice.equals(DATE_NANOS_FIELD_NAME), // TODO: needs testing for date_nanos type diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index 52aafbff72d9a..a9c9ae7f422cf 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -38,7 +38,6 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.test.rest.ObjectPath; import java.io.IOException; @@ -61,7 +60,7 @@ public class TermsSetQueryBuilderTests extends AbstractQueryTestCase> getPlugins() { - return Arrays.asList(CustomScriptPlugin.class, TestGeoShapeFieldMapperPlugin.class); + return Arrays.asList(CustomScriptPlugin.class); } @Override @@ -76,10 +75,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected TermsSetQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomValueOtherThanMany( - value -> value.equals(GEO_POINT_FIELD_NAME) || value.equals(GEO_SHAPE_FIELD_NAME), - () -> randomFrom(MAPPED_FIELD_NAMES) - ); + String fieldName = randomValueOtherThanMany(value -> value.equals(GEO_POINT_FIELD_NAME), () -> randomFrom(MAPPED_FIELD_NAMES)); List randomTerms = randomValues(fieldName); TermsSetQueryBuilder queryBuilder = new TermsSetQueryBuilder(TEXT_FIELD_NAME, randomTerms); if (randomBoolean()) { @@ -151,9 +147,7 @@ public TermsSetQueryBuilder mutateInstance(final TermsSetQueryBuilder instance) switch (randomIntBetween(0, 3)) { case 0 -> { - Predicate predicate = s -> s.equals(instance.getFieldName()) == false - && s.equals(GEO_POINT_FIELD_NAME) == false - && s.equals(GEO_SHAPE_FIELD_NAME) == false; + Predicate predicate = s -> s.equals(instance.getFieldName()) == false && s.equals(GEO_POINT_FIELD_NAME) == false; fieldName = randomValueOtherThanMany(predicate, () -> randomFrom(MAPPED_FIELD_NAMES)); values = randomValues(fieldName); } diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index b779d7c747b19..75f28e8cdbf3c 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -47,7 +47,6 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -90,7 +89,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase> getPlugins() { - return Arrays.asList(TestPlugin.class, TestGeoShapeFieldMapperPlugin.class); + return Arrays.asList(TestPlugin.class); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilderTests.java index 9e87d2009d626..d52cafc8e6857 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilderTests.java @@ -24,18 +24,14 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractBuilderTestCase; import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.List; import static org.hamcrest.Matchers.containsString; @@ -46,11 +42,6 @@ public class KnnVectorQueryBuilderTests extends AbstractQueryTestCase> getPlugins() { - return Arrays.asList(TestGeoShapeFieldMapperPlugin.class); - } - @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder() diff --git a/test/external-modules/delayed-aggs/src/test/java/org/elasticsearch/test/delayedshard/DelayedShardAggregationBuilderTests.java b/test/external-modules/delayed-aggs/src/test/java/org/elasticsearch/test/delayedshard/DelayedShardAggregationBuilderTests.java index 7a442920c0f00..e289bec89e8c7 100644 --- a/test/external-modules/delayed-aggs/src/test/java/org/elasticsearch/test/delayedshard/DelayedShardAggregationBuilderTests.java +++ b/test/external-modules/delayed-aggs/src/test/java/org/elasticsearch/test/delayedshard/DelayedShardAggregationBuilderTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import java.util.Arrays; import java.util.Collection; @@ -18,7 +17,7 @@ public class DelayedShardAggregationBuilderTests extends BaseAggregationTestCase { @Override protected Collection> getPlugins() { - return Arrays.asList(DelayedShardAggregationPlugin.class, TestGeoShapeFieldMapperPlugin.class); + return Arrays.asList(DelayedShardAggregationPlugin.class); } @Override diff --git a/test/external-modules/error-query/src/test/java/org/elasticsearch/test/errorquery/ErrorQueryBuilderTests.java b/test/external-modules/error-query/src/test/java/org/elasticsearch/test/errorquery/ErrorQueryBuilderTests.java index c92c2a5f89362..058e66caad4b9 100644 --- a/test/external-modules/error-query/src/test/java/org/elasticsearch/test/errorquery/ErrorQueryBuilderTests.java +++ b/test/external-modules/error-query/src/test/java/org/elasticsearch/test/errorquery/ErrorQueryBuilderTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import java.io.IOException; import java.util.ArrayList; @@ -24,7 +23,7 @@ public class ErrorQueryBuilderTests extends AbstractQueryTestCase { @Override protected Collection> getPlugins() { - return Arrays.asList(ErrorQueryPlugin.class, TestGeoShapeFieldMapperPlugin.class); + return Arrays.asList(ErrorQueryPlugin.class); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index 660554efc6d2c..1a49a898a69f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -110,7 +110,6 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { protected static final String OBJECT_FIELD_NAME = "mapped_object"; protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; protected static final String GEO_POINT_ALIAS_FIELD_NAME = "mapped_geo_point_alias"; - protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; // we don't include the binary field in the arrays below as it is not searchable protected static final String BINARY_FIELD_NAME = "mapped_binary"; protected static final String[] MAPPED_FIELD_NAMES = new String[] { @@ -125,8 +124,7 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, - GEO_POINT_ALIAS_FIELD_NAME, - GEO_SHAPE_FIELD_NAME }; + GEO_POINT_ALIAS_FIELD_NAME }; protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[] { TEXT_FIELD_NAME, TEXT_ALIAS_FIELD_NAME, @@ -159,9 +157,8 @@ protected static Index getIndex() { return index; } - @SuppressWarnings("deprecation") // dependencies in server for geo_shape field should be decoupled protected Collection> getPlugins() { - return Collections.singletonList(TestGeoShapeFieldMapperPlugin.class); + return Collections.emptyList(); } /** @@ -455,8 +452,6 @@ public void onRemoval(ShardId shardId, Accountable accountable) { "type=geo_point", GEO_POINT_ALIAS_FIELD_NAME, "type=alias,path=" + GEO_POINT_FIELD_NAME, - GEO_SHAPE_FIELD_NAME, - "type=geo_shape", BINARY_FIELD_NAME, "type=binary" ) diff --git a/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java b/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java index b8880330d3d30..3931af7d770a5 100644 --- a/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java +++ b/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -110,7 +109,6 @@ protected void doAssertLuceneQuery(PinnedQueryBuilder queryBuilder, Query query, protected Collection> getPlugins() { List> classpathPlugins = new ArrayList<>(); classpathPlugins.add(SearchBusinessRules.class); - classpathPlugins.add(TestGeoShapeFieldMapperPlugin.class); return classpathPlugins; } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilderTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilderTests.java index 3016fd92dc33f..cd174d4c6fb0a 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilderTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilderTests.java @@ -12,17 +12,22 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.geometry.utils.Geohash; import org.elasticsearch.h3.H3; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils; import org.elasticsearch.test.AbstractQueryTestCase; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; import java.io.IOException; @@ -38,6 +43,20 @@ public class GeoGridQueryBuilderTests extends AbstractQueryTestCase { + private static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; + protected static final String GEO_SHAPE_ALIAS_FIELD_NAME = "mapped_geo_shape_alias"; + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + final XContentBuilder builder = PutMappingRequest.simpleMapping( + GEO_SHAPE_FIELD_NAME, + "type=geo_shape", + GEO_SHAPE_ALIAS_FIELD_NAME, + "type=alias,path=" + GEO_SHAPE_FIELD_NAME + ); + mapperService.merge("_doc", new CompressedXContent(Strings.toString(builder)), MapperService.MergeReason.MAPPING_UPDATE); + } + @Override protected Collection> getPlugins() { return Arrays.asList(LocalStateSpatialPlugin.class); @@ -45,11 +64,11 @@ protected Collection> getPlugins() { @Override protected GeoGridQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, GEO_SHAPE_FIELD_NAME); + String fieldName = randomFrom(GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, GEO_SHAPE_FIELD_NAME, GEO_SHAPE_ALIAS_FIELD_NAME); GeoGridQueryBuilder builder = new GeoGridQueryBuilder(fieldName); // Only use geohex for points - int path = randomIntBetween(0, GEO_SHAPE_FIELD_NAME.equals(fieldName) ? 1 : 2); + int path = randomIntBetween(0, GEO_SHAPE_FIELD_NAME.equals(fieldName) || GEO_SHAPE_ALIAS_FIELD_NAME.equals(fieldName) ? 1 : 3); switch (path) { case 0 -> builder.setGridId(GeoGridQueryBuilder.Grid.GEOHASH, randomGeohash()); case 1 -> builder.setGridId(GeoGridQueryBuilder.Grid.GEOTILE, randomGeotile()); From 47510adab5064c09bc608de40c8737fa47abf14a Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Tue, 12 Jul 2022 08:27:00 +0200 Subject: [PATCH 19/36] Reduce map lookups (#88418) This change replcase 2 hash map operations with a single one and a null check. --- .../allocation/decider/NodeReplacementAllocationDecider.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDecider.java index dbaecd67cf429..485e0656c8163 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDecider.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import java.util.Map; import java.util.Optional; public class NodeReplacementAllocationDecider extends AllocationDecider { @@ -185,8 +184,8 @@ private static boolean isReplacementSource(RoutingAllocation allocation, String if (nodeId == null || replacementOngoing(allocation) == false) { return false; } - final Map nodeShutdowns = allocation.nodeShutdowns(); - return nodeShutdowns.containsKey(nodeId) && nodeShutdowns.get(nodeId).getType().equals(SingleNodeShutdownMetadata.Type.REPLACE); + final SingleNodeShutdownMetadata shutdown = allocation.nodeShutdowns().get(nodeId); + return shutdown != null && shutdown.getType().equals(SingleNodeShutdownMetadata.Type.REPLACE); } /** From 9ebbe1c62bf0157d332f20f392544a1921a710df Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 12 Jul 2022 09:03:25 +0200 Subject: [PATCH 20/36] Make ClusterInfo use immutable maps in all cases (#88447) This class's maps are used very hot in the disk threshold allocation decider. Moving them from hppc maps to unmodifiable map wrapping `HashMap` has led to a measurable slowdown in the many-shards benchmark bootstrapping. Lets use immutable map copies here exclusively to make performance outright better and more predictable via a single implementation. --- .../java/org/elasticsearch/cluster/ClusterInfo.java | 12 ++++++------ .../cluster/InternalClusterInfoService.java | 13 ++++++------- .../allocation/DiskThresholdMonitorTests.java | 2 +- .../decider/DiskThresholdDeciderTests.java | 2 +- .../decider/DiskThresholdDeciderUnitTests.java | 10 +++++----- .../storage/ReactiveStorageDeciderService.java | 9 ++++++++- .../AutoscalingCalculateCapacityServiceTests.java | 4 ++-- .../storage/FrozenStorageDeciderServiceTests.java | 2 +- .../storage/ReactiveStorageDeciderServiceTests.java | 4 ++-- 9 files changed, 32 insertions(+), 26 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java b/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java index af931e6d34996..7cc4f1bdca65b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java @@ -68,12 +68,12 @@ public ClusterInfo( Map routingToDataPath, Map reservedSpace ) { - this.leastAvailableSpaceUsage = leastAvailableSpaceUsage; - this.shardSizes = shardSizes; - this.shardDataSetSizes = shardDataSetSizes; - this.mostAvailableSpaceUsage = mostAvailableSpaceUsage; - this.routingToDataPath = routingToDataPath; - this.reservedSpace = reservedSpace; + this.leastAvailableSpaceUsage = Map.copyOf(leastAvailableSpaceUsage); + this.shardSizes = Map.copyOf(shardSizes); + this.shardDataSetSizes = Map.copyOf(shardDataSetSizes); + this.mostAvailableSpaceUsage = Map.copyOf(mostAvailableSpaceUsage); + this.routingToDataPath = Map.copyOf(routingToDataPath); + this.reservedSpace = Map.copyOf(reservedSpace); } public ClusterInfo(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 0bed0fd0f7b6f..4db01fc3f796c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -39,7 +39,6 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -188,8 +187,8 @@ public void onResponse(NodesStatsResponse nodesStatsResponse) { leastAvailableUsagesBuilder, mostAvailableUsagesBuilder ); - leastAvailableSpaceUsages = Collections.unmodifiableMap(leastAvailableUsagesBuilder); - mostAvailableSpaceUsages = Collections.unmodifiableMap(mostAvailableUsagesBuilder); + leastAvailableSpaceUsages = Map.copyOf(leastAvailableUsagesBuilder); + mostAvailableSpaceUsages = Map.copyOf(mostAvailableUsagesBuilder); } @Override @@ -262,10 +261,10 @@ public void onResponse(IndicesStatsResponse indicesStatsResponse) { reservedSpaceBuilders.forEach((nodeAndPath, builder) -> rsrvdSpace.put(nodeAndPath, builder.build())); indicesStatsSummary = new IndicesStatsSummary( - Collections.unmodifiableMap(shardSizeByIdentifierBuilder), - Collections.unmodifiableMap(shardDataSetSizeBuilder), - Collections.unmodifiableMap(dataPathByShardRoutingBuilder), - Collections.unmodifiableMap(rsrvdSpace) + Map.copyOf(shardSizeByIdentifierBuilder), + Map.copyOf(shardDataSetSizeBuilder), + Map.copyOf(dataPathByShardRoutingBuilder), + Map.copyOf(rsrvdSpace) ); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorTests.java index 08e68db23e8e2..1e3be3f54c205 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorTests.java @@ -964,7 +964,7 @@ private static ClusterInfo clusterInfo( Map diskUsages, Map reservedSpace ) { - return new ClusterInfo(diskUsages, null, null, null, null, reservedSpace); + return new ClusterInfo(diskUsages, Map.of(), Map.of(), Map.of(), Map.of(), reservedSpace); } private static DiscoveryNode newFrozenOnlyNode(String nodeId) { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index b1244ff4f17e5..373d915e37460 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -1308,7 +1308,7 @@ static class DevNullClusterInfo extends ClusterInfo { Map shardSizes, Map reservedSpace ) { - super(leastAvailableSpaceUsage, mostAvailableSpaceUsage, shardSizes, null, null, reservedSpace); + super(leastAvailableSpaceUsage, mostAvailableSpaceUsage, shardSizes, Map.of(), Map.of(), reservedSpace); } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index 37d94aba96d1a..cdc68d885c573 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -107,7 +107,7 @@ public void testCanAllocateUsesMaxAvailableSpace() { leastAvailableUsages, mostAvailableUsage, Map.of("[test][0][p]", 10L), // 10 bytes, - null, + Map.of(), Map.of(), Map.of() ); @@ -185,7 +185,7 @@ public void testCannotAllocateDueToLackOfDiskResources() { leastAvailableUsages, mostAvailableUsage, Map.of("[test][0][p]", shardSize), - null, + Map.of(), Map.of(), Map.of() ); @@ -307,7 +307,7 @@ public void testCanRemainUsesLeastAvailableSpace() { leastAvailableUsages, mostAvailableUsage, shardSizes, - null, + Map.of(), shardRoutingMap, Map.of() ); @@ -745,7 +745,7 @@ public void testDecidesYesIfWatermarksIgnored() { allFullUsages, allFullUsages, Map.of("[test][0][p]", 10L), - null, + Map.of(), Map.of(), Map.of() ); @@ -815,7 +815,7 @@ public void testCannotForceAllocateOver100PercentUsage() { // bigger than available space final long shardSize = randomIntBetween(1, 10); shardSizes.put("[test][0][p]", shardSize); - ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages, mostAvailableUsage, shardSizes, null, Map.of(), Map.of()); + ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages, mostAvailableUsage, shardSizes, Map.of(), Map.of(), Map.of()); RoutingAllocation allocation = new RoutingAllocation( new AllocationDeciders(Collections.singleton(decider)), clusterState, diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index 5d0c1b426a3f7..c168e7b3b08f0 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -793,7 +793,14 @@ private static class ExtendedClusterInfo extends ClusterInfo { private final ClusterInfo delegate; private ExtendedClusterInfo(Map extraShardSizes, ClusterInfo info) { - super(info.getNodeLeastAvailableDiskUsages(), info.getNodeMostAvailableDiskUsages(), extraShardSizes, Map.of(), null, null); + super( + info.getNodeLeastAvailableDiskUsages(), + info.getNodeMostAvailableDiskUsages(), + extraShardSizes, + Map.of(), + Map.of(), + Map.of() + ); this.delegate = info; } diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityServiceTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityServiceTests.java index 0ea41e147cec7..d7d715334a2dc 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityServiceTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityServiceTests.java @@ -260,7 +260,7 @@ public void testContext() { } } state = ClusterState.builder(ClusterName.DEFAULT).nodes(nodes).build(); - info = new ClusterInfo(leastUsages, mostUsages, null, null, null, null); + info = new ClusterInfo(leastUsages, mostUsages, Map.of(), Map.of(), Map.of(), Map.of()); context = new AutoscalingCalculateCapacityService.DefaultAutoscalingDeciderContext( roleNames, state, @@ -306,7 +306,7 @@ public void testContext() { ) ); - info = new ClusterInfo(leastUsages, mostUsages, null, null, null, null); + info = new ClusterInfo(leastUsages, mostUsages, Map.of(), Map.of(), Map.of(), Map.of()); context = new AutoscalingCalculateCapacityService.DefaultAutoscalingDeciderContext( roleNames, state, diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderServiceTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderServiceTests.java index f1a52024dcca2..d88e067777185 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderServiceTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderServiceTests.java @@ -109,7 +109,7 @@ public Tuple sizeAndClusterInfo(IndexMetadata indexMetadata) // add irrelevant shards noise for completeness (should not happen IRL). sizes.put(new ShardId(index, i), randomLongBetween(0, Integer.MAX_VALUE)); } - ClusterInfo info = new ClusterInfo(null, null, null, sizes, null, null); + ClusterInfo info = new ClusterInfo(Map.of(), Map.of(), Map.of(), sizes, Map.of(), Map.of()); return Tuple.tuple(totalSize, info); } } diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java index 4483a1bbe9261..0cbb3b27725a3 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java @@ -403,7 +403,7 @@ public void validateSizeOf(ClusterState clusterState, ShardRouting subjectShard, } private ReactiveStorageDeciderService.AllocationState createAllocationState(Map shardSize, ClusterState clusterState) { - ClusterInfo info = new ClusterInfo(null, null, shardSize, null, null, null); + ClusterInfo info = new ClusterInfo(Map.of(), Map.of(), shardSize, Map.of(), Map.of(), Map.of()); ReactiveStorageDeciderService.AllocationState allocationState = new ReactiveStorageDeciderService.AllocationState( clusterState, null, @@ -567,7 +567,7 @@ public void testUnmovableSize() { if (shardsWithSizes.isEmpty() == false) { shardSize.put(shardIdentifier(randomFrom(shardsWithSizes)), ByteSizeUnit.KB.toBytes(minShardSize)); } - ClusterInfo info = new ClusterInfo(diskUsages, diskUsages, shardSize, null, null, null); + ClusterInfo info = new ClusterInfo(diskUsages, diskUsages, shardSize, Map.of(), Map.of(), Map.of()); ReactiveStorageDeciderService.AllocationState allocationState = new ReactiveStorageDeciderService.AllocationState( clusterState, From a4ec9c69cafceabf4694bae370897c217b6809ed Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Tue, 12 Jul 2022 09:57:05 +0200 Subject: [PATCH 21/36] Correct some typos/mistakes in comments/docs (#88446) --- .../elasticsearch/action/support/ListenableActionFuture.java | 2 +- .../main/java/org/elasticsearch/snapshots/RestoreService.java | 2 +- .../main/java/org/elasticsearch/snapshots/package-info.java | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/ListenableActionFuture.java b/server/src/main/java/org/elasticsearch/action/support/ListenableActionFuture.java index 7da2324bb873b..bbac7cba72b43 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ListenableActionFuture.java +++ b/server/src/main/java/org/elasticsearch/action/support/ListenableActionFuture.java @@ -14,7 +14,7 @@ import java.util.List; /** - * A {@code Future} and {@link ActionListener} against which which other {@link ActionListener}s can be registered later, to support + * A {@code Future} and {@link ActionListener} against which other {@link ActionListener}s can be registered later, to support * fanning-out a result to a dynamic collection of listeners. */ public class ListenableActionFuture extends AdapterActionFuture { diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index c1307e8706833..6a2f49f99a2d7 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -1307,7 +1307,7 @@ public ClusterState execute(ClusterState currentState) { // that will be opened by the restore if (currentIndexMetadata == null) { // Index doesn't exist - create it and start recovery - // Make sure that the index we are about to create has a validate name + // Make sure that the index we are about to create has a valid name ensureValidIndexName(currentState, snapshotIndexMetadata, renamedIndexName); shardLimitValidator.validateShardLimit(snapshotIndexMetadata.getSettings(), currentState); diff --git a/server/src/main/java/org/elasticsearch/snapshots/package-info.java b/server/src/main/java/org/elasticsearch/snapshots/package-info.java index 2df9a96656d01..3972a9ac02d5f 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/package-info.java +++ b/server/src/main/java/org/elasticsearch/snapshots/package-info.java @@ -43,9 +43,9 @@ * {@code UpdateIndexShardSnapshotStatusRequest}. * *
  • If as a result of the received status update requests, all shards in the cluster state are in a completed state, i.e are marked as - * either {@code SUCCESS}, {@code FAILED} or {@code MISSING}, the {@code SnapshotShardsService} will update the state of the {@code Entry} + * either {@code SUCCESS}, {@code FAILED} or {@code MISSING}, the {@code SnapshotsService} will update the state of the {@code Entry} * itself and mark it as {@code SUCCESS}. At the same time {@link org.elasticsearch.snapshots.SnapshotsService#endSnapshot} is executed, - * writing the metadata necessary to finalize the snapshot in the repository to the repository.
  • + * writing to the repository the metadata necessary to finalize the snapshot in the repository. * *
  • After writing the final metadata to the repository, a cluster state update to remove the snapshot from the cluster state is * submitted and the removal of the snapshot's {@code SnapshotsInProgress.Entry} from the cluster state completes the snapshot process. From ecc9605ff6ff38a8bb2e45e723ded36552bb91bd Mon Sep 17 00:00:00 2001 From: weizijun Date: Tue, 12 Jul 2022 16:23:06 +0800 Subject: [PATCH 22/36] [TSDB] Cache rollup bucket timestamp to reduce rounding cost (#88420) --- .../xpack/rollup/v2/RollupShardIndexer.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java index 68e09e470cdce..056e96d51af09 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java @@ -209,6 +209,7 @@ private class TimeSeriesBucketCollector extends BucketCollector { private long bucketsCreated; private final RollupBucketBuilder rollupBucketBuilder = new RollupBucketBuilder(); long lastTimestamp = Long.MAX_VALUE; + long lastHistoTimestamp = Long.MAX_VALUE; BytesRef lastTsid = null; TimeSeriesBucketCollector(BulkProcessor bulkProcessor) { @@ -232,14 +233,18 @@ public void collect(int docId, long owningBucketOrd) throws IOException { final BytesRef tsid = aggCtx.getTsid(); assert tsid != null : "Document without [" + TimeSeriesIdFieldMapper.NAME + "] field was found."; final long timestamp = aggCtx.getTimestamp(); - final long histoTimestamp = rounding.round(timestamp); + + boolean tsidChanged = tsid.equals(rollupBucketBuilder.tsid()) == false; + if (tsidChanged || timestamp < lastHistoTimestamp) { + lastHistoTimestamp = rounding.round(timestamp); + } logger.trace( "Doc: [{}] - _tsid: [{}], @timestamp: [{}}] -> rollup bucket ts: [{}]", docId, DocValueFormat.TIME_SERIES_ID.format(tsid), timestampFormat.format(timestamp), - timestampFormat.format(histoTimestamp) + timestampFormat.format(lastHistoTimestamp) ); /* @@ -262,7 +267,7 @@ public void collect(int docId, long owningBucketOrd) throws IOException { lastTsid = BytesRef.deepCopyOf(tsid); lastTimestamp = timestamp; - if (tsid.equals(rollupBucketBuilder.tsid()) == false || rollupBucketBuilder.timestamp() != histoTimestamp) { + if (tsidChanged || rollupBucketBuilder.timestamp() != lastHistoTimestamp) { // Flush rollup doc if not empty if (rollupBucketBuilder.isEmpty() == false) { Map doc = rollupBucketBuilder.buildRollupDocument(); @@ -270,7 +275,7 @@ public void collect(int docId, long owningBucketOrd) throws IOException { } // Create new rollup bucket - rollupBucketBuilder.init(tsid, histoTimestamp); + rollupBucketBuilder.init(tsid, lastHistoTimestamp); bucketsCreated++; } From 48896509d8982bdca59a104e556d350e13deedd9 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 12 Jul 2022 10:53:24 +0200 Subject: [PATCH 23/36] Pass IndexMetadata to AllocationDecider.can_remain (#88453) We need the metadata in a number of allocation deciders and pass it to other allocation methods. Passing it here avoids redundant lookups across deciders. --- .../allocation/decider/AllocationDecider.java | 2 +- .../decider/AllocationDeciders.java | 5 +- .../decider/AwarenessAllocationDecider.java | 15 ++++-- .../decider/DiskThresholdDecider.java | 4 +- .../decider/FilterAllocationDecider.java | 18 ++----- .../NodeReplacementAllocationDecider.java | 2 +- .../NodeShutdownAllocationDecider.java | 2 +- .../decider/ShardsLimitAllocationDecider.java | 14 ++++-- .../RandomAllocationDeciderTests.java | 2 +- .../decider/AllocationDecidersTests.java | 47 +++++++++++++++---- .../decider/DiskThresholdDeciderTests.java | 23 +++++++-- .../DiskThresholdDeciderUnitTests.java | 19 +++++--- ...NodeReplacementAllocationDeciderTests.java | 8 ++-- .../NodeShutdownAllocationDeciderTests.java | 4 +- .../ReactiveStorageDeciderDecisionTests.java | 6 +-- .../ReactiveStorageDeciderServiceTests.java | 7 ++- .../allocation/DataTierAllocationDecider.java | 10 ++-- .../DataTierAllocationDeciderTests.java | 7 ++- .../DedicatedFrozenNodeAllocationDecider.java | 4 +- .../HasFrozenCacheAllocationDecider.java | 4 +- ...TransportGetShutdownStatusActionTests.java | 7 ++- 21 files changed, 139 insertions(+), 71 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java index af36ca86ed44c..1b5cf0805a821 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java @@ -42,7 +42,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing * Returns a {@link Decision} whether the given shard routing can be remain * on the given node. The default is {@link Decision#ALWAYS}. */ - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return Decision.ALWAYS; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java index 02546ccbd9b90..ff0634e5ee2f6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java @@ -86,10 +86,11 @@ public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAl } return Decision.NO; } + final IndexMetadata indexMetadata = allocation.metadata().getIndexSafe(shardRouting.index()); if (allocation.debugDecision()) { Decision.Multi ret = new Decision.Multi(); for (AllocationDecider allocationDecider : allocations) { - Decision decision = allocationDecider.canRemain(shardRouting, node, allocation); + Decision decision = allocationDecider.canRemain(indexMetadata, shardRouting, node, allocation); // short track if a NO is returned. if (decision.type() == Decision.Type.NO) { maybeTraceLogNoDecision(shardRouting, node, allocationDecider); @@ -103,7 +104,7 @@ public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAl // tighter loop if debug information is not collected: don't collect yes decisions + break out right away on NO Decision ret = Decision.YES; for (AllocationDecider allocationDecider : allocations) { - switch (allocationDecider.canRemain(shardRouting, node, allocation).type()) { + switch (allocationDecider.canRemain(indexMetadata, shardRouting, node, allocation).type()) { case NO -> { maybeTraceLogNoDecision(shardRouting, node, allocationDecider); return Decision.NO; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 5f981123e2c53..73873e4a3d693 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -123,7 +123,7 @@ private void setAwarenessAttributes(List awarenessAttributes) { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - return underCapacity(shardRouting, node, allocation, true); + return underCapacity(allocation.metadata().getIndexSafe(shardRouting.index()), shardRouting, node, allocation, true); } @Override @@ -135,8 +135,8 @@ public Decision canForceAllocateDuringReplace(ShardRouting shardRouting, Routing } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - return underCapacity(shardRouting, node, allocation, false); + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return underCapacity(indexMetadata, shardRouting, node, allocation, false); } private static final Decision YES_NOT_ENABLED = Decision.single( @@ -155,13 +155,18 @@ public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAl private static final Decision YES_ALL_MET = Decision.single(Decision.Type.YES, NAME, "node meets all awareness attribute requirements"); - private Decision underCapacity(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation, boolean moveToNode) { + private Decision underCapacity( + IndexMetadata indexMetadata, + ShardRouting shardRouting, + RoutingNode node, + RoutingAllocation allocation, + boolean moveToNode + ) { if (awarenessAttributes.isEmpty()) { return YES_NOT_ENABLED; } final boolean debug = allocation.debugDecision(); - final IndexMetadata indexMetadata = allocation.metadata().getIndexSafe(shardRouting.index()); if (indexMetadata.getAutoExpandReplicas().expandToAllNodes()) { return YES_AUTO_EXPAND_ALL; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 6917b9d9b7b18..69aae52a20ca4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -461,7 +461,7 @@ public Decision canForceAllocateDuringReplace(ShardRouting shardRouting, Routing ); @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (shardRouting.currentNodeId().equals(node.nodeId()) == false) { throw new IllegalArgumentException("Shard [" + shardRouting + "] is not allocated on node: [" + node.nodeId() + "]"); } @@ -472,7 +472,7 @@ public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAl return decision; } - if (allocation.metadata().index(shardRouting.index()).ignoreDiskWatermarks()) { + if (indexMetadata.ignoreDiskWatermarks()) { return YES_DISK_WATERMARKS_IGNORED; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index fda55b419ff01..e32fc14bc617b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -90,11 +90,11 @@ public FilterAllocationDecider(Settings settings, ClusterSettings clusterSetting @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + IndexMetadata indexMetadata = allocation.metadata().getIndexSafe(shardRouting.index()); if (shardRouting.unassigned() && shardRouting.recoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) { // only for unassigned - we filter allocation right after the index creation (for shard shrinking) to ensure // that once it has been allocated post API the replicas can be allocated elsewhere without user interaction // this is a setting that can only be set within the system! - IndexMetadata indexMetadata = allocation.metadata().getIndexSafe(shardRouting.index()); DiscoveryNodeFilters initialRecoveryFilters = DiscoveryNodeFilters.trimTier(indexMetadata.getInitialRecoveryFilters()); if (initialRecoveryFilters != null && initialRecoveryFilters.match(node.node()) == false) { String explanation = @@ -102,7 +102,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing return allocation.decision(Decision.NO, NAME, explanation, initialRecoveryFilters); } } - return shouldFilter(shardRouting, node.node(), allocation); + return shouldFilter(indexMetadata, node.node(), allocation); } @Override @@ -111,8 +111,8 @@ public Decision canAllocate(IndexMetadata indexMetadata, RoutingNode node, Routi } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - return shouldFilter(shardRouting, node.node(), allocation); + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return shouldFilter(indexMetadata, node.node(), allocation); } @Override @@ -126,16 +126,6 @@ public Decision shouldAutoExpandToNode(IndexMetadata indexMetadata, DiscoveryNod return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters"); } - private Decision shouldFilter(ShardRouting shardRouting, DiscoveryNode node, RoutingAllocation allocation) { - Decision decision = shouldClusterFilter(node, allocation); - if (decision != null) return decision; - - decision = shouldIndexFilter(allocation.metadata().getIndexSafe(shardRouting.index()), node, allocation); - if (decision != null) return decision; - - return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters"); - } - private Decision shouldFilter(IndexMetadata indexMd, DiscoveryNode node, RoutingAllocation allocation) { Decision decision = shouldClusterFilter(node, allocation); if (decision != null) return decision; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDecider.java index 485e0656c8163..f97e8ce9b5cea 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDecider.java @@ -73,7 +73,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (replacementOngoing(allocation) == false) { return NO_REPLACEMENTS; } else if (isReplacementSource(allocation, node.nodeId())) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDecider.java index ff6cd0796e98e..0c6a481ce03eb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDecider.java @@ -63,7 +63,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing * determine if shards can remain on their current node. */ @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return this.canAllocate(shardRouting, node, allocation); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index f9868bf312224..e53688654e64b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -81,22 +81,28 @@ private void setClusterShardLimit(int clusterShardLimit) { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - return doDecide(shardRouting, node, allocation, (count, limit) -> count >= limit); + return doDecide( + allocation.metadata().getIndexSafe(shardRouting.index()), + shardRouting, + node, + allocation, + (count, limit) -> count >= limit + ); } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - return doDecide(shardRouting, node, allocation, (count, limit) -> count > limit); + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return doDecide(indexMetadata, shardRouting, node, allocation, (count, limit) -> count > limit); } private Decision doDecide( + IndexMetadata indexMd, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation, BiPredicate decider ) { - IndexMetadata indexMd = allocation.metadata().getIndexSafe(shardRouting.index()); final int indexShardLimit = indexMd.getShardsPerNodeLimit(); // Capture the limit here in case it changes during this method's // execution diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java index 59419dd4aa3d0..76b1aba8a39cf 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java @@ -224,7 +224,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return getRandomDecision(); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecidersTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecidersTests.java index 7d62cd4da927b..c8a06783ebba6 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecidersTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecidersTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingNode; @@ -54,7 +55,12 @@ public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation alloca } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain( + IndexMetadata indexMetadata, + ShardRouting shardRouting, + RoutingNode node, + RoutingAllocation allocation + ) { return Decision.YES; } @@ -79,18 +85,25 @@ public Decision canRebalance(RoutingAllocation allocation) { } })); - ClusterState clusterState = ClusterState.builder(new ClusterName("test")).build(); + IndexMetadata idx = IndexMetadata.builder("idx").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0).build(); + IndexMetadata testIdx = IndexMetadata.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + ClusterState clusterState = ClusterState.builder(new ClusterName("test")) + .metadata(Metadata.builder().put(idx, false).put(testIdx, false).build()) + .build(); final RoutingAllocation allocation = new RoutingAllocation(deciders, clusterState, null, null, 0L); allocation.setDebugMode(mode); final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "_message"); final ShardRouting shardRouting = ShardRouting.newUnassigned( - new ShardId("test", "testUUID", 0), + new ShardId(testIdx.getIndex(), 0), true, RecoverySource.ExistingStoreRecoverySource.INSTANCE, unassignedInfo ); - IndexMetadata idx = IndexMetadata.builder("idx").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0).build(); RoutingNode routingNode = RoutingNodesHelper.routingNode("testNode", null); verify(deciders.canAllocate(shardRouting, routingNode, allocation), matcher); @@ -130,7 +143,12 @@ public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation alloca } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain( + IndexMetadata indexMetadata, + ShardRouting shardRouting, + RoutingNode node, + RoutingAllocation allocation + ) { return decisionOne; } @@ -171,7 +189,12 @@ public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation alloca } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain( + IndexMetadata indexMetadata, + ShardRouting shardRouting, + RoutingNode node, + RoutingAllocation allocation + ) { return decision(allocation); } @@ -208,20 +231,28 @@ private Decision decision(RoutingAllocation allocation) { } })); + IndexMetadata testIdx = IndexMetadata.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + // no debug should just short-circuit to no, no matter what kind of no type return the first decider returns final ShardRouting shardRouting = ShardRouting.newUnassigned( - new ShardId("test", "testUUID", 0), + new ShardId(testIdx.getIndex(), 0), true, RecoverySource.ExistingStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "_message") ); final RoutingNode routingNode = RoutingNodesHelper.routingNode("testNode", null); - final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).build(); final IndexMetadata indexMetadata = IndexMetadata.builder("idx") .settings(settings(Version.CURRENT)) .numberOfShards(1) .numberOfReplicas(0) .build(); + final ClusterState clusterState = ClusterState.builder(new ClusterName("test")) + .metadata(Metadata.builder().put(testIdx, false).put(indexMetadata, false).build()) + .build(); final RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, clusterState, null, null, 0L); assertSame(Decision.NO, allocationDeciders.canAllocate(shardRouting, routingNode, allocation)); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 373d915e37460..582521729fdea 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -919,10 +919,15 @@ public void testCanRemainWithShardRelocatingAway() { System.nanoTime() ); routingAllocation.debugDecision(true); - Decision decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); + Decision decision = diskThresholdDecider.canRemain( + routingAllocation.metadata().getIndexSafe(firstRouting.index()), + firstRouting, + firstRoutingNode, + routingAllocation + ); assertThat(decision.type(), equalTo(Decision.Type.NO)); assertThat( - ((Decision.Single) decision).getExplanation(), + decision.getExplanation(), containsString( "the shard cannot remain on this node because it is above the high watermark cluster setting " + "[cluster.routing.allocation.disk.watermark.high=70%] and there is less than the required [30.0%] free disk on node, " @@ -951,7 +956,12 @@ public void testCanRemainWithShardRelocatingAway() { System.nanoTime() ); routingAllocation.debugDecision(true); - decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); + decision = diskThresholdDecider.canRemain( + routingAllocation.metadata().getIndexSafe(firstRouting.index()), + firstRouting, + firstRoutingNode, + routingAllocation + ); assertThat(decision.type(), equalTo(Decision.Type.YES)); assertEquals( "there is enough disk on this node for the shard to remain, free: [60b]", @@ -1109,7 +1119,12 @@ public void testWatermarksEnabledForSingleDataNode() { System.nanoTime() ); routingAllocation.debugDecision(true); - Decision decision = diskThresholdDecider.canRemain(startedShard, clusterState.getRoutingNodes().node("data"), routingAllocation); + Decision decision = diskThresholdDecider.canRemain( + routingAllocation.metadata().getIndexSafe(startedShard.index()), + startedShard, + clusterState.getRoutingNodes().node("data"), + routingAllocation + ); assertThat(decision.type(), equalTo(Decision.Type.NO)); assertThat( decision.getExplanation(), diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index cdc68d885c573..dd380fb98e725 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -319,13 +319,13 @@ public void testCanRemainUsesLeastAvailableSpace() { System.nanoTime() ); allocation.debugDecision(true); - Decision decision = decider.canRemain(test_0, RoutingNodesHelper.routingNode("node_0", node_0), allocation); + Decision decision = decider.canRemain(indexMetadata, test_0, RoutingNodesHelper.routingNode("node_0", node_0), allocation); assertEquals(Decision.Type.YES, decision.type()); assertThat( ((Decision.Single) decision).getExplanation(), containsString("there is enough disk on this node for the shard to remain, free: [10b]") ); - decision = decider.canRemain(test_1, RoutingNodesHelper.routingNode("node_1", node_1), allocation); + decision = decider.canRemain(indexMetadata, test_1, RoutingNodesHelper.routingNode("node_1", node_1), allocation); assertEquals(Decision.Type.NO, decision.type()); assertThat( ((Decision.Single) decision).getExplanation(), @@ -336,26 +336,26 @@ public void testCanRemainUsesLeastAvailableSpace() { ) ); try { - decider.canRemain(test_0, RoutingNodesHelper.routingNode("node_1", node_1), allocation); + decider.canRemain(indexMetadata, test_0, RoutingNodesHelper.routingNode("node_1", node_1), allocation); fail("not allocated on this node"); } catch (IllegalArgumentException ex) { // not allocated on that node } try { - decider.canRemain(test_1, RoutingNodesHelper.routingNode("node_0", node_0), allocation); + decider.canRemain(indexMetadata, test_1, RoutingNodesHelper.routingNode("node_0", node_0), allocation); fail("not allocated on this node"); } catch (IllegalArgumentException ex) { // not allocated on that node } - decision = decider.canRemain(test_2, RoutingNodesHelper.routingNode("node_1", node_1), allocation); + decision = decider.canRemain(indexMetadata, test_2, RoutingNodesHelper.routingNode("node_1", node_1), allocation); assertEquals("can stay since allocated on a different path with enough space", Decision.Type.YES, decision.type()); assertThat( ((Decision.Single) decision).getExplanation(), containsString("this shard is not allocated on the most utilized disk and can remain") ); - decision = decider.canRemain(test_2, RoutingNodesHelper.routingNode("node_1", node_1), allocation); + decision = decider.canRemain(indexMetadata, test_2, RoutingNodesHelper.routingNode("node_1", node_1), allocation); assertEquals("can stay since we don't have information about this shard", Decision.Type.YES, decision.type()); assertThat( ((Decision.Single) decision).getExplanation(), @@ -762,7 +762,12 @@ public void testDecidesYesIfWatermarksIgnored() { assertThat(decision.type(), equalTo(Decision.Type.YES)); assertThat(decision.getExplanation(), containsString("disk watermarks are ignored on this index")); - decision = decider.canRemain(test_0.initialize(node_0.getId(), null, 0L).moveToStarted(), routingNode, allocation); + decision = decider.canRemain( + metadata.getIndexSafe(test_0.index()), + test_0.initialize(node_0.getId(), null, 0L).moveToStarted(), + routingNode, + allocation + ); assertThat(decision.type(), equalTo(Decision.Type.YES)); assertThat(decision.getExplanation(), containsString("disk watermarks are ignored on this index")); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDeciderTests.java index 173fb7619e5ec..824af0be3e0cc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDeciderTests.java @@ -95,7 +95,7 @@ public void testNoReplacements() { assertThat(decision.type(), equalTo(Decision.Type.YES)); assertThat(decision.getExplanation(), equalTo(NodeReplacementAllocationDecider.NO_REPLACEMENTS.getExplanation())); - decision = decider.canRemain(shard, routingNode, allocation); + decision = decider.canRemain(null, shard, routingNode, allocation); assertThat(decision.type(), equalTo(Decision.Type.YES)); assertThat(decision.getExplanation(), equalTo(NodeReplacementAllocationDecider.NO_REPLACEMENTS.getExplanation())); } @@ -149,7 +149,7 @@ public void testCannotRemainOnReplacedNode() { RoutingNode routingNode = RoutingNodesHelper.routingNode(NODE_A.getId(), NODE_A, shard); allocation.debugDecision(true); - Decision decision = decider.canRemain(shard, routingNode, allocation); + Decision decision = decider.canRemain(indexMetadata, shard, routingNode, allocation); assertThat(decision.type(), equalTo(Decision.Type.NO)); assertThat( decision.getExplanation(), @@ -158,13 +158,13 @@ public void testCannotRemainOnReplacedNode() { routingNode = RoutingNodesHelper.routingNode(NODE_B.getId(), NODE_B, shard); - decision = decider.canRemain(shard, routingNode, allocation); + decision = decider.canRemain(indexMetadata, shard, routingNode, allocation); assertThat(decision.type(), equalTo(Decision.Type.YES)); assertThat(decision.getExplanation(), equalTo("node [" + NODE_B.getId() + "] is not being replaced")); routingNode = RoutingNodesHelper.routingNode(NODE_C.getId(), NODE_C, shard); - decision = decider.canRemain(shard, routingNode, allocation); + decision = decider.canRemain(indexMetadata, shard, routingNode, allocation); assertThat(decision.type(), equalTo(Decision.Type.YES)); assertThat(decision.getExplanation(), equalTo("node [" + NODE_C.getId() + "] is not being replaced")); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java index f018ef21ac225..ca92761dba516 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java @@ -117,7 +117,7 @@ public void testShardsCanRemainOnRestartingNode() { RoutingNode routingNode = RoutingNodesHelper.routingNode(DATA_NODE.getId(), DATA_NODE, shard); allocation.debugDecision(true); - Decision decision = decider.canRemain(shard, routingNode, allocation); + Decision decision = decider.canRemain(null, shard, routingNode, allocation); assertThat(decision.type(), equalTo(Decision.Type.YES)); assertThat( decision.getExplanation(), @@ -134,7 +134,7 @@ public void testShardsCannotRemainOnRemovingNode() { RoutingNode routingNode = RoutingNodesHelper.routingNode(DATA_NODE.getId(), DATA_NODE, shard); allocation.debugDecision(true); - Decision decision = decider.canRemain(shard, routingNode, allocation); + Decision decision = decider.canRemain(null, shard, routingNode, allocation); assertThat(decision.type(), equalTo(Decision.Type.NO)); assertThat(decision.getExplanation(), equalTo("node [" + DATA_NODE.getId() + "] is preparing to be removed from the cluster")); } diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java index c7d81944cdee1..4d13727cd268b 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java @@ -88,7 +88,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing }; private static final AllocationDecider CAN_REMAIN_NO_DECIDER = new AllocationDecider() { @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return Decision.NO; } }; @@ -116,13 +116,13 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing // say NO with disk label for subject shards private final AllocationDecider mockCanRemainDiskDecider = new AllocationDecider() { @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (subjectShards.contains(shardRouting.shardId()) && node.node().getName().startsWith("hot")) return allocation.decision( Decision.NO, DiskThresholdDecider.NAME, "test" ); - return super.canRemain(shardRouting, node, allocation); + return super.canRemain(indexMetadata, shardRouting, node, allocation); } }; diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java index 0cbb3b27725a3..353fa8d789c84 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java @@ -615,7 +615,12 @@ public void testCanRemainOnlyHighestTierPreference() { AllocationDecider no = new AllocationDecider() { @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain( + IndexMetadata indexMetadata, + ShardRouting shardRouting, + RoutingNode node, + RoutingAllocation allocation + ) { return Decision.NO; } }; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDecider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDecider.java index 0dc1f22a85c46..cf2f17903960e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDecider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDecider.java @@ -41,7 +41,7 @@ private DataTierAllocationDecider() {} @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - return shouldFilter(shardRouting, node.node(), allocation); + return shouldFilter(allocation.metadata().getIndexSafe(shardRouting.index()), node.node(), allocation); } @Override @@ -50,8 +50,8 @@ public Decision canAllocate(IndexMetadata indexMetadata, RoutingNode node, Routi } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - return shouldFilter(shardRouting, node.node(), allocation); + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return shouldFilter(indexMetadata, node.node(), allocation); } @Override @@ -59,8 +59,8 @@ public Decision shouldAutoExpandToNode(IndexMetadata indexMetadata, DiscoveryNod return shouldFilter(indexMetadata, node.getRoles(), allocation); } - private Decision shouldFilter(ShardRouting shardRouting, DiscoveryNode node, RoutingAllocation allocation) { - return shouldFilter(allocation.metadata().getIndexSafe(shardRouting.index()), node.getRoles(), allocation); + private Decision shouldFilter(IndexMetadata indexMetadata, DiscoveryNode node, RoutingAllocation allocation) { + return shouldFilter(indexMetadata, node.getRoles(), allocation); } private static Decision shouldFilter(IndexMetadata indexMd, Set roles, RoutingAllocation allocation) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java index 712053287c683..bdbdfc4f77f56 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java @@ -646,7 +646,12 @@ private void assertAllocationDecision(ClusterState state, DiscoveryNode node, De } { - final var decision = DataTierAllocationDecider.INSTANCE.canRemain(shard, routingNode, allocation); + final var decision = DataTierAllocationDecider.INSTANCE.canRemain( + allocation.metadata().getIndexSafe(shard.index()), + shard, + routingNode, + allocation + ); assertThat(routingNode.toString(), decision.type(), equalTo(decisionType)); assertThat(routingNode.toString(), decision.getExplanation(), containsString(explanationMessage)); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/decider/DedicatedFrozenNodeAllocationDecider.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/decider/DedicatedFrozenNodeAllocationDecider.java index 58f2af3a0c944..cf9159d3f5af5 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/decider/DedicatedFrozenNodeAllocationDecider.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/decider/DedicatedFrozenNodeAllocationDecider.java @@ -48,8 +48,8 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - return canAllocateToNode(allocation.metadata().getIndexSafe(shardRouting.index()), node.node()); + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return canAllocateToNode(indexMetadata, node.node()); } @Override diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/decider/HasFrozenCacheAllocationDecider.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/decider/HasFrozenCacheAllocationDecider.java index 666b33512f351..d16f5483aab09 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/decider/HasFrozenCacheAllocationDecider.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/decider/HasFrozenCacheAllocationDecider.java @@ -60,8 +60,8 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - return canAllocateToNode(allocation.metadata().getIndexSafe(shardRouting.index()), node.node()); + public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return canAllocateToNode(indexMetadata, node.node()); } @Override diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java index a12fe6bd57a15..f1ccfdd77f19b 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java @@ -93,7 +93,12 @@ public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation alloca } @Override - public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + public Decision canRemain( + IndexMetadata indexMetadata, + ShardRouting shardRouting, + RoutingNode node, + RoutingAllocation allocation + ) { return canRemain.get().test(shardRouting, node, allocation); } From a2ee4c5393dbb184255b98f77665265e70c63c57 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 12 Jul 2022 12:48:56 +0200 Subject: [PATCH 24/36] Polish reworked LoggedExec task (#88424) Some polishing of reworked LoggedExec task --- .../gradle/LoggedExecFuncTest.groovy | 6 +++--- .../org/elasticsearch/gradle/LoggedExec.java | 16 +++++++++------- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/LoggedExecFuncTest.groovy b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/LoggedExecFuncTest.groovy index 302fb2bcc2257..5a92f61c70d8c 100644 --- a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/LoggedExecFuncTest.groovy +++ b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/LoggedExecFuncTest.groovy @@ -35,7 +35,7 @@ class LoggedExecFuncTest extends AbstractGradleFuncTest { import org.elasticsearch.gradle.LoggedExec tasks.register('loggedExec', LoggedExec) { commandLine 'ls', '-lh' - spoolOutput = $spooling + getSpoolOutput().set($spooling) } """ when: @@ -54,7 +54,7 @@ class LoggedExecFuncTest extends AbstractGradleFuncTest { import org.elasticsearch.gradle.LoggedExec tasks.register('loggedExec', LoggedExec) { commandLine 'ls', 'wtf' - spoolOutput = $spooling + getSpoolOutput().set($spooling) } """ when: @@ -97,7 +97,7 @@ class LoggedExecFuncTest extends AbstractGradleFuncTest { tasks.register('loggedExec', LoggedExec) { commandLine 'echo', 'HELLO' getCaptureOutput().set(true) - spoolOutput = true + getSpoolOutput().set(true) } """ when: diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java b/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java index 9740a0c2f5425..acb526cf9a3bb 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java @@ -56,7 +56,6 @@ public abstract class LoggedExec extends DefaultTask implements FileSystemOperat protected FileSystemOperations fileSystemOperations; private ProjectLayout projectLayout; private ExecOperations execOperations; - private boolean spoolOutput; @Input @Optional @@ -84,6 +83,9 @@ public abstract class LoggedExec extends DefaultTask implements FileSystemOperat @Input abstract public Property getWorkingDir(); + @Internal + abstract public Property getSpoolOutput(); + private String output; @Inject @@ -95,14 +97,16 @@ public LoggedExec(ProjectLayout projectLayout, ExecOperations execOperations, Fi // For now mimic default behaviour of Gradle Exec task here getEnvironment().putAll(System.getenv()); getCaptureOutput().convention(false); + getSpoolOutput().convention(false); } @TaskAction public void run() { + boolean spoolOutput = getSpoolOutput().get(); if (spoolOutput && getCaptureOutput().get()) { throw new GradleException("Capturing output is not supported when spoolOutput is true."); } - if (getCaptureOutput().getOrElse(false) && getIndentingConsoleOutput().isPresent()) { + if (getCaptureOutput().get() && getIndentingConsoleOutput().isPresent()) { throw new GradleException("Capturing output is not supported when indentingConsoleOutput is configured."); } Consumer outputLogger; @@ -156,7 +160,9 @@ public void run() { if (getLogger().isInfoEnabled() == false) { if (exitValue != 0) { try { - getLogger().error("Output for " + getExecutable().get() + ":"); + if (getIndentingConsoleOutput().isPresent() == false) { + getLogger().error("Output for " + getExecutable().get() + ":"); + } outputLogger.accept(getLogger()); } catch (Exception e) { throw new GradleException("Failed to read exec output", e); @@ -173,10 +179,6 @@ private String byteStreamToString(OutputStream out) { return ((ByteArrayOutputStream) out).toString(StandardCharsets.UTF_8); } - public void setSpoolOutput(boolean spoolOutput) { - this.spoolOutput = spoolOutput; - } - public static ExecResult exec(ExecOperations execOperations, Action action) { return genericExec(execOperations::exec, action); } From c56715f479f56ef738b4a5292f476d115c17a208 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Tue, 12 Jul 2022 14:16:11 +0200 Subject: [PATCH 25/36] Updatable API keys - logging audit trail event (#88276) This PR adds a new audit trail event for when API keys are updated. --- docs/changelog/88276.yaml | 5 +++ .../en/security/auditing/event-types.asciidoc | 35 ++++++++++++++++--- .../audit/logfile/LoggingAuditTrail.java | 30 +++++++++++++++- .../audit/logfile/LoggingAuditTrailTests.java | 32 ++++++++++++++++- 4 files changed, 96 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/88276.yaml diff --git a/docs/changelog/88276.yaml b/docs/changelog/88276.yaml new file mode 100644 index 0000000000000..942d83375b361 --- /dev/null +++ b/docs/changelog/88276.yaml @@ -0,0 +1,5 @@ +pr: 88276 +summary: Updatable API keys - logging audit trail event +area: Audit +type: enhancement +issues: [] diff --git a/x-pack/docs/en/security/auditing/event-types.asciidoc b/x-pack/docs/en/security/auditing/event-types.asciidoc index 7cc041ff4ccf8..f3ca8a303725b 100644 --- a/x-pack/docs/en/security/auditing/event-types.asciidoc +++ b/x-pack/docs/en/security/auditing/event-types.asciidoc @@ -231,6 +231,29 @@ event action. ["index-b*"],"privileges":["all"]}],"applications":[],"run_as":[]}]}}} ==== +[[event-change-apikey]] +`change_apikey`:: +Logged when the <> API is +invoked to update the attributes of an existing API key. ++ +You must include the `security_config_change` event type to audit the related +event action. ++ +.Example +[%collapsible%open] +==== +[source,js] +{"type":"audit", "timestamp":"2020-12-31T00:33:52,521+0200", "node.id": +"9clhpgjJRR-iKzOw20xBNQ", "event.type":"security_config_change", "event.action": +"change_apikey", "request.id":"9FteCmovTzWHVI-9Gpa_vQ", "change":{"apikey": +{"id":"zcwN3YEBBmnjw-K-hW5_","role_descriptors":[{"cluster": +["monitor","manage_ilm"],"indices":[{"names":["index-a*"],"privileges": +["read","maintenance"]},{"names":["in*","alias*"],"privileges":["read"], +"field_security":{"grant":["field1*","@timestamp"],"except":["field11"]}}], +"applications":[],"run_as":[]},{"cluster":["all"],"indices":[{"names": +["index-b*"],"privileges":["all"]}],"applications":[],"run_as":[]}]}}} +==== + [[event-delete-privileges]] `delete_privileges`:: Logged when the @@ -535,8 +558,8 @@ In addition, if `event.type` equals <>. The `role_descriptors` objects have the same schema as the `role_descriptor` object that is part of the above `role` config object. +The object for an API key update will differ in that it will not include +a `name` or `expiration`. + `grant` :: An object like: + [source,js] diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index 33f48b65fe9d1..6ea823564db02 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -49,6 +49,8 @@ import org.elasticsearch.xpack.core.security.action.apikey.GrantApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyRequest; +import org.elasticsearch.xpack.core.security.action.apikey.UpdateApiKeyAction; +import org.elasticsearch.xpack.core.security.action.apikey.UpdateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesRequest; import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; @@ -287,7 +289,8 @@ public class LoggingAuditTrail implements AuditTrail, ClusterStateListener { DeleteServiceAccountTokenAction.NAME, ActivateProfileAction.NAME, UpdateProfileDataAction.NAME, - SetProfileEnabledAction.NAME + SetProfileEnabledAction.NAME, + UpdateApiKeyAction.NAME ); private static final String FILTER_POLICY_PREFIX = setting("audit.logfile.events.ignore_filters."); // because of the default wildcard value (*) for the field filter, a policy with @@ -747,6 +750,9 @@ public void accessGranted( } else if (msg instanceof final SetProfileEnabledRequest setProfileEnabledRequest) { assert SetProfileEnabledAction.NAME.equals(action); securityChangeLogEntryBuilder(requestId).withRequestBody(setProfileEnabledRequest).build(); + } else if (msg instanceof final UpdateApiKeyRequest updateApiKeyRequest) { + assert UpdateApiKeyAction.NAME.equals(action); + securityChangeLogEntryBuilder(requestId).withRequestBody(updateApiKeyRequest).build(); } else { throw new IllegalStateException( "Unknown message class type [" @@ -1215,6 +1221,16 @@ LogEntryBuilder withRequestBody(GrantApiKeyRequest grantApiKeyRequest) throws IO return this; } + LogEntryBuilder withRequestBody(final UpdateApiKeyRequest updateApiKeyRequest) throws IOException { + logEntry.with(EVENT_ACTION_FIELD_NAME, "change_apikey"); + XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true); + builder.startObject(); + withRequestBody(builder, updateApiKeyRequest); + builder.endObject(); + logEntry.with(CHANGE_CONFIG_FIELD_NAME, Strings.toString(builder)); + return this; + } + private void withRequestBody(XContentBuilder builder, CreateApiKeyRequest createApiKeyRequest) throws IOException { TimeValue expiration = createApiKeyRequest.getExpiration(); builder.startObject("apikey") @@ -1228,6 +1244,18 @@ private void withRequestBody(XContentBuilder builder, CreateApiKeyRequest create .endObject(); // apikey } + private void withRequestBody(final XContentBuilder builder, final UpdateApiKeyRequest updateApiKeyRequest) throws IOException { + builder.startObject("apikey").field("id", updateApiKeyRequest.getId()); + if (updateApiKeyRequest.getRoleDescriptors() != null) { + builder.startArray("role_descriptors"); + for (RoleDescriptor roleDescriptor : updateApiKeyRequest.getRoleDescriptors()) { + withRoleDescriptor(builder, roleDescriptor); + } + builder.endArray(); + } + builder.endObject(); + } + private void withRoleDescriptor(XContentBuilder builder, RoleDescriptor roleDescriptor) throws IOException { builder.startObject().array(RoleDescriptor.Fields.CLUSTER.getPreferredName(), roleDescriptor.getClusterPrivileges()); if (roleDescriptor.getConditionalClusterPrivileges() != null && roleDescriptor.getConditionalClusterPrivileges().length > 0) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 4ea4ce42a19eb..540b5fa5c1507 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -46,12 +46,15 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.security.action.apikey.ApiKeyTests; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.GrantApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.GrantApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyRequest; +import org.elasticsearch.xpack.core.security.action.apikey.UpdateApiKeyAction; +import org.elasticsearch.xpack.core.security.action.apikey.UpdateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesRequest; import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; @@ -605,6 +608,32 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException // clear log CapturingLogger.output(logger.getName(), Level.INFO).clear(); + final String keyId = randomAlphaOfLength(10); + final var updateApiKeyRequest = new UpdateApiKeyRequest( + keyId, + randomBoolean() ? null : keyRoleDescriptors, + ApiKeyTests.randomMetadata() + ); + auditTrail.accessGranted(requestId, authentication, UpdateApiKeyAction.NAME, updateApiKeyRequest, authorizationInfo); + final var expectedUpdateKeyAuditEventString = """ + "change":{"apikey":{"id":"%s"%s}}\ + """.formatted(keyId, updateApiKeyRequest.getRoleDescriptors() == null ? "" : "," + roleDescriptorsStringBuilder); + output = CapturingLogger.output(logger.getName(), Level.INFO); + assertThat(output.size(), is(2)); + String generatedUpdateKeyAuditEventString = output.get(1); + assertThat(generatedUpdateKeyAuditEventString, containsString(expectedUpdateKeyAuditEventString)); + generatedUpdateKeyAuditEventString = generatedUpdateKeyAuditEventString.replace(", " + expectedUpdateKeyAuditEventString, ""); + checkedFields = new MapBuilder<>(commonFields); + checkedFields.remove(LoggingAuditTrail.ORIGIN_ADDRESS_FIELD_NAME); + checkedFields.remove(LoggingAuditTrail.ORIGIN_TYPE_FIELD_NAME); + checkedFields.put("type", "audit") + .put(LoggingAuditTrail.EVENT_TYPE_FIELD_NAME, "security_config_change") + .put(LoggingAuditTrail.EVENT_ACTION_FIELD_NAME, "change_apikey") + .put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); + assertMsg(generatedUpdateKeyAuditEventString, checkedFields.map()); + // clear log + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + GrantApiKeyRequest grantApiKeyRequest = new GrantApiKeyRequest(); grantApiKeyRequest.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); grantApiKeyRequest.getGrant().setType(randomFrom(randomAlphaOfLength(8), null)); @@ -1800,7 +1829,8 @@ public void testSecurityConfigChangedEventSelection() { new Tuple<>( SetProfileEnabledAction.NAME, new SetProfileEnabledRequest(randomAlphaOfLength(20), randomBoolean(), WriteRequest.RefreshPolicy.WAIT_UNTIL) - ) + ), + new Tuple<>(UpdateApiKeyAction.NAME, UpdateApiKeyRequest.usingApiKeyId(randomAlphaOfLength(10))) ); auditTrail.accessGranted(requestId, authentication, actionAndRequest.v1(), actionAndRequest.v2(), authorizationInfo); List output = CapturingLogger.output(logger.getName(), Level.INFO); From 5628b8792e5b7331bb0df899115396b8751488e9 Mon Sep 17 00:00:00 2001 From: Mat Schaffer Date: Tue, 12 Jul 2022 21:24:49 +0900 Subject: [PATCH 26/36] Bound random negative size test in SearchSourceBuilderTests#testNegativeSizeErrors (#88457) -1 is handled differently by the xcontent code path so this test will fail when `randomIntBetween` lands on -1. To fix, we add another integer for the xcontent test which starts at -2. --- .../search/builder/SearchSourceBuilderTests.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index ffe2d3c1bafbf..9c0c11a179115 100644 --- a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -500,10 +500,12 @@ public void testNegativeSizeErrors() throws IOException { expected = expectThrows(IllegalArgumentException.class, () -> new SearchSourceBuilder().size(-1)); assertEquals("[size] parameter cannot be negative, found [-1]", expected.getMessage()); - String restContent = "{\"size\" : " + randomSize + "}"; + // SearchSourceBuilder.fromXContent treats -1 as not-set + int boundedRandomSize = randomIntBetween(-100000, -2); + String restContent = "{\"size\" : " + boundedRandomSize + "}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> SearchSourceBuilder.fromXContent(parser)); - assertThat(ex.getMessage(), containsString(Integer.toString(randomSize))); + assertThat(ex.getMessage(), containsString(Integer.toString(boundedRandomSize))); } restContent = "{\"size\" : -1}"; From 24d2520ae48652fa76ae74e4562ac8a7b2147ace Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 12 Jul 2022 22:29:04 +1000 Subject: [PATCH 27/36] Audit API key ID when create or grant API keys (#88456) The API key ID generation is handled by the Request class since #63221. This makes it possible to audit it when creating or granting API keys. This PR makes the necessary changes for it to happen. Relates: #63221 --- docs/changelog/88456.yaml | 5 +++++ .../docs/en/security/auditing/event-types.asciidoc | 2 +- .../security/audit/logfile/LoggingAuditTrail.java | 1 + .../audit/logfile/LoggingAuditTrailTests.java | 13 ++++++++++--- 4 files changed, 17 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/88456.yaml diff --git a/docs/changelog/88456.yaml b/docs/changelog/88456.yaml new file mode 100644 index 0000000000000..bb3a5d1182365 --- /dev/null +++ b/docs/changelog/88456.yaml @@ -0,0 +1,5 @@ +pr: 88456 +summary: Audit API key ID when create or grant API keys +area: Audit +type: enhancement +issues: [] diff --git a/x-pack/docs/en/security/auditing/event-types.asciidoc b/x-pack/docs/en/security/auditing/event-types.asciidoc index f3ca8a303725b..db4209ec60e9d 100644 --- a/x-pack/docs/en/security/auditing/event-types.asciidoc +++ b/x-pack/docs/en/security/auditing/event-types.asciidoc @@ -757,7 +757,7 @@ the <>. + [source,js] ---- -`{"name": , "expiration": , "role_descriptors" []}` +`{"id": , "name": , "expiration": , "role_descriptors" []}` ---- // NOTCONSOLE + diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index 6ea823564db02..867dd495e82a0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -1234,6 +1234,7 @@ LogEntryBuilder withRequestBody(final UpdateApiKeyRequest updateApiKeyRequest) t private void withRequestBody(XContentBuilder builder, CreateApiKeyRequest createApiKeyRequest) throws IOException { TimeValue expiration = createApiKeyRequest.getExpiration(); builder.startObject("apikey") + .field("id", createApiKeyRequest.getId()) .field("name", createApiKeyRequest.getName()) .field("expiration", expiration != null ? expiration.toString() : null) .startArray("role_descriptors"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 540b5fa5c1507..f85b28bcbba98 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -590,8 +590,13 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException createApiKeyRequest.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); auditTrail.accessGranted(requestId, authentication, CreateApiKeyAction.NAME, createApiKeyRequest, authorizationInfo); String expectedCreateKeyAuditEventString = """ - "create":{"apikey":{"name":"%s","expiration":%s,%s}}\ - """.formatted(keyName, expiration != null ? "\"" + expiration + "\"" : "null", roleDescriptorsStringBuilder); + "create":{"apikey":{"id":"%s","name":"%s","expiration":%s,%s}}\ + """.formatted( + createApiKeyRequest.getId(), + keyName, + expiration != null ? "\"" + expiration + "\"" : "null", + roleDescriptorsStringBuilder + ); List output = CapturingLogger.output(logger.getName(), Level.INFO); assertThat(output.size(), is(2)); String generatedCreateKeyAuditEventString = output.get(1); @@ -646,7 +651,9 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException output = CapturingLogger.output(logger.getName(), Level.INFO); assertThat(output.size(), is(2)); String generatedGrantKeyAuditEventString = output.get(1); - StringBuilder grantKeyAuditEventStringBuilder = new StringBuilder().append("\"create\":{\"apikey\":{\"name\":\"") + StringBuilder grantKeyAuditEventStringBuilder = new StringBuilder().append("\"create\":{\"apikey\":{\"id\":\"") + .append(grantApiKeyRequest.getApiKeyRequest().getId()) + .append("\",\"name\":\"") .append(keyName) .append("\",\"expiration\":") .append(expiration != null ? "\"" + expiration + "\"" : "null") From 3ed549b36cae886532a598adf5385054935ca616 Mon Sep 17 00:00:00 2001 From: weizijun Date: Tue, 12 Jul 2022 20:31:58 +0800 Subject: [PATCH 28/36] TSDB: RollupShardIndexer logging improvements (#88416) 1. Add trace log guards to avoid high cost method 2. Log the time it took to rollup a shard --- .../xpack/rollup/v2/RollupShardIndexer.java | 35 +++++++++++-------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java index 056e96d51af09..a9498c785971f 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java @@ -127,6 +127,7 @@ class RollupShardIndexer { } public RollupIndexerAction.ShardRollupResponse execute() throws IOException { + long startTime = System.currentTimeMillis(); BulkProcessor bulkProcessor = createBulkProcessor(); try (searcher; bulkProcessor) { // TODO: add cancellations @@ -138,11 +139,12 @@ public RollupIndexerAction.ShardRollupResponse execute() throws IOException { } logger.info( - "Shard {} successfully sent [{}], indexed [{}], failed [{}]", + "Shard [{}] successfully sent [{}], indexed [{}], failed [{}], took [{}]", indexShard.shardId(), numSent.get(), numIndexed.get(), - numFailed.get() + numFailed.get(), + TimeValue.timeValueMillis(System.currentTimeMillis() - startTime) ); if (numIndexed.get() != numSent.get()) { @@ -239,13 +241,15 @@ public void collect(int docId, long owningBucketOrd) throws IOException { lastHistoTimestamp = rounding.round(timestamp); } - logger.trace( - "Doc: [{}] - _tsid: [{}], @timestamp: [{}}] -> rollup bucket ts: [{}]", - docId, - DocValueFormat.TIME_SERIES_ID.format(tsid), - timestampFormat.format(timestamp), - timestampFormat.format(lastHistoTimestamp) - ); + if (logger.isTraceEnabled()) { + logger.trace( + "Doc: [{}] - _tsid: [{}], @timestamp: [{}}] -> rollup bucket ts: [{}]", + docId, + DocValueFormat.TIME_SERIES_ID.format(tsid), + timestampFormat.format(timestamp), + timestampFormat.format(lastHistoTimestamp) + ); + } /* * Sanity checks to ensure that we receive documents in the correct order @@ -349,11 +353,14 @@ public RollupBucketBuilder init(BytesRef tsid, long timestamp) { this.timestamp = timestamp; this.docCount = 0; this.metricFieldProducers.values().stream().forEach(p -> p.reset()); - logger.trace( - "New bucket for _tsid: [{}], @timestamp: [{}]", - DocValueFormat.TIME_SERIES_ID.format(tsid), - timestampFormat.format(timestamp) - ); + if (logger.isTraceEnabled()) { + logger.trace( + "New bucket for _tsid: [{}], @timestamp: [{}]", + DocValueFormat.TIME_SERIES_ID.format(tsid), + timestampFormat.format(timestamp) + ); + } + return this; } From 4af02b8c80d8b8f9464e85c3c08ac0157b6c679e Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 12 Jul 2022 14:42:45 +0200 Subject: [PATCH 29/36] Stop registering TestGeoShapeFieldMapperPlugin in ESIntegTestCase (#88460) Instead of registering the plugin by default, implementations that need it are responsible on registering the plugin. --- .../search/GeoBoundingBoxQueryLegacyGeoShapeIT.java | 5 ----- .../legacygeo/search/LegacyGeoShapeIT.java | 5 ----- .../percolator/PercolatorQuerySearchIT.java | 5 ----- .../indices/diskusage/IndexDiskUsageAnalyzerIT.java | 2 ++ .../search/geo/GeoBoundingBoxQueryGeoPointIT.java | 10 ++++++++++ .../search/geo/GeoBoundingBoxQueryGeoShapeIT.java | 10 ++++++++++ .../java/org/elasticsearch/search/geo/GeoShapeIT.java | 10 ++++++++++ .../org/elasticsearch/search/query/QueryStringIT.java | 9 +++++++++ .../search/query/SimpleQueryStringIT.java | 4 ++-- .../search/geo/GeoPointShapeQueryTests.java | 10 ++++++++++ .../elasticsearch/search/geo/GeoShapeQueryTests.java | 10 ++++++++++ .../search/geo/GeoPointShapeQueryTestCase.java | 9 --------- .../java/org/elasticsearch/test/ESIntegTestCase.java | 8 -------- .../integration/DocumentLevelSecurityTests.java | 6 ------ .../integration/FieldLevelSecurityTests.java | 6 ------ .../GeoBoundingBoxQueryGeoShapeWithDocValuesIT.java | 5 ----- ...oBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java | 5 ----- .../search/GeoGridAggAndQueryConsistencyIT.java | 5 ----- .../xpack/spatial/search/GeoShapeWithDocValuesIT.java | 5 ----- .../spatial/search/LegacyGeoShapeWithDocValuesIT.java | 5 ----- 20 files changed, 63 insertions(+), 71 deletions(-) diff --git a/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/GeoBoundingBoxQueryLegacyGeoShapeIT.java b/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/GeoBoundingBoxQueryLegacyGeoShapeIT.java index ee12e28c35616..b3c888af3327a 100644 --- a/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/GeoBoundingBoxQueryLegacyGeoShapeIT.java +++ b/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/GeoBoundingBoxQueryLegacyGeoShapeIT.java @@ -22,11 +22,6 @@ public class GeoBoundingBoxQueryLegacyGeoShapeIT extends GeoBoundingBoxQueryIntegTestCase { - @Override - protected boolean addMockGeoShapeFieldMapper() { - return false; - } - @Override protected Collection> nodePlugins() { return Collections.singleton(TestLegacyGeoShapeFieldMapperPlugin.class); diff --git a/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeIT.java b/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeIT.java index 263d3840f7333..a92aba3cbee66 100644 --- a/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeIT.java +++ b/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeIT.java @@ -28,11 +28,6 @@ public class LegacyGeoShapeIT extends GeoShapeIntegTestCase { - @Override - protected boolean addMockGeoShapeFieldMapper() { - return false; - } - @Override protected Collection> nodePlugins() { return Collections.singleton(TestLegacyGeoShapeFieldMapperPlugin.class); diff --git a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index 6f64e697e907e..315ff6492a23c 100644 --- a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -62,11 +62,6 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase { - @Override - protected boolean addMockGeoShapeFieldMapper() { - return false; - } - @Override protected Collection> nodePlugins() { return Arrays.asList(PercolatorPlugin.class, TestGeoShapeFieldMapperPlugin.class); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java index 21677e420dd23..e7ba86ed5bfcd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.plugins.EnginePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentBuilder; @@ -57,6 +58,7 @@ protected Collection> nodePlugins() { List> plugins = new ArrayList<>(super.nodePlugins()); plugins.add(EngineTestPlugin.class); plugins.add(MockTransportService.TestPlugin.class); + plugins.add(TestGeoShapeFieldMapperPlugin.class); return plugins; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryGeoPointIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryGeoPointIT.java index 5d4b24d12b63f..f9909f9de5b02 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryGeoPointIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryGeoPointIT.java @@ -9,14 +9,24 @@ package org.elasticsearch.search.geo; import org.elasticsearch.Version; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import java.io.IOException; +import java.util.Collection; +import java.util.Collections; public class GeoBoundingBoxQueryGeoPointIT extends GeoBoundingBoxQueryIntegTestCase { + @SuppressWarnings("deprecation") + @Override + protected Collection> nodePlugins() { + return Collections.singleton(TestGeoShapeFieldMapperPlugin.class); + } + @Override public XContentBuilder getMapping() throws IOException { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryGeoShapeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryGeoShapeIT.java index a81801c53c34b..403b890ea3dc6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryGeoShapeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryGeoShapeIT.java @@ -9,14 +9,24 @@ package org.elasticsearch.search.geo; import org.elasticsearch.Version; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import java.io.IOException; +import java.util.Collection; +import java.util.Collections; public class GeoBoundingBoxQueryGeoShapeIT extends GeoBoundingBoxQueryIntegTestCase { + @SuppressWarnings("deprecation") + @Override + protected Collection> nodePlugins() { + return Collections.singleton(TestGeoShapeFieldMapperPlugin.class); + } + @Override public XContentBuilder getMapping() throws IOException { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIT.java index 5394232ace8fa..a12de847a62bd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIT.java @@ -9,13 +9,23 @@ package org.elasticsearch.search.geo; import org.elasticsearch.Version; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Collection; +import java.util.Collections; public class GeoShapeIT extends GeoShapeIntegTestCase { + @SuppressWarnings("deprecation") + @Override + protected Collection> nodePlugins() { + return Collections.singleton(TestGeoShapeFieldMapperPlugin.class); + } + @Override protected void getGeoShapeMapping(XContentBuilder b) throws IOException { b.field("type", "geo_shape"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java index 2090c4909870a..743cf268caf78 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java @@ -13,14 +13,17 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.Operator; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.xcontent.XContentType; import org.junit.Before; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -35,6 +38,12 @@ public class QueryStringIT extends ESIntegTestCase { + @SuppressWarnings("deprecation") + @Override + protected Collection> nodePlugins() { + return List.of(TestGeoShapeFieldMapperPlugin.class); + } + @Before public void setup() throws Exception { String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 387123189217c..7a4941d8454df 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -28,13 +28,13 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -65,7 +65,7 @@ public class SimpleQueryStringIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singletonList(MockAnalysisPlugin.class); + return List.of(MockAnalysisPlugin.class, TestGeoShapeFieldMapperPlugin.class); } public void testSimpleQueryString() throws ExecutionException, InterruptedException { diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java index 63c651107735b..b468e963a1f83 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java @@ -15,10 +15,14 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.WellKnownText; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import java.io.IOException; +import java.util.Collection; +import java.util.Collections; import java.util.Map; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; @@ -27,6 +31,12 @@ public class GeoPointShapeQueryTests extends GeoPointShapeQueryTestCase { + @SuppressWarnings("deprecation") + @Override + protected Collection> getPlugins() { + return Collections.singleton(TestGeoShapeFieldMapperPlugin.class); + } + @Override protected void createMapping(String indexName, String fieldName, Settings settings) throws Exception { XContentBuilder xcb = XContentFactory.jsonBuilder() diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index f2e9bfcc28f8a..50f209b67790c 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -14,10 +14,14 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.MultiPoint; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import java.io.IOException; +import java.util.Collection; +import java.util.Collections; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; @@ -25,6 +29,12 @@ public class GeoShapeQueryTests extends GeoShapeQueryTestCase { + @SuppressWarnings("deprecation") + @Override + protected Collection> getPlugins() { + return Collections.singleton(TestGeoShapeFieldMapperPlugin.class); + } + @Override protected void createMapping(String indexName, String fieldName, Settings settings) throws Exception { XContentBuilder xcb = XContentFactory.jsonBuilder() diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoPointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoPointShapeQueryTestCase.java index fe1b4c6e80602..d21a9f710c3d7 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoPointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoPointShapeQueryTestCase.java @@ -35,15 +35,11 @@ import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; -import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map; @@ -57,11 +53,6 @@ public abstract class GeoPointShapeQueryTestCase extends ESSingleNodeTestCase { - @Override - protected Collection> getPlugins() { - return Collections.singleton(TestGeoShapeFieldMapperPlugin.class); - } - protected abstract void createMapping(String indexName, String fieldName, Settings settings) throws Exception; protected void createMapping(String indexName, String fieldName) throws Exception { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 3e45394e70e24..7d52f28d32d6c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -2049,11 +2049,6 @@ protected boolean addMockInternalEngine() { return true; } - /** Returns {@code true} iff this test cluster should use a dummy geo_shape field mapper */ - protected boolean addMockGeoShapeFieldMapper() { - return true; - } - /** * Returns a function that allows to wrap / filter all clients that are exposed by the test cluster. This is useful * for debugging or request / response pre and post processing. It also allows to intercept all calls done by the test @@ -2095,9 +2090,6 @@ protected Collection> getMockPlugins() { mocks.add(TestSeedPlugin.class); mocks.add(AssertActionNamePlugin.class); mocks.add(MockScriptService.TestPlugin.class); - if (addMockGeoShapeFieldMapper()) { - mocks.add(TestGeoShapeFieldMapperPlugin.class); - } return Collections.unmodifiableList(mocks); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index 7ddded23b73b1..5beffd910382f 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -119,12 +119,6 @@ protected Collection> nodePlugins() { ); } - @Override - protected boolean addMockGeoShapeFieldMapper() { - // a test requires the real SpatialPlugin because it utilizes the shape query - return false; - } - @Override protected String configUsers() { final String usersPasswdHashed = new String(getFastStoredHashAlgoForTests().hash(USERS_PASSWD)); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index 8311ce48df312..0feb8f4b03add 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -104,12 +104,6 @@ protected Collection> nodePlugins() { ); } - @Override - protected boolean addMockGeoShapeFieldMapper() { - // a test requires the real SpatialPlugin because it utilizes the shape query - return false; - } - @Override protected String configUsers() { final String usersPasswHashed = new String(getFastStoredHashAlgoForTests().hash(USERS_PASSWD)); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryGeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryGeoShapeWithDocValuesIT.java index 8206454e926b7..2b4400bb5a3fd 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryGeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryGeoShapeWithDocValuesIT.java @@ -21,11 +21,6 @@ public class GeoBoundingBoxQueryGeoShapeWithDocValuesIT extends GeoBoundingBoxQueryIntegTestCase { - @Override - protected boolean addMockGeoShapeFieldMapper() { - return false; - } - @Override protected Collection> nodePlugins() { return Collections.singleton(LocalStateSpatialPlugin.class); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java index 2073eac56218f..9b8814053c066 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java @@ -21,11 +21,6 @@ public class GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT extends GeoBoundingBoxQueryIntegTestCase { - @Override - protected boolean addMockGeoShapeFieldMapper() { - return false; - } - @Override protected Collection> nodePlugins() { return Collections.singleton(LocalStateSpatialPlugin.class); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java index 67b009ed9bf86..52497a0cc9b7d 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java @@ -51,11 +51,6 @@ public class GeoGridAggAndQueryConsistencyIT extends ESIntegTestCase { - @Override - protected boolean addMockGeoShapeFieldMapper() { - return false; - } - @Override protected Collection> nodePlugins() { return Collections.singleton(LocalStateSpatialPlugin.class); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java index 16b4735cb55c1..faca74d83c6cc 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java @@ -25,11 +25,6 @@ public class GeoShapeWithDocValuesIT extends GeoShapeIntegTestCase { - @Override - protected boolean addMockGeoShapeFieldMapper() { - return false; - } - @Override protected Collection> nodePlugins() { return Collections.singleton(LocalStateSpatialPlugin.class); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java index 660fc7fd95bc1..f6fec0f64f75b 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java @@ -29,11 +29,6 @@ public class LegacyGeoShapeWithDocValuesIT extends GeoShapeIntegTestCase { - @Override - protected boolean addMockGeoShapeFieldMapper() { - return false; - } - @Override protected Collection> nodePlugins() { return Collections.singleton(LocalStateSpatialPlugin.class); From 47ecd204029fafbc2ac57d0bf454595f8e5cef89 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 12 Jul 2022 15:06:54 +0200 Subject: [PATCH 30/36] Use consistent shard map type in IndexService (#88465) It's faster and easier to reason about if we always have an immutable collections map here and not have the type depend on what the last operation on the index service was. --- .../java/org/elasticsearch/index/IndexService.java | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index e651b6b7a4da9..02b806406575c 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -96,7 +96,6 @@ import java.util.function.Supplier; import static java.util.Collections.emptyMap; -import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.core.Strings.format; public class IndexService extends AbstractIndexComponent implements IndicesClusterStateService.AllocatedIndex { @@ -118,7 +117,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust private final SimilarityService similarityService; private final EngineFactory engineFactory; private final IndexWarmer warmer; - private volatile Map shards = emptyMap(); + private volatile Map shards = Map.of(); private final AtomicBoolean closed = new AtomicBoolean(false); private final AtomicBoolean deleted = new AtomicBoolean(false); private final IndexSettings indexSettings; @@ -523,16 +522,13 @@ public synchronized IndexShard createShard( @Override public synchronized void removeShard(int shardId, String reason) { - final ShardId sId = new ShardId(index(), shardId); - final IndexShard indexShard; - if (shards.containsKey(shardId) == false) { + final IndexShard indexShard = shards.get(shardId); + if (indexShard == null) { return; } logger.debug("[{}] closing... (reason: [{}])", shardId, reason); - HashMap newShards = new HashMap<>(shards); - indexShard = newShards.remove(shardId); - shards = unmodifiableMap(newShards); - closeShard(reason, sId, indexShard, indexShard.store(), indexShard.getIndexEventListener()); + shards = Maps.copyMapWithRemovedEntry(shards, shardId); + closeShard(reason, indexShard.shardId(), indexShard, indexShard.store(), indexShard.getIndexEventListener()); logger.debug("[{}] closed (reason: [{}])", shardId, reason); } From 67cacde18b0aa08846b158054342cbd126eea626 Mon Sep 17 00:00:00 2001 From: Sean Letendre <32531321+local-ghost-127@users.noreply.github.com> Date: Tue, 12 Jul 2022 09:19:58 -0400 Subject: [PATCH 31/36] Corrected an incomplete sentence. (#86542) * Corrected an incomplete sentence. * Update docs/reference/aggregations/metrics/avg-aggregation.asciidoc Co-authored-by: Christos Soulios <1561376+csoulios@users.noreply.github.com> Co-authored-by: David Kilfoyle <41695641+kilfoyle@users.noreply.github.com> Co-authored-by: Christos Soulios <1561376+csoulios@users.noreply.github.com> --- docs/reference/aggregations/metrics/avg-aggregation.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/aggregations/metrics/avg-aggregation.asciidoc b/docs/reference/aggregations/metrics/avg-aggregation.asciidoc index 05d112a13ac03..4883eec3e617a 100644 --- a/docs/reference/aggregations/metrics/avg-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/avg-aggregation.asciidoc @@ -4,7 +4,7 @@ Avg ++++ -A `single-value` metrics aggregation that computes the average of numeric values that are extracted from the aggregated documents. These values can be extracted either from specific numeric fields in the documents. +A `single-value` metrics aggregation that computes the average of numeric values that are extracted from the aggregated documents. These values can be extracted either from specific numeric or <> fields in the documents. Assuming the data consists of documents representing exams grades (between 0 and 100) of students we can average their scores with: From 28048a5dbe696b458fb225257f7d338a0ba3d770 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Tue, 12 Jul 2022 15:34:45 +0200 Subject: [PATCH 32/36] Updatable API keys - noop check (#88346) This PR adds a noop check for API key updates. If we detect a noop update, i.e., an update that does not result in any changes to the existing doc, we skip the index step and return updated = false in the response. This PR also extends test coverage around various corner cases. --- docs/changelog/88346.yaml | 5 + .../core/security/authc/RealmDomain.java | 5 + .../authc/AuthenticationTestHelper.java | 30 ++ .../xpack/security/apikey/ApiKeyRestIT.java | 24 +- .../security/authc/ApiKeyIntegTests.java | 307 +++++++++++++++--- .../xpack/security/authc/ApiKeyService.java | 256 ++++++++++----- .../security/authc/ApiKeyServiceTests.java | 195 ++++++----- .../rest-api-spec/test/api_key/30_update.yml | 2 +- 8 files changed, 623 insertions(+), 201 deletions(-) create mode 100644 docs/changelog/88346.yaml diff --git a/docs/changelog/88346.yaml b/docs/changelog/88346.yaml new file mode 100644 index 0000000000000..ca2537f28a5a9 --- /dev/null +++ b/docs/changelog/88346.yaml @@ -0,0 +1,5 @@ +pr: 88346 +summary: Updatable API keys - noop check +area: Security +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java index 8863953dc844d..53de14b5b68bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java @@ -14,6 +14,7 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.List; @@ -48,6 +49,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + public static RealmDomain fromXContent(final XContentParser parser) { + return REALM_DOMAIN_PARSER.apply(parser, null); + } + @Override public String toString() { return "RealmDomain{" + "name='" + name + '\'' + ", realms=" + realms + '}'; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java index 8cb8b684a64ab..1bbcec5d92ce8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java @@ -90,6 +90,36 @@ public static User randomUser() { ); } + public static User userWithRandomMetadataAndDetails(final String username, final String... roles) { + return new User( + username, + roles, + ESTestCase.randomFrom(ESTestCase.randomAlphaOfLengthBetween(1, 10), null), + // Not a very realistic email address, but we don't validate this nor rely on correct format, so keeping it simple + ESTestCase.randomFrom(ESTestCase.randomAlphaOfLengthBetween(1, 10), null), + randomUserMetadata(), + true + ); + } + + public static Map randomUserMetadata() { + return ESTestCase.randomFrom( + Map.of( + "employee_id", + ESTestCase.randomAlphaOfLength(5), + "number", + 1, + "numbers", + List.of(1, 3, 5), + "extra", + Map.of("favorite pizza", "hawaii", "age", 42) + ), + Map.of(ESTestCase.randomAlphaOfLengthBetween(3, 8), ESTestCase.randomAlphaOfLengthBetween(3, 8)), + Map.of(), + null + ); + } + public static RealmDomain randomDomain(boolean includeInternal) { final Supplier randomRealmTypeSupplier = randomRealmTypeSupplier(includeInternal); final Set domainRealms = new HashSet<>( diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index c7f89106338cd..516d3b3d7a3a2 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -198,7 +198,7 @@ public void testGrantApiKeyWithOnlyManageOwnApiKeyPrivilegeFails() throws IOExce public void testUpdateApiKey() throws IOException { final var apiKeyName = "my-api-key-name"; - final Map apiKeyMetadata = Map.of("not", "returned"); + final Map apiKeyMetadata = Map.of("not", "returned"); final Map createApiKeyRequestBody = Map.of("name", apiKeyName, "metadata", apiKeyMetadata); final Request createApiKeyRequest = new Request("POST", "_security/api_key"); @@ -215,7 +215,7 @@ public void testUpdateApiKey() throws IOException { assertThat(apiKeyId, not(emptyString())); assertThat(apiKeyEncoded, not(emptyString())); - doTestUpdateApiKey(apiKeyName, apiKeyId, apiKeyEncoded); + doTestUpdateApiKey(apiKeyName, apiKeyId, apiKeyEncoded, apiKeyMetadata); } public void testGrantTargetCanUpdateApiKey() throws IOException { @@ -240,7 +240,7 @@ public void testGrantTargetCanUpdateApiKey() throws IOException { assertThat(apiKeyId, not(emptyString())); assertThat(apiKeyEncoded, not(emptyString())); - doTestUpdateApiKey(apiKeyName, apiKeyId, apiKeyEncoded); + doTestUpdateApiKey(apiKeyName, apiKeyId, apiKeyEncoded, null); } public void testGrantorCannotUpdateApiKeyOfGrantTarget() throws IOException { @@ -283,18 +283,26 @@ private void doTestAuthenticationWithApiKey(final String apiKeyName, final Strin assertThat(authenticate, hasEntry("api_key", Map.of("id", apiKeyId, "name", apiKeyName))); } - private void doTestUpdateApiKey(String apiKeyName, String apiKeyId, String apiKeyEncoded) throws IOException { + private void doTestUpdateApiKey( + final String apiKeyName, + final String apiKeyId, + final String apiKeyEncoded, + final Map oldMetadata + ) throws IOException { final var updateApiKeyRequest = new Request("PUT", "_security/api_key/" + apiKeyId); - final Map expectedApiKeyMetadata = Map.of("not", "returned (changed)", "foo", "bar"); - final Map updateApiKeyRequestBody = Map.of("metadata", expectedApiKeyMetadata); + final boolean updated = randomBoolean(); + final Map expectedApiKeyMetadata = updated ? Map.of("not", "returned (changed)", "foo", "bar") : oldMetadata; + final Map updateApiKeyRequestBody = expectedApiKeyMetadata == null + ? Map.of() + : Map.of("metadata", expectedApiKeyMetadata); updateApiKeyRequest.setJsonEntity(XContentTestUtils.convertToXContent(updateApiKeyRequestBody, XContentType.JSON).utf8ToString()); final Response updateApiKeyResponse = doUpdateUsingRandomAuthMethod(updateApiKeyRequest); assertOK(updateApiKeyResponse); final Map updateApiKeyResponseMap = responseAsMap(updateApiKeyResponse); - assertTrue((Boolean) updateApiKeyResponseMap.get("updated")); - expectMetadata(apiKeyId, expectedApiKeyMetadata); + assertEquals(updated, updateApiKeyResponseMap.get("updated")); + expectMetadata(apiKeyId, expectedApiKeyMetadata == null ? Map.of() : expectedApiKeyMetadata); // validate authentication still works after update doTestAuthenticationWithApiKey(apiKeyName, apiKeyId, apiKeyEncoded); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 36d60d6e25fd3..a2624cc1fdca1 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -28,11 +28,13 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.rest.RestStatus; @@ -43,6 +45,8 @@ import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.action.ClearSecurityCacheAction; @@ -137,6 +141,13 @@ public class ApiKeyIntegTests extends SecurityIntegTestCase { private static final long DELETE_INTERVAL_MILLIS = 100L; private static final int CRYPTO_THREAD_POOL_QUEUE_SIZE = 10; + private static final RoleDescriptor DEFAULT_API_KEY_ROLE_DESCRIPTOR = new RoleDescriptor( + "role", + new String[] { "monitor" }, + null, + null + ); + @Override public Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return Settings.builder() @@ -1435,6 +1446,7 @@ public void testSecurityIndexStateChangeWillInvalidateApiKeyCaches() throws Exce public void testUpdateApiKey() throws ExecutionException, InterruptedException, IOException { final Tuple> createdApiKey = createApiKey(TEST_USER_NAME, null); final var apiKeyId = createdApiKey.v1().getId(); + final Map oldMetadata = createdApiKey.v2(); final var newRoleDescriptors = randomRoleDescriptors(); final boolean nullRoleDescriptors = newRoleDescriptors == null; // Role descriptor corresponding to SecuritySettingsSource.TEST_ROLE_YML @@ -1449,11 +1461,14 @@ public void testUpdateApiKey() throws ExecutionException, InterruptedException, ); final var request = new UpdateApiKeyRequest(apiKeyId, newRoleDescriptors, ApiKeyTests.randomMetadata()); - final PlainActionFuture listener = new PlainActionFuture<>(); - final UpdateApiKeyResponse response = executeUpdateApiKey(TEST_USER_NAME, request, listener); + final UpdateApiKeyResponse response = executeUpdateApiKey(TEST_USER_NAME, request); assertNotNull(response); - assertTrue(response.isUpdated()); + // In this test, non-null roleDescriptors always result in an update since they either update the role name, or associated + // privileges. As such null descriptors (plus matching or null metadata) is the only way we can get a noop here + final boolean metadataChanged = request.getMetadata() != null && false == request.getMetadata().equals(oldMetadata); + final boolean isUpdated = nullRoleDescriptors == false || metadataChanged; + assertEquals(isUpdated, response.isUpdated()); final PlainActionFuture getListener = new PlainActionFuture<>(); client().filterWithHeader( @@ -1475,31 +1490,37 @@ public void testUpdateApiKey() throws ExecutionException, InterruptedException, final var updatedApiKeyDoc = getApiKeyDocument(apiKeyId); expectMetadataForApiKey(expectedMetadata, updatedApiKeyDoc); expectRoleDescriptorsForApiKey("limited_by_role_descriptors", expectedLimitedByRoleDescriptors, updatedApiKeyDoc); - if (nullRoleDescriptors) { - // Default role descriptor assigned to api key in `createApiKey` - final var expectedRoleDescriptor = new RoleDescriptor("role", new String[] { "monitor" }, null, null); - expectRoleDescriptorsForApiKey("role_descriptors", List.of(expectedRoleDescriptor), updatedApiKeyDoc); - - // Create user action unauthorized because we did not update key role; it only has `monitor` cluster priv - final Map authorizationHeaders = Collections.singletonMap( - "Authorization", - "ApiKey " + getBase64EncodedApiKeyValue(createdApiKey.v1().getId(), createdApiKey.v1().getKey()) - ); + final var expectedRoleDescriptors = nullRoleDescriptors ? List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR) : newRoleDescriptors; + expectRoleDescriptorsForApiKey("role_descriptors", expectedRoleDescriptors, updatedApiKeyDoc); + final Map expectedCreator = new HashMap<>(); + expectedCreator.put("principal", TEST_USER_NAME); + expectedCreator.put("full_name", null); + expectedCreator.put("email", null); + expectedCreator.put("metadata", Map.of()); + expectedCreator.put("realm_type", "file"); + expectedCreator.put("realm", "file"); + expectCreatorForApiKey(expectedCreator, updatedApiKeyDoc); + + // Check if update resulted in API key role going from `monitor` to `all` cluster privilege and assert that action that requires + // `all` is authorized or denied accordingly + final boolean hasAllClusterPrivilege = expectedRoleDescriptors.stream() + .filter(rd -> Arrays.asList(rd.getClusterPrivileges()).contains("all")) + .toList() + .isEmpty() == false; + final var authorizationHeaders = Collections.singletonMap( + "Authorization", + "ApiKey " + getBase64EncodedApiKeyValue(createdApiKey.v1().getId(), createdApiKey.v1().getKey()) + ); + if (hasAllClusterPrivilege) { + createUserWithRunAsRole(authorizationHeaders); + } else { ExecutionException e = expectThrows(ExecutionException.class, () -> createUserWithRunAsRole(authorizationHeaders)); assertThat(e.getMessage(), containsString("unauthorized")); assertThat(e.getCause(), instanceOf(ElasticsearchSecurityException.class)); - } else { - expectRoleDescriptorsForApiKey("role_descriptors", newRoleDescriptors, updatedApiKeyDoc); - // Create user action authorized because we updated key role to `all` cluster priv - final var authorizationHeaders = Collections.singletonMap( - "Authorization", - "ApiKey " + getBase64EncodedApiKeyValue(createdApiKey.v1().getId(), createdApiKey.v1().getKey()) - ); - createUserWithRunAsRole(authorizationHeaders); } } - public void testUpdateApiKeyAutoUpdatesUserRoles() throws IOException, ExecutionException, InterruptedException { + public void testUpdateApiKeyAutoUpdatesUserFields() throws IOException, ExecutionException, InterruptedException { // Create separate native realm user and role for user role change test final var nativeRealmUser = randomAlphaOfLengthBetween(5, 10); final var nativeRealmRole = randomAlphaOfLengthBetween(5, 10); @@ -1536,13 +1557,39 @@ public void testUpdateApiKeyAutoUpdatesUserRoles() throws IOException, Execution newClusterPrivileges.toArray(new String[0]) ); - // Update API key - final PlainActionFuture listener = new PlainActionFuture<>(); - final UpdateApiKeyResponse response = executeUpdateApiKey(nativeRealmUser, UpdateApiKeyRequest.usingApiKeyId(apiKeyId), listener); + UpdateApiKeyResponse response = executeUpdateApiKey(nativeRealmUser, UpdateApiKeyRequest.usingApiKeyId(apiKeyId)); assertNotNull(response); assertTrue(response.isUpdated()); expectRoleDescriptorsForApiKey("limited_by_role_descriptors", Set.of(roleDescriptorAfterUpdate), getApiKeyDocument(apiKeyId)); + + // Update user role name only + final RoleDescriptor roleDescriptorWithNewName = putRoleWithClusterPrivileges( + randomValueOtherThan(nativeRealmRole, () -> randomAlphaOfLength(10)), + // Keep old privileges + newClusterPrivileges.toArray(new String[0]) + ); + final User updatedUser = AuthenticationTestHelper.userWithRandomMetadataAndDetails( + nativeRealmUser, + roleDescriptorWithNewName.getName() + ); + updateUser(updatedUser); + + // Update API key + response = executeUpdateApiKey(nativeRealmUser, UpdateApiKeyRequest.usingApiKeyId(apiKeyId)); + + assertNotNull(response); + assertTrue(response.isUpdated()); + final Map updatedApiKeyDoc = getApiKeyDocument(apiKeyId); + expectRoleDescriptorsForApiKey("limited_by_role_descriptors", Set.of(roleDescriptorWithNewName), updatedApiKeyDoc); + final Map expectedCreator = new HashMap<>(); + expectedCreator.put("principal", updatedUser.principal()); + expectedCreator.put("full_name", updatedUser.fullName()); + expectedCreator.put("email", updatedUser.email()); + expectedCreator.put("metadata", updatedUser.metadata()); + expectedCreator.put("realm_type", "native"); + expectedCreator.put("realm", "index"); + expectCreatorForApiKey(expectedCreator, updatedApiKeyDoc); } public void testUpdateApiKeyNotFoundScenarios() throws ExecutionException, InterruptedException { @@ -1552,8 +1599,7 @@ public void testUpdateApiKeyNotFoundScenarios() throws ExecutionException, Inter final var request = new UpdateApiKeyRequest(apiKeyId, List.of(expectedRoleDescriptor), ApiKeyTests.randomMetadata()); // Validate can update own API key - final PlainActionFuture listener = new PlainActionFuture<>(); - final UpdateApiKeyResponse response = executeUpdateApiKey(TEST_USER_NAME, request, listener); + final UpdateApiKeyResponse response = executeUpdateApiKey(TEST_USER_NAME, request); assertNotNull(response); assertTrue(response.isUpdated()); @@ -1646,7 +1692,7 @@ public void testInvalidUpdateApiKeyScenarios() throws ExecutionException, Interr } } - public void testUpdateApiKeyAccountsForSecurityDomains() throws ExecutionException, InterruptedException { + public void testUpdateApiKeyAccountsForSecurityDomains() throws ExecutionException, InterruptedException, IOException { final Tuple> createdApiKey = createApiKey(TEST_USER_NAME, null); final var apiKeyId = createdApiKey.v1().getId(); @@ -1679,6 +1725,147 @@ public void testUpdateApiKeyAccountsForSecurityDomains() throws ExecutionExcepti assertNotNull(response); assertTrue(response.isUpdated()); + final Map expectedCreator = new HashMap<>(); + expectedCreator.put("principal", TEST_USER_NAME); + expectedCreator.put("full_name", null); + expectedCreator.put("email", null); + expectedCreator.put("metadata", Map.of()); + expectedCreator.put("realm_type", authenticatingRealm.getType()); + expectedCreator.put("realm", authenticatingRealm.getName()); + final XContentBuilder builder = realmDomain.toXContent(XContentFactory.jsonBuilder(), null); + expectedCreator.put("realm_domain", XContentHelper.convertToMap(BytesReference.bytes(builder), false, XContentType.JSON).v2()); + expectCreatorForApiKey(expectedCreator, getApiKeyDocument(apiKeyId)); + } + + public void testNoopUpdateApiKey() throws ExecutionException, InterruptedException, IOException { + final Tuple> createdApiKey = createApiKey(TEST_USER_NAME, null); + final var apiKeyId = createdApiKey.v1().getId(); + + final var initialRequest = new UpdateApiKeyRequest( + apiKeyId, + List.of(new RoleDescriptor(randomAlphaOfLength(10), new String[] { "all" }, null, null)), + ApiKeyTests.randomMetadata() + ); + UpdateApiKeyResponse response = executeUpdateApiKey(TEST_USER_NAME, initialRequest); + assertNotNull(response); + // First update is not noop, because role descriptors changed and possibly metadata + assertTrue(response.isUpdated()); + + // Update with same request is a noop and does not clear cache + authenticateWithApiKey(apiKeyId, createdApiKey.v1().getKey()); + final var serviceWithNameForDoc1 = Arrays.stream(internalCluster().getNodeNames()) + .map(n -> internalCluster().getInstance(ApiKeyService.class, n)) + .filter(s -> s.getDocCache().get(apiKeyId) != null) + .findFirst() + .orElseThrow(); + final int count = serviceWithNameForDoc1.getDocCache().count(); + response = executeUpdateApiKey(TEST_USER_NAME, initialRequest); + assertNotNull(response); + assertFalse(response.isUpdated()); + assertEquals(count, serviceWithNameForDoc1.getDocCache().count()); + + // Update with empty request is a noop + response = executeUpdateApiKey(TEST_USER_NAME, UpdateApiKeyRequest.usingApiKeyId(apiKeyId)); + assertNotNull(response); + assertFalse(response.isUpdated()); + + // Update with different role descriptors is not a noop + final List newRoleDescriptors = List.of( + randomValueOtherThanMany( + rd -> (RoleDescriptorRequestValidator.validate(rd) != null) && initialRequest.getRoleDescriptors().contains(rd) == false, + () -> RoleDescriptorTests.randomRoleDescriptor(false) + ), + randomValueOtherThanMany( + rd -> (RoleDescriptorRequestValidator.validate(rd) != null) && initialRequest.getRoleDescriptors().contains(rd) == false, + () -> RoleDescriptorTests.randomRoleDescriptor(false) + ) + ); + response = executeUpdateApiKey(TEST_USER_NAME, new UpdateApiKeyRequest(apiKeyId, newRoleDescriptors, null)); + assertNotNull(response); + assertTrue(response.isUpdated()); + + // Update with re-ordered role descriptors is a noop + response = executeUpdateApiKey( + TEST_USER_NAME, + new UpdateApiKeyRequest(apiKeyId, List.of(newRoleDescriptors.get(1), newRoleDescriptors.get(0)), null) + ); + assertNotNull(response); + assertFalse(response.isUpdated()); + + // Update with different metadata is not a noop + response = executeUpdateApiKey( + TEST_USER_NAME, + new UpdateApiKeyRequest( + apiKeyId, + null, + randomValueOtherThanMany(md -> md == null || md.equals(initialRequest.getMetadata()), ApiKeyTests::randomMetadata) + ) + ); + assertNotNull(response); + assertTrue(response.isUpdated()); + + // Update with different creator info is not a noop + // First, ensure that the user role descriptors alone do *not* cause an update, so we can test that we correctly perform the noop + // check when we update creator info + final ServiceWithNodeName serviceWithNodeName = getServiceWithNodeName(); + PlainActionFuture listener = new PlainActionFuture<>(); + // Role descriptor corresponding to SecuritySettingsSource.TEST_ROLE_YML, i.e., should not result in update + final Set oldUserRoleDescriptors = Set.of( + new RoleDescriptor( + TEST_ROLE, + new String[] { "ALL" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").allowRestrictedIndices(true).privileges("ALL").build() }, + null + ) + ); + serviceWithNodeName.service() + .updateApiKey( + Authentication.newRealmAuthentication( + new User(TEST_USER_NAME, TEST_ROLE), + new Authentication.RealmRef("file", "file", serviceWithNodeName.nodeName()) + ), + UpdateApiKeyRequest.usingApiKeyId(apiKeyId), + oldUserRoleDescriptors, + listener + ); + response = listener.get(); + assertNotNull(response); + assertFalse(response.isUpdated()); + final User updatedUser = AuthenticationTestHelper.userWithRandomMetadataAndDetails(TEST_USER_NAME, TEST_ROLE); + final RealmConfig.RealmIdentifier creatorRealmOnCreatedApiKey = new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, "file"); + final boolean noUserChanges = updatedUser.equals(new User(TEST_USER_NAME, TEST_ROLE)); + final Authentication.RealmRef realmRef; + if (randomBoolean() || noUserChanges) { + final RealmConfig.RealmIdentifier otherRealmInDomain = AuthenticationTestHelper.randomRealmIdentifier(true); + final var realmDomain = new RealmDomain( + ESTestCase.randomAlphaOfLengthBetween(3, 8), + Set.of(creatorRealmOnCreatedApiKey, otherRealmInDomain) + ); + // Using other realm from domain should result in update + realmRef = new Authentication.RealmRef( + otherRealmInDomain.getName(), + otherRealmInDomain.getType(), + serviceWithNodeName.nodeName(), + realmDomain + ); + } else { + realmRef = new Authentication.RealmRef( + creatorRealmOnCreatedApiKey.getName(), + creatorRealmOnCreatedApiKey.getType(), + serviceWithNodeName.nodeName() + ); + } + final var authentication = randomValueOtherThanMany( + Authentication::isApiKey, + () -> AuthenticationTestHelper.builder().user(updatedUser).realmRef(realmRef).build() + ); + listener = new PlainActionFuture<>(); + serviceWithNodeName.service() + .updateApiKey(authentication, UpdateApiKeyRequest.usingApiKeyId(apiKeyId), oldUserRoleDescriptors, listener); + response = listener.get(); + assertNotNull(response); + assertTrue(response.isUpdated()); } public void testUpdateApiKeyClearsApiKeyDocCache() throws IOException, ExecutionException, InterruptedException { @@ -1720,7 +1907,12 @@ public void testUpdateApiKeyClearsApiKeyDocCache() throws IOException, Execution final Client client = client().filterWithHeader( Collections.singletonMap("Authorization", basicAuthHeaderValue(ES_TEST_ROOT_USER, TEST_PASSWORD_SECURE_STRING)) ); - client.execute(UpdateApiKeyAction.INSTANCE, new UpdateApiKeyRequest(apiKey1.v1(), List.of(), null), listener); + client.execute( + UpdateApiKeyAction.INSTANCE, + // Set metadata to ensure update + new UpdateApiKeyRequest(apiKey1.v1(), List.of(), Map.of(randomAlphaOfLength(5), randomAlphaOfLength(10))), + listener + ); final var response = listener.get(); assertNotNull(response); assertTrue(response.isUpdated()); @@ -1741,7 +1933,7 @@ public void testUpdateApiKeyClearsApiKeyDocCache() throws IOException, Execution } private List randomRoleDescriptors() { - int caseNo = randomIntBetween(0, 2); + int caseNo = randomIntBetween(0, 3); return switch (caseNo) { case 0 -> List.of(new RoleDescriptor(randomAlphaOfLength(10), new String[] { "all" }, null, null)); case 1 -> List.of( @@ -1752,6 +1944,15 @@ private List randomRoleDescriptors() { ) ); case 2 -> null; + // vary default role descriptor assigned to created API keys by name only + case 3 -> List.of( + new RoleDescriptor( + randomValueOtherThan(DEFAULT_API_KEY_ROLE_DESCRIPTOR.getName(), () -> randomAlphaOfLength(10)), + DEFAULT_API_KEY_ROLE_DESCRIPTOR.getClusterPrivileges(), + DEFAULT_API_KEY_ROLE_DESCRIPTOR.getIndicesPrivileges(), + DEFAULT_API_KEY_ROLE_DESCRIPTOR.getRunAs() + ) + ); default -> throw new IllegalStateException("unexpected case no"); }; } @@ -1774,6 +1975,13 @@ private void expectMetadataForApiKey(final Map expectedMetadata, assertThat("for api key doc " + actualRawApiKeyDoc, actualMetadata, equalTo(expectedMetadata)); } + private void expectCreatorForApiKey(final Map expectedCreator, final Map actualRawApiKeyDoc) { + assertNotNull(actualRawApiKeyDoc); + @SuppressWarnings("unchecked") + final var actualCreator = (Map) actualRawApiKeyDoc.get("creator"); + assertThat("for api key doc " + actualRawApiKeyDoc, actualCreator, equalTo(expectedCreator)); + } + @SuppressWarnings("unchecked") private void expectRoleDescriptorsForApiKey( final String roleDescriptorType, @@ -1940,12 +2148,17 @@ private void verifyGetResponse( } private Tuple> createApiKey(String user, TimeValue expiration) { - final Tuple, List>> res = createApiKeys(user, 1, expiration, "monitor"); + final Tuple, List>> res = createApiKeys( + user, + 1, + expiration, + DEFAULT_API_KEY_ROLE_DESCRIPTOR.getClusterPrivileges() + ); return new Tuple<>(res.v1().get(0), res.v2().get(0)); } private Tuple, List>> createApiKeys(int noOfApiKeys, TimeValue expiration) { - return createApiKeys(ES_TEST_ROOT_USER, noOfApiKeys, expiration, "monitor"); + return createApiKeys(ES_TEST_ROOT_USER, noOfApiKeys, expiration, DEFAULT_API_KEY_ROLE_DESCRIPTOR.getClusterPrivileges()); } private Tuple, List>> createApiKeys( @@ -1996,7 +2209,7 @@ private Tuple, List>> createApiKe List> metadatas = new ArrayList<>(noOfApiKeys); List responses = new ArrayList<>(); for (int i = 0; i < noOfApiKeys; i++) { - final RoleDescriptor descriptor = new RoleDescriptor("role", clusterPrivileges, null, null); + final RoleDescriptor descriptor = new RoleDescriptor(DEFAULT_API_KEY_ROLE_DESCRIPTOR.getName(), clusterPrivileges, null, null); Client client = client().filterWithHeader(headers); final Map metadata = ApiKeyTests.randomMetadata(); metadatas.add(metadata); @@ -2041,13 +2254,27 @@ private void createNativeRealmUser( assertTrue(putUserResponse.created()); } + private void updateUser(User user) throws ExecutionException, InterruptedException { + final PutUserRequest putUserRequest = new PutUserRequest(); + putUserRequest.username(user.principal()); + putUserRequest.roles(user.roles()); + putUserRequest.metadata(user.metadata()); + putUserRequest.fullName(user.fullName()); + putUserRequest.email(user.email()); + final PlainActionFuture listener = new PlainActionFuture<>(); + final Client client = client().filterWithHeader( + Map.of("Authorization", basicAuthHeaderValue(ES_TEST_ROOT_USER, TEST_PASSWORD_SECURE_STRING)) + ); + client.execute(PutUserAction.INSTANCE, putUserRequest, listener); + final PutUserResponse putUserResponse = listener.get(); + assertFalse(putUserResponse.created()); + } + private RoleDescriptor putRoleWithClusterPrivileges(final String nativeRealmRoleName, String... clusterPrivileges) throws InterruptedException, ExecutionException { final PutRoleRequest putRoleRequest = new PutRoleRequest(); putRoleRequest.name(nativeRealmRoleName); - for (final String clusterPrivilege : clusterPrivileges) { - putRoleRequest.cluster(clusterPrivilege); - } + putRoleRequest.cluster(clusterPrivileges); final PlainActionFuture roleListener = new PlainActionFuture<>(); client().filterWithHeader(Map.of("Authorization", basicAuthHeaderValue(ES_TEST_ROOT_USER, TEST_PASSWORD_SECURE_STRING))) .execute(PutRoleAction.INSTANCE, putRoleRequest, roleListener); @@ -2066,11 +2293,9 @@ private Client getClientForRunAsUser() { ); } - private UpdateApiKeyResponse executeUpdateApiKey( - final String username, - final UpdateApiKeyRequest request, - final PlainActionFuture listener - ) throws InterruptedException, ExecutionException { + private UpdateApiKeyResponse executeUpdateApiKey(final String username, final UpdateApiKeyRequest request) throws InterruptedException, + ExecutionException { + final var listener = new PlainActionFuture(); final Client client = client().filterWithHeader( Collections.singletonMap("Authorization", basicAuthHeaderValue(username, TEST_PASSWORD_SECURE_STRING)) ); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index fe30462e9b8d6..883ad3ca98c19 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -119,6 +119,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -279,27 +280,27 @@ public void invalidateAll() { * Asynchronously creates a new API key based off of the request and authentication * @param authentication the authentication that this api key should be based off of * @param request the request to create the api key included any permission restrictions - * @param userRoles the user's actual roles that we always enforce + * @param userRoleDescriptors the user's actual roles that we always enforce * @param listener the listener that will be used to notify of completion */ public void createApiKey( Authentication authentication, CreateApiKeyRequest request, - Set userRoles, + Set userRoleDescriptors, ActionListener listener ) { ensureEnabled(); if (authentication == null) { listener.onFailure(new IllegalArgumentException("authentication must be provided")); } else { - createApiKeyAndIndexIt(authentication, request, userRoles, listener); + createApiKeyAndIndexIt(authentication, request, userRoleDescriptors, listener); } } private void createApiKeyAndIndexIt( Authentication authentication, CreateApiKeyRequest request, - Set roleDescriptorSet, + Set userRoleDescriptors, ActionListener listener ) { final Instant created = clock.instant(); @@ -313,7 +314,7 @@ private void createApiKeyAndIndexIt( apiKeyHashChars, request.getName(), authentication, - roleDescriptorSet, + userRoleDescriptors, created, expiration, request.getRoleDescriptors(), @@ -358,7 +359,7 @@ private void createApiKeyAndIndexIt( public void updateApiKey( final Authentication authentication, final UpdateApiKeyRequest request, - final Set userRoles, + final Set userRoleDescriptors, final ActionListener listener ) { ensureEnabled(); @@ -386,10 +387,7 @@ public void updateApiKey( validateCurrentApiKeyDocForUpdate(apiKeyId, authentication, versionedDoc.doc()); - executeBulkRequest( - buildBulkRequestForUpdate(versionedDoc, authentication, request, userRoles), - ActionListener.wrap(bulkResponse -> translateResponseAndClearCache(apiKeyId, bulkResponse, listener), listener::onFailure) - ); + doUpdateApiKey(authentication, request, userRoleDescriptors, versionedDoc, listener); }, listener::onFailure)); } @@ -418,10 +416,10 @@ static XContentBuilder newDocument( char[] apiKeyHashChars, String name, Authentication authentication, - Set userRoles, + Set userRoleDescriptors, Instant created, Instant expiration, - List keyRoles, + List keyRoleDescriptors, Version version, @Nullable Map metadata ) throws IOException { @@ -433,8 +431,8 @@ static XContentBuilder newDocument( .field("api_key_invalidated", false); addApiKeyHash(builder, apiKeyHashChars); - addRoleDescriptors(builder, keyRoles); - addLimitedByRoleDescriptors(builder, userRoles); + addRoleDescriptors(builder, keyRoleDescriptors); + addLimitedByRoleDescriptors(builder, userRoleDescriptors); builder.field("name", name).field("version", version.id).field("metadata_flattened", metadata); addCreator(builder, authentication); @@ -442,14 +440,22 @@ static XContentBuilder newDocument( return builder.endObject(); } - static XContentBuilder buildUpdatedDocument( + // package private for testing + + /** + * @return `null` if the update is a noop, i.e., if no changes to `currentApiKeyDoc` are required + */ + XContentBuilder maybeBuildUpdatedDocument( final ApiKeyDoc currentApiKeyDoc, + final Version targetDocVersion, final Authentication authentication, - final Set userRoles, - final List keyRoles, - final Version version, - final Map metadata + final UpdateApiKeyRequest request, + final Set userRoleDescriptors ) throws IOException { + if (isNoop(currentApiKeyDoc, targetDocVersion, authentication, request, userRoleDescriptors)) { + return null; + } + final XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject() .field("doc_type", "api_key") @@ -459,6 +465,7 @@ static XContentBuilder buildUpdatedDocument( addApiKeyHash(builder, currentApiKeyDoc.hash.toCharArray()); + final List keyRoles = request.getRoleDescriptors(); if (keyRoles != null) { logger.trace(() -> format("Building API key doc with updated role descriptors [{}]", keyRoles)); addRoleDescriptors(builder, keyRoles); @@ -467,14 +474,15 @@ static XContentBuilder buildUpdatedDocument( builder.rawField("role_descriptors", currentApiKeyDoc.roleDescriptorsBytes.streamInput(), XContentType.JSON); } - addLimitedByRoleDescriptors(builder, userRoles); + addLimitedByRoleDescriptors(builder, userRoleDescriptors); - builder.field("name", currentApiKeyDoc.name).field("version", version.id); + builder.field("name", currentApiKeyDoc.name).field("version", targetDocVersion.id); assert currentApiKeyDoc.metadataFlattened == null || MetadataUtils.containsReservedMetadata( XContentHelper.convertToMap(currentApiKeyDoc.metadataFlattened, false, XContentType.JSON).v2() ) == false : "API key doc to be updated contains reserved metadata"; + final Map metadata = request.getMetadata(); if (metadata != null) { logger.trace(() -> format("Building API key doc with updated metadata [{}]", metadata)); builder.field("metadata_flattened", metadata); @@ -493,6 +501,90 @@ static XContentBuilder buildUpdatedDocument( return builder.endObject(); } + private boolean isNoop( + final ApiKeyDoc apiKeyDoc, + final Version targetDocVersion, + final Authentication authentication, + final UpdateApiKeyRequest request, + final Set userRoleDescriptors + ) { + if (apiKeyDoc.version != targetDocVersion.id) { + return false; + } + + final Map currentCreator = apiKeyDoc.creator; + final var user = authentication.getEffectiveSubject().getUser(); + final var sourceRealm = authentication.getEffectiveSubject().getRealm(); + if (false == (Objects.equals(user.principal(), currentCreator.get("principal")) + && Objects.equals(user.fullName(), currentCreator.get("full_name")) + && Objects.equals(user.email(), currentCreator.get("email")) + && Objects.equals(user.metadata(), currentCreator.get("metadata")) + && Objects.equals(sourceRealm.getName(), currentCreator.get("realm")) + && Objects.equals(sourceRealm.getType(), currentCreator.get("realm_type")))) { + return false; + } + if (sourceRealm.getDomain() != null) { + if (currentCreator.get("realm_domain") == null) { + return false; + } + @SuppressWarnings("unchecked") + final var currentRealmDomain = RealmDomain.fromXContent( + XContentHelper.mapToXContentParser( + XContentParserConfiguration.EMPTY, + (Map) currentCreator.get("realm_domain") + ) + ); + if (sourceRealm.getDomain().equals(currentRealmDomain) == false) { + return false; + } + } else { + if (currentCreator.get("realm_domain") != null) { + return false; + } + } + + final Map newMetadata = request.getMetadata(); + if (newMetadata != null) { + if (apiKeyDoc.metadataFlattened == null) { + return false; + } + final Map currentMetadata = XContentHelper.convertToMap(apiKeyDoc.metadataFlattened, false, XContentType.JSON) + .v2(); + if (newMetadata.equals(currentMetadata) == false) { + return false; + } + } + + final List newRoleDescriptors = request.getRoleDescriptors(); + if (newRoleDescriptors != null) { + final List currentRoleDescriptors = parseRoleDescriptorsBytes( + request.getId(), + apiKeyDoc.roleDescriptorsBytes, + RoleReference.ApiKeyRoleType.ASSIGNED + ); + if (false == (newRoleDescriptors.size() == currentRoleDescriptors.size() + && Set.copyOf(newRoleDescriptors).containsAll(new HashSet<>(currentRoleDescriptors)))) { + return false; + } + } + + assert userRoleDescriptors != null; + // There is an edge case here when we update an 7.x API key that has a `LEGACY_SUPERUSER_ROLE_DESCRIPTOR` role descriptor: + // `parseRoleDescriptorsBytes` automatically transforms it to `ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR`. As such, when we + // perform the noop check on `ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR` we will treat it as a noop even though the actual + // role descriptor bytes on the API key are different, and correspond to `LEGACY_SUPERUSER_ROLE_DESCRIPTOR`. + // + // This does *not* present a functional issue, since whenever a `LEGACY_SUPERUSER_ROLE_DESCRIPTOR` is loaded at authentication time, + // it is likewise automatically transformed to `ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR`. + final List currentLimitedByRoleDescriptors = parseRoleDescriptorsBytes( + request.getId(), + apiKeyDoc.limitedByRoleDescriptorsBytes, + RoleReference.ApiKeyRoleType.LIMITED_BY + ); + return (userRoleDescriptors.size() == currentLimitedByRoleDescriptors.size() + && userRoleDescriptors.containsAll(currentLimitedByRoleDescriptors)); + } + void tryAuthenticate(ThreadContext ctx, ApiKeyCredentials credentials, ActionListener> listener) { if (false == isEnabled()) { listener.onResponse(AuthenticationResult.notHandled()); @@ -971,6 +1063,68 @@ public void logRemovedField(String parserName, Supplier locati } } + private void doUpdateApiKey( + final Authentication authentication, + final UpdateApiKeyRequest request, + final Set userRoleDescriptors, + final VersionedApiKeyDoc currentVersionedDoc, + final ActionListener listener + ) throws IOException { + logger.trace( + "Building update request for API key doc [{}] with seqNo [{}] and primaryTerm [{}]", + request.getId(), + currentVersionedDoc.seqNo(), + currentVersionedDoc.primaryTerm() + ); + final var targetDocVersion = clusterService.state().nodes().getMinNodeVersion(); + final var currentDocVersion = Version.fromId(currentVersionedDoc.doc().version); + assert currentDocVersion.onOrBefore(targetDocVersion) : "current API key doc version must be on or before target version"; + if (currentDocVersion.before(targetDocVersion)) { + logger.debug( + "API key update for [{}] will update version from [{}] to [{}]", + request.getId(), + currentDocVersion, + targetDocVersion + ); + } + + final XContentBuilder builder = maybeBuildUpdatedDocument( + currentVersionedDoc.doc(), + targetDocVersion, + authentication, + request, + userRoleDescriptors + ); + final boolean isNoop = builder == null; + if (isNoop) { + logger.debug("Detected noop update request for API key [{}]. Skipping index request.", request.getId()); + listener.onResponse(new UpdateApiKeyResponse(false)); + return; + } + + final IndexRequest indexRequest = client.prepareIndex(SECURITY_MAIN_ALIAS) + .setId(request.getId()) + .setSource(builder) + .setIfSeqNo(currentVersionedDoc.seqNo()) + .setIfPrimaryTerm(currentVersionedDoc.primaryTerm()) + .setOpType(DocWriteRequest.OpType.INDEX) + .request(); + logger.trace("Executing index request to update API key [{}]", request.getId()); + securityIndex.prepareIndexIfNeededThenExecute( + listener::onFailure, + () -> executeAsyncWithOrigin( + client.threadPool().getThreadContext(), + SECURITY_ORIGIN, + client.prepareBulk().add(indexRequest).setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).request(), + ActionListener.wrap( + bulkResponse -> translateResponseAndClearCache(request.getId(), bulkResponse, listener), + listener::onFailure + ), + client::bulk + ) + ); + } + /** * Invalidate API keys for given realm, user name, API key name and id. * @param realmNames realm names @@ -1235,63 +1389,11 @@ private static VersionedApiKeyDoc singleDoc(final String apiKeyId, final Collect return elements.iterator().next(); } - private BulkRequest buildBulkRequestForUpdate( - final VersionedApiKeyDoc versionedDoc, - final Authentication authentication, - final UpdateApiKeyRequest request, - final Set userRoles - ) throws IOException { - logger.trace( - "Building update request for API key doc [{}] with seqNo [{}] and primaryTerm [{}]", - request.getId(), - versionedDoc.seqNo(), - versionedDoc.primaryTerm() - ); - final var currentDocVersion = Version.fromId(versionedDoc.doc().version); - final var targetDocVersion = clusterService.state().nodes().getMinNodeVersion(); - assert currentDocVersion.onOrBefore(targetDocVersion) : "current API key doc version must be on or before target version"; - if (currentDocVersion.before(targetDocVersion)) { - logger.debug( - "API key update for [{}] will update version from [{}] to [{}]", - request.getId(), - currentDocVersion, - targetDocVersion - ); - } - final var bulkRequestBuilder = client.prepareBulk(); - bulkRequestBuilder.add( - client.prepareIndex(SECURITY_MAIN_ALIAS) - .setId(request.getId()) - .setSource( - buildUpdatedDocument( - versionedDoc.doc(), - authentication, - userRoles, - request.getRoleDescriptors(), - targetDocVersion, - request.getMetadata() - ) - ) - .setIfSeqNo(versionedDoc.seqNo()) - .setIfPrimaryTerm(versionedDoc.primaryTerm()) - .setOpType(DocWriteRequest.OpType.INDEX) - .request() - ); - bulkRequestBuilder.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); - return bulkRequestBuilder.request(); - } - - private void executeBulkRequest(final BulkRequest bulkRequest, final ActionListener listener) { - securityIndex.prepareIndexIfNeededThenExecute( - listener::onFailure, - () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, bulkRequest, listener, client::bulk) - ); - } - - private static void addLimitedByRoleDescriptors(final XContentBuilder builder, final Set userRoles) throws IOException { - assert userRoles != null; + private static void addLimitedByRoleDescriptors(final XContentBuilder builder, final Set limitedByRoleDescriptors) + throws IOException { + assert limitedByRoleDescriptors != null; builder.startObject("limited_by_role_descriptors"); - for (RoleDescriptor descriptor : userRoles) { + for (RoleDescriptor descriptor : limitedByRoleDescriptors) { builder.field(descriptor.getName(), (contentBuilder, params) -> descriptor.toXContent(contentBuilder, params, true)); } builder.endObject(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 181dcf8211283..ae4fa08bc0806 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -51,6 +51,7 @@ import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; @@ -68,12 +69,14 @@ import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyResponse; import org.elasticsearch.xpack.core.security.action.apikey.GetApiKeyResponse; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyResponse; +import org.elasticsearch.xpack.core.security.action.apikey.UpdateApiKeyRequest; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authc.AuthenticationTests; +import org.elasticsearch.xpack.core.security.authc.RealmDomain; import org.elasticsearch.xpack.core.security.authc.support.AuthenticationContextSerializer; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; @@ -1671,91 +1674,135 @@ public void testValidateApiKeyDocBeforeUpdate() throws IOException { assertThat(ex.getMessage(), containsString("cannot update legacy API key [" + apiKeyId + "] without name")); } - public void testBuildUpdatedDocument() throws IOException { + public void testMaybeBuildUpdatedDocument() throws IOException { final var apiKey = randomAlphaOfLength(16); final var hasher = getFastStoredHashAlgoForTests(); final char[] hash = hasher.hash(new SecureString(apiKey.toCharArray())); - - final var oldApiKeyDoc = buildApiKeyDoc(hash, randomBoolean() ? -1 : Instant.now().toEpochMilli(), false); - - final Set newUserRoles = randomBoolean() ? Set.of() : Set.of(RoleDescriptorTests.randomRoleDescriptor()); - - final boolean nullKeyRoles = randomBoolean(); - final List newKeyRoles; - if (nullKeyRoles) { - newKeyRoles = null; - } else { - newKeyRoles = List.of(RoleDescriptorTests.randomRoleDescriptor()); - } - - final var metadata = ApiKeyTests.randomMetadata(); - final var version = Version.CURRENT; - final var authentication = randomValueOtherThanMany( + final var oldAuthentication = randomValueOtherThanMany( Authentication::isApiKey, - () -> AuthenticationTestHelper.builder().user(new User("user", "role")).build(false) - ); - - final var keyDocSource = ApiKeyService.buildUpdatedDocument( - oldApiKeyDoc, - authentication, - newUserRoles, - newKeyRoles, - version, - metadata + () -> AuthenticationTestHelper.builder() + .user(AuthenticationTestHelper.userWithRandomMetadataAndDetails("user", "role")) + .build(false) ); - final var updatedApiKeyDoc = ApiKeyDoc.fromXContent( - XContentHelper.createParser(XContentParserConfiguration.EMPTY, BytesReference.bytes(keyDocSource), XContentType.JSON) + final Set oldUserRoles = randomSet(0, 3, RoleDescriptorTests::randomRoleDescriptor); + final List oldKeyRoles = randomList(3, RoleDescriptorTests::randomRoleDescriptor); + final Map oldMetadata = ApiKeyTests.randomMetadata(); + final Version oldVersion = VersionUtils.randomVersion(random()); + final ApiKeyDoc oldApiKeyDoc = ApiKeyDoc.fromXContent( + XContentHelper.createParser( + XContentParserConfiguration.EMPTY, + BytesReference.bytes( + ApiKeyService.newDocument( + hash, + randomAlphaOfLength(10), + oldAuthentication, + oldUserRoles, + Instant.now(), + randomBoolean() ? null : Instant.now(), + oldKeyRoles, + oldVersion, + oldMetadata + ) + ), + XContentType.JSON + ) ); - assertEquals(oldApiKeyDoc.docType, updatedApiKeyDoc.docType); - assertEquals(oldApiKeyDoc.name, updatedApiKeyDoc.name); - assertEquals(oldApiKeyDoc.hash, updatedApiKeyDoc.hash); - assertEquals(oldApiKeyDoc.expirationTime, updatedApiKeyDoc.expirationTime); - assertEquals(oldApiKeyDoc.creationTime, updatedApiKeyDoc.creationTime); - assertEquals(oldApiKeyDoc.invalidated, updatedApiKeyDoc.invalidated); - - final var service = createApiKeyService(Settings.EMPTY); - final var actualUserRoles = service.parseRoleDescriptorsBytes( - "", - updatedApiKeyDoc.limitedByRoleDescriptorsBytes, - RoleReference.ApiKeyRoleType.LIMITED_BY - ); - assertEquals(newUserRoles.size(), actualUserRoles.size()); - assertEquals(new HashSet<>(newUserRoles), new HashSet<>(actualUserRoles)); + final boolean changeUserRoles = randomBoolean(); + final boolean changeKeyRoles = randomBoolean(); + final boolean changeMetadata = randomBoolean(); + final boolean changeVersion = randomBoolean(); + final boolean changeCreator = randomBoolean(); + final Set newUserRoles = changeUserRoles + ? randomValueOtherThan(oldUserRoles, () -> randomSet(0, 3, RoleDescriptorTests::randomRoleDescriptor)) + : oldUserRoles; + final List newKeyRoles = changeKeyRoles + ? randomValueOtherThan(oldKeyRoles, () -> randomList(0, 3, RoleDescriptorTests::randomRoleDescriptor)) + : (randomBoolean() ? oldKeyRoles : null); + final Map newMetadata = changeMetadata + ? randomValueOtherThanMany(md -> md == null || md.equals(oldMetadata), ApiKeyTests::randomMetadata) + : (randomBoolean() ? oldMetadata : null); + final Version newVersion = changeVersion + ? randomValueOtherThan(oldVersion, () -> VersionUtils.randomVersion(random())) + : oldVersion; + final Authentication newAuthentication = changeCreator + ? randomValueOtherThanMany( + (auth -> auth.isApiKey() || auth.getEffectiveSubject().getUser().equals(oldAuthentication.getEffectiveSubject().getUser())), + () -> AuthenticationTestHelper.builder() + .user(AuthenticationTestHelper.userWithRandomMetadataAndDetails("user", "role")) + .build(false) + ) + : oldAuthentication; + final var request = new UpdateApiKeyRequest(randomAlphaOfLength(10), newKeyRoles, newMetadata); + final var service = createApiKeyService(); - final var actualKeyRoles = service.parseRoleDescriptorsBytes( - "", - updatedApiKeyDoc.roleDescriptorsBytes, - RoleReference.ApiKeyRoleType.ASSIGNED + final XContentBuilder builder = service.maybeBuildUpdatedDocument( + oldApiKeyDoc, + newVersion, + newAuthentication, + request, + newUserRoles ); - if (nullKeyRoles) { - assertEquals( - service.parseRoleDescriptorsBytes("", oldApiKeyDoc.roleDescriptorsBytes, RoleReference.ApiKeyRoleType.ASSIGNED), - actualKeyRoles - ); - } else { - assertEquals(newKeyRoles.size(), actualKeyRoles.size()); - assertEquals(new HashSet<>(newKeyRoles), new HashSet<>(actualKeyRoles)); - } - if (metadata == null) { - assertEquals(oldApiKeyDoc.metadataFlattened, updatedApiKeyDoc.metadataFlattened); - } else { - assertEquals(metadata, XContentHelper.convertToMap(updatedApiKeyDoc.metadataFlattened, true, XContentType.JSON).v2()); - } - assertEquals(authentication.getEffectiveSubject().getUser().principal(), updatedApiKeyDoc.creator.getOrDefault("principal", null)); - assertEquals(authentication.getEffectiveSubject().getUser().fullName(), updatedApiKeyDoc.creator.getOrDefault("fullName", null)); - assertEquals(authentication.getEffectiveSubject().getUser().email(), updatedApiKeyDoc.creator.getOrDefault("email", null)); - assertEquals(authentication.getEffectiveSubject().getUser().metadata(), updatedApiKeyDoc.creator.getOrDefault("metadata", null)); - RealmRef realm = authentication.getEffectiveSubject().getRealm(); - assertEquals(realm.getName(), updatedApiKeyDoc.creator.getOrDefault("realm", null)); - assertEquals(realm.getType(), updatedApiKeyDoc.creator.getOrDefault("realm_type", null)); - if (realm.getDomain() != null) { - @SuppressWarnings("unchecked") - final var actualDomain = (Map) updatedApiKeyDoc.creator.getOrDefault("realm_domain", null); - assertEquals(realm.getDomain().name(), actualDomain.get("name")); + final boolean noop = (changeCreator || changeMetadata || changeKeyRoles || changeUserRoles || changeVersion) == false; + if (noop) { + assertNull(builder); } else { - assertFalse(updatedApiKeyDoc.creator.containsKey("realm_domain")); + final ApiKeyDoc updatedApiKeyDoc = ApiKeyDoc.fromXContent( + XContentHelper.createParser(XContentParserConfiguration.EMPTY, BytesReference.bytes(builder), XContentType.JSON) + ); + assertEquals(oldApiKeyDoc.docType, updatedApiKeyDoc.docType); + assertEquals(oldApiKeyDoc.name, updatedApiKeyDoc.name); + assertEquals(oldApiKeyDoc.hash, updatedApiKeyDoc.hash); + assertEquals(oldApiKeyDoc.expirationTime, updatedApiKeyDoc.expirationTime); + assertEquals(oldApiKeyDoc.creationTime, updatedApiKeyDoc.creationTime); + assertEquals(oldApiKeyDoc.invalidated, updatedApiKeyDoc.invalidated); + assertEquals(newVersion.id, updatedApiKeyDoc.version); + final var actualUserRoles = service.parseRoleDescriptorsBytes( + "", + updatedApiKeyDoc.limitedByRoleDescriptorsBytes, + RoleReference.ApiKeyRoleType.LIMITED_BY + ); + assertEquals(newUserRoles.size(), actualUserRoles.size()); + assertEquals(new HashSet<>(newUserRoles), new HashSet<>(actualUserRoles)); + final var actualKeyRoles = service.parseRoleDescriptorsBytes( + "", + updatedApiKeyDoc.roleDescriptorsBytes, + RoleReference.ApiKeyRoleType.ASSIGNED + ); + if (changeKeyRoles == false) { + assertEquals( + service.parseRoleDescriptorsBytes("", oldApiKeyDoc.roleDescriptorsBytes, RoleReference.ApiKeyRoleType.ASSIGNED), + actualKeyRoles + ); + } else { + assertEquals(newKeyRoles.size(), actualKeyRoles.size()); + assertEquals(new HashSet<>(newKeyRoles), new HashSet<>(actualKeyRoles)); + } + if (changeMetadata == false) { + assertEquals(oldApiKeyDoc.metadataFlattened, updatedApiKeyDoc.metadataFlattened); + } else { + assertEquals(newMetadata, XContentHelper.convertToMap(updatedApiKeyDoc.metadataFlattened, true, XContentType.JSON).v2()); + } + assertEquals(newAuthentication.getEffectiveSubject().getUser().principal(), updatedApiKeyDoc.creator.get("principal")); + assertEquals(newAuthentication.getEffectiveSubject().getUser().fullName(), updatedApiKeyDoc.creator.get("full_name")); + assertEquals(newAuthentication.getEffectiveSubject().getUser().email(), updatedApiKeyDoc.creator.get("email")); + assertEquals(newAuthentication.getEffectiveSubject().getUser().metadata(), updatedApiKeyDoc.creator.get("metadata")); + final RealmRef realm = newAuthentication.getEffectiveSubject().getRealm(); + assertEquals(realm.getName(), updatedApiKeyDoc.creator.get("realm")); + assertEquals(realm.getType(), updatedApiKeyDoc.creator.get("realm_type")); + if (realm.getDomain() != null) { + @SuppressWarnings("unchecked") + final var actualRealmDomain = RealmDomain.fromXContent( + XContentHelper.mapToXContentParser( + XContentParserConfiguration.EMPTY, + (Map) updatedApiKeyDoc.creator.get("realm_domain") + ) + ); + assertEquals(realm.getDomain(), actualRealmDomain); + } else { + assertFalse(updatedApiKeyDoc.creator.containsKey("realm_domain")); + } } } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/30_update.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/30_update.yml index 013d28113521b..73ff3fba19b46 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/30_update.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/30_update.yml @@ -223,7 +223,7 @@ teardown: Authorization: "Basic YXBpX2tleV91c2VyXzE6eC1wYWNrLXRlc3QtcGFzc3dvcmQ=" # api_key_user_1 security.update_api_key: id: "$user1_key_id" - - match: { updated: true } + - match: { updated: false } # Check metadata did not change - do: From bb950414a3a5f0a0b8119586d82cd687a46f0c0a Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Tue, 12 Jul 2022 08:38:43 -0500 Subject: [PATCH 33/36] unmute test (#88454) --- .../rest-api-spec/test/security/authz/14_cat_indices.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml index 09d0d416e54da..31fb9e0810b90 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml @@ -120,8 +120,6 @@ teardown: "Test empty request while single authorized closed index": - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/47875" features: ["allowed_warnings"] - do: From 1e079154a46d37bb337b76c6130928cca86b374d Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Tue, 12 Jul 2022 16:52:32 +0300 Subject: [PATCH 34/36] INFO logging of snapshot restore and completion (#88257) But DEBUG (silent) logging of snapshot restore/completion when done in the context of CCR or searchable snapshots. --- docs/changelog/88257.yaml | 6 ++ .../cluster/ClusterStateDiffIT.java | 1 + .../snapshots/RestoreSnapshotIT.java | 62 +++++++++++++++++++ .../restore/RestoreSnapshotRequest.java | 34 ++++++++++ .../cluster/RestoreInProgress.java | 41 +++++++++++- .../snapshots/RestoreService.java | 14 ++++- .../restore/RestoreSnapshotRequestTests.java | 2 + .../MetadataIndexStateServiceTests.java | 1 + .../allocation/ThrottlingAllocationTests.java | 1 + .../decider/DiskThresholdDeciderTests.java | 2 +- ...storeInProgressAllocationDeciderTests.java | 1 + .../ClusterSerializationTests.java | 1 + .../InternalSnapshotsInfoServiceTests.java | 1 + .../xpack/ccr/CcrRepositoryIT.java | 18 ++++-- .../xpack/ccr/CcrRetentionLeaseIT.java | 3 +- .../ccr/action/TransportPutFollowAction.java | 3 +- ...ransportMountSearchableSnapshotAction.java | 4 +- 17 files changed, 180 insertions(+), 15 deletions(-) create mode 100644 docs/changelog/88257.yaml diff --git a/docs/changelog/88257.yaml b/docs/changelog/88257.yaml new file mode 100644 index 0000000000000..feb5df85feafb --- /dev/null +++ b/docs/changelog/88257.yaml @@ -0,0 +1,6 @@ +pr: 88257 +summary: INFO logging of snapshot restore and completion +area: Snapshot/Restore +type: enhancement +issues: + - 86610 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 5b995482f6fa7..b669cbf11bffc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -727,6 +727,7 @@ public ClusterState.Custom randomCreate(String name) { UUIDs.randomBase64UUID(), new Snapshot(randomName("repo"), new SnapshotId(randomName("snap"), UUIDs.randomBase64UUID())), RestoreInProgress.State.fromValue((byte) randomIntBetween(0, 3)), + randomBoolean(), emptyList(), ImmutableOpenMap.of() ) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RestoreSnapshotIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RestoreSnapshotIT.java index c9c0c4a6cd60c..23c43a16308bd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RestoreSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RestoreSnapshotIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xcontent.XContentFactory; import java.nio.file.Path; @@ -158,6 +159,67 @@ public void testParallelRestoreOperationsFromSingleSnapshot() throws Exception { assertThat(client.prepareGet(restoredIndexName2, sameSourceIndex ? docId : docId2).get().isExists(), equalTo(true)); } + @TestLogging( + reason = "testing the logging of the start and completion of a snapshot restore", + value = "org.elasticsearch.snapshots.RestoreService:INFO" + ) + public void testRestoreLogging() throws IllegalAccessException { + final MockLogAppender mockLogAppender = new MockLogAppender(); + try { + String indexName = "testindex"; + String repoName = "test-restore-snapshot-repo"; + String snapshotName = "test-restore-snapshot"; + Path absolutePath = randomRepoPath().toAbsolutePath(); + logger.info("Path [{}]", absolutePath); + String restoredIndexName = indexName + "-restored"; + String expectedValue = "expected"; + + mockLogAppender.start(); + Loggers.addAppender(LogManager.getLogger(RestoreService.class), mockLogAppender); + + mockLogAppender.addExpectation( + new MockLogAppender.PatternSeenEventExpectation( + "not seen start of snapshot restore", + "org.elasticsearch.snapshots.RestoreService", + Level.INFO, + "started restore of snapshot \\[.*" + snapshotName + ".*\\] for indices \\[.*" + indexName + ".*\\]" + ) + ); + + mockLogAppender.addExpectation( + new MockLogAppender.PatternSeenEventExpectation( + "not seen completion of snapshot restore", + "org.elasticsearch.snapshots.RestoreService", + Level.INFO, + "completed restore of snapshot \\[.*" + snapshotName + ".*\\]" + ) + ); + + Client client = client(); + // Write a document + String docId = Integer.toString(randomInt()); + indexDoc(indexName, docId, "value", expectedValue); + createRepository(repoName, "fs", absolutePath); + createSnapshot(repoName, snapshotName, Collections.singletonList(indexName)); + + RestoreSnapshotResponse restoreSnapshotResponse = client.admin() + .cluster() + .prepareRestoreSnapshot(repoName, snapshotName) + .setWaitForCompletion(false) + .setRenamePattern(indexName) + .setRenameReplacement(restoredIndexName) + .get(); + + assertThat(restoreSnapshotResponse.status(), equalTo(RestStatus.ACCEPTED)); + ensureGreen(restoredIndexName); + assertThat(client.prepareGet(restoredIndexName, docId).get().isExists(), equalTo(true)); + mockLogAppender.assertAllExpectationsMatched(); + } finally { + Loggers.removeAppender(LogManager.getLogger(RestoreService.class), mockLogAppender); + mockLogAppender.stop(); + } + } + public void testRestoreIncreasesPrimaryTerms() { final String indexName = randomAlphaOfLengthBetween(5, 10).toLowerCase(Locale.ROOT); createIndex(indexName, indexSettingsNoReplicas(2).build()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 8eb5f6d48ecc2..c383b2d610e5f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.restore; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeRequest; @@ -48,6 +49,8 @@ public class RestoreSnapshotRequest extends MasterNodeRequest indices, Map shards) { + public record Entry( + String uuid, + Snapshot snapshot, + State state, + boolean quiet, + List indices, + Map shards + ) { /** * Creates new restore metadata * * @param uuid uuid of the restore * @param snapshot snapshot * @param state current state of the restore process + * @param quiet {@code true} if logging of the start and completion of the snapshot restore should be at {@code DEBUG} log + * level, else it should be at {@code INFO} log level * @param indices list of indices being restored * @param shards map of shards being restored to their current restore status */ - public Entry(String uuid, Snapshot snapshot, State state, List indices, Map shards) { + public Entry( + String uuid, + Snapshot snapshot, + State state, + boolean quiet, + List indices, + Map shards + ) { this.snapshot = Objects.requireNonNull(snapshot); this.state = Objects.requireNonNull(state); + this.quiet = Objects.requireNonNull(quiet); this.indices = Objects.requireNonNull(indices); if (shards == null) { this.shards = Map.of(); @@ -342,10 +360,24 @@ public RestoreInProgress(StreamInput in) throws IOException { uuid = in.readString(); Snapshot snapshot = new Snapshot(in); State state = State.fromValue(in.readByte()); + boolean quiet; + if (in.getVersion().onOrAfter(RestoreSnapshotRequest.VERSION_SUPPORTING_QUIET_PARAMETER)) { + quiet = in.readBoolean(); + } else { + // Backwards compatibility: previously there was no logging of the start or completion of a snapshot restore + quiet = true; + } List indices = in.readImmutableList(StreamInput::readString); entriesBuilder.put( uuid, - new Entry(uuid, snapshot, state, indices, in.readImmutableMap(ShardId::new, ShardRestoreStatus::readShardRestoreStatus)) + new Entry( + uuid, + snapshot, + state, + quiet, + indices, + in.readImmutableMap(ShardId::new, ShardRestoreStatus::readShardRestoreStatus) + ) ); } this.entries = Collections.unmodifiableMap(entriesBuilder); @@ -357,6 +389,9 @@ public void writeTo(StreamOutput out) throws IOException { o.writeString(entry.uuid); entry.snapshot().writeTo(o); o.writeByte(entry.state().value()); + if (out.getVersion().onOrAfter(RestoreSnapshotRequest.VERSION_SUPPORTING_QUIET_PARAMETER)) { + o.writeBoolean(entry.quiet()); + } o.writeStringCollection(entry.indices); o.writeMap(entry.shards); }); diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 6a2f49f99a2d7..512fe1766133f 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -238,7 +238,6 @@ public void restoreSnapshot( final BiConsumer updater ) { try { - // Try and fill in any missing repository UUIDs in case they're needed during the restore final StepListener repositoryUuidRefreshListener = new StepListener<>(); refreshRepositoryUuids(refreshRepositoryUuidOnRestore, repositoriesService, repositoryUuidRefreshListener); @@ -737,6 +736,7 @@ public static RestoreInProgress updateRestoreStateWithDeletedIndices(RestoreInPr entry.uuid(), entry.snapshot(), overallState(RestoreInProgress.State.STARTED, shards), + entry.quiet(), entry.indices(), shards ) @@ -873,7 +873,9 @@ public RestoreInProgress applyChanges(final RestoreInProgress oldRestore) { Map shards = Map.copyOf(shardsBuilder); RestoreInProgress.State newState = overallState(RestoreInProgress.State.STARTED, shards); - builder.add(new RestoreInProgress.Entry(entry.uuid(), entry.snapshot(), newState, entry.indices(), shards)); + builder.add( + new RestoreInProgress.Entry(entry.uuid(), entry.snapshot(), newState, entry.quiet(), entry.indices(), shards) + ); } else { builder.add(entry); } @@ -1051,6 +1053,7 @@ public ClusterState execute(ClusterState currentState) { boolean changed = false; for (RestoreInProgress.Entry entry : currentState.custom(RestoreInProgress.TYPE, RestoreInProgress.EMPTY)) { if (entry.state().completed()) { + logger.log(entry.quiet() ? Level.DEBUG : Level.INFO, "completed restore of snapshot [{}]", entry.snapshot()); changed = true; } else { restoreInProgressBuilder.add(entry); @@ -1373,6 +1376,7 @@ && isSystemIndex(snapshotIndexMetadata) == false) { restoreUUID, snapshot, overallState(RestoreInProgress.State.INIT, shards), + request.quiet(), List.copyOf(indicesToRestore.keySet()), Map.copyOf(shards) ) @@ -1569,6 +1573,12 @@ public void onFailure(Exception e) { @Override public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + logger.log( + request.quiet() ? Level.DEBUG : Level.INFO, + "started restore of snapshot [{}] for indices {}", + snapshot, + snapshotInfo.indices() + ); listener.onResponse(new RestoreCompletionResponse(restoreUUID, snapshot, restoreInfo)); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java index ae730ed4d4a64..922e7e03c7600 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java @@ -64,6 +64,7 @@ private RestoreSnapshotRequest randomState(RestoreSnapshotRequest instance) { } instance.partial(randomBoolean()); instance.includeAliases(randomBoolean()); + instance.quiet(randomBoolean()); if (randomBoolean()) { Map indexSettings = new HashMap<>(); @@ -127,6 +128,7 @@ protected RestoreSnapshotRequest mutateInstance(RestoreSnapshotRequest instance) public void testSource() throws IOException { RestoreSnapshotRequest original = createTestInstance(); original.snapshotUuid(null); // cannot be set via the REST API + original.quiet(false); // cannot be set via the REST API XContentBuilder builder = original.toXContent(XContentFactory.jsonBuilder(), new ToXContent.MapParams(Collections.emptyMap())); XContentParser parser = XContentType.JSON.xContent() .createParser(NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput()); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java index 43cad5e994326..21f41912e9514 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java @@ -343,6 +343,7 @@ private static ClusterState addRestoredIndex(final String index, final int numSh "_uuid", snapshot, RestoreInProgress.State.INIT, + false, Collections.singletonList(index), shardsBuilder ); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java index e616b81bebc8e..4077c878d3c4e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java @@ -413,6 +413,7 @@ private ClusterState createRecoveryStateAndInitializeAllocations( restoreUUID, snapshot, RestoreInProgress.State.INIT, + false, new ArrayList<>(snapshotIndices), restoreShards ); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 582521729fdea..8793f6f9c63e5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -1212,7 +1212,7 @@ public void testDiskThresholdWithSnapshotShardSizes() { Map shards = Map.of(shardId, new RestoreInProgress.ShardRestoreStatus("node1")); final RestoreInProgress.Builder restores = new RestoreInProgress.Builder().add( - new RestoreInProgress.Entry("_restore_uuid", snapshot, RestoreInProgress.State.INIT, List.of("test"), shards) + new RestoreInProgress.Entry("_restore_uuid", snapshot, RestoreInProgress.State.INIT, false, List.of("test"), shards) ); ClusterState clusterState = ClusterState.builder(new ClusterName(getTestName())) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java index 6fda78125b8ed..2562d367cb6f1 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java @@ -150,6 +150,7 @@ public void testCanAllocatePrimaryExistingInRestoreInProgress() { recoverySource.restoreUUID(), snapshot, restoreState, + false, singletonList("test"), shards ); diff --git a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java index 1bffc1931d840..f57627649706d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java @@ -137,6 +137,7 @@ public void testSnapshotDeletionsInProgressSerialization() throws Exception { UUIDs.randomBase64UUID(), new Snapshot("repo2", new SnapshotId("snap2", UUIDs.randomBase64UUID())), RestoreInProgress.State.STARTED, + false, Collections.singletonList("index_name"), Map.of() ) diff --git a/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java index 45d9f37bd054b..72fb1f17a4b1e 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java @@ -451,6 +451,7 @@ private ClusterState addUnassignedShards(final ClusterState currentState, String recoverySource.restoreUUID(), recoverySource.snapshot(), RestoreInProgress.State.INIT, + false, Collections.singletonList(indexName), shards ) diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index 55e0e5a614962..65192908c28ed 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -158,7 +158,8 @@ public void testThatRepositoryRecoversEmptyIndexBasedOnLeaderSettings() throws I .renamePattern("^(.*)$") .renameReplacement(followerIndex) .masterNodeTimeout(TimeValue.MAX_VALUE) - .indexSettings(settingsBuilder); + .indexSettings(settingsBuilder) + .quiet(true); PlainActionFuture future = PlainActionFuture.newFuture(); restoreService.restoreSnapshot(restoreRequest, waitForRestore(clusterService, future)); @@ -230,7 +231,8 @@ public void testDocsAreRecovered() throws Exception { .renamePattern("^(.*)$") .renameReplacement(followerIndex) .masterNodeTimeout(new TimeValue(1L, TimeUnit.HOURS)) - .indexSettings(settingsBuilder); + .indexSettings(settingsBuilder) + .quiet(true); PlainActionFuture future = PlainActionFuture.newFuture(); restoreService.restoreSnapshot(restoreRequest, waitForRestore(clusterService, future)); @@ -298,7 +300,8 @@ public void testRateLimitingIsEmployed() throws Exception { .renamePattern("^(.*)$") .renameReplacement(followerIndex) .masterNodeTimeout(TimeValue.MAX_VALUE) - .indexSettings(settingsBuilder); + .indexSettings(settingsBuilder) + .quiet(true); PlainActionFuture future = PlainActionFuture.newFuture(); restoreService.restoreSnapshot(restoreRequest, waitForRestore(clusterService, future)); @@ -364,7 +367,8 @@ public void testIndividualActionsTimeout() throws Exception { .renamePattern("^(.*)$") .renameReplacement(followerIndex) .masterNodeTimeout(new TimeValue(1L, TimeUnit.HOURS)) - .indexSettings(settingsBuilder); + .indexSettings(settingsBuilder) + .quiet(true); try { final RestoreService restoreService = getFollowerCluster().getCurrentMasterNodeInstance(RestoreService.class); @@ -427,7 +431,8 @@ public void testFollowerMappingIsUpdated() throws IOException { .renamePattern("^(.*)$") .renameReplacement(followerIndex) .masterNodeTimeout(new TimeValue(1L, TimeUnit.HOURS)) - .indexSettings(settingsBuilder); + .indexSettings(settingsBuilder) + .quiet(true); List transportServices = new ArrayList<>(); CountDownLatch latch = new CountDownLatch(1); @@ -586,7 +591,8 @@ public void testCcrRepositoryFetchesSnapshotShardSizeFromIndexShardStoreStats() Settings.builder() .put(IndexMetadata.SETTING_INDEX_PROVIDED_NAME, followerIndex) .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true) - ); + ) + .quiet(true); restoreService.restoreSnapshot(restoreRequest, PlainActionFuture.newFuture()); waitForRestoreInProgress.get(30L, TimeUnit.SECONDS); diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java index a3cd5750f0d39..8d8fc8120c36b 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java @@ -157,7 +157,8 @@ private RestoreSnapshotRequest setUpRestoreSnapshotRequest( .indicesOptions(indicesOptions) .renamePattern("^(.*)$") .renameReplacement(followerIndex) - .masterNodeTimeout(TimeValue.MAX_VALUE); + .masterNodeTimeout(TimeValue.MAX_VALUE) + .quiet(true); } public void testRetentionLeaseIsTakenAtTheStartOfRecovery() throws Exception { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index a22ac0abc9e03..88301c49c2101 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -194,7 +194,8 @@ private void createFollowerIndex( .renamePattern("^(.*)$") .renameReplacement(request.getFollowerIndex()) .masterNodeTimeout(request.masterNodeTimeout()) - .indexSettings(overrideSettings); + .indexSettings(overrideSettings) + .quiet(true); final Client clientWithHeaders = CcrLicenseChecker.wrapClient( this.client, diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java index 1b8bf8c991efc..ae476d6f8539c 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java @@ -272,7 +272,9 @@ protected void masterOperation( // Pass through the master-node timeout .masterNodeTimeout(request.masterNodeTimeout()) // Fail the restore if the snapshot found above is swapped out from under us before the restore happens - .snapshotUuid(snapshotId.getUUID()), + .snapshotUuid(snapshotId.getUUID()) + // Log snapshot restore at the DEBUG log level + .quiet(true), listener ); }, listener::onFailure), threadPool.executor(ThreadPool.Names.SNAPSHOT_META), null); From 84af49337850922a76ef1bde270f47086ac5e8d1 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 12 Jul 2022 16:08:05 +0200 Subject: [PATCH 35/36] Fix ReactiveStorageDeciderServiceTests testNodeSizeForDataBelowLowWatermark (#88452) Fix this test unexpectedly being off by one by increasing the accuracy of the fp division (better to have a larger dividend and divisor) a little. I could easily reproduce the failure without the fix but with it, the test cases we use at least run accurate with the change. closes #88433 --- .../autoscaling/storage/ReactiveStorageDeciderService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index c168e7b3b08f0..476c6b21df034 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -225,7 +225,7 @@ static long nodeSizeForDataBelowLowWatermark(long bytes, DiskThresholdSettings t } else { double percentThreshold = thresholdSettings.getFreeDiskThresholdLow(); if (percentThreshold >= 0.0 && percentThreshold < 100.0) { - return (long) (bytes / ((100.0 - percentThreshold) / 100)); + return (long) (100 * bytes / (100 - percentThreshold)); } else { return bytes; } From ba46bd4ad8e7e87ce0a7b1847cb1b898dfcf9ffb Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 12 Jul 2022 16:40:54 +0200 Subject: [PATCH 36/36] Avoid noisy exceptions on data nodes when aborting snapshots (#88476) Currently, an abort (especially when triggered an index delete) can manifest as either an aborted snapshot exception, a missing index exception or an NPE. The latter two show up as noise in logs. This change catches effectively all of these cleanly as aborted snapshot exceptions so they don't get logged as warnings and avoids the NPE if a shard was removed from the index service concurrently by using the API that throws on missing shards to look it up. --- .../org/elasticsearch/snapshots/SnapshotShardsService.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index c2ab4e808f1dc..2850dbacf73aa 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -337,7 +337,10 @@ private void snapshot( ActionListener listener ) { try { - final IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShardOrNull(shardId.id()); + if (snapshotStatus.isAborted()) { + throw new AbortedSnapshotException(); + } + final IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShard(shardId.id()); if (indexShard.routingEntry().primary() == false) { throw new IndexShardSnapshotFailedException(shardId, "snapshot should be performed only on primary"); }