Skip to content

Commit a721e71

Browse files
committed
Handle individual doc parsing failure gracefully in bulk request with pipeline (elastic#138624)
(cherry picked from commit 933354b)
1 parent 71d54f4 commit a721e71

File tree

4 files changed

+186
-1
lines changed

4 files changed

+186
-1
lines changed

docs/changelog/138624.yaml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
pr: 138624
2+
summary: Handle individual doc parsing failure in bulk request with pipeline
3+
area: Ingest Node
4+
type: bug
5+
issues:
6+
- 138445

server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -378,6 +378,92 @@ public void testPipelineProcessorOnFailure() throws Exception {
378378
assertThat(inserted.get("readme"), equalTo("pipeline with id [3] is a bad pipeline"));
379379
}
380380

381+
public void testBulkRequestWithInvalidJsonAndPipeline() throws Exception {
382+
// Test that when a document with invalid JSON is in a bulk request with a pipeline,
383+
// the invalid document fails gracefully without causing the entire bulk request to fail.
384+
// This tests the fix for https://github.com/elastic/elasticsearch/issues/138445
385+
386+
createIndex("test_index");
387+
388+
putJsonPipeline(
389+
"test-pipeline",
390+
(builder, params) -> builder.field("description", "test pipeline")
391+
.startArray("processors")
392+
.startObject()
393+
.startObject("test")
394+
.endObject()
395+
.endObject()
396+
.endArray()
397+
);
398+
399+
// Create a bulk request with valid and invalid documents
400+
BulkRequest bulkRequest = new BulkRequest();
401+
402+
// Valid document
403+
IndexRequest validRequest = new IndexRequest("test_index").id("valid_doc");
404+
validRequest.source("{\"valid\":\"test\"}", XContentType.JSON);
405+
validRequest.setPipeline("test-pipeline");
406+
bulkRequest.add(validRequest);
407+
408+
// Invalid document with missing closing brace
409+
IndexRequest invalidRequest = new IndexRequest("test_index").id("invalid_doc");
410+
invalidRequest.source("{\"invalid\":\"json\"", XContentType.JSON);
411+
invalidRequest.setPipeline("test-pipeline");
412+
bulkRequest.add(invalidRequest);
413+
414+
// Invalid document with duplicate fields
415+
IndexRequest invalidRequest2 = new IndexRequest("test_index").id("invalid_doc2");
416+
invalidRequest2.source("{\"invalid\":\"json\", \"invalid\":\"json\"}", XContentType.JSON);
417+
invalidRequest2.setPipeline("test-pipeline");
418+
bulkRequest.add(invalidRequest2);
419+
420+
// Another valid document
421+
IndexRequest validRequest2 = new IndexRequest("test_index").id("valid_doc2");
422+
validRequest2.source("{\"valid\":\"test2\"}", XContentType.JSON);
423+
validRequest2.setPipeline("test-pipeline");
424+
bulkRequest.add(validRequest2);
425+
426+
BulkResponse response = client().bulk(bulkRequest).actionGet();
427+
428+
// The bulk request should succeed
429+
assertThat(response.hasFailures(), is(true));
430+
assertThat(response.getItems().length, equalTo(4));
431+
432+
// First document should succeed
433+
BulkItemResponse item0 = response.getItems()[0];
434+
assertThat(item0.isFailed(), is(false));
435+
assertThat(item0.getResponse().getId(), equalTo("valid_doc"));
436+
assertThat(item0.getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED));
437+
438+
// Second document should fail
439+
BulkItemResponse item1 = response.getItems()[1];
440+
assertThat(item1.isFailed(), is(true));
441+
assertThat(item1.getFailure().getStatus(), equalTo(org.elasticsearch.rest.RestStatus.BAD_REQUEST));
442+
assertThat(item1.getFailure().getCause(), instanceOf(IllegalArgumentException.class));
443+
444+
// Third document should fail
445+
BulkItemResponse item2 = response.getItems()[2];
446+
assertThat(item2.isFailed(), is(true));
447+
assertThat(item2.getFailure().getStatus(), equalTo(org.elasticsearch.rest.RestStatus.BAD_REQUEST));
448+
assertThat(item2.getFailure().getCause(), instanceOf(IllegalArgumentException.class));
449+
450+
// Fourth document should succeed
451+
BulkItemResponse item3 = response.getItems()[3];
452+
assertThat(item3.isFailed(), is(false));
453+
assertThat(item3.getResponse().getId(), equalTo("valid_doc2"));
454+
assertThat(item3.getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED));
455+
456+
// Verify that the valid documents were indexed
457+
assertThat(client().prepareGet("test_index", "valid_doc").get().isExists(), is(true));
458+
assertThat(client().prepareGet("test_index", "valid_doc2").get().isExists(), is(true));
459+
// Verify that the invalid documents were not indexed
460+
assertThat(client().prepareGet("test_index", "invalid_doc").get().isExists(), is(false));
461+
assertThat(client().prepareGet("test_index", "invalid_doc2").get().isExists(), is(false));
462+
463+
// cleanup
464+
deletePipeline("test-pipeline");
465+
}
466+
381467
public static class ExtendedIngestTestPlugin extends IngestTestPlugin {
382468

383469
@Override

server/src/main/java/org/elasticsearch/ingest/IngestService.java

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -801,7 +801,20 @@ protected void doRun() {
801801
}
802802
final int slot = i;
803803
final Releasable ref = refs.acquire();
804-
final IngestDocument ingestDocument = newIngestDocument(indexRequest);
804+
final IngestDocument ingestDocument;
805+
try {
806+
ingestDocument = newIngestDocument(indexRequest);
807+
} catch (Exception e) {
808+
// Document parsing failed (e.g. invalid JSON). Handle this gracefully
809+
// by marking this document as failed and continuing with other documents.
810+
final long ingestTimeInNanos = System.nanoTime() - startTimeInNanos;
811+
totalMetrics.postIngest(ingestTimeInNanos);
812+
totalMetrics.ingestFailed();
813+
ref.close();
814+
i++;
815+
onFailure.accept(slot, e);
816+
continue;
817+
}
805818
final org.elasticsearch.script.Metadata originalDocumentMetadata = ingestDocument.getMetadata().clone();
806819
// the document listener gives us three-way logic: a document can fail processing (1), or it can
807820
// be successfully processed. a successfully processed document can be kept (2) or dropped (3).

server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@
6666
import org.elasticsearch.test.MockLog;
6767
import org.elasticsearch.threadpool.ThreadPool;
6868
import org.elasticsearch.xcontent.XContentBuilder;
69+
import org.elasticsearch.xcontent.XContentParseException;
6970
import org.elasticsearch.xcontent.XContentType;
7071
import org.elasticsearch.xcontent.cbor.CborXContent;
7172
import org.junit.Before;
@@ -1558,6 +1559,85 @@ public void testBulkRequestExecutionWithFailures() throws Exception {
15581559
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
15591560
}
15601561

1562+
public void testBulkRequestExecutionWithInvalidJsonDocument() throws Exception {
1563+
// Test that when a document with invalid JSON (e.g., duplicate keys) is in a bulk request with a pipeline,
1564+
// the invalid document fails gracefully without causing the entire bulk request to fail.
1565+
BulkRequest bulkRequest = new BulkRequest();
1566+
String pipelineId = "_id";
1567+
1568+
// Valid document that should succeed
1569+
IndexRequest validRequest = new IndexRequest("_index").id("valid").setPipeline(pipelineId).setFinalPipeline("_none");
1570+
validRequest.source(Requests.INDEX_CONTENT_TYPE, "field1", "value1");
1571+
validRequest.setListExecutedPipelines(true);
1572+
bulkRequest.add(validRequest);
1573+
1574+
// Invalid document with missing closing brace
1575+
String invalidJson = "{\"invalid\":\"json\"";
1576+
IndexRequest invalidRequest = new IndexRequest("_index").id("invalid").setPipeline(pipelineId).setFinalPipeline("_none");
1577+
invalidRequest.source(new BytesArray(invalidJson), XContentType.JSON);
1578+
bulkRequest.add(invalidRequest);
1579+
1580+
// Another valid document that should succeed
1581+
IndexRequest validRequest2 = new IndexRequest("_index").id("valid2").setPipeline(pipelineId).setFinalPipeline("_none");
1582+
validRequest2.source(Requests.INDEX_CONTENT_TYPE, "field2", "value2");
1583+
validRequest2.setListExecutedPipelines(true);
1584+
bulkRequest.add(validRequest2);
1585+
1586+
// Invalid document with duplicated keys
1587+
String invalidJson2 = "{\"@timestamp\":\"2024-06-01T00:00:00Z\",\"@timestamp\":\"2024-06-01T00:00:00Z\"}";
1588+
IndexRequest invalidRequest2 = new IndexRequest("_index").id("invalid").setPipeline(pipelineId).setFinalPipeline("_none");
1589+
invalidRequest2.source(new BytesArray(invalidJson2), XContentType.JSON);
1590+
bulkRequest.add(invalidRequest2);
1591+
1592+
final Processor processor = mock(Processor.class);
1593+
when(processor.getType()).thenReturn("mock");
1594+
when(processor.getTag()).thenReturn("mockTag");
1595+
doAnswer(args -> {
1596+
BiConsumer<IngestDocument, Exception> handler = args.getArgument(1);
1597+
handler.accept(RandomDocumentPicks.randomIngestDocument(random()), null);
1598+
return null;
1599+
}).when(processor).execute(any(), any());
1600+
1601+
IngestService ingestService = createWithProcessors(Map.of("mock", (factories, tag, description, config, projectId) -> processor));
1602+
PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}");
1603+
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
1604+
.build();
1605+
ClusterState previousClusterState = clusterState;
1606+
clusterState = executePut(putRequest, clusterState);
1607+
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
1608+
1609+
BiConsumer<Integer, Exception> requestItemErrorHandler = mock();
1610+
final BiConsumer<Thread, Exception> onCompletion = mock();
1611+
1612+
ingestService.executeBulkRequest(
1613+
4,
1614+
bulkRequest.requests(),
1615+
indexReq -> {},
1616+
(s) -> false,
1617+
(slot, targetIndex, e) -> fail("Should not redirect to failure store"),
1618+
requestItemErrorHandler,
1619+
onCompletion,
1620+
EsExecutors.DIRECT_EXECUTOR_SERVICE
1621+
);
1622+
1623+
// The invalid documents should fail with a parsing error
1624+
verify(requestItemErrorHandler).accept(
1625+
eq(1), // slot 1 is the invalid document
1626+
argThat(e -> e instanceof XContentParseException)
1627+
);
1628+
verify(requestItemErrorHandler).accept(
1629+
eq(3), // slot 3 is the other invalid document
1630+
argThat(e -> e instanceof XContentParseException)
1631+
);
1632+
1633+
// The bulk listener should still be called with success
1634+
verify(onCompletion).accept(any(), eq(null));
1635+
assertStats(ingestService.stats().totalStats(), 4, 2, 0);
1636+
// Verify that the valid documents were processed (they should have their pipelines executed)
1637+
assertThat(validRequest.getExecutedPipelines(), equalTo(List.of(pipelineId)));
1638+
assertThat(validRequest2.getExecutedPipelines(), equalTo(List.of(pipelineId)));
1639+
}
1640+
15611641
public void testExecuteFailureRedirection() throws Exception {
15621642
final CompoundProcessor processor = mockCompoundProcessor();
15631643
IngestService ingestService = createWithProcessors(

0 commit comments

Comments
 (0)