Skip to content

Commit 44e680e

Browse files
fix: [Orchestration] Fixed getting OrchestrationFilterException.Input for bad requests with input filter. (#577)
* fix: [Orchestration] Fixed getting `OrchestrationFilterException.Input` for bad requests with input filter. * Update openai old test * Removed isError
1 parent 79b4a56 commit 44e680e

File tree

11 files changed

+101
-14
lines changed

11 files changed

+101
-14
lines changed

core/src/main/java/com/sap/ai/sdk/core/common/ClientStreamingHandler.java

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
package com.sap.ai.sdk.core.common;
22

3+
import com.fasterxml.jackson.core.JsonProcessingException;
4+
import com.fasterxml.jackson.databind.JsonNode;
35
import com.fasterxml.jackson.databind.ObjectMapper;
46
import com.google.common.annotations.Beta;
57
import java.io.IOException;
@@ -77,12 +79,26 @@ public Stream<D> handleStreamingResponse(@Nonnull final ClassicHttpResponse resp
7779
line -> {
7880
final String data = line.substring(5); // remove "data: "
7981
try {
80-
return objectMapper.readValue(data, successType);
81-
} catch (final IOException e) { // exception message e gets lost
82+
final JsonNode jsonNode = objectMapper.readTree(data);
83+
if (jsonNode.has("error")) {
84+
throwErrorType(response, data);
85+
}
86+
return objectMapper.treeToValue(jsonNode, successType);
87+
} catch (final IOException e) {
8288
log.error("Failed to parse delta chunk to type {}", successType);
8389
final String message = "Failed to parse delta chunk";
8490
throw exceptionFactory.build(message, e).setHttpResponse(response);
8591
}
8692
});
8793
}
94+
95+
private void throwErrorType(final @Nonnull ClassicHttpResponse response, final String data)
96+
throws JsonProcessingException, E {
97+
final R error = objectMapper.readValue(data, errorType);
98+
final String msg =
99+
(error != null && error.getMessage() != null)
100+
? error.getMessage()
101+
: "Error, unable to parse http response.";
102+
throw exceptionFactory.build(msg).setHttpResponse(response);
103+
}
88104
}

docs/release_notes.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,4 +40,4 @@
4040

4141
### 🐛 Fixed Issues
4242

43-
-
43+
- [Orchestration] Fixed getting `OrchestrationFilterException.Input` for bad requests with input filter.

foundation-models/openai/src/test/java/com/sap/ai/sdk/foundationmodels/openai/OpenAiClientGeneratedTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -298,7 +298,7 @@ void streamChatCompletionDeltasErrorHandling() throws IOException {
298298
try (var stream = client.streamChatCompletionDeltas(request)) {
299299
assertThatThrownBy(() -> stream.forEach(System.out::println))
300300
.isInstanceOf(OpenAiClientException.class)
301-
.hasMessage("Failed to parse response");
301+
.hasMessage("exceeded token rate limit");
302302
}
303303

304304
Mockito.verify(inputStream, times(1)).close();

foundation-models/openai/src/test/java/com/sap/ai/sdk/foundationmodels/openai/OpenAiClientTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -262,7 +262,7 @@ void streamChatCompletionDeltasErrorHandling() throws IOException {
262262
try (var stream = client.streamChatCompletionDeltas(request)) {
263263
assertThatThrownBy(() -> stream.forEach(System.out::println))
264264
.isInstanceOf(OpenAiClientException.class)
265-
.hasMessage("Failed to parse response");
265+
.hasMessage("exceeded token rate limit");
266266
}
267267

268268
Mockito.verify(inputStream, times(1)).close();
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
data: {"choices":[],"created":0,"id":"","model":"","object":"","prompt_filter_results":[{"prompt_index":0,"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}}}]}
2-
{"error":{"code":"429","message":"exceeded token rate limit"}}
2+
data: {"error":{"code":"429","message":"exceeded token rate limit"}}

orchestration/src/main/java/com/sap/ai/sdk/orchestration/OrchestrationClientException.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ static Map<String, Object> extractInputFilterDetails(@Nullable final Orchestrati
3939
.map(ErrorResponse::getError)
4040
.map(Error::getIntermediateResults)
4141
.map(ModuleResults::getInputFiltering)
42+
.filter(filter -> !filter.getMessage().equals("Input Filter passed successfully."))
4243
.map(GenericModuleResult::getData)
4344
.map(map -> (Map<String, Object>) map)
4445
.orElseGet(Collections::emptyMap);
@@ -47,6 +48,7 @@ static Map<String, Object> extractInputFilterDetails(@Nullable final Orchestrati
4748
.map(ErrorResponseStreaming::getError)
4849
.map(ErrorStreaming::getIntermediateResults)
4950
.map(ModuleResultsStreaming::getInputFiltering)
51+
.filter(filter -> !filter.getMessage().equals("Input Filter passed successfully."))
5052
.map(GenericModuleResult::getData)
5153
.filter(Map.class::isInstance)
5254
.map(map -> (Map<String, Object>) map)

orchestration/src/test/java/com/sap/ai/sdk/orchestration/OrchestrationUnitTest.java

Lines changed: 34 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -673,33 +673,33 @@ void testErrorHandling(@Nonnull final Runnable request) {
673673
softly
674674
.assertThatThrownBy(request::run)
675675
.describedAs("Server errors should be handled")
676-
.isInstanceOf(OrchestrationClientException.class)
676+
.isExactlyInstanceOf(OrchestrationClientException.class)
677677
.hasMessageContaining("500");
678678

679679
softly
680680
.assertThatThrownBy(request::run)
681681
.describedAs("Error objects from Orchestration should be interpreted")
682-
.isInstanceOf(OrchestrationClientException.class)
682+
.isExactlyInstanceOf(OrchestrationClientException.class)
683683
.hasMessageContaining("'config' is a required property");
684684

685685
softly
686686
.assertThatThrownBy(request::run)
687687
.describedAs("Failures while parsing error message should be handled")
688-
.isInstanceOf(OrchestrationClientException.class)
688+
.isExactlyInstanceOf(OrchestrationClientException.class)
689689
.hasMessageContaining("400")
690690
.extracting(e -> e.getSuppressed()[0])
691691
.isInstanceOf(JsonParseException.class);
692692

693693
softly
694694
.assertThatThrownBy(request::run)
695695
.describedAs("Non-JSON responses should be handled")
696-
.isInstanceOf(OrchestrationClientException.class)
696+
.isExactlyInstanceOf(OrchestrationClientException.class)
697697
.hasMessageContaining("Failed to parse");
698698

699699
softly
700700
.assertThatThrownBy(request::run)
701701
.describedAs("Empty responses should be handled")
702-
.isInstanceOf(OrchestrationClientException.class)
702+
.isExactlyInstanceOf(OrchestrationClientException.class)
703703
.hasMessageContaining("HTTP Response is empty");
704704

705705
softly.assertAll();
@@ -839,6 +839,35 @@ void streamChatCompletionOutputFilterErrorHandling() throws IOException {
839839
}
840840
}
841841

842+
@Test
843+
void testStreamingErrorHandlingBadRequest() throws IOException {
844+
try (var inputStream = fileLoader.apply("streamError.txt")) {
845+
final var httpClient = mock(HttpClient.class);
846+
ApacheHttpClient5Accessor.setHttpClientFactory(destination -> httpClient);
847+
848+
// Create a mock response
849+
final var mockResponse = new BasicClassicHttpResponse(200, "OK");
850+
final var inputStreamEntity = new InputStreamEntity(inputStream, ContentType.TEXT_PLAIN);
851+
mockResponse.setEntity(inputStreamEntity);
852+
mockResponse.setHeader("Content-Type", "text/event-stream");
853+
854+
// Configure the HttpClient mock to return the mock response
855+
doReturn(mockResponse).when(httpClient).executeOpen(any(), any(), any());
856+
857+
val wrongConfig =
858+
new OrchestrationModuleConfig()
859+
.withLlmConfig(GPT_4O_MINI.withVersion("wrong-version"))
860+
.withInputFiltering(new AzureContentFilter().hate(AzureFilterThreshold.ALLOW_SAFE));
861+
val prompt = new OrchestrationPrompt("HelloWorld!");
862+
863+
assertThatThrownBy(
864+
() -> client.streamChatCompletion(prompt, wrongConfig).forEach(System.out::println))
865+
.isExactlyInstanceOf(OrchestrationClientException.class)
866+
.hasMessageContaining("400")
867+
.hasMessageContaining("Model gpt-5 in version wrong-version not found.");
868+
}
869+
}
870+
842871
@Test
843872
void streamChatCompletionDeltas() throws IOException {
844873
try (var inputStream = spy(fileLoader.apply("streamChatCompletion.txt"))) {

orchestration/src/test/resources/__files/errorResponse.json

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,17 @@
44
"code": 400,
55
"message": "'config' is a required property",
66
"location": "request body",
7-
"intermediate_results": {}
7+
"intermediate_results": {
8+
"input_filtering": {
9+
"message": "Input Filter passed successfully.",
10+
"data": {
11+
"azure_content_safety": {
12+
"userPromptAnalysis": {
13+
"attackDetected": false
14+
}
15+
}
16+
}
17+
}
18+
}
819
}
9-
}
20+
}

orchestration/src/test/resources/__files/toolCallsResponse.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,4 +88,4 @@
8888
"total_tokens": 121
8989
}
9090
}
91-
}
91+
}
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
data: {"request_id": "14424a52-0a8d-4004-a766-c6010d8091c9", "intermediate_results": {"templating": [{"content": "HelloWorld!", "role": "user"}], "input_filtering": {"message": "Input Filter passed successfully.", "data": {"azure_content_safety": {"Hate": 0}}}}, "final_result": {"id": "", "object": "", "created": 0, "model": "", "system_fingerprint": "", "choices": [{"index": 0, "delta": {"content": ""}}]}}
2+
data: {"error": {"request_id": "ecb33455-6983-4baa-9889-ab391ddcd9b4", "code": 400, "message": "400 - LLM Module: Model gpt-5 in version wrong-version not found.", "location": "LLM Module"}}

0 commit comments

Comments
 (0)