Skip to content

Commit afe9730

Browse files
authored
Fix bedrock ConverseStream undocumented /delta/stop_sequence (#11410)
## background PR #11286 attempted to expose the stop sequence from Bedrock's Converse API, but had a bug: it was reading from the wrong response path. The AWS Bedrock API actually returns the stop sequence nested under a `delta` object: `additionalModelResponseFields.delta.stop_sequence`, not directly at `additionalModelResponseFields.stop_sequence`. ## summary **Fixed stop sequence extraction to read from correct API response path** - Changed extraction path from `additionalModelResponseFields.stop_sequence` to `additionalModelResponseFields.delta.stop_sequence` - Updated TypeScript schema to include `delta` wrapper in `BedrockAdditionalModelResponseFieldsSchema` - Updated tests to match actual AWS Bedrock API response structure - Verified fix works with both `generateText` and `streamText` ## implementation details The AWS Bedrock Converse API returns additional model response fields nested under a `delta` object when using `additionalModelResponseFieldPaths: ['/delta/stop_sequence']`. The previous implementation incorrectly assumed the field would be at the top level of `additionalModelResponseFields`. **Correct structure:** ```typescript { messageStop: { stopReason: "stop_sequence", additionalModelResponseFields: { delta: { stop_sequence: "END" // ← nested under delta } } } } ``` ## verification Tested with `streamText` using `stopSequences: ['END']`. Confirmed `providerMetadata.bedrock.stopSequence` now correctly returns `"END"` when the stop sequence is triggered (previously returned `undefined`). --- **Fixes:** #11286
1 parent ea6609a commit afe9730

File tree

3 files changed

+110
-9
lines changed

3 files changed

+110
-9
lines changed

.changeset/rotten-steaks-hammer.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@ai-sdk/amazon-bedrock': patch
3+
---
4+
5+
Fix bedrock ConverseStream using /delta/stop_sequence

packages/amazon-bedrock/src/bedrock-chat-language-model.test.ts

Lines changed: 91 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -896,7 +896,7 @@ describe('doStream', () => {
896896
expect(await server.calls[0].requestBodyJson).toStrictEqual({
897897
messages: [{ role: 'user', content: [{ text: 'Hello' }] }],
898898
system: [{ text: 'System Prompt' }],
899-
additionalModelResponseFieldPaths: ['/stop_sequence'],
899+
additionalModelResponseFieldPaths: ['/delta/stop_sequence'],
900900
});
901901
});
902902

@@ -1058,7 +1058,7 @@ describe('doStream', () => {
10581058
JSON.stringify({
10591059
messageStop: {
10601060
stopReason: 'stop_sequence',
1061-
additionalModelResponseFields: { stop_sequence: 'STOP' },
1061+
additionalModelResponseFields: { delta: { stop_sequence: 'STOP' } },
10621062
},
10631063
}) + '\n',
10641064
],
@@ -1108,6 +1108,54 @@ describe('doStream', () => {
11081108
`);
11091109
});
11101110

1111+
it('should correctly parse delta.stop_sequence structure in streaming with additional fields', async () => {
1112+
setupMockEventStreamHandler();
1113+
server.urls[streamUrl].response = {
1114+
type: 'stream-chunks',
1115+
chunks: [
1116+
JSON.stringify({
1117+
contentBlockDelta: {
1118+
contentBlockIndex: 0,
1119+
delta: { text: 'Response' },
1120+
},
1121+
}) + '\n',
1122+
JSON.stringify({
1123+
metadata: {
1124+
usage: { inputTokens: 5, outputTokens: 10, totalTokens: 15 },
1125+
},
1126+
}) + '\n',
1127+
JSON.stringify({
1128+
messageStop: {
1129+
stopReason: 'stop_sequence',
1130+
additionalModelResponseFields: {
1131+
delta: {
1132+
stop_sequence: 'CUSTOM_END',
1133+
},
1134+
// Additional fields that might be present
1135+
otherField: 'value',
1136+
},
1137+
},
1138+
}) + '\n',
1139+
],
1140+
};
1141+
1142+
const { stream } = await model.doStream({
1143+
prompt: TEST_PROMPT,
1144+
stopSequences: ['CUSTOM_END'],
1145+
});
1146+
1147+
const chunks = await convertReadableStreamToArray(stream);
1148+
const finishChunk = chunks.find(chunk => chunk.type === 'finish');
1149+
1150+
expect(finishChunk?.providerMetadata?.bedrock?.stopSequence).toBe(
1151+
'CUSTOM_END',
1152+
);
1153+
expect(finishChunk?.finishReason).toEqual({
1154+
unified: 'stop',
1155+
raw: 'stop_sequence',
1156+
});
1157+
});
1158+
11111159
it('should include response headers in rawResponse', async () => {
11121160
setupMockEventStreamHandler();
11131161
server.urls[streamUrl].response = {
@@ -2718,7 +2766,7 @@ describe('doGenerate', () => {
27182766
expect(await server.calls[0].requestBodyJson).toStrictEqual({
27192767
messages: [{ role: 'user', content: [{ text: 'Hello' }] }],
27202768
system: [{ text: 'System Prompt' }],
2721-
additionalModelResponseFieldPaths: ['/stop_sequence'],
2769+
additionalModelResponseFieldPaths: ['/delta/stop_sequence'],
27222770
});
27232771
});
27242772

@@ -2789,7 +2837,7 @@ describe('doGenerate', () => {
27892837
},
27902838
},
27912839
stopReason: 'stop_sequence',
2792-
additionalModelResponseFields: { stop_sequence: 'STOP' },
2840+
additionalModelResponseFields: { delta: { stop_sequence: 'STOP' } },
27932841
usage: {
27942842
inputTokens: 4,
27952843
outputTokens: 30,
@@ -2833,7 +2881,7 @@ describe('doGenerate', () => {
28332881
},
28342882
},
28352883
stopReason: 'tool_use',
2836-
additionalModelResponseFields: { stop_sequence: null },
2884+
additionalModelResponseFields: { delta: { stop_sequence: null } },
28372885
usage: {
28382886
inputTokens: 10,
28392887
outputTokens: 20,
@@ -2886,6 +2934,44 @@ describe('doGenerate', () => {
28862934
`);
28872935
});
28882936

2937+
it('should correctly parse delta.stop_sequence structure from additionalModelResponseFields', async () => {
2938+
server.urls[generateUrl].response = {
2939+
type: 'json-value',
2940+
body: {
2941+
output: {
2942+
message: {
2943+
role: 'assistant',
2944+
content: [{ text: 'Response text' }],
2945+
},
2946+
},
2947+
stopReason: 'stop_sequence',
2948+
additionalModelResponseFields: {
2949+
delta: {
2950+
stop_sequence: 'CUSTOM_STOP',
2951+
},
2952+
// Additional fields that might be present
2953+
otherField: 'value',
2954+
},
2955+
usage: {
2956+
inputTokens: 10,
2957+
outputTokens: 20,
2958+
totalTokens: 30,
2959+
},
2960+
},
2961+
};
2962+
2963+
const result = await model.doGenerate({
2964+
prompt: TEST_PROMPT,
2965+
stopSequences: ['CUSTOM_STOP'],
2966+
});
2967+
2968+
expect(result.providerMetadata?.bedrock.stopSequence).toBe('CUSTOM_STOP');
2969+
expect(result.finishReason).toEqual({
2970+
unified: 'stop',
2971+
raw: 'stop_sequence',
2972+
});
2973+
});
2974+
28892975
it('should include response headers in rawResponse', async () => {
28902976
server.urls[generateUrl].response = {
28912977
type: 'json-value',

packages/amazon-bedrock/src/bedrock-chat-language-model.ts

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -311,13 +311,19 @@ export class BedrockChatLanguageModel implements LanguageModelV3 {
311311
...filteredBedrockOptions
312312
} = providerOptions?.bedrock || {};
313313

314+
const additionalModelResponseFieldPaths = isAnthropicModel
315+
? ['/delta/stop_sequence']
316+
: undefined;
317+
314318
return {
315319
command: {
316320
system,
317321
messages,
318322
additionalModelRequestFields:
319323
bedrockOptions.additionalModelRequestFields,
320-
additionalModelResponseFieldPaths: ['/stop_sequence'],
324+
...(additionalModelResponseFieldPaths && {
325+
additionalModelResponseFieldPaths,
326+
}),
321327
...(Object.keys(inferenceConfig).length > 0 && {
322328
inferenceConfig,
323329
}),
@@ -435,7 +441,7 @@ export class BedrockChatLanguageModel implements LanguageModelV3 {
435441

436442
// provider metadata:
437443
const stopSequence =
438-
response.additionalModelResponseFields?.stop_sequence ?? null;
444+
response.additionalModelResponseFields?.delta?.stop_sequence ?? null;
439445

440446
const providerMetadata =
441447
response.trace || response.usage || isJsonResponseFromTool || stopSequence
@@ -575,7 +581,7 @@ export class BedrockChatLanguageModel implements LanguageModelV3 {
575581
raw: value.messageStop.stopReason ?? undefined,
576582
};
577583
stopSequence =
578-
value.messageStop.additionalModelResponseFields
584+
value.messageStop.additionalModelResponseFields?.delta
579585
?.stop_sequence ?? null;
580586
}
581587

@@ -849,7 +855,11 @@ const BedrockStopReasonSchema = z.union([
849855

850856
const BedrockAdditionalModelResponseFieldsSchema = z
851857
.object({
852-
stop_sequence: z.string().nullish(),
858+
delta: z
859+
.object({
860+
stop_sequence: z.string().nullish(),
861+
})
862+
.nullish(),
853863
})
854864
.catchall(z.unknown());
855865

0 commit comments

Comments
 (0)