Skip to content

Commit 23f02f1

Browse files
robert-j-ykalaomerclaudedevin-ai-integration[bot]
authored
fix: include accumulated reasoning_details in reasoning-end event + regression test (#395)
* fix: include accumulated reasoning_details with signature in reasoning-end event The reasoning-start event only carries the first streaming delta's providerMetadata, but Anthropic signatures arrive in the last delta. The AI SDK updates the reasoning part's providerMetadata from the reasoning-end event. Without this fix, text-only responses (no tool calls) lose the signature, causing "Invalid signature in thinking block" errors on subsequent turns in multi-turn conversations. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> * style: fix formatting in test file Co-Authored-By: Robert Yeakel <robert.yeakel@openrouter.ai> * test: add e2e regression test for issue #394 reasoning-end signature Co-Authored-By: Robert Yeakel <robert.yeakel@openrouter.ai> * chore: add patch changeset for reasoning-end signature fix Co-Authored-By: Robert Yeakel <robert.yeakel@openrouter.ai> * style: fix formatting in regression test Co-Authored-By: Robert Yeakel <robert.yeakel@openrouter.ai> * style: fix formatting in regression test Co-Authored-By: Robert Yeakel <robert.yeakel@openrouter.ai> --------- Co-authored-by: Ömer Kala <kalamustafa@gmail.com> Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com> Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com>
1 parent 2dabe2c commit 23f02f1

File tree

4 files changed

+259
-0
lines changed

4 files changed

+259
-0
lines changed
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
---
2+
"@openrouter/ai-sdk-provider": patch
3+
---
4+
5+
fix: include accumulated reasoning_details with signature in reasoning-end stream event
6+
7+
When streaming a text-only response (no tool calls) with reasoning enabled, the reasoning-end event now includes the accumulated reasoning_details (with signature) in providerMetadata. This fixes multi-turn conversation failures with Anthropic models where the signature was lost, causing "Invalid signature in thinking block" errors on subsequent turns.
Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
/**
2+
* Regression test for GitHub PR #394
3+
* https://github.com/OpenRouterTeam/ai-sdk-provider/pull/394
4+
*
5+
* Reported error: Multi-turn conversation failure with Anthropic models when
6+
* the first turn is a text-only response (no tool calls) with reasoning enabled.
7+
* The reasoning-end stream event was emitted without providerMetadata, causing
8+
* the Anthropic signature to be lost. On the next turn, Anthropic rejects with
9+
* "Invalid signature in thinking block".
10+
*/
11+
import { streamText } from 'ai';
12+
import { describe, expect, it, vi } from 'vitest';
13+
import { createOpenRouter } from '@/src';
14+
15+
vi.setConfig({
16+
testTimeout: 120_000,
17+
});
18+
19+
describe('Issue #394: reasoning-end should include accumulated reasoning_details with signature', () => {
20+
const provider = createOpenRouter({
21+
apiKey: process.env.OPENROUTER_API_KEY,
22+
baseUrl: `${process.env.OPENROUTER_API_BASE}/api/v1`,
23+
});
24+
25+
it('should include reasoning_details with signature in reasoning-end providerMetadata for text-only streaming response', async () => {
26+
const model = provider('anthropic/claude-sonnet-4');
27+
28+
const stream = streamText({
29+
model,
30+
prompt: 'What is 2+2? Answer briefly.',
31+
providerOptions: {
32+
openrouter: {
33+
reasoning: 'enabled',
34+
},
35+
},
36+
});
37+
38+
let hasReasoningStart = false;
39+
let hasReasoningEnd = false;
40+
let reasoningEndProviderMetadata: Record<string, unknown> | undefined;
41+
let reasoning = '';
42+
43+
for await (const chunk of stream.fullStream) {
44+
if (chunk.type === 'reasoning-start') {
45+
hasReasoningStart = true;
46+
}
47+
if (chunk.type === 'reasoning-delta') {
48+
reasoning += chunk.text;
49+
}
50+
if (chunk.type === 'reasoning-end') {
51+
hasReasoningEnd = true;
52+
reasoningEndProviderMetadata = chunk.providerMetadata as
53+
| Record<string, unknown>
54+
| undefined;
55+
}
56+
}
57+
58+
expect(hasReasoningStart).toBe(true);
59+
expect(hasReasoningEnd).toBe(true);
60+
expect(reasoning.length).toBeGreaterThan(0);
61+
62+
expect(reasoningEndProviderMetadata).toBeDefined();
63+
64+
const openrouterMeta = reasoningEndProviderMetadata?.openrouter as
65+
| Record<string, unknown>
66+
| undefined;
67+
expect(openrouterMeta).toBeDefined();
68+
69+
const reasoningDetails = openrouterMeta?.reasoning_details as
70+
| Array<Record<string, unknown>>
71+
| undefined;
72+
expect(reasoningDetails).toBeDefined();
73+
expect(reasoningDetails!.length).toBeGreaterThan(0);
74+
75+
const textDetail = reasoningDetails!.find(
76+
(d) => d.type === 'reasoning.text',
77+
);
78+
expect(textDetail).toBeDefined();
79+
expect(textDetail!.signature).toBeDefined();
80+
expect(typeof textDetail!.signature).toBe('string');
81+
expect((textDetail!.signature as string).length).toBeGreaterThan(0);
82+
});
83+
84+
it('should produce valid reasoning parts for multi-turn continuation', async () => {
85+
const model = provider('anthropic/claude-sonnet-4');
86+
87+
const result = await streamText({
88+
model,
89+
prompt: 'What is the capital of France? Answer in one word.',
90+
providerOptions: {
91+
openrouter: {
92+
reasoning: 'enabled',
93+
},
94+
},
95+
});
96+
97+
const response = await result.response;
98+
const messages = response.messages;
99+
100+
expect(messages.length).toBeGreaterThan(0);
101+
102+
const assistantMessage = messages.find((m) => m.role === 'assistant');
103+
expect(assistantMessage).toBeDefined();
104+
105+
const content = assistantMessage?.content;
106+
if (typeof content === 'string') {
107+
return;
108+
}
109+
110+
const reasoningParts = content?.filter(
111+
(p: { type: string }) => p.type === 'reasoning',
112+
);
113+
114+
if (reasoningParts && reasoningParts.length > 0) {
115+
for (const part of reasoningParts) {
116+
if ('providerMetadata' in part) {
117+
expect(part.providerMetadata).toBeDefined();
118+
119+
const openrouterMeta = (
120+
part as { providerMetadata?: Record<string, unknown> }
121+
).providerMetadata?.openrouter as Record<string, unknown> | undefined;
122+
expect(openrouterMeta).toBeDefined();
123+
124+
const details = openrouterMeta?.reasoning_details as
125+
| Array<Record<string, unknown>>
126+
| undefined;
127+
expect(details).toBeDefined();
128+
}
129+
}
130+
}
131+
});
132+
});

src/chat/index.test.ts

Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2308,6 +2308,104 @@ describe('doStream', () => {
23082308
expect(openrouterMetadata?.annotations?.[0]?.file.hash).toBe('hash1');
23092309
expect(openrouterMetadata?.annotations?.[1]?.file.hash).toBe('hash2');
23102310
});
2311+
2312+
it('should include accumulated reasoning_details with signature in reasoning-end providerMetadata for text-only responses', async () => {
2313+
// This test reproduces the Anthropic multi-turn signature bug:
2314+
// When streaming a text-only response (no tool calls), the signature arrives
2315+
// in the LAST reasoning delta. The reasoning-start event gets the FIRST delta's
2316+
// metadata (no signature). The AI SDK uses reasoning-end's providerMetadata to
2317+
// update the reasoning part's providerMetadata. So the provider MUST include
2318+
// the accumulated reasoning_details (with signature) in the reasoning-end event.
2319+
// Without this fix, the saved reasoning part has no signature, and the next turn
2320+
// fails with "Invalid signature in thinking block".
2321+
server.urls['https://openrouter.ai/api/v1/chat/completions']!.response = {
2322+
type: 'stream-chunks',
2323+
chunks: [
2324+
// First chunk: reasoning starts, NO signature yet
2325+
`data: {"id":"chatcmpl-sig-test","object":"chat.completion.chunk","created":1711357598,"model":"anthropic/claude-opus-4.6",` +
2326+
`"system_fingerprint":"fp_test","choices":[{"index":0,"delta":{"role":"assistant","content":"",` +
2327+
`"reasoning_details":[{"type":"${ReasoningDetailType.Text}","text":"Let me think about this","index":0,"format":"anthropic-claude-v1"}]},` +
2328+
`"logprobs":null,"finish_reason":null}]}\n\n`,
2329+
// Second chunk: more reasoning text, still no signature
2330+
`data: {"id":"chatcmpl-sig-test","object":"chat.completion.chunk","created":1711357598,"model":"anthropic/claude-opus-4.6",` +
2331+
`"system_fingerprint":"fp_test","choices":[{"index":0,"delta":{` +
2332+
`"reasoning_details":[{"type":"${ReasoningDetailType.Text}","text":" step by step.","index":0,"format":"anthropic-claude-v1"}]},` +
2333+
`"logprobs":null,"finish_reason":null}]}\n\n`,
2334+
// Third chunk: last reasoning delta WITH signature
2335+
`data: {"id":"chatcmpl-sig-test","object":"chat.completion.chunk","created":1711357598,"model":"anthropic/claude-opus-4.6",` +
2336+
`"system_fingerprint":"fp_test","choices":[{"index":0,"delta":{` +
2337+
`"reasoning_details":[{"type":"${ReasoningDetailType.Text}","text":" Done.","index":0,"format":"anthropic-claude-v1","signature":"erX9OCAqSEO90HsfvNlBn5J3BQ9cEI/Hg2wHFo5iA8w3L+a"}]},` +
2338+
`"logprobs":null,"finish_reason":null}]}\n\n`,
2339+
// Fourth chunk: text content starts (reasoning ends)
2340+
`data: {"id":"chatcmpl-sig-test","object":"chat.completion.chunk","created":1711357598,"model":"anthropic/claude-opus-4.6",` +
2341+
`"system_fingerprint":"fp_test","choices":[{"index":0,"delta":{"content":"Hello! How can I help?"},` +
2342+
`"logprobs":null,"finish_reason":null}]}\n\n`,
2343+
// Finish chunk
2344+
`data: {"id":"chatcmpl-sig-test","object":"chat.completion.chunk","created":1711357598,"model":"anthropic/claude-opus-4.6",` +
2345+
`"system_fingerprint":"fp_test","choices":[{"index":0,"delta":{},` +
2346+
`"logprobs":null,"finish_reason":"stop"}]}\n\n`,
2347+
`data: {"id":"chatcmpl-sig-test","object":"chat.completion.chunk","created":1711357598,"model":"anthropic/claude-opus-4.6",` +
2348+
`"system_fingerprint":"fp_test","choices":[],"usage":{"prompt_tokens":100,"completion_tokens":50,"total_tokens":150}}\n\n`,
2349+
'data: [DONE]\n\n',
2350+
],
2351+
};
2352+
2353+
const { stream } = await model.doStream({
2354+
prompt: TEST_PROMPT,
2355+
});
2356+
2357+
const elements = await convertReadableStreamToArray(stream);
2358+
2359+
// Find reasoning-end event
2360+
const reasoningEnd = elements.find(
2361+
(
2362+
el,
2363+
): el is Extract<LanguageModelV3StreamPart, { type: 'reasoning-end' }> =>
2364+
el.type === 'reasoning-end',
2365+
);
2366+
2367+
expect(reasoningEnd).toBeDefined();
2368+
2369+
// The reasoning-end event MUST have providerMetadata with the full accumulated
2370+
// reasoning_details including the signature from the last delta.
2371+
// This is critical because the AI SDK updates the reasoning part's providerMetadata
2372+
// from reasoning-end, and the signature is needed for multi-turn conversations.
2373+
expect(reasoningEnd?.providerMetadata).toBeDefined();
2374+
2375+
const reasoningDetails = (
2376+
reasoningEnd?.providerMetadata?.openrouter as {
2377+
reasoning_details: ReasoningDetailUnion[];
2378+
}
2379+
)?.reasoning_details;
2380+
2381+
expect(reasoningDetails).toBeDefined();
2382+
expect(reasoningDetails).toHaveLength(1);
2383+
expect(reasoningDetails[0]).toMatchObject({
2384+
type: ReasoningDetailType.Text,
2385+
text: 'Let me think about this step by step. Done.',
2386+
signature: 'erX9OCAqSEO90HsfvNlBn5J3BQ9cEI/Hg2wHFo5iA8w3L+a',
2387+
format: 'anthropic-claude-v1',
2388+
});
2389+
2390+
// Also verify that the finish event has the same accumulated data
2391+
const finishEvent = elements.find(
2392+
(el): el is Extract<LanguageModelV3StreamPart, { type: 'finish' }> =>
2393+
el.type === 'finish',
2394+
);
2395+
2396+
const finishReasoningDetails = (
2397+
finishEvent?.providerMetadata?.openrouter as {
2398+
reasoning_details: ReasoningDetailUnion[];
2399+
}
2400+
)?.reasoning_details;
2401+
2402+
expect(finishReasoningDetails).toHaveLength(1);
2403+
expect(finishReasoningDetails[0]).toMatchObject({
2404+
type: ReasoningDetailType.Text,
2405+
text: 'Let me think about this step by step. Done.',
2406+
signature: 'erX9OCAqSEO90HsfvNlBn5J3BQ9cEI/Hg2wHFo5iA8w3L+a',
2407+
});
2408+
});
23112409
});
23122410

23132411
describe('debug settings', () => {

src/chat/index.ts

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -822,6 +822,18 @@ export class OpenRouterChatLanguageModel implements LanguageModelV3 {
822822
controller.enqueue({
823823
type: 'reasoning-end',
824824
id: reasoningId || generateId(),
825+
// Include accumulated reasoning_details so the AI SDK can update
826+
// the reasoning part's providerMetadata with the correct signature.
827+
// The signature typically arrives in the last reasoning delta,
828+
// but reasoning-start only carries the first delta's metadata.
829+
providerMetadata:
830+
accumulatedReasoningDetails.length > 0
831+
? {
832+
openrouter: {
833+
reasoning_details: accumulatedReasoningDetails,
834+
},
835+
}
836+
: undefined,
825837
});
826838
reasoningStarted = false; // Mark as ended so we don't end it again in flush
827839
}
@@ -1096,6 +1108,16 @@ export class OpenRouterChatLanguageModel implements LanguageModelV3 {
10961108
controller.enqueue({
10971109
type: 'reasoning-end',
10981110
id: reasoningId || generateId(),
1111+
// Include accumulated reasoning_details so the AI SDK can update
1112+
// the reasoning part's providerMetadata with the correct signature.
1113+
providerMetadata:
1114+
accumulatedReasoningDetails.length > 0
1115+
? {
1116+
openrouter: {
1117+
reasoning_details: accumulatedReasoningDetails,
1118+
},
1119+
}
1120+
: undefined,
10991121
});
11001122
}
11011123
if (textStarted) {

0 commit comments

Comments
 (0)